muya 2.1.1 → 2.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/cjs/index.js +1 -1
- package/esm/create-state.js +1 -1
- package/esm/create.js +1 -1
- package/esm/scheduler.js +1 -1
- package/esm/select.js +1 -1
- package/esm/sqlite/__tests__/create-sqlite.test.js +1 -0
- package/esm/sqlite/__tests__/map-deque.test.js +1 -0
- package/esm/sqlite/__tests__/table.test.js +1 -0
- package/esm/sqlite/__tests__/use-sqlite.test.js +1 -0
- package/esm/sqlite/create-sqlite.js +1 -0
- package/esm/sqlite/select-sql.js +1 -0
- package/esm/sqlite/table/backend.js +1 -0
- package/esm/sqlite/table/bun-backend.js +1 -0
- package/esm/sqlite/table/map-deque.js +1 -0
- package/esm/sqlite/table/table.js +10 -0
- package/esm/sqlite/table/table.types.js +0 -0
- package/esm/sqlite/table/where.js +1 -0
- package/esm/sqlite/use-sqlite.js +1 -0
- package/esm/utils/common.js +1 -1
- package/package.json +1 -1
- package/src/__tests__/scheduler.test.tsx +2 -2
- package/src/create-state.ts +3 -2
- package/src/create.ts +22 -24
- package/src/scheduler.ts +15 -7
- package/src/select.ts +15 -17
- package/src/sqlite/__tests__/create-sqlite.test.ts +81 -0
- package/src/sqlite/__tests__/map-deque.test.ts +61 -0
- package/src/sqlite/__tests__/table.test.ts +142 -0
- package/src/sqlite/__tests__/use-sqlite.test.ts +274 -0
- package/src/sqlite/create-sqlite.ts +273 -0
- package/src/sqlite/select-sql.ts +55 -0
- package/src/sqlite/table/backend.ts +21 -0
- package/src/sqlite/table/bun-backend.ts +38 -0
- package/src/sqlite/table/map-deque.ts +29 -0
- package/src/sqlite/table/table.ts +200 -0
- package/src/sqlite/table/table.types.ts +55 -0
- package/src/sqlite/table/where.ts +267 -0
- package/src/sqlite/use-sqlite.ts +70 -0
- package/src/types.ts +1 -0
- package/src/utils/common.ts +6 -2
- package/types/create.d.ts +3 -3
- package/types/scheduler.d.ts +12 -3
- package/types/sqlite/create-sqlite.d.ts +28 -0
- package/types/sqlite/select-sql.d.ts +14 -0
- package/types/sqlite/table/backend.d.ts +20 -0
- package/types/sqlite/table/bun-backend.d.ts +2 -0
- package/types/sqlite/table/map-deque.d.ts +5 -0
- package/types/sqlite/table/table.d.ts +3 -0
- package/types/sqlite/table/table.types.d.ts +52 -0
- package/types/sqlite/table/where.d.ts +32 -0
- package/types/sqlite/use-sqlite.d.ts +15 -0
- package/types/types.d.ts +1 -0
- package/types/utils/common.d.ts +2 -2
|
@@ -0,0 +1,273 @@
|
|
|
1
|
+
/* eslint-disable sonarjs/redundant-type-aliases */
|
|
2
|
+
/* eslint-disable @typescript-eslint/no-shadow */
|
|
3
|
+
/* eslint-disable no-shadow */
|
|
4
|
+
import { createScheduler } from '../scheduler'
|
|
5
|
+
import { shallow } from '../utils/shallow'
|
|
6
|
+
import { selectSql, type CreateState } from './select-sql'
|
|
7
|
+
import { createTable, DEFAULT_STEP_SIZE } from './table/table'
|
|
8
|
+
import type { DbOptions, DocType, Key, MutationResult, SearchOptions, Table } from './table/table.types'
|
|
9
|
+
import type { Where } from './table/where'
|
|
10
|
+
|
|
11
|
+
type SearchId = string
|
|
12
|
+
const STATE_SCHEDULER = createScheduler()
|
|
13
|
+
|
|
14
|
+
let stateId = 0
|
|
15
|
+
function getStateId() {
|
|
16
|
+
return stateId++
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
export interface SqLiteActions {
|
|
20
|
+
readonly next: () => Promise<boolean>
|
|
21
|
+
readonly reset: () => Promise<void>
|
|
22
|
+
}
|
|
23
|
+
export interface SyncTable<Document extends DocType> {
|
|
24
|
+
// readonly registerSearch: <Selected = Document>(searchId: SearchId, options: SearchOptions<Document, Selected>) => () => void
|
|
25
|
+
readonly updateSearchOptions: <Selected = Document>(searchId: SearchId, options: SearchOptions<Document, Selected>) => void
|
|
26
|
+
readonly subscribe: (searchId: SearchId, listener: () => void) => () => void
|
|
27
|
+
readonly getSnapshot: (searchId: SearchId) => Document[]
|
|
28
|
+
readonly refresh: (searchId: SearchId) => Promise<void>
|
|
29
|
+
|
|
30
|
+
readonly set: (document: Document) => Promise<MutationResult>
|
|
31
|
+
readonly batchSet: (documents: Document[]) => Promise<MutationResult[]>
|
|
32
|
+
readonly get: <Selected = Document>(key: Key, selector?: (document: Document) => Selected) => Promise<Selected | undefined>
|
|
33
|
+
|
|
34
|
+
readonly delete: (key: Key) => Promise<MutationResult | undefined>
|
|
35
|
+
readonly search: <Selected = Document>(options?: SearchOptions<Document, Selected>) => AsyncIterableIterator<Selected>
|
|
36
|
+
readonly count: (options?: { where?: Where<Document> }) => Promise<number>
|
|
37
|
+
readonly deleteBy: (where: Where<Document>) => Promise<MutationResult[]>
|
|
38
|
+
readonly destroy: () => void
|
|
39
|
+
readonly next: (searchId: SearchId) => Promise<boolean>
|
|
40
|
+
|
|
41
|
+
readonly select: <Params extends unknown[]>(
|
|
42
|
+
compute: (...args: Params) => SearchOptions<Document>,
|
|
43
|
+
) => CreateState<Document, Params>
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
interface DataItems<Document extends DocType> {
|
|
47
|
+
items: Document[]
|
|
48
|
+
keys: Set<Key>
|
|
49
|
+
options?: SearchOptions<Document, unknown>
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
export function createSqliteState<Document extends DocType>(options: DbOptions<Document>): SyncTable<Document> {
|
|
53
|
+
// const table = await createTable<Document>(options)
|
|
54
|
+
|
|
55
|
+
const id = getStateId()
|
|
56
|
+
function getScheduleId(searchId: SearchId) {
|
|
57
|
+
return `state-${id}-search-${searchId}`
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
let cachedTable: Table<Document> | undefined
|
|
61
|
+
async function getTable() {
|
|
62
|
+
if (!cachedTable) {
|
|
63
|
+
cachedTable = await createTable<Document>(options)
|
|
64
|
+
}
|
|
65
|
+
return cachedTable
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
interface NextResult {
|
|
69
|
+
document: Document
|
|
70
|
+
rowId: number
|
|
71
|
+
}
|
|
72
|
+
// const emitter = createEmitter<Table<Document>>()
|
|
73
|
+
const cachedData = new Map<SearchId, DataItems<Document>>()
|
|
74
|
+
const listeners = new Map<SearchId, () => void>()
|
|
75
|
+
const iterators = new Map<SearchId, AsyncIterableIterator<NextResult>>()
|
|
76
|
+
|
|
77
|
+
async function next(searchId: SearchId, data: DataItems<Document>): Promise<boolean> {
|
|
78
|
+
const iterator = iterators.get(searchId)
|
|
79
|
+
const { options = {} } = data
|
|
80
|
+
const { stepSize = DEFAULT_STEP_SIZE } = options
|
|
81
|
+
if (!iterator) return false
|
|
82
|
+
const newItems: Document[] = []
|
|
83
|
+
|
|
84
|
+
for (let index = 0; index < stepSize; index++) {
|
|
85
|
+
const result = await iterator.next()
|
|
86
|
+
if (result.done) {
|
|
87
|
+
iterators.delete(searchId)
|
|
88
|
+
break
|
|
89
|
+
}
|
|
90
|
+
newItems.push(result.value.document)
|
|
91
|
+
data.keys.add(String(result.value.rowId))
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
if (newItems.length === 0) return false
|
|
95
|
+
if (shallow(data.items, newItems)) return false
|
|
96
|
+
data.items = [...data.items, ...newItems]
|
|
97
|
+
return true
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
function notifyListeners(searchId: SearchId) {
|
|
101
|
+
const searchListeners = listeners.get(searchId)
|
|
102
|
+
if (searchListeners) {
|
|
103
|
+
searchListeners()
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
async function refreshCache(searchId: SearchId) {
|
|
108
|
+
const table = await getTable()
|
|
109
|
+
const data = cachedData.get(searchId)
|
|
110
|
+
if (!data) return
|
|
111
|
+
const { options } = data
|
|
112
|
+
const iterator = table.search({ ...options, select: (document, { rowId }) => ({ document, rowId }) })
|
|
113
|
+
iterators.set(searchId, iterator)
|
|
114
|
+
data.keys = new Set()
|
|
115
|
+
data.items = []
|
|
116
|
+
await next(searchId, data)
|
|
117
|
+
}
|
|
118
|
+
async function refresh(searchId: SearchId) {
|
|
119
|
+
await refreshCache(searchId)
|
|
120
|
+
notifyListeners(searchId)
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
function handleChange(mutationResult: MutationResult) {
|
|
124
|
+
const { key, op } = mutationResult
|
|
125
|
+
// find all cached data with key
|
|
126
|
+
const searchIds = new Set<SearchId>()
|
|
127
|
+
for (const [searchId, { keys }] of cachedData) {
|
|
128
|
+
switch (op) {
|
|
129
|
+
case 'delete':
|
|
130
|
+
case 'update': {
|
|
131
|
+
if (keys.has(String(key))) {
|
|
132
|
+
searchIds.add(searchId)
|
|
133
|
+
}
|
|
134
|
+
break
|
|
135
|
+
}
|
|
136
|
+
case 'insert': {
|
|
137
|
+
// we do not know about the key
|
|
138
|
+
searchIds.add(searchId)
|
|
139
|
+
break
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
return searchIds
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
async function handleChanges(mutationResults: MutationResult[]) {
|
|
147
|
+
const updateSearchIds = new Set<SearchId>()
|
|
148
|
+
for (const mutationResult of mutationResults) {
|
|
149
|
+
const searchIds = handleChange(mutationResult)
|
|
150
|
+
for (const searchId of searchIds) {
|
|
151
|
+
updateSearchIds.add(searchId)
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
// const promises = []
|
|
156
|
+
for (const searchId of updateSearchIds) {
|
|
157
|
+
const scheduleId = getScheduleId(searchId)
|
|
158
|
+
STATE_SCHEDULER.schedule(scheduleId, { searchId })
|
|
159
|
+
}
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
const clearSchedulers = new Set<() => void>()
|
|
163
|
+
|
|
164
|
+
function registerData(searchId: SearchId, options?: SearchOptions<Document, unknown>) {
|
|
165
|
+
if (!cachedData.has(searchId)) {
|
|
166
|
+
cachedData.set(searchId, { items: [], options, keys: new Set() })
|
|
167
|
+
if (options) {
|
|
168
|
+
refresh(searchId)
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
const data = cachedData.get(searchId)!
|
|
172
|
+
if (options) {
|
|
173
|
+
data.options = options
|
|
174
|
+
}
|
|
175
|
+
return data
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
const state: SyncTable<Document> = {
|
|
179
|
+
async set(document) {
|
|
180
|
+
const table = await getTable()
|
|
181
|
+
const changes = await table.set(document)
|
|
182
|
+
await handleChanges([changes])
|
|
183
|
+
return changes
|
|
184
|
+
},
|
|
185
|
+
async batchSet(documents) {
|
|
186
|
+
const table = await getTable()
|
|
187
|
+
const changes = await table.batchSet(documents)
|
|
188
|
+
await handleChanges(changes)
|
|
189
|
+
return changes
|
|
190
|
+
},
|
|
191
|
+
async delete(key) {
|
|
192
|
+
const table = await getTable()
|
|
193
|
+
const changes = await table.delete(key)
|
|
194
|
+
if (changes) {
|
|
195
|
+
await handleChanges([changes])
|
|
196
|
+
}
|
|
197
|
+
return changes
|
|
198
|
+
},
|
|
199
|
+
async deleteBy(where) {
|
|
200
|
+
const table = await getTable()
|
|
201
|
+
const changes = await table.deleteBy(where)
|
|
202
|
+
await handleChanges(changes)
|
|
203
|
+
return changes
|
|
204
|
+
},
|
|
205
|
+
async get(key, selector) {
|
|
206
|
+
const table = await getTable()
|
|
207
|
+
return table.get(key, selector)
|
|
208
|
+
},
|
|
209
|
+
async *search(options = {}) {
|
|
210
|
+
const table = await getTable()
|
|
211
|
+
for await (const item of table.search(options)) {
|
|
212
|
+
yield item
|
|
213
|
+
}
|
|
214
|
+
},
|
|
215
|
+
async count(options) {
|
|
216
|
+
const table = await getTable()
|
|
217
|
+
return await table.count(options)
|
|
218
|
+
},
|
|
219
|
+
|
|
220
|
+
updateSearchOptions(searchId, options) {
|
|
221
|
+
const data = registerData(searchId, options)
|
|
222
|
+
data.options = options
|
|
223
|
+
const scheduleId = getScheduleId(searchId)
|
|
224
|
+
STATE_SCHEDULER.schedule(scheduleId, { searchId })
|
|
225
|
+
},
|
|
226
|
+
|
|
227
|
+
subscribe(searchId, listener) {
|
|
228
|
+
const scheduleId = getScheduleId(searchId)
|
|
229
|
+
const clear = STATE_SCHEDULER.add(scheduleId, {
|
|
230
|
+
onScheduleDone() {
|
|
231
|
+
refresh(searchId)
|
|
232
|
+
},
|
|
233
|
+
})
|
|
234
|
+
clearSchedulers.add(clear)
|
|
235
|
+
|
|
236
|
+
if (!listeners.has(searchId)) {
|
|
237
|
+
listeners.set(searchId, listener)
|
|
238
|
+
}
|
|
239
|
+
return () => {
|
|
240
|
+
listeners.delete(searchId)
|
|
241
|
+
clear()
|
|
242
|
+
cachedData.delete(searchId)
|
|
243
|
+
}
|
|
244
|
+
},
|
|
245
|
+
getSnapshot(searchId) {
|
|
246
|
+
const data = registerData(searchId)
|
|
247
|
+
return data.items
|
|
248
|
+
},
|
|
249
|
+
refresh,
|
|
250
|
+
destroy() {
|
|
251
|
+
for (const clear of clearSchedulers) clear()
|
|
252
|
+
cachedData.clear()
|
|
253
|
+
listeners.clear()
|
|
254
|
+
},
|
|
255
|
+
async next(searchId) {
|
|
256
|
+
const data = cachedData.get(searchId)
|
|
257
|
+
if (data) {
|
|
258
|
+
const hasNext = await next(searchId, data)
|
|
259
|
+
if (hasNext) {
|
|
260
|
+
notifyListeners(searchId)
|
|
261
|
+
}
|
|
262
|
+
return hasNext
|
|
263
|
+
}
|
|
264
|
+
return false
|
|
265
|
+
},
|
|
266
|
+
|
|
267
|
+
select(compute) {
|
|
268
|
+
return selectSql(state, compute)
|
|
269
|
+
},
|
|
270
|
+
}
|
|
271
|
+
|
|
272
|
+
return state
|
|
273
|
+
}
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
import { createState } from '../create-state'
|
|
2
|
+
import type { GetState } from '../types'
|
|
3
|
+
import type { SyncTable } from './create-sqlite'
|
|
4
|
+
import type { DocType } from './table/table.types'
|
|
5
|
+
import type { Where } from './table/where'
|
|
6
|
+
|
|
7
|
+
export type CreateState<Document, Params extends unknown[]> = (...params: Params) => GetState<Document[]>
|
|
8
|
+
|
|
9
|
+
export interface SqlSeachOptions<Document extends DocType> {
|
|
10
|
+
readonly sorBy?: keyof Document
|
|
11
|
+
readonly order?: 'asc' | 'desc'
|
|
12
|
+
readonly limit?: number
|
|
13
|
+
readonly offset?: number
|
|
14
|
+
readonly where?: Where<Document>
|
|
15
|
+
readonly stepSize?: number
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
let stateId = 0
|
|
19
|
+
function getStateId() {
|
|
20
|
+
stateId++
|
|
21
|
+
return `${stateId.toString(36)}-sql`
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
export function selectSql<Document extends DocType, Params extends unknown[] = []>(
|
|
25
|
+
state: SyncTable<Document>,
|
|
26
|
+
compute: (...args: Params) => SqlSeachOptions<Document>,
|
|
27
|
+
): CreateState<Document, Params> {
|
|
28
|
+
const { subscribe, updateSearchOptions } = state
|
|
29
|
+
|
|
30
|
+
const result: CreateState<Document, Params> = (...params) => {
|
|
31
|
+
const searchId = getStateId()
|
|
32
|
+
const destroy = subscribe(searchId, () => {
|
|
33
|
+
getState.emitter.emit()
|
|
34
|
+
})
|
|
35
|
+
|
|
36
|
+
const options = compute(...params)
|
|
37
|
+
const getState = createState<Document[]>({
|
|
38
|
+
destroy() {
|
|
39
|
+
destroy()
|
|
40
|
+
getState.emitter.clear()
|
|
41
|
+
getState.cache.current = undefined
|
|
42
|
+
},
|
|
43
|
+
get() {
|
|
44
|
+
return state.getSnapshot(searchId)
|
|
45
|
+
},
|
|
46
|
+
getSnapshot() {
|
|
47
|
+
return state.getSnapshot(searchId)
|
|
48
|
+
},
|
|
49
|
+
})
|
|
50
|
+
updateSearchOptions<Document>(searchId, options)
|
|
51
|
+
|
|
52
|
+
return getState
|
|
53
|
+
}
|
|
54
|
+
return result
|
|
55
|
+
}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
export const IN_MEMORY_DB = ':memory:'
|
|
2
|
+
|
|
3
|
+
export interface QueryResult {
|
|
4
|
+
/** The number of rows affected by the query. */
|
|
5
|
+
rowsAffected: number
|
|
6
|
+
/**
|
|
7
|
+
* The last inserted `id`.
|
|
8
|
+
*
|
|
9
|
+
* This value is not set for Postgres databases. If the
|
|
10
|
+
* last inserted id is required on Postgres, the `select` function
|
|
11
|
+
* must be used, with a `RETURNING` clause
|
|
12
|
+
* (`INSERT INTO todos (title) VALUES ($1) RETURNING id`).
|
|
13
|
+
*/
|
|
14
|
+
lastInsertId?: number
|
|
15
|
+
}
|
|
16
|
+
export interface Backend {
|
|
17
|
+
execute: (query: string, bindValues?: unknown[]) => Promise<QueryResult>
|
|
18
|
+
select: <T>(query: string, bindValues?: unknown[]) => Promise<T>
|
|
19
|
+
transaction: (callback: (tx: Backend) => Promise<void>) => Promise<void>
|
|
20
|
+
path: string
|
|
21
|
+
}
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
import { Database, type Statement } from 'bun:sqlite'
|
|
2
|
+
import type { Backend } from './backend'
|
|
3
|
+
import { MapDeque } from './map-deque'
|
|
4
|
+
|
|
5
|
+
export function bunMemoryBackend(): Backend {
|
|
6
|
+
const db = Database.open(':memory:')
|
|
7
|
+
const prepares = new MapDeque<string, Statement>(100)
|
|
8
|
+
function getStatement(query: string): Statement {
|
|
9
|
+
if (prepares.has(query)) {
|
|
10
|
+
return prepares.get(query)!
|
|
11
|
+
}
|
|
12
|
+
const stmt = db.prepare(query)
|
|
13
|
+
prepares.set(query, stmt)
|
|
14
|
+
return stmt
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
const backend: Backend = {
|
|
18
|
+
execute: async (query, params = []) => {
|
|
19
|
+
const q = getStatement(query)
|
|
20
|
+
const result = q.run(...(params as never[]))
|
|
21
|
+
return {
|
|
22
|
+
rowsAffected: result.changes,
|
|
23
|
+
changes: result.changes,
|
|
24
|
+
}
|
|
25
|
+
},
|
|
26
|
+
transaction: async (callback) => {
|
|
27
|
+
return db.transaction(() => callback(backend))()
|
|
28
|
+
},
|
|
29
|
+
path: db.filename,
|
|
30
|
+
select: async (query, params = []) => {
|
|
31
|
+
const q = getStatement(query)
|
|
32
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
33
|
+
const result = q.all(...(params as never[])) as Array<Record<string, any>>
|
|
34
|
+
return result as never
|
|
35
|
+
},
|
|
36
|
+
}
|
|
37
|
+
return backend
|
|
38
|
+
}
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
export class MapDeque<K, V> extends Map<K, V> {
|
|
2
|
+
constructor(
|
|
3
|
+
private maxSize: number,
|
|
4
|
+
entries?: ReadonlyArray<readonly [K, V]> | null,
|
|
5
|
+
) {
|
|
6
|
+
super(entries)
|
|
7
|
+
if (this.maxSize <= 0) {
|
|
8
|
+
throw new RangeError('maxSize must be greater than 0')
|
|
9
|
+
}
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
override set(key: K, value: V): this {
|
|
13
|
+
if (this.has(key)) {
|
|
14
|
+
super.set(key, value)
|
|
15
|
+
return this
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
if (this.size >= this.maxSize) {
|
|
19
|
+
const firstKey = this.keys().next().value
|
|
20
|
+
if (firstKey !== undefined) {
|
|
21
|
+
this.delete(firstKey)
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
super.set(key, value)
|
|
26
|
+
|
|
27
|
+
return this
|
|
28
|
+
}
|
|
29
|
+
}
|
|
@@ -0,0 +1,200 @@
|
|
|
1
|
+
// table.ts
|
|
2
|
+
/* eslint-disable sonarjs/different-types-comparison */
|
|
3
|
+
/* eslint-disable sonarjs/cognitive-complexity */
|
|
4
|
+
/* eslint-disable @typescript-eslint/no-shadow */
|
|
5
|
+
/* eslint-disable no-shadow */
|
|
6
|
+
import type { Table, DbOptions, DocType, Key, SearchOptions, MutationResult } from './table.types'
|
|
7
|
+
import { getWhereQuery, type Where } from './where'
|
|
8
|
+
|
|
9
|
+
const DELETE_IN_CHUNK = 500 // keep well below SQLite's default 999 parameter limit
|
|
10
|
+
export const DEFAULT_STEP_SIZE = 100
|
|
11
|
+
export async function createTable<Document extends DocType>(options: DbOptions<Document>): Promise<Table<Document>> {
|
|
12
|
+
const { backend, tableName, indexes, key } = options
|
|
13
|
+
const hasUserKey = key !== undefined
|
|
14
|
+
|
|
15
|
+
// Schema
|
|
16
|
+
if (hasUserKey) {
|
|
17
|
+
await backend.execute(`
|
|
18
|
+
CREATE TABLE IF NOT EXISTS ${tableName} (
|
|
19
|
+
key TEXT PRIMARY KEY,
|
|
20
|
+
data TEXT NOT NULL
|
|
21
|
+
);
|
|
22
|
+
`)
|
|
23
|
+
} else {
|
|
24
|
+
await backend.execute(`
|
|
25
|
+
CREATE TABLE IF NOT EXISTS ${tableName} (
|
|
26
|
+
data TEXT NOT NULL
|
|
27
|
+
);
|
|
28
|
+
`)
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
// JSON expression indexes for fields under data
|
|
32
|
+
for (const index of indexes ?? []) {
|
|
33
|
+
const idx = String(index)
|
|
34
|
+
await backend.execute(`CREATE INDEX IF NOT EXISTS idx_${tableName}_${idx} ON ${tableName} (json_extract(data, '$.${idx}'));`)
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
function getKeyFromDocument(document: Document): Key | undefined {
|
|
38
|
+
return hasUserKey ? (document[key as keyof Document] as unknown as Key | undefined) : undefined
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
async function getChanges(conn: typeof backend): Promise<number> {
|
|
42
|
+
const r = await conn.select<Array<{ c: number }>>(`SELECT changes() AS c`)
|
|
43
|
+
return r[0]?.c ?? 0
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
const table: Table<Document> = {
|
|
47
|
+
backend,
|
|
48
|
+
|
|
49
|
+
async set(document, backendOverride) {
|
|
50
|
+
const db = backendOverride ?? backend
|
|
51
|
+
const json = JSON.stringify(document)
|
|
52
|
+
|
|
53
|
+
if (hasUserKey) {
|
|
54
|
+
const id = getKeyFromDocument(document)
|
|
55
|
+
if (id === undefined || id === null) {
|
|
56
|
+
throw new Error(
|
|
57
|
+
`Document is missing the configured key "${String(key)}". Provide it or create the table without "key".`,
|
|
58
|
+
)
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
// Fast path: UPDATE first
|
|
62
|
+
await db.execute(`UPDATE ${tableName} SET data = ? WHERE key = ?`, [json, id])
|
|
63
|
+
const updated = await getChanges(db)
|
|
64
|
+
if (updated === 1) return { key: id, op: 'update' }
|
|
65
|
+
|
|
66
|
+
// No row updated => try INSERT
|
|
67
|
+
try {
|
|
68
|
+
await db.execute(`INSERT INTO ${tableName} (key, data) VALUES (?, ?)`, [id, json])
|
|
69
|
+
return { key: id, op: 'insert' }
|
|
70
|
+
} catch {
|
|
71
|
+
await db.execute(`UPDATE ${tableName} SET data = ? WHERE key = ?`, [json, id])
|
|
72
|
+
return { key: id, op: 'update' }
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
// ROWID mode
|
|
77
|
+
await db.execute(`INSERT INTO ${tableName} (data) VALUES (?)`, [json])
|
|
78
|
+
const rows = await db.select<Array<{ id: number }>>(`SELECT last_insert_rowid() AS id`)
|
|
79
|
+
const rowid = rows[0]?.id
|
|
80
|
+
if (typeof rowid !== 'number') throw new Error('Failed to retrieve last_insert_rowid()')
|
|
81
|
+
const result: MutationResult = { key: rowid, op: 'insert' }
|
|
82
|
+
return result
|
|
83
|
+
},
|
|
84
|
+
|
|
85
|
+
// --- FIXED: include rowid ---
|
|
86
|
+
async get<Selected = Document>(
|
|
87
|
+
keyValue: Key,
|
|
88
|
+
selector: (document: Document, meta: { rowid: number }) => Selected = (d, _m) => d as unknown as Selected,
|
|
89
|
+
) {
|
|
90
|
+
const whereKey = hasUserKey ? `key = ?` : `rowid = ?`
|
|
91
|
+
const result = await backend.select<Array<{ data: string; rowid: number }>>(
|
|
92
|
+
`SELECT rowid, data FROM ${tableName} WHERE ${whereKey}`,
|
|
93
|
+
[keyValue],
|
|
94
|
+
)
|
|
95
|
+
if (result.length === 0) return
|
|
96
|
+
const [item] = result
|
|
97
|
+
const { data, rowid } = item
|
|
98
|
+
const document = JSON.parse(data) as Document
|
|
99
|
+
return selector(document, { rowid }) as Selected
|
|
100
|
+
},
|
|
101
|
+
|
|
102
|
+
async delete(keyValue: Key) {
|
|
103
|
+
const whereKey = hasUserKey ? `key = ?` : `rowid = ?`
|
|
104
|
+
await backend.execute(`DELETE FROM ${tableName} WHERE ${whereKey}`, [keyValue])
|
|
105
|
+
const changed = await backend.select<Array<{ c: number }>>(`SELECT changes() AS c`)
|
|
106
|
+
if ((changed[0]?.c ?? 0) > 0) {
|
|
107
|
+
return { key: keyValue, op: 'delete' }
|
|
108
|
+
}
|
|
109
|
+
return
|
|
110
|
+
},
|
|
111
|
+
|
|
112
|
+
// --- FIXED: include rowid in search ---
|
|
113
|
+
async *search<Selected = Document>(options: SearchOptions<Document, Selected> = {}): AsyncIterableIterator<Selected> {
|
|
114
|
+
const {
|
|
115
|
+
sorBy,
|
|
116
|
+
order = 'asc',
|
|
117
|
+
limit,
|
|
118
|
+
offset = 0,
|
|
119
|
+
where,
|
|
120
|
+
select = (document, _meta) => document as unknown as Selected,
|
|
121
|
+
stepSize = DEFAULT_STEP_SIZE,
|
|
122
|
+
} = options
|
|
123
|
+
|
|
124
|
+
let baseQuery = `SELECT rowid, data FROM ${tableName}`
|
|
125
|
+
if (where) baseQuery += ' ' + getWhereQuery(where)
|
|
126
|
+
|
|
127
|
+
let yielded = 0
|
|
128
|
+
let currentOffset = offset
|
|
129
|
+
while (true) {
|
|
130
|
+
let query = baseQuery
|
|
131
|
+
|
|
132
|
+
if (sorBy) {
|
|
133
|
+
query += ` ORDER BY json_extract(data, '$.${String(sorBy)}') COLLATE NOCASE ${order.toUpperCase()}`
|
|
134
|
+
} else {
|
|
135
|
+
query += hasUserKey ? ` ORDER BY key COLLATE NOCASE ${order.toUpperCase()}` : ` ORDER BY rowid ${order.toUpperCase()}`
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
const batchLimit = limit ? Math.min(stepSize, limit - yielded) : stepSize
|
|
139
|
+
query += ` LIMIT ${batchLimit} OFFSET ${currentOffset}`
|
|
140
|
+
|
|
141
|
+
const results = await backend.select<Array<{ rowid: number; data: string }>>(query)
|
|
142
|
+
if (results.length === 0) break
|
|
143
|
+
|
|
144
|
+
for (const { rowid, data } of results) {
|
|
145
|
+
if (limit && yielded >= limit) return
|
|
146
|
+
const document = JSON.parse(data) as Document
|
|
147
|
+
yield select(document, { rowId: rowid }) as Selected
|
|
148
|
+
yielded++
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
if (results.length < batchLimit || (limit && yielded >= limit)) break
|
|
152
|
+
currentOffset += results.length
|
|
153
|
+
}
|
|
154
|
+
},
|
|
155
|
+
|
|
156
|
+
async count(options: { where?: Where<Document> } = {}) {
|
|
157
|
+
const { where } = options
|
|
158
|
+
let query = `SELECT COUNT(*) as count FROM ${tableName}`
|
|
159
|
+
if (where) query += ' ' + getWhereQuery(where)
|
|
160
|
+
const result = await backend.select<Array<{ count: number }>>(query)
|
|
161
|
+
return result[0]?.count ?? 0
|
|
162
|
+
},
|
|
163
|
+
|
|
164
|
+
async deleteBy(where: Where<Document>) {
|
|
165
|
+
const whereQuery = getWhereQuery(where)
|
|
166
|
+
const keyCol = hasUserKey ? 'key' : 'rowid'
|
|
167
|
+
|
|
168
|
+
const results: MutationResult[] = []
|
|
169
|
+
await backend.transaction(async (tx) => {
|
|
170
|
+
const rows = await tx.select<Array<{ k: Key }>>(`SELECT ${keyCol} AS k, rowid FROM ${tableName} ${whereQuery}`)
|
|
171
|
+
if (rows.length === 0) return
|
|
172
|
+
|
|
173
|
+
const allKeys = rows.map((r) => r.k)
|
|
174
|
+
|
|
175
|
+
for (let index = 0; index < allKeys.length; index += DELETE_IN_CHUNK) {
|
|
176
|
+
const chunk = allKeys.slice(index, index + DELETE_IN_CHUNK)
|
|
177
|
+
const placeholders = chunk.map(() => '?').join(',')
|
|
178
|
+
await tx.execute(`DELETE FROM ${tableName} WHERE ${keyCol} IN (${placeholders})`, chunk as unknown as unknown[])
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
for (const k of allKeys) results.push({ key: k, op: 'delete' })
|
|
182
|
+
})
|
|
183
|
+
|
|
184
|
+
return results
|
|
185
|
+
},
|
|
186
|
+
|
|
187
|
+
async batchSet(documents: Document[]) {
|
|
188
|
+
const mutations: MutationResult[] = []
|
|
189
|
+
await backend.transaction(async (tx) => {
|
|
190
|
+
for (const document of documents) {
|
|
191
|
+
const m = await table.set(document, tx)
|
|
192
|
+
mutations.push(m)
|
|
193
|
+
}
|
|
194
|
+
})
|
|
195
|
+
return mutations
|
|
196
|
+
},
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
return table
|
|
200
|
+
}
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
// table.types.ts
|
|
2
|
+
import type { Backend } from './backend'
|
|
3
|
+
import type { Where } from './where'
|
|
4
|
+
|
|
5
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
6
|
+
export type DocType = { [key: string]: any }
|
|
7
|
+
export type KeyTypeAvailable = 'string' | 'number'
|
|
8
|
+
|
|
9
|
+
export interface DbOptions<Document extends DocType> {
|
|
10
|
+
readonly sorBy?: keyof Document
|
|
11
|
+
readonly order?: 'asc' | 'desc'
|
|
12
|
+
readonly tableName: string
|
|
13
|
+
readonly indexes?: Array<keyof Document>
|
|
14
|
+
readonly backend: Backend
|
|
15
|
+
/**
|
|
16
|
+
* Optional key. If omitted, the table uses implicit SQLite ROWID as the key.
|
|
17
|
+
*/
|
|
18
|
+
readonly key?: keyof Document
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
interface DbNotGeneric {
|
|
22
|
+
readonly backend: Backend
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
export interface SearchOptions<Document extends DocType, Selected = Document> {
|
|
26
|
+
readonly sorBy?: keyof Document
|
|
27
|
+
readonly order?: 'asc' | 'desc'
|
|
28
|
+
readonly limit?: number
|
|
29
|
+
readonly offset?: number
|
|
30
|
+
readonly where?: Where<Document>
|
|
31
|
+
readonly stepSize?: number
|
|
32
|
+
/**
|
|
33
|
+
* Naive projection. Prefer specialized queries for heavy fan-out graphs.
|
|
34
|
+
*/
|
|
35
|
+
readonly select?: (document: Document, meta: { rowId: number }) => Selected
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
export type Key = string | number
|
|
39
|
+
|
|
40
|
+
export type MutationOp = 'insert' | 'update' | 'delete'
|
|
41
|
+
export interface MutationResult {
|
|
42
|
+
key: Key
|
|
43
|
+
op: MutationOp
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
export interface Table<Document extends DocType> extends DbNotGeneric {
|
|
47
|
+
readonly set: (document: Document, backendOverride?: Backend) => Promise<MutationResult>
|
|
48
|
+
readonly batchSet: (documents: Document[]) => Promise<MutationResult[]>
|
|
49
|
+
readonly get: <Selected = Document>(key: Key, selector?: (document: Document) => Selected) => Promise<Selected | undefined>
|
|
50
|
+
|
|
51
|
+
readonly delete: (key: Key) => Promise<MutationResult | undefined>
|
|
52
|
+
readonly search: <Selected = Document>(options?: SearchOptions<Document, Selected>) => AsyncIterableIterator<Selected>
|
|
53
|
+
readonly count: (options?: { where?: Where<Document> }) => Promise<number>
|
|
54
|
+
readonly deleteBy: (where: Where<Document>) => Promise<MutationResult[]>
|
|
55
|
+
}
|