document-dataply 0.0.2-alpha.3 → 0.0.3-alpha.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +115 -47
- package/dist/cjs/index.js +356 -106
- package/dist/types/core/bptree/documentStrategy.d.ts +3 -3
- package/dist/types/core/document.d.ts +81 -14
- package/dist/types/types/index.d.ts +33 -11
- package/package.json +16 -3
package/README.md
CHANGED
|
@@ -4,23 +4,21 @@
|
|
|
4
4
|
> **This project is currently in the Alpha stage.**
|
|
5
5
|
> APIs and internal structures may change significantly between versions. Use with caution in production environments.
|
|
6
6
|
|
|
7
|
-
`document-dataply` is a high-performance
|
|
7
|
+
`document-dataply` is a high-performance document-oriented database library built on top of the [`dataply`](https://github.com/izure1/dataply) record storage engine. It provides a structured way to store, index, and query JSON-style documents, supporting transactions and complex field indexing.
|
|
8
8
|
|
|
9
|
-
## Features
|
|
9
|
+
## Key Features
|
|
10
10
|
|
|
11
|
-
- **Document-Oriented**: Store and retrieve JSON-
|
|
12
|
-
- **B+Tree Indexing**:
|
|
11
|
+
- **Document-Oriented**: Store and retrieve JSON-style documents.
|
|
12
|
+
- **B+Tree Indexing**: Supports high-performance lookups using a B+Tree indexing engine.
|
|
13
13
|
- **Deep Indexing**: Index nested object fields and specific array elements (e.g., `user.profile.name` or `tags.0`).
|
|
14
|
-
- **Flexible Indexing
|
|
14
|
+
- **Flexible Indexing Policies**: Supports full re-indexing for existing data or incremental indexing for future data.
|
|
15
15
|
- **Transactions**: ACID-compliant transactions for atomic operations.
|
|
16
|
-
- **Rich Querying**:
|
|
16
|
+
- **Rich Querying**: Supports comparison operators (`lt`, `gt`, `equal`, etc.) and pattern matching (`like`).
|
|
17
17
|
|
|
18
18
|
## Installation
|
|
19
19
|
|
|
20
20
|
```bash
|
|
21
21
|
npm install document-dataply
|
|
22
|
-
# or
|
|
23
|
-
yarn add document-dataply
|
|
24
22
|
```
|
|
25
23
|
|
|
26
24
|
## Quick Start
|
|
@@ -28,39 +26,48 @@ yarn add document-dataply
|
|
|
28
26
|
```typescript
|
|
29
27
|
import { DocumentDataply } from 'document-dataply';
|
|
30
28
|
|
|
29
|
+
type MyDocument = {
|
|
30
|
+
name: string;
|
|
31
|
+
age: number;
|
|
32
|
+
tags: string[];
|
|
33
|
+
}
|
|
34
|
+
|
|
31
35
|
async function main() {
|
|
32
|
-
const db =
|
|
33
|
-
name: string;
|
|
34
|
-
age: number;
|
|
35
|
-
tags: string[];
|
|
36
|
-
}>('my-database.db', {
|
|
36
|
+
const db = DocumentDataply.Define<MyDocument>().Options({
|
|
37
37
|
wal: 'my-database.wal',
|
|
38
38
|
indices: {
|
|
39
|
-
name: true, // Index existing and new data
|
|
39
|
+
name: true, // Index both existing and new data
|
|
40
40
|
age: false, // Index only new data
|
|
41
41
|
'tags.0': true // Index the first element of the 'tags' array
|
|
42
42
|
}
|
|
43
|
-
});
|
|
43
|
+
}).Open('my-database.db');
|
|
44
44
|
|
|
45
|
-
// Initialize
|
|
45
|
+
// Initialize database
|
|
46
46
|
await db.init();
|
|
47
47
|
|
|
48
|
-
// Insert
|
|
48
|
+
// Insert document
|
|
49
49
|
const id = await db.insert({
|
|
50
50
|
name: 'John Doe',
|
|
51
51
|
age: 30,
|
|
52
52
|
tags: ['admin', 'developer']
|
|
53
53
|
});
|
|
54
54
|
|
|
55
|
-
// Query
|
|
56
|
-
const
|
|
55
|
+
// Query document
|
|
56
|
+
const query = db.select({
|
|
57
57
|
name: 'John Doe',
|
|
58
58
|
age: { gte: 25 }
|
|
59
|
-
})
|
|
59
|
+
})
|
|
60
60
|
|
|
61
|
-
|
|
61
|
+
// Get all results
|
|
62
|
+
const allResults = await query.drain();
|
|
63
|
+
// Or iterate through results
|
|
64
|
+
for await (const doc of query.stream) {
|
|
65
|
+
console.log(doc);
|
|
66
|
+
}
|
|
62
67
|
|
|
63
|
-
|
|
68
|
+
console.log(allResults);
|
|
69
|
+
|
|
70
|
+
// Close database
|
|
64
71
|
await db.close();
|
|
65
72
|
}
|
|
66
73
|
|
|
@@ -71,17 +78,17 @@ main();
|
|
|
71
78
|
|
|
72
79
|
### Indexing Policies
|
|
73
80
|
|
|
74
|
-
When defining indices in the
|
|
81
|
+
When defining indices in the `options`, you can specify a boolean value.
|
|
75
82
|
|
|
76
|
-
- `true`: The library
|
|
77
|
-
- `false`: The library
|
|
83
|
+
- `true`: The library indexes all existing documents for that field during `init()`, and also indexes all subsequent insertions.
|
|
84
|
+
- `false`: The library only indexes documents inserted after this configuration.
|
|
78
85
|
|
|
79
86
|
> [!NOTE]
|
|
80
|
-
> `db.init()` automatically performs
|
|
87
|
+
> `db.init()` automatically performs a backfilling process for fields marked as `true`.
|
|
81
88
|
|
|
82
89
|
### Batch Insertion
|
|
83
90
|
|
|
84
|
-
|
|
91
|
+
To efficiently insert multiple documents, use the following:
|
|
85
92
|
|
|
86
93
|
```typescript
|
|
87
94
|
const ids = await db.insertBatch([
|
|
@@ -92,7 +99,7 @@ const ids = await db.insertBatch([
|
|
|
92
99
|
|
|
93
100
|
### Querying
|
|
94
101
|
|
|
95
|
-
`document-dataply` supports various comparison operators
|
|
102
|
+
`document-dataply` supports various comparison operators.
|
|
96
103
|
|
|
97
104
|
| Operator | Description |
|
|
98
105
|
| :--- | :--- |
|
|
@@ -102,8 +109,8 @@ const ids = await db.insertBatch([
|
|
|
102
109
|
| `gte` | Greater than or equal to |
|
|
103
110
|
| `equal` | Equal to |
|
|
104
111
|
| `notEqual` | Not equal to |
|
|
105
|
-
| `like` | SQL-
|
|
106
|
-
| `or` |
|
|
112
|
+
| `like` | SQL-style pattern matching (e.g., `Jo%`) |
|
|
113
|
+
| `or` | If any value in the array is satisfied |
|
|
107
114
|
|
|
108
115
|
Example of a complex query:
|
|
109
116
|
```typescript
|
|
@@ -111,23 +118,23 @@ const users = await db.select({
|
|
|
111
118
|
age: { gt: 18, lt: 65 },
|
|
112
119
|
'address.city': 'Seoul',
|
|
113
120
|
tags: { or: ['vip', 'premium'] }
|
|
114
|
-
});
|
|
121
|
+
}).drain();
|
|
115
122
|
```
|
|
116
123
|
|
|
117
124
|
> [!IMPORTANT]
|
|
118
|
-
> **Query Constraints**:
|
|
125
|
+
> **Query Constraints**: Query conditions (`lt`, `gt`, `equal`, etc.) can only be used on fields explicitly indexed during initialization.
|
|
119
126
|
>
|
|
120
|
-
> **If a field in the query is not indexed,
|
|
127
|
+
> **If a field in the query is not indexed, that condition will be ignored.**
|
|
121
128
|
>
|
|
122
|
-
> If you need to filter by
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
129
|
+
> If you need to filter by unindexed fields, you should first retrieve the documents and then use JavaScript's native `.filter()` method.
|
|
130
|
+
```typescript
|
|
131
|
+
const results = await db.select({ /* indexed fields only */ }).drain();
|
|
132
|
+
const filtered = results.filter(doc => doc.unindexedField === 'some-value');
|
|
133
|
+
```
|
|
127
134
|
|
|
128
135
|
### Transactions
|
|
129
136
|
|
|
130
|
-
|
|
137
|
+
To ensure the atomicity of multiple operations, use transactions.
|
|
131
138
|
|
|
132
139
|
```typescript
|
|
133
140
|
const tx = db.createTransaction();
|
|
@@ -141,31 +148,92 @@ try {
|
|
|
141
148
|
}
|
|
142
149
|
```
|
|
143
150
|
|
|
151
|
+
### Updating and Deleting
|
|
152
|
+
|
|
153
|
+
`document-dataply` provides flexible ways to update or delete documents matching a query. All these operations are performed in a memory-efficient streaming manner.
|
|
154
|
+
|
|
155
|
+
#### Partial Update
|
|
156
|
+
Updates only specified fields of the matching documents.
|
|
157
|
+
|
|
158
|
+
```typescript
|
|
159
|
+
// Using an object to merge
|
|
160
|
+
const count = await db.partialUpdate(
|
|
161
|
+
{ name: 'John Doe' },
|
|
162
|
+
{ status: 'active', updatedAt: Date.now() }
|
|
163
|
+
);
|
|
164
|
+
|
|
165
|
+
// Using a function for dynamic updates
|
|
166
|
+
const count = await db.partialUpdate(
|
|
167
|
+
{ age: { lt: 20 } },
|
|
168
|
+
(doc) => ({ age: doc.age + 1 })
|
|
169
|
+
);
|
|
170
|
+
```
|
|
171
|
+
|
|
172
|
+
#### Full Update
|
|
173
|
+
Completely replaces the documents matching the query, while preserving their original `_id`.
|
|
174
|
+
|
|
175
|
+
```typescript
|
|
176
|
+
const count = await db.fullUpdate(
|
|
177
|
+
{ name: 'John Doe' },
|
|
178
|
+
{ name: 'John Smith', age: 31, location: 'New York' }
|
|
179
|
+
);
|
|
180
|
+
```
|
|
181
|
+
|
|
182
|
+
#### Delete
|
|
183
|
+
Removes documents matching the query from both the index and storage.
|
|
184
|
+
|
|
185
|
+
```typescript
|
|
186
|
+
const deletedCount = await db.delete({ status: 'inactive' });
|
|
187
|
+
```
|
|
188
|
+
|
|
189
|
+
## Tips and Advanced Features
|
|
190
|
+
|
|
191
|
+
For more information on performance optimization and advanced features, see [TIPS.md](./docs/TIPS.md).
|
|
192
|
+
|
|
193
|
+
- **Query Optimization**: Automatic index selection for maximum performance.
|
|
194
|
+
- **Sorting and Pagination**: Detailed usage of `limit`, `orderBy`, and `sortOrder`.
|
|
195
|
+
- **Memory Management**: When to use `stream` vs `drain()`.
|
|
196
|
+
- **Performance**: Optimizing bulk data insertion using `insertBatch`.
|
|
197
|
+
- **Indexing Policies**: Deep dive into index backfilling and configuration.
|
|
198
|
+
|
|
144
199
|
## API Reference
|
|
145
200
|
|
|
146
|
-
### `
|
|
147
|
-
Creates a
|
|
201
|
+
### `DocumentDataply.Define<T>().Options(options).Open(file)`
|
|
202
|
+
Creates or opens a database instance. `T` defines the document structure.
|
|
203
|
+
`options.indices` is an object where keys are field names and values are booleans indicating the [Indexing Policy](#indexing-policies).
|
|
148
204
|
|
|
149
205
|
### `db.init()`
|
|
150
206
|
Initializes the database, sets up internal metadata, and prepares indices.
|
|
151
207
|
|
|
152
208
|
### `db.insert(document, tx?)`
|
|
153
|
-
Inserts a single document. Returns the
|
|
209
|
+
Inserts a single document. Returns the `_id` (`number`) of the document.
|
|
154
210
|
|
|
155
211
|
### `db.insertBatch(documents, tx?)`
|
|
156
|
-
Inserts multiple documents efficiently. Returns an array of `
|
|
212
|
+
Inserts multiple documents efficiently. Returns an array of `_ids` (`number[]`).
|
|
213
|
+
|
|
214
|
+
### `db.select(query, options?, tx?)`
|
|
215
|
+
Searches for documents matching the query.
|
|
216
|
+
Returns an object `{ stream, drain }`.
|
|
217
|
+
- `stream`: An async iterator to traverse results one by one.
|
|
218
|
+
- `drain()`: A promise that resolves to an array of all matching documents.
|
|
219
|
+
|
|
220
|
+
### `db.partialUpdate(query, newFields, tx?)`
|
|
221
|
+
Partially updates documents matching the query. `newFields` can be a partial object or a function that returns a partial object. Returns the number of updated documents.
|
|
222
|
+
|
|
223
|
+
### `db.fullUpdate(query, newDocument, tx?)`
|
|
224
|
+
Fully replaces documents matching the query while preserving their `_id`. Returns the number of updated documents.
|
|
157
225
|
|
|
158
|
-
### `db.
|
|
159
|
-
|
|
226
|
+
### `db.delete(query, tx?)`
|
|
227
|
+
Deletes documents matching the query. Returns the number of deleted documents.
|
|
160
228
|
|
|
161
229
|
### `db.getMetadata(tx?)`
|
|
162
|
-
Returns physical storage information (
|
|
230
|
+
Returns physical storage information (number of pages, number of rows, etc.).
|
|
163
231
|
|
|
164
232
|
### `db.createTransaction()`
|
|
165
233
|
Returns a new `Transaction` object.
|
|
166
234
|
|
|
167
235
|
### `db.close()`
|
|
168
|
-
Flushes changes and closes the database
|
|
236
|
+
Flushes changes and closes the database files.
|
|
169
237
|
|
|
170
238
|
## License
|
|
171
239
|
|
package/dist/cjs/index.js
CHANGED
|
@@ -1795,6 +1795,31 @@ var require_cjs = __commonJS({
|
|
|
1795
1795
|
}
|
|
1796
1796
|
return true;
|
|
1797
1797
|
}
|
|
1798
|
+
/**
|
|
1799
|
+
* Selects the best driver key from a condition object.
|
|
1800
|
+
* The driver key determines the starting point and traversal direction for queries.
|
|
1801
|
+
*
|
|
1802
|
+
* @param condition The condition to analyze.
|
|
1803
|
+
* @returns The best driver key or null if no valid key found.
|
|
1804
|
+
*/
|
|
1805
|
+
getDriverKey(condition) {
|
|
1806
|
+
if ("primaryEqual" in condition) return "primaryEqual";
|
|
1807
|
+
if ("equal" in condition) return "equal";
|
|
1808
|
+
if ("gt" in condition) return "gt";
|
|
1809
|
+
if ("gte" in condition) return "gte";
|
|
1810
|
+
if ("lt" in condition) return "lt";
|
|
1811
|
+
if ("lte" in condition) return "lte";
|
|
1812
|
+
if ("primaryGt" in condition) return "primaryGt";
|
|
1813
|
+
if ("primaryGte" in condition) return "primaryGte";
|
|
1814
|
+
if ("primaryLt" in condition) return "primaryLt";
|
|
1815
|
+
if ("primaryLte" in condition) return "primaryLte";
|
|
1816
|
+
if ("like" in condition) return "like";
|
|
1817
|
+
if ("notEqual" in condition) return "notEqual";
|
|
1818
|
+
if ("primaryNotEqual" in condition) return "primaryNotEqual";
|
|
1819
|
+
if ("or" in condition) return "or";
|
|
1820
|
+
if ("primaryOr" in condition) return "primaryOr";
|
|
1821
|
+
return null;
|
|
1822
|
+
}
|
|
1798
1823
|
constructor(rootTx, mvccRoot, mvcc, strategy, comparator, option) {
|
|
1799
1824
|
this.rootTx = rootTx === null ? this : rootTx;
|
|
1800
1825
|
this.mvccRoot = mvccRoot;
|
|
@@ -2235,8 +2260,8 @@ var require_cjs = __commonJS({
|
|
|
2235
2260
|
}
|
|
2236
2261
|
return void 0;
|
|
2237
2262
|
}
|
|
2238
|
-
*keysStream(condition, filterValues, limit) {
|
|
2239
|
-
const stream = this.whereStream(condition, limit);
|
|
2263
|
+
*keysStream(condition, filterValues, limit, order = "asc") {
|
|
2264
|
+
const stream = this.whereStream(condition, limit, order);
|
|
2240
2265
|
const intersection = filterValues && filterValues.size > 0 ? filterValues : null;
|
|
2241
2266
|
for (const [key] of stream) {
|
|
2242
2267
|
if (intersection && !intersection.has(key)) {
|
|
@@ -2245,30 +2270,20 @@ var require_cjs = __commonJS({
|
|
|
2245
2270
|
yield key;
|
|
2246
2271
|
}
|
|
2247
2272
|
}
|
|
2248
|
-
*whereStream(condition, limit) {
|
|
2249
|
-
|
|
2250
|
-
if ("primaryEqual" in condition) driverKey = "primaryEqual";
|
|
2251
|
-
else if ("equal" in condition) driverKey = "equal";
|
|
2252
|
-
else if ("gt" in condition) driverKey = "gt";
|
|
2253
|
-
else if ("gte" in condition) driverKey = "gte";
|
|
2254
|
-
else if ("lt" in condition) driverKey = "lt";
|
|
2255
|
-
else if ("lte" in condition) driverKey = "lte";
|
|
2256
|
-
else if ("primaryGt" in condition) driverKey = "primaryGt";
|
|
2257
|
-
else if ("primaryGte" in condition) driverKey = "primaryGte";
|
|
2258
|
-
else if ("primaryLt" in condition) driverKey = "primaryLt";
|
|
2259
|
-
else if ("primaryLte" in condition) driverKey = "primaryLte";
|
|
2260
|
-
else if ("like" in condition) driverKey = "like";
|
|
2261
|
-
else if ("notEqual" in condition) driverKey = "notEqual";
|
|
2262
|
-
else if ("primaryNotEqual" in condition) driverKey = "primaryNotEqual";
|
|
2263
|
-
else if ("or" in condition) driverKey = "or";
|
|
2264
|
-
else if ("primaryOr" in condition) driverKey = "primaryOr";
|
|
2273
|
+
*whereStream(condition, limit, order = "asc") {
|
|
2274
|
+
const driverKey = this.getDriverKey(condition);
|
|
2265
2275
|
if (!driverKey) return;
|
|
2266
2276
|
const value = condition[driverKey];
|
|
2267
|
-
|
|
2268
|
-
|
|
2269
|
-
|
|
2277
|
+
let startNode = this.verifierStartNode[driverKey](value);
|
|
2278
|
+
let endNode = this.verifierEndNode[driverKey](value);
|
|
2279
|
+
let direction = this.verifierDirection[driverKey];
|
|
2270
2280
|
const comparator = this.verifierMap[driverKey];
|
|
2271
2281
|
const earlyTerminate = this.verifierEarlyTerminate[driverKey];
|
|
2282
|
+
if (order === "desc") {
|
|
2283
|
+
startNode = endNode ?? this.rightestNode();
|
|
2284
|
+
endNode = null;
|
|
2285
|
+
direction *= -1;
|
|
2286
|
+
}
|
|
2272
2287
|
const generator = this.getPairsGenerator(
|
|
2273
2288
|
value,
|
|
2274
2289
|
startNode,
|
|
@@ -2299,16 +2314,16 @@ var require_cjs = __commonJS({
|
|
|
2299
2314
|
}
|
|
2300
2315
|
}
|
|
2301
2316
|
}
|
|
2302
|
-
keys(condition, filterValues) {
|
|
2317
|
+
keys(condition, filterValues, order = "asc") {
|
|
2303
2318
|
const set = /* @__PURE__ */ new Set();
|
|
2304
|
-
for (const key of this.keysStream(condition, filterValues)) {
|
|
2319
|
+
for (const key of this.keysStream(condition, filterValues, void 0, order)) {
|
|
2305
2320
|
set.add(key);
|
|
2306
2321
|
}
|
|
2307
2322
|
return set;
|
|
2308
2323
|
}
|
|
2309
|
-
where(condition) {
|
|
2324
|
+
where(condition, order = "asc") {
|
|
2310
2325
|
const map = /* @__PURE__ */ new Map();
|
|
2311
|
-
for (const [key, value] of this.whereStream(condition)) {
|
|
2326
|
+
for (const [key, value] of this.whereStream(condition, void 0, order)) {
|
|
2312
2327
|
map.set(key, value);
|
|
2313
2328
|
}
|
|
2314
2329
|
return map;
|
|
@@ -3064,8 +3079,8 @@ var require_cjs = __commonJS({
|
|
|
3064
3079
|
}
|
|
3065
3080
|
return void 0;
|
|
3066
3081
|
}
|
|
3067
|
-
async *keysStream(condition, filterValues, limit) {
|
|
3068
|
-
const stream = this.whereStream(condition, limit);
|
|
3082
|
+
async *keysStream(condition, filterValues, limit, order = "asc") {
|
|
3083
|
+
const stream = this.whereStream(condition, limit, order);
|
|
3069
3084
|
const intersection = filterValues && filterValues.size > 0 ? filterValues : null;
|
|
3070
3085
|
for await (const [key] of stream) {
|
|
3071
3086
|
if (intersection && !intersection.has(key)) {
|
|
@@ -3074,30 +3089,20 @@ var require_cjs = __commonJS({
|
|
|
3074
3089
|
yield key;
|
|
3075
3090
|
}
|
|
3076
3091
|
}
|
|
3077
|
-
async *whereStream(condition, limit) {
|
|
3078
|
-
|
|
3079
|
-
if ("primaryEqual" in condition) driverKey = "primaryEqual";
|
|
3080
|
-
else if ("equal" in condition) driverKey = "equal";
|
|
3081
|
-
else if ("gt" in condition) driverKey = "gt";
|
|
3082
|
-
else if ("gte" in condition) driverKey = "gte";
|
|
3083
|
-
else if ("lt" in condition) driverKey = "lt";
|
|
3084
|
-
else if ("lte" in condition) driverKey = "lte";
|
|
3085
|
-
else if ("primaryGt" in condition) driverKey = "primaryGt";
|
|
3086
|
-
else if ("primaryGte" in condition) driverKey = "primaryGte";
|
|
3087
|
-
else if ("primaryLt" in condition) driverKey = "primaryLt";
|
|
3088
|
-
else if ("primaryLte" in condition) driverKey = "primaryLte";
|
|
3089
|
-
else if ("like" in condition) driverKey = "like";
|
|
3090
|
-
else if ("notEqual" in condition) driverKey = "notEqual";
|
|
3091
|
-
else if ("primaryNotEqual" in condition) driverKey = "primaryNotEqual";
|
|
3092
|
-
else if ("or" in condition) driverKey = "or";
|
|
3093
|
-
else if ("primaryOr" in condition) driverKey = "primaryOr";
|
|
3092
|
+
async *whereStream(condition, limit, order = "asc") {
|
|
3093
|
+
const driverKey = this.getDriverKey(condition);
|
|
3094
3094
|
if (!driverKey) return;
|
|
3095
3095
|
const value = condition[driverKey];
|
|
3096
|
-
|
|
3097
|
-
|
|
3098
|
-
|
|
3096
|
+
let startNode = await this.verifierStartNode[driverKey](value);
|
|
3097
|
+
let endNode = await this.verifierEndNode[driverKey](value);
|
|
3098
|
+
let direction = this.verifierDirection[driverKey];
|
|
3099
3099
|
const comparator = this.verifierMap[driverKey];
|
|
3100
3100
|
const earlyTerminate = this.verifierEarlyTerminate[driverKey];
|
|
3101
|
+
if (order === "desc") {
|
|
3102
|
+
startNode = endNode ?? await this.rightestNode();
|
|
3103
|
+
endNode = null;
|
|
3104
|
+
direction *= -1;
|
|
3105
|
+
}
|
|
3101
3106
|
const generator = this.getPairsGenerator(
|
|
3102
3107
|
value,
|
|
3103
3108
|
startNode,
|
|
@@ -3128,16 +3133,16 @@ var require_cjs = __commonJS({
|
|
|
3128
3133
|
}
|
|
3129
3134
|
}
|
|
3130
3135
|
}
|
|
3131
|
-
async keys(condition, filterValues) {
|
|
3136
|
+
async keys(condition, filterValues, order = "asc") {
|
|
3132
3137
|
const set = /* @__PURE__ */ new Set();
|
|
3133
|
-
for await (const key of this.keysStream(condition, filterValues)) {
|
|
3138
|
+
for await (const key of this.keysStream(condition, filterValues, void 0, order)) {
|
|
3134
3139
|
set.add(key);
|
|
3135
3140
|
}
|
|
3136
3141
|
return set;
|
|
3137
3142
|
}
|
|
3138
|
-
async where(condition) {
|
|
3143
|
+
async where(condition, order = "asc") {
|
|
3139
3144
|
const map = /* @__PURE__ */ new Map();
|
|
3140
|
-
for await (const [key, value] of this.whereStream(condition)) {
|
|
3145
|
+
for await (const [key, value] of this.whereStream(condition, void 0, order)) {
|
|
3141
3146
|
map.set(key, value);
|
|
3142
3147
|
}
|
|
3143
3148
|
return map;
|
|
@@ -8855,6 +8860,9 @@ var require_cjs = __commonJS({
|
|
|
8855
8860
|
get() {
|
|
8856
8861
|
return this.storage.getStore();
|
|
8857
8862
|
}
|
|
8863
|
+
stream(tx, callback) {
|
|
8864
|
+
return this.storage.run(tx, callback);
|
|
8865
|
+
}
|
|
8858
8866
|
};
|
|
8859
8867
|
var DataplyAPI2 = class {
|
|
8860
8868
|
constructor(file, options) {
|
|
@@ -9072,6 +9080,38 @@ var require_cjs = __commonJS({
|
|
|
9072
9080
|
}
|
|
9073
9081
|
return result;
|
|
9074
9082
|
}
|
|
9083
|
+
/**
|
|
9084
|
+
* Runs a generator callback function within a transaction context.
|
|
9085
|
+
* Similar to runWithDefault but allows yielding values from an AsyncGenerator.
|
|
9086
|
+
* If no transaction is provided, a new transaction is created.
|
|
9087
|
+
* The transaction is committed if the generator completes successfully,
|
|
9088
|
+
* or rolled back if an error occurs.
|
|
9089
|
+
* @param callback The generator callback function to run within the transaction context.
|
|
9090
|
+
* @param tx The transaction to use. If not provided, a new transaction is created.
|
|
9091
|
+
* @returns An AsyncGenerator that yields values from the callback.
|
|
9092
|
+
*/
|
|
9093
|
+
async *streamWithDefault(callback, tx) {
|
|
9094
|
+
const isInternalTx = !tx;
|
|
9095
|
+
if (!tx) {
|
|
9096
|
+
tx = this.createTransaction();
|
|
9097
|
+
}
|
|
9098
|
+
let hasError = false;
|
|
9099
|
+
try {
|
|
9100
|
+
const generator = this.txContext.stream(tx, () => callback(tx));
|
|
9101
|
+
for await (const value of generator) {
|
|
9102
|
+
yield value;
|
|
9103
|
+
}
|
|
9104
|
+
} catch (error) {
|
|
9105
|
+
hasError = true;
|
|
9106
|
+
if (isInternalTx) {
|
|
9107
|
+
await tx.rollback();
|
|
9108
|
+
}
|
|
9109
|
+
throw error;
|
|
9110
|
+
}
|
|
9111
|
+
if (!hasError && isInternalTx) {
|
|
9112
|
+
await tx.commit();
|
|
9113
|
+
}
|
|
9114
|
+
}
|
|
9075
9115
|
/**
|
|
9076
9116
|
* Retrieves metadata from the dataply.
|
|
9077
9117
|
* @returns Metadata of the dataply.
|
|
@@ -9369,12 +9409,12 @@ var DocumentSerializeStrategyAsync = class extends import_dataply.SerializeStrat
|
|
|
9369
9409
|
async readHead() {
|
|
9370
9410
|
const tx = this.txContext.get();
|
|
9371
9411
|
const metadata = await this.api.getDocumentInnerMetadata(tx);
|
|
9372
|
-
const indexInfo = metadata.
|
|
9412
|
+
const indexInfo = metadata.indices[this.treeKey];
|
|
9373
9413
|
if (!indexInfo) return null;
|
|
9374
9414
|
const headPk = indexInfo[0];
|
|
9375
9415
|
if (headPk === -1) {
|
|
9376
9416
|
const pk = await this.api.insertAsOverflow("__BPTREE_HEAD_PLACEHOLDER__", false, tx);
|
|
9377
|
-
metadata.
|
|
9417
|
+
metadata.indices[this.treeKey][0] = pk;
|
|
9378
9418
|
await this.api.updateDocumentInnerMetadata(metadata, tx);
|
|
9379
9419
|
return null;
|
|
9380
9420
|
}
|
|
@@ -9385,7 +9425,7 @@ var DocumentSerializeStrategyAsync = class extends import_dataply.SerializeStrat
|
|
|
9385
9425
|
async writeHead(head) {
|
|
9386
9426
|
const tx = this.txContext.get();
|
|
9387
9427
|
const metadata = await this.api.getDocumentInnerMetadata(tx);
|
|
9388
|
-
const indexInfo = metadata.
|
|
9428
|
+
const indexInfo = metadata.indices[this.treeKey];
|
|
9389
9429
|
if (!indexInfo) {
|
|
9390
9430
|
throw new Error(`Index info not found for tree: ${this.treeKey}. Initialization should be handled outside.`);
|
|
9391
9431
|
}
|
|
@@ -9424,7 +9464,7 @@ async function catchPromise(promise) {
|
|
|
9424
9464
|
|
|
9425
9465
|
// src/core/document.ts
|
|
9426
9466
|
var DocumentDataplyAPI = class extends import_dataply3.DataplyAPI {
|
|
9427
|
-
|
|
9467
|
+
indices = {};
|
|
9428
9468
|
trees = /* @__PURE__ */ new Map();
|
|
9429
9469
|
comparator = new DocumentValueComparator();
|
|
9430
9470
|
pendingBackfillFields = [];
|
|
@@ -9441,18 +9481,18 @@ var DocumentDataplyAPI = class extends import_dataply3.DataplyAPI {
|
|
|
9441
9481
|
throw new Error("Document metadata verification failed");
|
|
9442
9482
|
}
|
|
9443
9483
|
const metadata = await this.getDocumentInnerMetadata(tx);
|
|
9444
|
-
const
|
|
9445
|
-
const
|
|
9446
|
-
...
|
|
9484
|
+
const optionsIndices = options.indices ?? {};
|
|
9485
|
+
const targetIndices = {
|
|
9486
|
+
...optionsIndices,
|
|
9447
9487
|
_id: true
|
|
9448
9488
|
};
|
|
9449
9489
|
const backfillTargets = [];
|
|
9450
9490
|
let isMetadataChanged = false;
|
|
9451
|
-
for (const field in
|
|
9452
|
-
const isBackfillEnabled =
|
|
9453
|
-
const existingIndex = metadata.
|
|
9491
|
+
for (const field in targetIndices) {
|
|
9492
|
+
const isBackfillEnabled = targetIndices[field];
|
|
9493
|
+
const existingIndex = metadata.indices[field];
|
|
9454
9494
|
if (!existingIndex) {
|
|
9455
|
-
metadata.
|
|
9495
|
+
metadata.indices[field] = [-1, isBackfillEnabled];
|
|
9456
9496
|
isMetadataChanged = true;
|
|
9457
9497
|
if (isBackfillEnabled && !isNewlyCreated) {
|
|
9458
9498
|
backfillTargets.push(field);
|
|
@@ -9460,11 +9500,11 @@ var DocumentDataplyAPI = class extends import_dataply3.DataplyAPI {
|
|
|
9460
9500
|
} else {
|
|
9461
9501
|
const [_pk, isMetaBackfillEnabled] = existingIndex;
|
|
9462
9502
|
if (isBackfillEnabled && !isMetaBackfillEnabled) {
|
|
9463
|
-
metadata.
|
|
9503
|
+
metadata.indices[field][1] = true;
|
|
9464
9504
|
isMetadataChanged = true;
|
|
9465
9505
|
backfillTargets.push(field);
|
|
9466
9506
|
} else if (!isBackfillEnabled && isMetaBackfillEnabled) {
|
|
9467
|
-
metadata.
|
|
9507
|
+
metadata.indices[field][1] = false;
|
|
9468
9508
|
isMetadataChanged = true;
|
|
9469
9509
|
}
|
|
9470
9510
|
}
|
|
@@ -9472,9 +9512,9 @@ var DocumentDataplyAPI = class extends import_dataply3.DataplyAPI {
|
|
|
9472
9512
|
if (isMetadataChanged) {
|
|
9473
9513
|
await this.updateDocumentInnerMetadata(metadata, tx);
|
|
9474
9514
|
}
|
|
9475
|
-
this.
|
|
9476
|
-
for (const field in this.
|
|
9477
|
-
if (field in
|
|
9515
|
+
this.indices = metadata.indices;
|
|
9516
|
+
for (const field in this.indices) {
|
|
9517
|
+
if (field in targetIndices) {
|
|
9478
9518
|
const tree = new import_dataply3.BPTreeAsync(
|
|
9479
9519
|
new DocumentSerializeStrategyAsync(
|
|
9480
9520
|
this.rowTableEngine.order,
|
|
@@ -9576,14 +9616,14 @@ var DocumentDataplyAPI = class extends import_dataply3.DataplyAPI {
|
|
|
9576
9616
|
return backfilledCount;
|
|
9577
9617
|
}, tx);
|
|
9578
9618
|
}
|
|
9579
|
-
createDocumentInnerMetadata(
|
|
9619
|
+
createDocumentInnerMetadata(indices) {
|
|
9580
9620
|
return {
|
|
9581
9621
|
magicString: "document-dataply",
|
|
9582
9622
|
version: 1,
|
|
9583
9623
|
createdAt: Date.now(),
|
|
9584
9624
|
updatedAt: Date.now(),
|
|
9585
9625
|
lastId: 0,
|
|
9586
|
-
|
|
9626
|
+
indices
|
|
9587
9627
|
};
|
|
9588
9628
|
}
|
|
9589
9629
|
async initializeDocumentFile(tx) {
|
|
@@ -9643,8 +9683,42 @@ var DocumentDataplyAPI = class extends import_dataply3.DataplyAPI {
|
|
|
9643
9683
|
await this.update(1, JSON.stringify(metadata), tx);
|
|
9644
9684
|
}
|
|
9645
9685
|
};
|
|
9646
|
-
var DocumentDataply = class {
|
|
9686
|
+
var DocumentDataply = class _DocumentDataply {
|
|
9687
|
+
/**
|
|
9688
|
+
* Starts the database definition by setting the document type.
|
|
9689
|
+
* This is used to ensure TypeScript type inference works correctly for the document structure.
|
|
9690
|
+
* @template T The structure of the document to be stored.
|
|
9691
|
+
*/
|
|
9692
|
+
static Define() {
|
|
9693
|
+
return {
|
|
9694
|
+
/**
|
|
9695
|
+
* Sets the options for the database, such as index configurations and WAL settings.
|
|
9696
|
+
* @template IC The configuration of indices.
|
|
9697
|
+
* @param options The database initialization options.
|
|
9698
|
+
*/
|
|
9699
|
+
Options: (options) => _DocumentDataply.Options(options)
|
|
9700
|
+
};
|
|
9701
|
+
}
|
|
9702
|
+
/**
|
|
9703
|
+
* Internal method used by the Define-chain to pass options.
|
|
9704
|
+
*/
|
|
9705
|
+
static Options(options) {
|
|
9706
|
+
return {
|
|
9707
|
+
/**
|
|
9708
|
+
* Creates or opens the database instance with the specified file path.
|
|
9709
|
+
* @param file The path to the database file.
|
|
9710
|
+
*/
|
|
9711
|
+
Open: (file) => _DocumentDataply.Open(file, options)
|
|
9712
|
+
};
|
|
9713
|
+
}
|
|
9714
|
+
/**
|
|
9715
|
+
* Internal method used to finalize construction and create the instance.
|
|
9716
|
+
*/
|
|
9717
|
+
static Open(file, options) {
|
|
9718
|
+
return new _DocumentDataply(file, options);
|
|
9719
|
+
}
|
|
9647
9720
|
api;
|
|
9721
|
+
indexedFields;
|
|
9648
9722
|
operatorConverters = {
|
|
9649
9723
|
equal: "primaryEqual",
|
|
9650
9724
|
notEqual: "primaryNotEqual",
|
|
@@ -9657,6 +9731,12 @@ var DocumentDataply = class {
|
|
|
9657
9731
|
};
|
|
9658
9732
|
constructor(file, options) {
|
|
9659
9733
|
this.api = new DocumentDataplyAPI(file, options ?? {});
|
|
9734
|
+
this.indexedFields = /* @__PURE__ */ new Set(["_id"]);
|
|
9735
|
+
if (options?.indices) {
|
|
9736
|
+
for (const field of Object.keys(options.indices)) {
|
|
9737
|
+
this.indexedFields.add(field);
|
|
9738
|
+
}
|
|
9739
|
+
}
|
|
9660
9740
|
}
|
|
9661
9741
|
/**
|
|
9662
9742
|
* Initialize the document database
|
|
@@ -9706,16 +9786,29 @@ var DocumentDataply = class {
|
|
|
9706
9786
|
}
|
|
9707
9787
|
/**
|
|
9708
9788
|
* Get the selectivity candidate for the given query
|
|
9709
|
-
* @param query
|
|
9710
|
-
* @
|
|
9789
|
+
* @param query The query conditions
|
|
9790
|
+
* @param orderByField Optional field name for orderBy optimization
|
|
9791
|
+
* @returns Driver and other candidates for query execution
|
|
9711
9792
|
*/
|
|
9712
|
-
async getSelectivityCandidate(query) {
|
|
9793
|
+
async getSelectivityCandidate(query, orderByField) {
|
|
9713
9794
|
const candidates = [];
|
|
9714
9795
|
for (const field in query) {
|
|
9715
9796
|
const tree = this.api.trees.get(field);
|
|
9716
9797
|
if (!tree) continue;
|
|
9717
9798
|
const condition = query[field];
|
|
9718
|
-
candidates.push({ tree, condition });
|
|
9799
|
+
candidates.push({ tree, condition, field });
|
|
9800
|
+
}
|
|
9801
|
+
if (candidates.length === 0) {
|
|
9802
|
+
return null;
|
|
9803
|
+
}
|
|
9804
|
+
if (orderByField) {
|
|
9805
|
+
const orderByCandidate = candidates.find((c) => c.field === orderByField);
|
|
9806
|
+
if (orderByCandidate) {
|
|
9807
|
+
return {
|
|
9808
|
+
driver: orderByCandidate,
|
|
9809
|
+
others: candidates.filter((c) => c.field !== orderByField)
|
|
9810
|
+
};
|
|
9811
|
+
}
|
|
9719
9812
|
}
|
|
9720
9813
|
let res = import_dataply3.BPTreeAsync.ChooseDriver(candidates);
|
|
9721
9814
|
if (!res && candidates.length > 0) {
|
|
@@ -9723,10 +9816,7 @@ var DocumentDataply = class {
|
|
|
9723
9816
|
}
|
|
9724
9817
|
if (!res) return null;
|
|
9725
9818
|
return {
|
|
9726
|
-
driver:
|
|
9727
|
-
tree: res.tree,
|
|
9728
|
-
condition: res.condition
|
|
9729
|
-
},
|
|
9819
|
+
driver: res,
|
|
9730
9820
|
others: candidates.filter((c) => c.tree !== res.tree)
|
|
9731
9821
|
};
|
|
9732
9822
|
}
|
|
@@ -9813,44 +9903,204 @@ var DocumentDataply = class {
|
|
|
9813
9903
|
}, tx));
|
|
9814
9904
|
}
|
|
9815
9905
|
/**
|
|
9816
|
-
*
|
|
9906
|
+
* Internal update method used by both fullUpdate and partialUpdate
|
|
9817
9907
|
* @param query The query to use
|
|
9818
|
-
* @param
|
|
9908
|
+
* @param computeUpdatedDoc Function that computes the updated document from the original
|
|
9909
|
+
* @param tx The transaction to use
|
|
9910
|
+
* @returns The number of updated documents
|
|
9911
|
+
*/
|
|
9912
|
+
async updateInternal(query, computeUpdatedDoc, tx) {
|
|
9913
|
+
const idTree = this.api.trees.get("_id");
|
|
9914
|
+
if (!idTree) {
|
|
9915
|
+
throw new Error("ID tree not found");
|
|
9916
|
+
}
|
|
9917
|
+
const { stream } = this.select(query, {}, tx);
|
|
9918
|
+
let updatedCount = 0;
|
|
9919
|
+
for await (const doc of stream) {
|
|
9920
|
+
const id = doc._id;
|
|
9921
|
+
let pk = null;
|
|
9922
|
+
for await (const [entryPk] of idTree.whereStream({ primaryEqual: { v: id } })) {
|
|
9923
|
+
pk = entryPk;
|
|
9924
|
+
break;
|
|
9925
|
+
}
|
|
9926
|
+
if (pk === null) continue;
|
|
9927
|
+
const updatedDoc = computeUpdatedDoc(doc);
|
|
9928
|
+
const oldFlatDoc = this.api.flattenDocument(doc);
|
|
9929
|
+
const newFlatDoc = this.api.flattenDocument(updatedDoc);
|
|
9930
|
+
for (const [field, tree] of this.api.trees) {
|
|
9931
|
+
const oldV = oldFlatDoc[field];
|
|
9932
|
+
const newV = newFlatDoc[field];
|
|
9933
|
+
if (oldV === newV) continue;
|
|
9934
|
+
if (oldV !== void 0) {
|
|
9935
|
+
await tree.delete(pk, { k: pk, v: oldV });
|
|
9936
|
+
}
|
|
9937
|
+
if (newV !== void 0) {
|
|
9938
|
+
await tree.insert(pk, { k: pk, v: newV });
|
|
9939
|
+
}
|
|
9940
|
+
}
|
|
9941
|
+
await this.api.update(pk, JSON.stringify(updatedDoc), tx);
|
|
9942
|
+
updatedCount++;
|
|
9943
|
+
}
|
|
9944
|
+
return updatedCount;
|
|
9945
|
+
}
|
|
9946
|
+
/**
|
|
9947
|
+
* Fully update documents from the database that match the query
|
|
9948
|
+
* @param query The query to use (only indexed fields + _id allowed)
|
|
9949
|
+
* @param newRecord Complete document to replace with, or function that receives current document and returns new document
|
|
9950
|
+
* @param tx The transaction to use
|
|
9951
|
+
* @returns The number of updated documents
|
|
9952
|
+
*/
|
|
9953
|
+
async fullUpdate(query, newRecord, tx) {
|
|
9954
|
+
return this.api.writeLock(() => this.api.runWithDefault(async (tx2) => {
|
|
9955
|
+
return this.updateInternal(query, (doc) => {
|
|
9956
|
+
const newDoc = typeof newRecord === "function" ? newRecord(doc) : newRecord;
|
|
9957
|
+
return { _id: doc._id, ...newDoc };
|
|
9958
|
+
}, tx2);
|
|
9959
|
+
}, tx));
|
|
9960
|
+
}
|
|
9961
|
+
/**
|
|
9962
|
+
* Partially update documents from the database that match the query
|
|
9963
|
+
* @param query The query to use (only indexed fields + _id allowed)
|
|
9964
|
+
* @param newRecord Partial document to merge, or function that receives current document and returns partial update
|
|
9965
|
+
* @param tx The transaction to use
|
|
9966
|
+
* @returns The number of updated documents
|
|
9967
|
+
*/
|
|
9968
|
+
async partialUpdate(query, newRecord, tx) {
|
|
9969
|
+
return this.api.writeLock(() => this.api.runWithDefault(async (tx2) => {
|
|
9970
|
+
return this.updateInternal(query, (doc) => {
|
|
9971
|
+
const partialUpdate = typeof newRecord === "function" ? newRecord(doc) : newRecord;
|
|
9972
|
+
delete partialUpdate._id;
|
|
9973
|
+
return { ...doc, ...partialUpdate };
|
|
9974
|
+
}, tx2);
|
|
9975
|
+
}, tx));
|
|
9976
|
+
}
|
|
9977
|
+
/**
|
|
9978
|
+
* Delete documents from the database that match the query
|
|
9979
|
+
* @param query The query to use (only indexed fields + _id allowed)
|
|
9980
|
+
* @param tx The transaction to use
|
|
9981
|
+
* @returns The number of deleted documents
|
|
9982
|
+
*/
|
|
9983
|
+
async delete(query, tx) {
|
|
9984
|
+
return this.api.writeLock(() => this.api.runWithDefault(async (tx2) => {
|
|
9985
|
+
const idTree = this.api.trees.get("_id");
|
|
9986
|
+
if (!idTree) {
|
|
9987
|
+
throw new Error("ID tree not found");
|
|
9988
|
+
}
|
|
9989
|
+
const { stream } = this.select(query, {}, tx2);
|
|
9990
|
+
let deletedCount = 0;
|
|
9991
|
+
for await (const doc of stream) {
|
|
9992
|
+
const id = doc._id;
|
|
9993
|
+
let pk = null;
|
|
9994
|
+
for await (const [entryPk] of idTree.whereStream({ primaryEqual: { v: id } })) {
|
|
9995
|
+
pk = entryPk;
|
|
9996
|
+
break;
|
|
9997
|
+
}
|
|
9998
|
+
if (pk === null) continue;
|
|
9999
|
+
const flatDoc = this.api.flattenDocument(doc);
|
|
10000
|
+
for (const [field, tree] of this.api.trees) {
|
|
10001
|
+
const v = flatDoc[field];
|
|
10002
|
+
if (v === void 0) continue;
|
|
10003
|
+
await tree.delete(pk, { k: pk, v });
|
|
10004
|
+
}
|
|
10005
|
+
await this.api.delete(pk, true, tx2);
|
|
10006
|
+
deletedCount++;
|
|
10007
|
+
}
|
|
10008
|
+
return deletedCount;
|
|
10009
|
+
}, tx));
|
|
10010
|
+
}
|
|
10011
|
+
/**
|
|
10012
|
+
* Select documents from the database
|
|
10013
|
+
* @param query The query to use (only indexed fields + _id allowed)
|
|
10014
|
+
* @param options The options to use
|
|
9819
10015
|
* @param tx The transaction to use
|
|
9820
10016
|
* @returns The documents that match the query
|
|
10017
|
+
* @throws Error if query or orderBy contains non-indexed fields
|
|
9821
10018
|
*/
|
|
9822
|
-
|
|
9823
|
-
|
|
9824
|
-
|
|
9825
|
-
|
|
9826
|
-
|
|
9827
|
-
|
|
10019
|
+
select(query, options = {}, tx) {
|
|
10020
|
+
for (const field of Object.keys(query)) {
|
|
10021
|
+
if (!this.indexedFields.has(field)) {
|
|
10022
|
+
throw new Error(`Query field "${field}" is not indexed. Available indexed fields: ${Array.from(this.indexedFields).join(", ")}`);
|
|
10023
|
+
}
|
|
10024
|
+
}
|
|
10025
|
+
const orderBy = options.orderBy ?? "_id";
|
|
10026
|
+
if (!this.indexedFields.has(orderBy)) {
|
|
10027
|
+
throw new Error(`orderBy field "${orderBy}" is not indexed. Available indexed fields: ${Array.from(this.indexedFields).join(", ")}`);
|
|
10028
|
+
}
|
|
10029
|
+
const {
|
|
10030
|
+
limit = Infinity,
|
|
10031
|
+
sortOrder = "asc"
|
|
10032
|
+
} = options;
|
|
10033
|
+
const self = this;
|
|
10034
|
+
const stream = this.api.streamWithDefault(async function* (tx2) {
|
|
10035
|
+
const isQueryEmpty = Object.keys(query).length === 0;
|
|
10036
|
+
const normalizedQuery = isQueryEmpty ? { _id: { gte: 0 } } : query;
|
|
10037
|
+
const verbose = self.verboseQuery(normalizedQuery);
|
|
10038
|
+
const orderByTree = self.api.trees.get(orderBy);
|
|
10039
|
+
const selectivity = await self.getSelectivityCandidate(
|
|
10040
|
+
verbose,
|
|
10041
|
+
orderByTree ? orderBy : void 0
|
|
10042
|
+
);
|
|
10043
|
+
if (!selectivity) return;
|
|
9828
10044
|
const { driver, others } = selectivity;
|
|
9829
|
-
const
|
|
9830
|
-
|
|
9831
|
-
|
|
9832
|
-
|
|
9833
|
-
|
|
9834
|
-
if (
|
|
9835
|
-
|
|
9836
|
-
|
|
10045
|
+
const isDriverOrderByField = orderByTree && driver.field === orderBy;
|
|
10046
|
+
if (isDriverOrderByField) {
|
|
10047
|
+
const driverStream = driver.tree.whereStream(driver.condition, limit, sortOrder);
|
|
10048
|
+
let i = 0;
|
|
10049
|
+
for await (const [pk, val] of driverStream) {
|
|
10050
|
+
if (i >= limit) break;
|
|
10051
|
+
let isMatch = true;
|
|
10052
|
+
for (const { tree, condition } of others) {
|
|
10053
|
+
const targetValue = await tree.get(pk);
|
|
10054
|
+
if (targetValue === void 0 || !tree.verify(targetValue, condition)) {
|
|
10055
|
+
isMatch = false;
|
|
10056
|
+
break;
|
|
10057
|
+
}
|
|
10058
|
+
}
|
|
10059
|
+
if (isMatch) {
|
|
10060
|
+
const stringified = await self.api.select(pk, false, tx2);
|
|
10061
|
+
if (!stringified) continue;
|
|
10062
|
+
yield JSON.parse(stringified);
|
|
10063
|
+
i++;
|
|
9837
10064
|
}
|
|
9838
10065
|
}
|
|
9839
|
-
|
|
9840
|
-
|
|
9841
|
-
|
|
10066
|
+
} else {
|
|
10067
|
+
const results = [];
|
|
10068
|
+
const driverStream = driver.tree.whereStream(driver.condition);
|
|
10069
|
+
for await (const [pk, val] of driverStream) {
|
|
10070
|
+
let isMatch = true;
|
|
10071
|
+
for (const { tree, condition } of others) {
|
|
10072
|
+
const targetValue = await tree.get(pk);
|
|
10073
|
+
if (targetValue === void 0 || !tree.verify(targetValue, condition)) {
|
|
10074
|
+
isMatch = false;
|
|
10075
|
+
break;
|
|
10076
|
+
}
|
|
10077
|
+
}
|
|
10078
|
+
if (isMatch) {
|
|
10079
|
+
const stringified = await self.api.select(pk, false, tx2);
|
|
10080
|
+
if (!stringified) continue;
|
|
10081
|
+
results.push(JSON.parse(stringified));
|
|
10082
|
+
}
|
|
9842
10083
|
}
|
|
9843
|
-
|
|
9844
|
-
|
|
9845
|
-
|
|
9846
|
-
|
|
9847
|
-
|
|
9848
|
-
|
|
10084
|
+
results.sort((a, b) => {
|
|
10085
|
+
const aVal = a[orderBy] ?? a._id;
|
|
10086
|
+
const bVal = b[orderBy] ?? b._id;
|
|
10087
|
+
const cmp = aVal < bVal ? -1 : aVal > bVal ? 1 : 0;
|
|
10088
|
+
return sortOrder === "asc" ? cmp : -cmp;
|
|
10089
|
+
});
|
|
10090
|
+
const limitedResults = results.slice(0, limit === Infinity ? void 0 : limit);
|
|
10091
|
+
for (const doc of limitedResults) {
|
|
10092
|
+
yield doc;
|
|
9849
10093
|
}
|
|
9850
|
-
documents.push(JSON.parse(stringify));
|
|
9851
10094
|
}
|
|
9852
|
-
return documents;
|
|
9853
10095
|
}, tx);
|
|
10096
|
+
const drain = async () => {
|
|
10097
|
+
const result = [];
|
|
10098
|
+
for await (const document of stream) {
|
|
10099
|
+
result.push(document);
|
|
10100
|
+
}
|
|
10101
|
+
return result;
|
|
10102
|
+
};
|
|
10103
|
+
return { stream, drain };
|
|
9854
10104
|
}
|
|
9855
10105
|
/**
|
|
9856
10106
|
* Close the document database
|
|
@@ -2,10 +2,10 @@ import type { DataplyTreeValue, Primitive } from '../../types';
|
|
|
2
2
|
import { BPTreeNode, SerializeStrategyAsync, type SerializeStrategyHead } from 'dataply';
|
|
3
3
|
import { DocumentDataplyAPI } from '../document';
|
|
4
4
|
export declare class DocumentSerializeStrategyAsync<T extends Primitive> extends SerializeStrategyAsync<number, DataplyTreeValue<T>> {
|
|
5
|
-
protected readonly api: DocumentDataplyAPI<any>;
|
|
6
|
-
protected readonly txContext: DocumentDataplyAPI<any>['txContext'];
|
|
5
|
+
protected readonly api: DocumentDataplyAPI<any, any>;
|
|
6
|
+
protected readonly txContext: DocumentDataplyAPI<any, any>['txContext'];
|
|
7
7
|
readonly treeKey: string;
|
|
8
|
-
constructor(order: number, api: DocumentDataplyAPI<any>, txContext: DocumentDataplyAPI<any>['txContext'], treeKey: string);
|
|
8
|
+
constructor(order: number, api: DocumentDataplyAPI<any, any>, txContext: DocumentDataplyAPI<any, any>['txContext'], treeKey: string);
|
|
9
9
|
id(isLeaf: boolean): Promise<string>;
|
|
10
10
|
read(id: string): Promise<BPTreeNode<number, DataplyTreeValue<T>>>;
|
|
11
11
|
write(id: string, node: BPTreeNode<number, DataplyTreeValue<T>>): Promise<void>;
|
|
@@ -1,14 +1,15 @@
|
|
|
1
|
-
import type { DataplyTreeValue, DocumentDataplyInnerMetadata, DocumentDataplyOptions, DocumentJSON, FlattenedDocumentJSON, Primitive, DocumentDataplyQuery,
|
|
1
|
+
import type { DataplyTreeValue, DocumentDataplyInnerMetadata, DocumentDataplyOptions, DocumentJSON, FlattenedDocumentJSON, Primitive, DocumentDataplyQuery, DocumentDataplyIndexedQuery, DocumentDataplyCondition, DataplyDocument, DocumentDataplyMetadata, DocumentDataplyQueryOptions, IndexConfig } from '../types';
|
|
2
2
|
import { DataplyAPI, Transaction, BPTreeAsync } from 'dataply';
|
|
3
3
|
import { DocumentValueComparator } from './bptree/documentComparator';
|
|
4
|
-
export declare class DocumentDataplyAPI<T extends DocumentJSON
|
|
4
|
+
export declare class DocumentDataplyAPI<T extends DocumentJSON, IC extends IndexConfig<T>> extends DataplyAPI {
|
|
5
5
|
runWithDefault: <T_1>(callback: (tx: Transaction) => Promise<T_1>, tx?: Transaction) => Promise<T_1>;
|
|
6
|
-
|
|
6
|
+
streamWithDefault: <T_1>(callback: (tx: Transaction) => AsyncGenerator<T_1>, tx?: Transaction) => AsyncGenerator<T_1>;
|
|
7
|
+
indices: DocumentDataplyInnerMetadata['indices'];
|
|
7
8
|
readonly trees: Map<string, BPTreeAsync<number, DataplyTreeValue<Primitive>>>;
|
|
8
9
|
readonly comparator: DocumentValueComparator<DataplyTreeValue<Primitive>, Primitive>;
|
|
9
10
|
private pendingBackfillFields;
|
|
10
11
|
private readonly lock;
|
|
11
|
-
constructor(file: string, options: DocumentDataplyOptions);
|
|
12
|
+
constructor(file: string, options: DocumentDataplyOptions<T, IC>);
|
|
12
13
|
readLock<T>(fn: () => T): Promise<T>;
|
|
13
14
|
writeLock<T>(fn: () => T): Promise<T>;
|
|
14
15
|
getDocument(pk: number, tx?: Transaction): Promise<DataplyDocument<T>>;
|
|
@@ -20,7 +21,7 @@ export declare class DocumentDataplyAPI<T extends DocumentJSON> extends DataplyA
|
|
|
20
21
|
* @returns Number of documents that were backfilled
|
|
21
22
|
*/
|
|
22
23
|
backfillIndices(tx?: Transaction): Promise<number>;
|
|
23
|
-
createDocumentInnerMetadata(
|
|
24
|
+
createDocumentInnerMetadata(indices: DocumentDataplyInnerMetadata['indices']): DocumentDataplyInnerMetadata;
|
|
24
25
|
initializeDocumentFile(tx: Transaction): Promise<void>;
|
|
25
26
|
verifyDocumentFile(tx: Transaction): Promise<boolean>;
|
|
26
27
|
/**
|
|
@@ -33,10 +34,38 @@ export declare class DocumentDataplyAPI<T extends DocumentJSON> extends DataplyA
|
|
|
33
34
|
getDocumentInnerMetadata(tx: Transaction): Promise<DocumentDataplyInnerMetadata>;
|
|
34
35
|
updateDocumentInnerMetadata(metadata: DocumentDataplyInnerMetadata, tx: Transaction): Promise<void>;
|
|
35
36
|
}
|
|
36
|
-
export declare class DocumentDataply<T extends DocumentJSON
|
|
37
|
-
|
|
37
|
+
export declare class DocumentDataply<T extends DocumentJSON, IC extends IndexConfig<T>> {
|
|
38
|
+
/**
|
|
39
|
+
* Starts the database definition by setting the document type.
|
|
40
|
+
* This is used to ensure TypeScript type inference works correctly for the document structure.
|
|
41
|
+
* @template T The structure of the document to be stored.
|
|
42
|
+
*/
|
|
43
|
+
static Define<T extends DocumentJSON>(): {
|
|
44
|
+
/**
|
|
45
|
+
* Sets the options for the database, such as index configurations and WAL settings.
|
|
46
|
+
* @template IC The configuration of indices.
|
|
47
|
+
* @param options The database initialization options.
|
|
48
|
+
*/
|
|
49
|
+
Options: <IC extends IndexConfig<T>>(options: DocumentDataplyOptions<T, IC>) => {
|
|
50
|
+
/**
|
|
51
|
+
* Creates or opens the database instance with the specified file path.
|
|
52
|
+
* @param file The path to the database file.
|
|
53
|
+
*/
|
|
54
|
+
Open: (file: string) => DocumentDataply<T, IC>;
|
|
55
|
+
};
|
|
56
|
+
};
|
|
57
|
+
/**
|
|
58
|
+
* Internal method used by the Define-chain to pass options.
|
|
59
|
+
*/
|
|
60
|
+
private static Options;
|
|
61
|
+
/**
|
|
62
|
+
* Internal method used to finalize construction and create the instance.
|
|
63
|
+
*/
|
|
64
|
+
private static Open;
|
|
65
|
+
protected readonly api: DocumentDataplyAPI<T, IC>;
|
|
66
|
+
private readonly indexedFields;
|
|
38
67
|
private readonly operatorConverters;
|
|
39
|
-
constructor(file: string, options?: DocumentDataplyOptions);
|
|
68
|
+
protected constructor(file: string, options?: DocumentDataplyOptions<T, IC>);
|
|
40
69
|
/**
|
|
41
70
|
* Initialize the document database
|
|
42
71
|
*/
|
|
@@ -52,17 +81,20 @@ export declare class DocumentDataply<T extends DocumentJSON> {
|
|
|
52
81
|
private verboseQuery;
|
|
53
82
|
/**
|
|
54
83
|
* Get the selectivity candidate for the given query
|
|
55
|
-
* @param query
|
|
56
|
-
* @
|
|
84
|
+
* @param query The query conditions
|
|
85
|
+
* @param orderByField Optional field name for orderBy optimization
|
|
86
|
+
* @returns Driver and other candidates for query execution
|
|
57
87
|
*/
|
|
58
|
-
getSelectivityCandidate<U extends
|
|
88
|
+
getSelectivityCandidate<U extends Partial<DocumentDataplyIndexedQuery<T, IC>>, V extends DataplyTreeValue<U>>(query: Partial<DocumentDataplyQuery<V>>, orderByField?: string): Promise<{
|
|
59
89
|
driver: {
|
|
60
90
|
tree: BPTreeAsync<number, V>;
|
|
61
91
|
condition: Partial<DocumentDataplyCondition<U>>;
|
|
92
|
+
field: string;
|
|
62
93
|
};
|
|
63
94
|
others: {
|
|
64
95
|
tree: BPTreeAsync<number, V>;
|
|
65
96
|
condition: Partial<DocumentDataplyCondition<U>>;
|
|
97
|
+
field: string;
|
|
66
98
|
}[];
|
|
67
99
|
} | null>;
|
|
68
100
|
private insertDocument;
|
|
@@ -81,13 +113,48 @@ export declare class DocumentDataply<T extends DocumentJSON> {
|
|
|
81
113
|
*/
|
|
82
114
|
insertBatch(documents: T[], tx?: Transaction): Promise<number[]>;
|
|
83
115
|
/**
|
|
84
|
-
*
|
|
116
|
+
* Internal update method used by both fullUpdate and partialUpdate
|
|
85
117
|
* @param query The query to use
|
|
86
|
-
* @param
|
|
118
|
+
* @param computeUpdatedDoc Function that computes the updated document from the original
|
|
119
|
+
* @param tx The transaction to use
|
|
120
|
+
* @returns The number of updated documents
|
|
121
|
+
*/
|
|
122
|
+
private updateInternal;
|
|
123
|
+
/**
|
|
124
|
+
* Fully update documents from the database that match the query
|
|
125
|
+
* @param query The query to use (only indexed fields + _id allowed)
|
|
126
|
+
* @param newRecord Complete document to replace with, or function that receives current document and returns new document
|
|
127
|
+
* @param tx The transaction to use
|
|
128
|
+
* @returns The number of updated documents
|
|
129
|
+
*/
|
|
130
|
+
fullUpdate(query: Partial<DocumentDataplyIndexedQuery<T, IC>>, newRecord: T | ((document: DataplyDocument<T>) => T), tx?: Transaction): Promise<number>;
|
|
131
|
+
/**
|
|
132
|
+
* Partially update documents from the database that match the query
|
|
133
|
+
* @param query The query to use (only indexed fields + _id allowed)
|
|
134
|
+
* @param newRecord Partial document to merge, or function that receives current document and returns partial update
|
|
135
|
+
* @param tx The transaction to use
|
|
136
|
+
* @returns The number of updated documents
|
|
137
|
+
*/
|
|
138
|
+
partialUpdate(query: Partial<DocumentDataplyIndexedQuery<T, IC>>, newRecord: Partial<DataplyDocument<T>> | ((document: DataplyDocument<T>) => Partial<DataplyDocument<T>>), tx?: Transaction): Promise<number>;
|
|
139
|
+
/**
|
|
140
|
+
* Delete documents from the database that match the query
|
|
141
|
+
* @param query The query to use (only indexed fields + _id allowed)
|
|
142
|
+
* @param tx The transaction to use
|
|
143
|
+
* @returns The number of deleted documents
|
|
144
|
+
*/
|
|
145
|
+
delete(query: Partial<DocumentDataplyIndexedQuery<T, IC>>, tx?: Transaction): Promise<number>;
|
|
146
|
+
/**
|
|
147
|
+
* Select documents from the database
|
|
148
|
+
* @param query The query to use (only indexed fields + _id allowed)
|
|
149
|
+
* @param options The options to use
|
|
87
150
|
* @param tx The transaction to use
|
|
88
151
|
* @returns The documents that match the query
|
|
152
|
+
* @throws Error if query or orderBy contains non-indexed fields
|
|
89
153
|
*/
|
|
90
|
-
select(query: Partial<
|
|
154
|
+
select(query: Partial<DocumentDataplyIndexedQuery<T, IC>>, options?: DocumentDataplyQueryOptions<T, IC>, tx?: Transaction): {
|
|
155
|
+
stream: AsyncIterableIterator<DataplyDocument<T>>;
|
|
156
|
+
drain: () => Promise<DataplyDocument<T>[]>;
|
|
157
|
+
};
|
|
91
158
|
/**
|
|
92
159
|
* Close the document database
|
|
93
160
|
*/
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import type { DataplyOptions } from 'dataply';
|
|
1
|
+
import type { BPTreeOrder, DataplyOptions } from 'dataply';
|
|
2
2
|
export type Primitive = string | number | boolean | null;
|
|
3
3
|
export type JSONValue = Primitive | JSONValue[] | {
|
|
4
4
|
[key: string]: JSONValue;
|
|
@@ -15,7 +15,7 @@ export interface DocumentDataplyInnerMetadata {
|
|
|
15
15
|
createdAt: number;
|
|
16
16
|
updatedAt: number;
|
|
17
17
|
lastId: number;
|
|
18
|
-
|
|
18
|
+
indices: {
|
|
19
19
|
[key: string]: [number, boolean];
|
|
20
20
|
};
|
|
21
21
|
}
|
|
@@ -52,10 +52,21 @@ export type DocumentDataplyQuery<T> = {
|
|
|
52
52
|
} & {
|
|
53
53
|
[key: string]: any;
|
|
54
54
|
};
|
|
55
|
+
/**
|
|
56
|
+
* Query type restricted to indexed fields only
|
|
57
|
+
*/
|
|
58
|
+
export type DocumentDataplyIndexedQuery<T extends DocumentJSON, IC extends IndexConfig<T>> = {
|
|
59
|
+
[key in keyof IC]: key extends keyof FinalFlatten<DataplyDocument<T>> ? FinalFlatten<DataplyDocument<T>>[key] | DocumentDataplyCondition<FinalFlatten<DataplyDocument<T>>[key]> : never;
|
|
60
|
+
};
|
|
55
61
|
export interface DataplyTreeValue<T> {
|
|
56
62
|
k: number;
|
|
57
63
|
v: T;
|
|
58
64
|
}
|
|
65
|
+
export type DocumentDataplyQueryOptions<T extends DocumentJSON, IC extends IndexConfig<T>> = {
|
|
66
|
+
limit?: number;
|
|
67
|
+
orderBy?: ExtractIndexKeys<T, IC> | '_id';
|
|
68
|
+
sortOrder?: BPTreeOrder;
|
|
69
|
+
};
|
|
59
70
|
/**
|
|
60
71
|
* T가 객체인지 확인하고, 객체라면 하위 키를 재귀적으로 탐색합니다.
|
|
61
72
|
*/
|
|
@@ -64,10 +75,10 @@ type Prev = [never, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17
|
|
|
64
75
|
* T가 객체인지 확인하고, 객체라면 하위 키를 재귀적으로 탐색합니다.
|
|
65
76
|
* Depth 제한을 두어 "Type instantiation is excessively deep and possibly infinite" 에러를 방지합니다.
|
|
66
77
|
*/
|
|
67
|
-
export type DeepFlattenKeys<T, Prefix extends string = "", D extends number =
|
|
78
|
+
export type DeepFlattenKeys<T, Prefix extends string = "", D extends number = 5> = [
|
|
68
79
|
D
|
|
69
|
-
] extends [0] ? never : T extends Primitive ? (Prefix extends `${infer P}.` ? P : never) : T extends readonly any[] ? (
|
|
70
|
-
[K in keyof T & string]: NonNullable<T[K]> extends Primitive ? `${Prefix}${K}` :
|
|
80
|
+
] extends [0] ? never : T extends Primitive ? (Prefix extends `${infer P}.` ? P : never) : T extends readonly any[] ? (DeepFlattenKeys<T[number], `${Prefix}${number}.`, Prev[D]>) : T extends object ? {
|
|
81
|
+
[K in keyof T & string]: NonNullable<T[K]> extends Primitive ? `${Prefix}${K}` : DeepFlattenKeys<NonNullable<T[K]>, `${Prefix}${K}.`, Prev[D]>;
|
|
71
82
|
}[keyof T & string] : never;
|
|
72
83
|
/**
|
|
73
84
|
* 경로 문자열(Path)을 기반으로 원본 객체(T)에서 타입을 찾아옵니다.
|
|
@@ -77,15 +88,26 @@ type GetTypeByPath<T, Path extends string> = T extends readonly (infer U)[] ? Pa
|
|
|
77
88
|
export type FinalFlatten<T> = {
|
|
78
89
|
[P in DeepFlattenKeys<T>]: GetTypeByPath<T, P & string>;
|
|
79
90
|
};
|
|
80
|
-
export
|
|
91
|
+
export type DocumentDataplyIndices<T extends DocumentJSON, IC extends IndexConfig<T>> = {
|
|
92
|
+
[key in keyof IC & keyof FinalFlatten<T>]: GetTypeByPath<T, key>;
|
|
93
|
+
};
|
|
94
|
+
/**
|
|
95
|
+
* Index configuration type - keys are field names, values are boolean
|
|
96
|
+
*/
|
|
97
|
+
export type IndexConfig<T> = Partial<{
|
|
98
|
+
[key in keyof FinalFlatten<T>]: boolean;
|
|
99
|
+
}>;
|
|
100
|
+
/**
|
|
101
|
+
* Extract index keys from IndexConfig
|
|
102
|
+
*/
|
|
103
|
+
export type ExtractIndexKeys<T extends DocumentJSON, IC extends IndexConfig<T>> = keyof IC & keyof FinalFlatten<DataplyDocument<T>> & string;
|
|
104
|
+
export interface DocumentDataplyOptions<T, IC extends IndexConfig<T> = IndexConfig<T>> extends DataplyOptions {
|
|
81
105
|
/**
|
|
82
|
-
*
|
|
83
|
-
* If not specified, no
|
|
106
|
+
* Indices to create when initializing the database.
|
|
107
|
+
* If not specified, no indices will be created.
|
|
84
108
|
* If the value of the index is `true`, the index will be created for the already inserted data.
|
|
85
109
|
* If the value of the index is `false`, the index will not be created for the already inserted data.
|
|
86
110
|
*/
|
|
87
|
-
|
|
88
|
-
[key: string]: boolean;
|
|
89
|
-
};
|
|
111
|
+
indices?: IC;
|
|
90
112
|
}
|
|
91
113
|
export {};
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "document-dataply",
|
|
3
|
-
"version": "0.0.
|
|
4
|
-
"description": "",
|
|
3
|
+
"version": "0.0.3-alpha.0",
|
|
4
|
+
"description": "Simple and powerful JSON document database supporting complex queries and flexible indexing policies.",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"author": "izure <admin@izure.org>",
|
|
7
7
|
"type": "commonjs",
|
|
@@ -25,8 +25,21 @@
|
|
|
25
25
|
"test": "jest -i",
|
|
26
26
|
"build": "node build/index.js && tsc"
|
|
27
27
|
},
|
|
28
|
+
"keywords": [
|
|
29
|
+
"database",
|
|
30
|
+
"document-database",
|
|
31
|
+
"nosql",
|
|
32
|
+
"json",
|
|
33
|
+
"indexing",
|
|
34
|
+
"bptree",
|
|
35
|
+
"transactions",
|
|
36
|
+
"acid",
|
|
37
|
+
"embedded-database",
|
|
38
|
+
"deep-indexing",
|
|
39
|
+
"dataply"
|
|
40
|
+
],
|
|
28
41
|
"dependencies": {
|
|
29
|
-
"dataply": "^0.0.18
|
|
42
|
+
"dataply": "^0.0.18"
|
|
30
43
|
},
|
|
31
44
|
"devDependencies": {
|
|
32
45
|
"@types/jest": "^30.0.0",
|