document-dataply 0.0.2-alpha.3 → 0.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +65 -44
- package/dist/cjs/index.js +216 -105
- package/dist/types/core/document.d.ts +21 -12
- package/dist/types/types/index.d.ts +19 -8
- package/package.json +16 -3
package/README.md
CHANGED
|
@@ -4,23 +4,21 @@
|
|
|
4
4
|
> **This project is currently in the Alpha stage.**
|
|
5
5
|
> APIs and internal structures may change significantly between versions. Use with caution in production environments.
|
|
6
6
|
|
|
7
|
-
`document-dataply` is a high-performance
|
|
7
|
+
`document-dataply` is a high-performance document-oriented database library built on top of the [`dataply`](https://github.com/izure1/dataply) record storage engine. It provides a structured way to store, index, and query JSON-style documents, supporting transactions and complex field indexing.
|
|
8
8
|
|
|
9
|
-
## Features
|
|
9
|
+
## Key Features
|
|
10
10
|
|
|
11
|
-
- **Document-Oriented**: Store and retrieve JSON-
|
|
12
|
-
- **B+Tree Indexing**:
|
|
11
|
+
- **Document-Oriented**: Store and retrieve JSON-style documents.
|
|
12
|
+
- **B+Tree Indexing**: Supports high-performance lookups using a B+Tree indexing engine.
|
|
13
13
|
- **Deep Indexing**: Index nested object fields and specific array elements (e.g., `user.profile.name` or `tags.0`).
|
|
14
|
-
- **Flexible Indexing
|
|
14
|
+
- **Flexible Indexing Policies**: Supports full re-indexing for existing data or incremental indexing for future data.
|
|
15
15
|
- **Transactions**: ACID-compliant transactions for atomic operations.
|
|
16
|
-
- **Rich Querying**:
|
|
16
|
+
- **Rich Querying**: Supports comparison operators (`lt`, `gt`, `equal`, etc.) and pattern matching (`like`).
|
|
17
17
|
|
|
18
18
|
## Installation
|
|
19
19
|
|
|
20
20
|
```bash
|
|
21
21
|
npm install document-dataply
|
|
22
|
-
# or
|
|
23
|
-
yarn add document-dataply
|
|
24
22
|
```
|
|
25
23
|
|
|
26
24
|
## Quick Start
|
|
@@ -28,39 +26,48 @@ yarn add document-dataply
|
|
|
28
26
|
```typescript
|
|
29
27
|
import { DocumentDataply } from 'document-dataply';
|
|
30
28
|
|
|
29
|
+
type MyDocument = {
|
|
30
|
+
name: string;
|
|
31
|
+
age: number;
|
|
32
|
+
tags: string[];
|
|
33
|
+
}
|
|
34
|
+
|
|
31
35
|
async function main() {
|
|
32
|
-
const db = new DocumentDataply<{
|
|
33
|
-
name: string;
|
|
34
|
-
age: number;
|
|
35
|
-
tags: string[];
|
|
36
|
-
}>('my-database.db', {
|
|
36
|
+
const db = new DocumentDataply<MyDocument>('my-database.db', {
|
|
37
37
|
wal: 'my-database.wal',
|
|
38
38
|
indices: {
|
|
39
|
-
name: true, // Index existing and new data
|
|
39
|
+
name: true, // Index both existing and new data
|
|
40
40
|
age: false, // Index only new data
|
|
41
41
|
'tags.0': true // Index the first element of the 'tags' array
|
|
42
42
|
}
|
|
43
43
|
});
|
|
44
44
|
|
|
45
|
-
// Initialize
|
|
45
|
+
// Initialize database
|
|
46
46
|
await db.init();
|
|
47
47
|
|
|
48
|
-
// Insert
|
|
48
|
+
// Insert document
|
|
49
49
|
const id = await db.insert({
|
|
50
50
|
name: 'John Doe',
|
|
51
51
|
age: 30,
|
|
52
52
|
tags: ['admin', 'developer']
|
|
53
53
|
});
|
|
54
54
|
|
|
55
|
-
// Query
|
|
56
|
-
const
|
|
55
|
+
// Query document
|
|
56
|
+
const query = db.select({
|
|
57
57
|
name: 'John Doe',
|
|
58
58
|
age: { gte: 25 }
|
|
59
|
-
})
|
|
59
|
+
})
|
|
60
60
|
|
|
61
|
-
|
|
61
|
+
// Get all results
|
|
62
|
+
const allResults = await query.drain();
|
|
63
|
+
// Or iterate through results
|
|
64
|
+
for await (const doc of query.stream) {
|
|
65
|
+
console.log(doc);
|
|
66
|
+
}
|
|
62
67
|
|
|
63
|
-
|
|
68
|
+
console.log(allResults);
|
|
69
|
+
|
|
70
|
+
// Close database
|
|
64
71
|
await db.close();
|
|
65
72
|
}
|
|
66
73
|
|
|
@@ -71,17 +78,17 @@ main();
|
|
|
71
78
|
|
|
72
79
|
### Indexing Policies
|
|
73
80
|
|
|
74
|
-
When defining indices in the constructor, you can specify a boolean value
|
|
81
|
+
When defining indices in the constructor, you can specify a boolean value.
|
|
75
82
|
|
|
76
|
-
- `true`: The library
|
|
77
|
-
- `false`: The library
|
|
83
|
+
- `true`: The library indexes all existing documents for that field during `init()`, and also indexes all subsequent insertions.
|
|
84
|
+
- `false`: The library only indexes documents inserted after this configuration.
|
|
78
85
|
|
|
79
86
|
> [!NOTE]
|
|
80
|
-
> `db.init()` automatically performs
|
|
87
|
+
> `db.init()` automatically performs a backfilling process for fields marked as `true`.
|
|
81
88
|
|
|
82
89
|
### Batch Insertion
|
|
83
90
|
|
|
84
|
-
|
|
91
|
+
To efficiently insert multiple documents, use the following:
|
|
85
92
|
|
|
86
93
|
```typescript
|
|
87
94
|
const ids = await db.insertBatch([
|
|
@@ -92,7 +99,7 @@ const ids = await db.insertBatch([
|
|
|
92
99
|
|
|
93
100
|
### Querying
|
|
94
101
|
|
|
95
|
-
`document-dataply` supports various comparison operators
|
|
102
|
+
`document-dataply` supports various comparison operators.
|
|
96
103
|
|
|
97
104
|
| Operator | Description |
|
|
98
105
|
| :--- | :--- |
|
|
@@ -102,8 +109,8 @@ const ids = await db.insertBatch([
|
|
|
102
109
|
| `gte` | Greater than or equal to |
|
|
103
110
|
| `equal` | Equal to |
|
|
104
111
|
| `notEqual` | Not equal to |
|
|
105
|
-
| `like` | SQL-
|
|
106
|
-
| `or` |
|
|
112
|
+
| `like` | SQL-style pattern matching (e.g., `Jo%`) |
|
|
113
|
+
| `or` | If any value in the array is satisfied |
|
|
107
114
|
|
|
108
115
|
Example of a complex query:
|
|
109
116
|
```typescript
|
|
@@ -111,23 +118,23 @@ const users = await db.select({
|
|
|
111
118
|
age: { gt: 18, lt: 65 },
|
|
112
119
|
'address.city': 'Seoul',
|
|
113
120
|
tags: { or: ['vip', 'premium'] }
|
|
114
|
-
});
|
|
121
|
+
}).drain();
|
|
115
122
|
```
|
|
116
123
|
|
|
117
124
|
> [!IMPORTANT]
|
|
118
|
-
> **Query Constraints**:
|
|
125
|
+
> **Query Constraints**: Query conditions (`lt`, `gt`, `equal`, etc.) can only be used on fields explicitly indexed in the constructor.
|
|
119
126
|
>
|
|
120
|
-
> **If a field in the query is not indexed,
|
|
127
|
+
> **If a field in the query is not indexed, that condition will be ignored.**
|
|
121
128
|
>
|
|
122
|
-
> If you need to filter by
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
129
|
+
> If you need to filter by unindexed fields, you should first retrieve the documents and then use JavaScript's native `.filter()` method.
|
|
130
|
+
```typescript
|
|
131
|
+
const results = await db.select({ /* indexed fields only */ }).drain();
|
|
132
|
+
const filtered = results.filter(doc => doc.unindexedField === 'some-value');
|
|
133
|
+
```
|
|
127
134
|
|
|
128
135
|
### Transactions
|
|
129
136
|
|
|
130
|
-
|
|
137
|
+
To ensure the atomicity of multiple operations, use transactions.
|
|
131
138
|
|
|
132
139
|
```typescript
|
|
133
140
|
const tx = db.createTransaction();
|
|
@@ -141,31 +148,45 @@ try {
|
|
|
141
148
|
}
|
|
142
149
|
```
|
|
143
150
|
|
|
151
|
+
## Tips and Advanced Features
|
|
152
|
+
|
|
153
|
+
For more information on performance optimization and advanced features, see [TIPS.md](./docs/TIPS.md).
|
|
154
|
+
|
|
155
|
+
- **Query Optimization**: Automatic index selection for maximum performance.
|
|
156
|
+
- **Sorting and Pagination**: Detailed usage of `limit`, `orderBy`, and `sortOrder`.
|
|
157
|
+
- **Memory Management**: When to use `stream` vs `drain()`.
|
|
158
|
+
- **Performance**: Optimizing bulk data insertion using `insertBatch`.
|
|
159
|
+
- **Indexing Policies**: Deep dive into index backfilling and configuration.
|
|
160
|
+
|
|
144
161
|
## API Reference
|
|
145
162
|
|
|
146
163
|
### `new DocumentDataply<T>(file, options)`
|
|
147
164
|
Creates a new database instance. `T` defines the document structure.
|
|
165
|
+
`options.indices` is an object where keys are field names and values are booleans indicating whether to index.
|
|
148
166
|
|
|
149
167
|
### `db.init()`
|
|
150
168
|
Initializes the database, sets up internal metadata, and prepares indices.
|
|
151
169
|
|
|
152
170
|
### `db.insert(document, tx?)`
|
|
153
|
-
Inserts a single document. Returns the
|
|
171
|
+
Inserts a single document. Returns the `_id` (`number`) of the document.
|
|
154
172
|
|
|
155
173
|
### `db.insertBatch(documents, tx?)`
|
|
156
|
-
Inserts multiple documents efficiently. Returns an array of `
|
|
174
|
+
Inserts multiple documents efficiently. Returns an array of `_ids` (`number[]`).
|
|
157
175
|
|
|
158
|
-
### `db.select(query,
|
|
159
|
-
|
|
176
|
+
### `db.select(query, options?, tx?)`
|
|
177
|
+
Searches for documents matching the query.
|
|
178
|
+
Returns an object `{ stream, drain }`.
|
|
179
|
+
- `stream`: An async iterator to traverse results one by one.
|
|
180
|
+
- `drain()`: A promise that resolves to an array of all matching documents.
|
|
160
181
|
|
|
161
182
|
### `db.getMetadata(tx?)`
|
|
162
|
-
Returns physical storage information (
|
|
183
|
+
Returns physical storage information (number of pages, number of rows, etc.).
|
|
163
184
|
|
|
164
185
|
### `db.createTransaction()`
|
|
165
186
|
Returns a new `Transaction` object.
|
|
166
187
|
|
|
167
188
|
### `db.close()`
|
|
168
|
-
Flushes changes and closes the database
|
|
189
|
+
Flushes changes and closes the database files.
|
|
169
190
|
|
|
170
191
|
## License
|
|
171
192
|
|
package/dist/cjs/index.js
CHANGED
|
@@ -1795,6 +1795,31 @@ var require_cjs = __commonJS({
|
|
|
1795
1795
|
}
|
|
1796
1796
|
return true;
|
|
1797
1797
|
}
|
|
1798
|
+
/**
|
|
1799
|
+
* Selects the best driver key from a condition object.
|
|
1800
|
+
* The driver key determines the starting point and traversal direction for queries.
|
|
1801
|
+
*
|
|
1802
|
+
* @param condition The condition to analyze.
|
|
1803
|
+
* @returns The best driver key or null if no valid key found.
|
|
1804
|
+
*/
|
|
1805
|
+
getDriverKey(condition) {
|
|
1806
|
+
if ("primaryEqual" in condition) return "primaryEqual";
|
|
1807
|
+
if ("equal" in condition) return "equal";
|
|
1808
|
+
if ("gt" in condition) return "gt";
|
|
1809
|
+
if ("gte" in condition) return "gte";
|
|
1810
|
+
if ("lt" in condition) return "lt";
|
|
1811
|
+
if ("lte" in condition) return "lte";
|
|
1812
|
+
if ("primaryGt" in condition) return "primaryGt";
|
|
1813
|
+
if ("primaryGte" in condition) return "primaryGte";
|
|
1814
|
+
if ("primaryLt" in condition) return "primaryLt";
|
|
1815
|
+
if ("primaryLte" in condition) return "primaryLte";
|
|
1816
|
+
if ("like" in condition) return "like";
|
|
1817
|
+
if ("notEqual" in condition) return "notEqual";
|
|
1818
|
+
if ("primaryNotEqual" in condition) return "primaryNotEqual";
|
|
1819
|
+
if ("or" in condition) return "or";
|
|
1820
|
+
if ("primaryOr" in condition) return "primaryOr";
|
|
1821
|
+
return null;
|
|
1822
|
+
}
|
|
1798
1823
|
constructor(rootTx, mvccRoot, mvcc, strategy, comparator, option) {
|
|
1799
1824
|
this.rootTx = rootTx === null ? this : rootTx;
|
|
1800
1825
|
this.mvccRoot = mvccRoot;
|
|
@@ -2235,8 +2260,8 @@ var require_cjs = __commonJS({
|
|
|
2235
2260
|
}
|
|
2236
2261
|
return void 0;
|
|
2237
2262
|
}
|
|
2238
|
-
*keysStream(condition, filterValues, limit) {
|
|
2239
|
-
const stream = this.whereStream(condition, limit);
|
|
2263
|
+
*keysStream(condition, filterValues, limit, order = "asc") {
|
|
2264
|
+
const stream = this.whereStream(condition, limit, order);
|
|
2240
2265
|
const intersection = filterValues && filterValues.size > 0 ? filterValues : null;
|
|
2241
2266
|
for (const [key] of stream) {
|
|
2242
2267
|
if (intersection && !intersection.has(key)) {
|
|
@@ -2245,30 +2270,20 @@ var require_cjs = __commonJS({
|
|
|
2245
2270
|
yield key;
|
|
2246
2271
|
}
|
|
2247
2272
|
}
|
|
2248
|
-
*whereStream(condition, limit) {
|
|
2249
|
-
|
|
2250
|
-
if ("primaryEqual" in condition) driverKey = "primaryEqual";
|
|
2251
|
-
else if ("equal" in condition) driverKey = "equal";
|
|
2252
|
-
else if ("gt" in condition) driverKey = "gt";
|
|
2253
|
-
else if ("gte" in condition) driverKey = "gte";
|
|
2254
|
-
else if ("lt" in condition) driverKey = "lt";
|
|
2255
|
-
else if ("lte" in condition) driverKey = "lte";
|
|
2256
|
-
else if ("primaryGt" in condition) driverKey = "primaryGt";
|
|
2257
|
-
else if ("primaryGte" in condition) driverKey = "primaryGte";
|
|
2258
|
-
else if ("primaryLt" in condition) driverKey = "primaryLt";
|
|
2259
|
-
else if ("primaryLte" in condition) driverKey = "primaryLte";
|
|
2260
|
-
else if ("like" in condition) driverKey = "like";
|
|
2261
|
-
else if ("notEqual" in condition) driverKey = "notEqual";
|
|
2262
|
-
else if ("primaryNotEqual" in condition) driverKey = "primaryNotEqual";
|
|
2263
|
-
else if ("or" in condition) driverKey = "or";
|
|
2264
|
-
else if ("primaryOr" in condition) driverKey = "primaryOr";
|
|
2273
|
+
*whereStream(condition, limit, order = "asc") {
|
|
2274
|
+
const driverKey = this.getDriverKey(condition);
|
|
2265
2275
|
if (!driverKey) return;
|
|
2266
2276
|
const value = condition[driverKey];
|
|
2267
|
-
|
|
2268
|
-
|
|
2269
|
-
|
|
2277
|
+
let startNode = this.verifierStartNode[driverKey](value);
|
|
2278
|
+
let endNode = this.verifierEndNode[driverKey](value);
|
|
2279
|
+
let direction = this.verifierDirection[driverKey];
|
|
2270
2280
|
const comparator = this.verifierMap[driverKey];
|
|
2271
2281
|
const earlyTerminate = this.verifierEarlyTerminate[driverKey];
|
|
2282
|
+
if (order === "desc") {
|
|
2283
|
+
startNode = endNode ?? this.rightestNode();
|
|
2284
|
+
endNode = null;
|
|
2285
|
+
direction *= -1;
|
|
2286
|
+
}
|
|
2272
2287
|
const generator = this.getPairsGenerator(
|
|
2273
2288
|
value,
|
|
2274
2289
|
startNode,
|
|
@@ -2299,16 +2314,16 @@ var require_cjs = __commonJS({
|
|
|
2299
2314
|
}
|
|
2300
2315
|
}
|
|
2301
2316
|
}
|
|
2302
|
-
keys(condition, filterValues) {
|
|
2317
|
+
keys(condition, filterValues, order = "asc") {
|
|
2303
2318
|
const set = /* @__PURE__ */ new Set();
|
|
2304
|
-
for (const key of this.keysStream(condition, filterValues)) {
|
|
2319
|
+
for (const key of this.keysStream(condition, filterValues, void 0, order)) {
|
|
2305
2320
|
set.add(key);
|
|
2306
2321
|
}
|
|
2307
2322
|
return set;
|
|
2308
2323
|
}
|
|
2309
|
-
where(condition) {
|
|
2324
|
+
where(condition, order = "asc") {
|
|
2310
2325
|
const map = /* @__PURE__ */ new Map();
|
|
2311
|
-
for (const [key, value] of this.whereStream(condition)) {
|
|
2326
|
+
for (const [key, value] of this.whereStream(condition, void 0, order)) {
|
|
2312
2327
|
map.set(key, value);
|
|
2313
2328
|
}
|
|
2314
2329
|
return map;
|
|
@@ -3064,8 +3079,8 @@ var require_cjs = __commonJS({
|
|
|
3064
3079
|
}
|
|
3065
3080
|
return void 0;
|
|
3066
3081
|
}
|
|
3067
|
-
async *keysStream(condition, filterValues, limit) {
|
|
3068
|
-
const stream = this.whereStream(condition, limit);
|
|
3082
|
+
async *keysStream(condition, filterValues, limit, order = "asc") {
|
|
3083
|
+
const stream = this.whereStream(condition, limit, order);
|
|
3069
3084
|
const intersection = filterValues && filterValues.size > 0 ? filterValues : null;
|
|
3070
3085
|
for await (const [key] of stream) {
|
|
3071
3086
|
if (intersection && !intersection.has(key)) {
|
|
@@ -3074,30 +3089,20 @@ var require_cjs = __commonJS({
|
|
|
3074
3089
|
yield key;
|
|
3075
3090
|
}
|
|
3076
3091
|
}
|
|
3077
|
-
async *whereStream(condition, limit) {
|
|
3078
|
-
|
|
3079
|
-
if ("primaryEqual" in condition) driverKey = "primaryEqual";
|
|
3080
|
-
else if ("equal" in condition) driverKey = "equal";
|
|
3081
|
-
else if ("gt" in condition) driverKey = "gt";
|
|
3082
|
-
else if ("gte" in condition) driverKey = "gte";
|
|
3083
|
-
else if ("lt" in condition) driverKey = "lt";
|
|
3084
|
-
else if ("lte" in condition) driverKey = "lte";
|
|
3085
|
-
else if ("primaryGt" in condition) driverKey = "primaryGt";
|
|
3086
|
-
else if ("primaryGte" in condition) driverKey = "primaryGte";
|
|
3087
|
-
else if ("primaryLt" in condition) driverKey = "primaryLt";
|
|
3088
|
-
else if ("primaryLte" in condition) driverKey = "primaryLte";
|
|
3089
|
-
else if ("like" in condition) driverKey = "like";
|
|
3090
|
-
else if ("notEqual" in condition) driverKey = "notEqual";
|
|
3091
|
-
else if ("primaryNotEqual" in condition) driverKey = "primaryNotEqual";
|
|
3092
|
-
else if ("or" in condition) driverKey = "or";
|
|
3093
|
-
else if ("primaryOr" in condition) driverKey = "primaryOr";
|
|
3092
|
+
async *whereStream(condition, limit, order = "asc") {
|
|
3093
|
+
const driverKey = this.getDriverKey(condition);
|
|
3094
3094
|
if (!driverKey) return;
|
|
3095
3095
|
const value = condition[driverKey];
|
|
3096
|
-
|
|
3097
|
-
|
|
3098
|
-
|
|
3096
|
+
let startNode = await this.verifierStartNode[driverKey](value);
|
|
3097
|
+
let endNode = await this.verifierEndNode[driverKey](value);
|
|
3098
|
+
let direction = this.verifierDirection[driverKey];
|
|
3099
3099
|
const comparator = this.verifierMap[driverKey];
|
|
3100
3100
|
const earlyTerminate = this.verifierEarlyTerminate[driverKey];
|
|
3101
|
+
if (order === "desc") {
|
|
3102
|
+
startNode = endNode ?? await this.rightestNode();
|
|
3103
|
+
endNode = null;
|
|
3104
|
+
direction *= -1;
|
|
3105
|
+
}
|
|
3101
3106
|
const generator = this.getPairsGenerator(
|
|
3102
3107
|
value,
|
|
3103
3108
|
startNode,
|
|
@@ -3128,16 +3133,16 @@ var require_cjs = __commonJS({
|
|
|
3128
3133
|
}
|
|
3129
3134
|
}
|
|
3130
3135
|
}
|
|
3131
|
-
async keys(condition, filterValues) {
|
|
3136
|
+
async keys(condition, filterValues, order = "asc") {
|
|
3132
3137
|
const set = /* @__PURE__ */ new Set();
|
|
3133
|
-
for await (const key of this.keysStream(condition, filterValues)) {
|
|
3138
|
+
for await (const key of this.keysStream(condition, filterValues, void 0, order)) {
|
|
3134
3139
|
set.add(key);
|
|
3135
3140
|
}
|
|
3136
3141
|
return set;
|
|
3137
3142
|
}
|
|
3138
|
-
async where(condition) {
|
|
3143
|
+
async where(condition, order = "asc") {
|
|
3139
3144
|
const map = /* @__PURE__ */ new Map();
|
|
3140
|
-
for await (const [key, value] of this.whereStream(condition)) {
|
|
3145
|
+
for await (const [key, value] of this.whereStream(condition, void 0, order)) {
|
|
3141
3146
|
map.set(key, value);
|
|
3142
3147
|
}
|
|
3143
3148
|
return map;
|
|
@@ -8855,6 +8860,9 @@ var require_cjs = __commonJS({
|
|
|
8855
8860
|
get() {
|
|
8856
8861
|
return this.storage.getStore();
|
|
8857
8862
|
}
|
|
8863
|
+
stream(tx, callback) {
|
|
8864
|
+
return this.storage.run(tx, callback);
|
|
8865
|
+
}
|
|
8858
8866
|
};
|
|
8859
8867
|
var DataplyAPI2 = class {
|
|
8860
8868
|
constructor(file, options) {
|
|
@@ -9072,6 +9080,38 @@ var require_cjs = __commonJS({
|
|
|
9072
9080
|
}
|
|
9073
9081
|
return result;
|
|
9074
9082
|
}
|
|
9083
|
+
/**
|
|
9084
|
+
* Runs a generator callback function within a transaction context.
|
|
9085
|
+
* Similar to runWithDefault but allows yielding values from an AsyncGenerator.
|
|
9086
|
+
* If no transaction is provided, a new transaction is created.
|
|
9087
|
+
* The transaction is committed if the generator completes successfully,
|
|
9088
|
+
* or rolled back if an error occurs.
|
|
9089
|
+
* @param callback The generator callback function to run within the transaction context.
|
|
9090
|
+
* @param tx The transaction to use. If not provided, a new transaction is created.
|
|
9091
|
+
* @returns An AsyncGenerator that yields values from the callback.
|
|
9092
|
+
*/
|
|
9093
|
+
async *streamWithDefault(callback, tx) {
|
|
9094
|
+
const isInternalTx = !tx;
|
|
9095
|
+
if (!tx) {
|
|
9096
|
+
tx = this.createTransaction();
|
|
9097
|
+
}
|
|
9098
|
+
let hasError = false;
|
|
9099
|
+
try {
|
|
9100
|
+
const generator = this.txContext.stream(tx, () => callback(tx));
|
|
9101
|
+
for await (const value of generator) {
|
|
9102
|
+
yield value;
|
|
9103
|
+
}
|
|
9104
|
+
} catch (error) {
|
|
9105
|
+
hasError = true;
|
|
9106
|
+
if (isInternalTx) {
|
|
9107
|
+
await tx.rollback();
|
|
9108
|
+
}
|
|
9109
|
+
throw error;
|
|
9110
|
+
}
|
|
9111
|
+
if (!hasError && isInternalTx) {
|
|
9112
|
+
await tx.commit();
|
|
9113
|
+
}
|
|
9114
|
+
}
|
|
9075
9115
|
/**
|
|
9076
9116
|
* Retrieves metadata from the dataply.
|
|
9077
9117
|
* @returns Metadata of the dataply.
|
|
@@ -9369,12 +9409,12 @@ var DocumentSerializeStrategyAsync = class extends import_dataply.SerializeStrat
|
|
|
9369
9409
|
async readHead() {
|
|
9370
9410
|
const tx = this.txContext.get();
|
|
9371
9411
|
const metadata = await this.api.getDocumentInnerMetadata(tx);
|
|
9372
|
-
const indexInfo = metadata.
|
|
9412
|
+
const indexInfo = metadata.indices[this.treeKey];
|
|
9373
9413
|
if (!indexInfo) return null;
|
|
9374
9414
|
const headPk = indexInfo[0];
|
|
9375
9415
|
if (headPk === -1) {
|
|
9376
9416
|
const pk = await this.api.insertAsOverflow("__BPTREE_HEAD_PLACEHOLDER__", false, tx);
|
|
9377
|
-
metadata.
|
|
9417
|
+
metadata.indices[this.treeKey][0] = pk;
|
|
9378
9418
|
await this.api.updateDocumentInnerMetadata(metadata, tx);
|
|
9379
9419
|
return null;
|
|
9380
9420
|
}
|
|
@@ -9385,7 +9425,7 @@ var DocumentSerializeStrategyAsync = class extends import_dataply.SerializeStrat
|
|
|
9385
9425
|
async writeHead(head) {
|
|
9386
9426
|
const tx = this.txContext.get();
|
|
9387
9427
|
const metadata = await this.api.getDocumentInnerMetadata(tx);
|
|
9388
|
-
const indexInfo = metadata.
|
|
9428
|
+
const indexInfo = metadata.indices[this.treeKey];
|
|
9389
9429
|
if (!indexInfo) {
|
|
9390
9430
|
throw new Error(`Index info not found for tree: ${this.treeKey}. Initialization should be handled outside.`);
|
|
9391
9431
|
}
|
|
@@ -9424,7 +9464,7 @@ async function catchPromise(promise) {
|
|
|
9424
9464
|
|
|
9425
9465
|
// src/core/document.ts
|
|
9426
9466
|
var DocumentDataplyAPI = class extends import_dataply3.DataplyAPI {
|
|
9427
|
-
|
|
9467
|
+
indices = {};
|
|
9428
9468
|
trees = /* @__PURE__ */ new Map();
|
|
9429
9469
|
comparator = new DocumentValueComparator();
|
|
9430
9470
|
pendingBackfillFields = [];
|
|
@@ -9441,18 +9481,18 @@ var DocumentDataplyAPI = class extends import_dataply3.DataplyAPI {
|
|
|
9441
9481
|
throw new Error("Document metadata verification failed");
|
|
9442
9482
|
}
|
|
9443
9483
|
const metadata = await this.getDocumentInnerMetadata(tx);
|
|
9444
|
-
const
|
|
9445
|
-
const
|
|
9446
|
-
...
|
|
9484
|
+
const optionsIndices = options.indices ?? {};
|
|
9485
|
+
const targetIndices = {
|
|
9486
|
+
...optionsIndices,
|
|
9447
9487
|
_id: true
|
|
9448
9488
|
};
|
|
9449
9489
|
const backfillTargets = [];
|
|
9450
9490
|
let isMetadataChanged = false;
|
|
9451
|
-
for (const field in
|
|
9452
|
-
const isBackfillEnabled =
|
|
9453
|
-
const existingIndex = metadata.
|
|
9491
|
+
for (const field in targetIndices) {
|
|
9492
|
+
const isBackfillEnabled = targetIndices[field];
|
|
9493
|
+
const existingIndex = metadata.indices[field];
|
|
9454
9494
|
if (!existingIndex) {
|
|
9455
|
-
metadata.
|
|
9495
|
+
metadata.indices[field] = [-1, isBackfillEnabled];
|
|
9456
9496
|
isMetadataChanged = true;
|
|
9457
9497
|
if (isBackfillEnabled && !isNewlyCreated) {
|
|
9458
9498
|
backfillTargets.push(field);
|
|
@@ -9460,11 +9500,11 @@ var DocumentDataplyAPI = class extends import_dataply3.DataplyAPI {
|
|
|
9460
9500
|
} else {
|
|
9461
9501
|
const [_pk, isMetaBackfillEnabled] = existingIndex;
|
|
9462
9502
|
if (isBackfillEnabled && !isMetaBackfillEnabled) {
|
|
9463
|
-
metadata.
|
|
9503
|
+
metadata.indices[field][1] = true;
|
|
9464
9504
|
isMetadataChanged = true;
|
|
9465
9505
|
backfillTargets.push(field);
|
|
9466
9506
|
} else if (!isBackfillEnabled && isMetaBackfillEnabled) {
|
|
9467
|
-
metadata.
|
|
9507
|
+
metadata.indices[field][1] = false;
|
|
9468
9508
|
isMetadataChanged = true;
|
|
9469
9509
|
}
|
|
9470
9510
|
}
|
|
@@ -9472,9 +9512,9 @@ var DocumentDataplyAPI = class extends import_dataply3.DataplyAPI {
|
|
|
9472
9512
|
if (isMetadataChanged) {
|
|
9473
9513
|
await this.updateDocumentInnerMetadata(metadata, tx);
|
|
9474
9514
|
}
|
|
9475
|
-
this.
|
|
9476
|
-
for (const field in this.
|
|
9477
|
-
if (field in
|
|
9515
|
+
this.indices = metadata.indices;
|
|
9516
|
+
for (const field in this.indices) {
|
|
9517
|
+
if (field in targetIndices) {
|
|
9478
9518
|
const tree = new import_dataply3.BPTreeAsync(
|
|
9479
9519
|
new DocumentSerializeStrategyAsync(
|
|
9480
9520
|
this.rowTableEngine.order,
|
|
@@ -9576,14 +9616,14 @@ var DocumentDataplyAPI = class extends import_dataply3.DataplyAPI {
|
|
|
9576
9616
|
return backfilledCount;
|
|
9577
9617
|
}, tx);
|
|
9578
9618
|
}
|
|
9579
|
-
createDocumentInnerMetadata(
|
|
9619
|
+
createDocumentInnerMetadata(indices) {
|
|
9580
9620
|
return {
|
|
9581
9621
|
magicString: "document-dataply",
|
|
9582
9622
|
version: 1,
|
|
9583
9623
|
createdAt: Date.now(),
|
|
9584
9624
|
updatedAt: Date.now(),
|
|
9585
9625
|
lastId: 0,
|
|
9586
|
-
|
|
9626
|
+
indices
|
|
9587
9627
|
};
|
|
9588
9628
|
}
|
|
9589
9629
|
async initializeDocumentFile(tx) {
|
|
@@ -9645,6 +9685,7 @@ var DocumentDataplyAPI = class extends import_dataply3.DataplyAPI {
|
|
|
9645
9685
|
};
|
|
9646
9686
|
var DocumentDataply = class {
|
|
9647
9687
|
api;
|
|
9688
|
+
indexedFields;
|
|
9648
9689
|
operatorConverters = {
|
|
9649
9690
|
equal: "primaryEqual",
|
|
9650
9691
|
notEqual: "primaryNotEqual",
|
|
@@ -9657,6 +9698,12 @@ var DocumentDataply = class {
|
|
|
9657
9698
|
};
|
|
9658
9699
|
constructor(file, options) {
|
|
9659
9700
|
this.api = new DocumentDataplyAPI(file, options ?? {});
|
|
9701
|
+
this.indexedFields = /* @__PURE__ */ new Set(["_id"]);
|
|
9702
|
+
if (options?.indices) {
|
|
9703
|
+
for (const field of Object.keys(options.indices)) {
|
|
9704
|
+
this.indexedFields.add(field);
|
|
9705
|
+
}
|
|
9706
|
+
}
|
|
9660
9707
|
}
|
|
9661
9708
|
/**
|
|
9662
9709
|
* Initialize the document database
|
|
@@ -9706,16 +9753,29 @@ var DocumentDataply = class {
|
|
|
9706
9753
|
}
|
|
9707
9754
|
/**
|
|
9708
9755
|
* Get the selectivity candidate for the given query
|
|
9709
|
-
* @param query
|
|
9710
|
-
* @
|
|
9756
|
+
* @param query The query conditions
|
|
9757
|
+
* @param orderByField Optional field name for orderBy optimization
|
|
9758
|
+
* @returns Driver and other candidates for query execution
|
|
9711
9759
|
*/
|
|
9712
|
-
async getSelectivityCandidate(query) {
|
|
9760
|
+
async getSelectivityCandidate(query, orderByField) {
|
|
9713
9761
|
const candidates = [];
|
|
9714
9762
|
for (const field in query) {
|
|
9715
9763
|
const tree = this.api.trees.get(field);
|
|
9716
9764
|
if (!tree) continue;
|
|
9717
9765
|
const condition = query[field];
|
|
9718
|
-
candidates.push({ tree, condition });
|
|
9766
|
+
candidates.push({ tree, condition, field });
|
|
9767
|
+
}
|
|
9768
|
+
if (candidates.length === 0) {
|
|
9769
|
+
return null;
|
|
9770
|
+
}
|
|
9771
|
+
if (orderByField) {
|
|
9772
|
+
const orderByCandidate = candidates.find((c) => c.field === orderByField);
|
|
9773
|
+
if (orderByCandidate) {
|
|
9774
|
+
return {
|
|
9775
|
+
driver: orderByCandidate,
|
|
9776
|
+
others: candidates.filter((c) => c.field !== orderByField)
|
|
9777
|
+
};
|
|
9778
|
+
}
|
|
9719
9779
|
}
|
|
9720
9780
|
let res = import_dataply3.BPTreeAsync.ChooseDriver(candidates);
|
|
9721
9781
|
if (!res && candidates.length > 0) {
|
|
@@ -9723,10 +9783,7 @@ var DocumentDataply = class {
|
|
|
9723
9783
|
}
|
|
9724
9784
|
if (!res) return null;
|
|
9725
9785
|
return {
|
|
9726
|
-
driver:
|
|
9727
|
-
tree: res.tree,
|
|
9728
|
-
condition: res.condition
|
|
9729
|
-
},
|
|
9786
|
+
driver: res,
|
|
9730
9787
|
others: candidates.filter((c) => c.tree !== res.tree)
|
|
9731
9788
|
};
|
|
9732
9789
|
}
|
|
@@ -9814,43 +9871,97 @@ var DocumentDataply = class {
|
|
|
9814
9871
|
}
|
|
9815
9872
|
/**
|
|
9816
9873
|
* Select documents from the database
|
|
9817
|
-
* @param query The query to use
|
|
9818
|
-
* @param
|
|
9874
|
+
* @param query The query to use (only indexed fields + _id allowed)
|
|
9875
|
+
* @param options The options to use
|
|
9819
9876
|
* @param tx The transaction to use
|
|
9820
9877
|
* @returns The documents that match the query
|
|
9878
|
+
* @throws Error if query or orderBy contains non-indexed fields
|
|
9821
9879
|
*/
|
|
9822
|
-
|
|
9823
|
-
|
|
9824
|
-
|
|
9825
|
-
|
|
9826
|
-
|
|
9827
|
-
|
|
9880
|
+
select(query, options = {}, tx) {
|
|
9881
|
+
for (const field of Object.keys(query)) {
|
|
9882
|
+
if (!this.indexedFields.has(field)) {
|
|
9883
|
+
throw new Error(`Query field "${field}" is not indexed. Available indexed fields: ${Array.from(this.indexedFields).join(", ")}`);
|
|
9884
|
+
}
|
|
9885
|
+
}
|
|
9886
|
+
const orderBy = options.orderBy ?? "_id";
|
|
9887
|
+
if (!this.indexedFields.has(orderBy)) {
|
|
9888
|
+
throw new Error(`orderBy field "${orderBy}" is not indexed. Available indexed fields: ${Array.from(this.indexedFields).join(", ")}`);
|
|
9889
|
+
}
|
|
9890
|
+
const {
|
|
9891
|
+
limit = Infinity,
|
|
9892
|
+
sortOrder = "asc"
|
|
9893
|
+
} = options;
|
|
9894
|
+
const self = this;
|
|
9895
|
+
const stream = this.api.streamWithDefault(async function* (tx2) {
|
|
9896
|
+
const isQueryEmpty = Object.keys(query).length === 0;
|
|
9897
|
+
const normalizedQuery = isQueryEmpty ? { _id: { gte: 0 } } : query;
|
|
9898
|
+
const verbose = self.verboseQuery(normalizedQuery);
|
|
9899
|
+
const orderByTree = self.api.trees.get(orderBy);
|
|
9900
|
+
const selectivity = await self.getSelectivityCandidate(
|
|
9901
|
+
verbose,
|
|
9902
|
+
orderByTree ? orderBy : void 0
|
|
9903
|
+
);
|
|
9904
|
+
if (!selectivity) return;
|
|
9828
9905
|
const { driver, others } = selectivity;
|
|
9829
|
-
const
|
|
9830
|
-
|
|
9831
|
-
|
|
9832
|
-
|
|
9833
|
-
|
|
9834
|
-
if (
|
|
9835
|
-
|
|
9836
|
-
|
|
9906
|
+
const isDriverOrderByField = orderByTree && driver.field === orderBy;
|
|
9907
|
+
if (isDriverOrderByField) {
|
|
9908
|
+
const driverStream = driver.tree.whereStream(driver.condition, limit, sortOrder);
|
|
9909
|
+
let i = 0;
|
|
9910
|
+
for await (const [pk, val] of driverStream) {
|
|
9911
|
+
if (i >= limit) break;
|
|
9912
|
+
let isMatch = true;
|
|
9913
|
+
for (const { tree, condition } of others) {
|
|
9914
|
+
const targetValue = await tree.get(pk);
|
|
9915
|
+
if (targetValue === void 0 || !tree.verify(targetValue, condition)) {
|
|
9916
|
+
isMatch = false;
|
|
9917
|
+
break;
|
|
9918
|
+
}
|
|
9919
|
+
}
|
|
9920
|
+
if (isMatch) {
|
|
9921
|
+
const stringified = await self.api.select(pk, false, tx2);
|
|
9922
|
+
if (!stringified) continue;
|
|
9923
|
+
yield JSON.parse(stringified);
|
|
9924
|
+
i++;
|
|
9837
9925
|
}
|
|
9838
9926
|
}
|
|
9839
|
-
|
|
9840
|
-
|
|
9841
|
-
|
|
9927
|
+
} else {
|
|
9928
|
+
const results = [];
|
|
9929
|
+
const driverStream = driver.tree.whereStream(driver.condition);
|
|
9930
|
+
for await (const [pk, val] of driverStream) {
|
|
9931
|
+
let isMatch = true;
|
|
9932
|
+
for (const { tree, condition } of others) {
|
|
9933
|
+
const targetValue = await tree.get(pk);
|
|
9934
|
+
if (targetValue === void 0 || !tree.verify(targetValue, condition)) {
|
|
9935
|
+
isMatch = false;
|
|
9936
|
+
break;
|
|
9937
|
+
}
|
|
9938
|
+
}
|
|
9939
|
+
if (isMatch) {
|
|
9940
|
+
const stringified = await self.api.select(pk, false, tx2);
|
|
9941
|
+
if (!stringified) continue;
|
|
9942
|
+
results.push(JSON.parse(stringified));
|
|
9943
|
+
}
|
|
9842
9944
|
}
|
|
9843
|
-
|
|
9844
|
-
|
|
9845
|
-
|
|
9846
|
-
|
|
9847
|
-
|
|
9848
|
-
|
|
9945
|
+
results.sort((a, b) => {
|
|
9946
|
+
const aVal = a[orderBy] ?? a._id;
|
|
9947
|
+
const bVal = b[orderBy] ?? b._id;
|
|
9948
|
+
const cmp = aVal < bVal ? -1 : aVal > bVal ? 1 : 0;
|
|
9949
|
+
return sortOrder === "asc" ? cmp : -cmp;
|
|
9950
|
+
});
|
|
9951
|
+
const limitedResults = results.slice(0, limit === Infinity ? void 0 : limit);
|
|
9952
|
+
for (const doc of limitedResults) {
|
|
9953
|
+
yield doc;
|
|
9849
9954
|
}
|
|
9850
|
-
documents.push(JSON.parse(stringify));
|
|
9851
9955
|
}
|
|
9852
|
-
return documents;
|
|
9853
9956
|
}, tx);
|
|
9957
|
+
const drain = async () => {
|
|
9958
|
+
const result = [];
|
|
9959
|
+
for await (const document of stream) {
|
|
9960
|
+
result.push(document);
|
|
9961
|
+
}
|
|
9962
|
+
return result;
|
|
9963
|
+
};
|
|
9964
|
+
return { stream, drain };
|
|
9854
9965
|
}
|
|
9855
9966
|
/**
|
|
9856
9967
|
* Close the document database
|
|
@@ -1,14 +1,15 @@
|
|
|
1
|
-
import type { DataplyTreeValue, DocumentDataplyInnerMetadata, DocumentDataplyOptions, DocumentJSON, FlattenedDocumentJSON, Primitive, DocumentDataplyQuery, FinalFlatten, DocumentDataplyCondition, DataplyDocument, DocumentDataplyMetadata } from '../types';
|
|
1
|
+
import type { DataplyTreeValue, DocumentDataplyInnerMetadata, DocumentDataplyOptions, DocumentJSON, FlattenedDocumentJSON, Primitive, DocumentDataplyQuery, FinalFlatten, DocumentDataplyCondition, DataplyDocument, DocumentDataplyMetadata, DocumentDataplyQueryOptions, IndexedDocumentDataplyQuery } from '../types';
|
|
2
2
|
import { DataplyAPI, Transaction, BPTreeAsync } from 'dataply';
|
|
3
3
|
import { DocumentValueComparator } from './bptree/documentComparator';
|
|
4
4
|
export declare class DocumentDataplyAPI<T extends DocumentJSON> extends DataplyAPI {
|
|
5
5
|
runWithDefault: <T_1>(callback: (tx: Transaction) => Promise<T_1>, tx?: Transaction) => Promise<T_1>;
|
|
6
|
-
|
|
6
|
+
streamWithDefault: <T_1>(callback: (tx: Transaction) => AsyncGenerator<T_1>, tx?: Transaction) => AsyncGenerator<T_1>;
|
|
7
|
+
indices: DocumentDataplyInnerMetadata['indices'];
|
|
7
8
|
readonly trees: Map<string, BPTreeAsync<number, DataplyTreeValue<Primitive>>>;
|
|
8
9
|
readonly comparator: DocumentValueComparator<DataplyTreeValue<Primitive>, Primitive>;
|
|
9
10
|
private pendingBackfillFields;
|
|
10
11
|
private readonly lock;
|
|
11
|
-
constructor(file: string, options: DocumentDataplyOptions);
|
|
12
|
+
constructor(file: string, options: DocumentDataplyOptions<T>);
|
|
12
13
|
readLock<T>(fn: () => T): Promise<T>;
|
|
13
14
|
writeLock<T>(fn: () => T): Promise<T>;
|
|
14
15
|
getDocument(pk: number, tx?: Transaction): Promise<DataplyDocument<T>>;
|
|
@@ -20,7 +21,7 @@ export declare class DocumentDataplyAPI<T extends DocumentJSON> extends DataplyA
|
|
|
20
21
|
* @returns Number of documents that were backfilled
|
|
21
22
|
*/
|
|
22
23
|
backfillIndices(tx?: Transaction): Promise<number>;
|
|
23
|
-
createDocumentInnerMetadata(
|
|
24
|
+
createDocumentInnerMetadata(indices: DocumentDataplyInnerMetadata['indices']): DocumentDataplyInnerMetadata;
|
|
24
25
|
initializeDocumentFile(tx: Transaction): Promise<void>;
|
|
25
26
|
verifyDocumentFile(tx: Transaction): Promise<boolean>;
|
|
26
27
|
/**
|
|
@@ -33,10 +34,11 @@ export declare class DocumentDataplyAPI<T extends DocumentJSON> extends DataplyA
|
|
|
33
34
|
getDocumentInnerMetadata(tx: Transaction): Promise<DocumentDataplyInnerMetadata>;
|
|
34
35
|
updateDocumentInnerMetadata(metadata: DocumentDataplyInnerMetadata, tx: Transaction): Promise<void>;
|
|
35
36
|
}
|
|
36
|
-
export declare class DocumentDataply<T extends DocumentJSON> {
|
|
37
|
+
export declare class DocumentDataply<T extends DocumentJSON, I extends string = keyof FinalFlatten<T> & string> {
|
|
37
38
|
protected readonly api: DocumentDataplyAPI<T>;
|
|
39
|
+
private readonly indexedFields;
|
|
38
40
|
private readonly operatorConverters;
|
|
39
|
-
constructor(file: string, options?: DocumentDataplyOptions);
|
|
41
|
+
constructor(file: string, options?: DocumentDataplyOptions<T>);
|
|
40
42
|
/**
|
|
41
43
|
* Initialize the document database
|
|
42
44
|
*/
|
|
@@ -52,17 +54,20 @@ export declare class DocumentDataply<T extends DocumentJSON> {
|
|
|
52
54
|
private verboseQuery;
|
|
53
55
|
/**
|
|
54
56
|
* Get the selectivity candidate for the given query
|
|
55
|
-
* @param query
|
|
56
|
-
* @
|
|
57
|
+
* @param query The query conditions
|
|
58
|
+
* @param orderByField Optional field name for orderBy optimization
|
|
59
|
+
* @returns Driver and other candidates for query execution
|
|
57
60
|
*/
|
|
58
|
-
getSelectivityCandidate<U extends FinalFlatten<DataplyDocument<T>>, V extends DataplyTreeValue<U>>(query: Partial<DocumentDataplyQuery<V
|
|
61
|
+
getSelectivityCandidate<U extends Partial<IndexedDocumentDataplyQuery<FinalFlatten<DataplyDocument<T>>, I>>, V extends DataplyTreeValue<U>>(query: Partial<DocumentDataplyQuery<V>>, orderByField?: string): Promise<{
|
|
59
62
|
driver: {
|
|
60
63
|
tree: BPTreeAsync<number, V>;
|
|
61
64
|
condition: Partial<DocumentDataplyCondition<U>>;
|
|
65
|
+
field: string;
|
|
62
66
|
};
|
|
63
67
|
others: {
|
|
64
68
|
tree: BPTreeAsync<number, V>;
|
|
65
69
|
condition: Partial<DocumentDataplyCondition<U>>;
|
|
70
|
+
field: string;
|
|
66
71
|
}[];
|
|
67
72
|
} | null>;
|
|
68
73
|
private insertDocument;
|
|
@@ -82,12 +87,16 @@ export declare class DocumentDataply<T extends DocumentJSON> {
|
|
|
82
87
|
insertBatch(documents: T[], tx?: Transaction): Promise<number[]>;
|
|
83
88
|
/**
|
|
84
89
|
* Select documents from the database
|
|
85
|
-
* @param query The query to use
|
|
86
|
-
* @param
|
|
90
|
+
* @param query The query to use (only indexed fields + _id allowed)
|
|
91
|
+
* @param options The options to use
|
|
87
92
|
* @param tx The transaction to use
|
|
88
93
|
* @returns The documents that match the query
|
|
94
|
+
* @throws Error if query or orderBy contains non-indexed fields
|
|
89
95
|
*/
|
|
90
|
-
select(query: Partial<
|
|
96
|
+
select(query: Partial<IndexedDocumentDataplyQuery<FinalFlatten<DataplyDocument<T>>, I>>, options?: DocumentDataplyQueryOptions<FinalFlatten<DataplyDocument<T>>, I>, tx?: Transaction): {
|
|
97
|
+
stream: AsyncIterableIterator<DataplyDocument<T>>;
|
|
98
|
+
drain: () => Promise<DataplyDocument<T>[]>;
|
|
99
|
+
};
|
|
91
100
|
/**
|
|
92
101
|
* Close the document database
|
|
93
102
|
*/
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import type { DataplyOptions } from 'dataply';
|
|
1
|
+
import type { BPTreeOrder, DataplyOptions } from 'dataply';
|
|
2
2
|
export type Primitive = string | number | boolean | null;
|
|
3
3
|
export type JSONValue = Primitive | JSONValue[] | {
|
|
4
4
|
[key: string]: JSONValue;
|
|
@@ -15,7 +15,7 @@ export interface DocumentDataplyInnerMetadata {
|
|
|
15
15
|
createdAt: number;
|
|
16
16
|
updatedAt: number;
|
|
17
17
|
lastId: number;
|
|
18
|
-
|
|
18
|
+
indices: {
|
|
19
19
|
[key: string]: [number, boolean];
|
|
20
20
|
};
|
|
21
21
|
}
|
|
@@ -47,11 +47,22 @@ export type DocumentDataplyCondition<V> = {
|
|
|
47
47
|
or?: Partial<V>[];
|
|
48
48
|
like?: string;
|
|
49
49
|
};
|
|
50
|
+
export type DocumentDataplyQueryOptions<V, I extends string = string> = {
|
|
51
|
+
limit?: number;
|
|
52
|
+
orderBy?: I | '_id';
|
|
53
|
+
sortOrder?: BPTreeOrder;
|
|
54
|
+
};
|
|
50
55
|
export type DocumentDataplyQuery<T> = {
|
|
51
56
|
[key in keyof T]?: T[key] | DocumentDataplyCondition<T[key]>;
|
|
52
57
|
} & {
|
|
53
58
|
[key: string]: any;
|
|
54
59
|
};
|
|
60
|
+
/**
|
|
61
|
+
* Query type restricted to indexed fields only (+ _id)
|
|
62
|
+
*/
|
|
63
|
+
export type IndexedDocumentDataplyQuery<T, I extends string> = {
|
|
64
|
+
[key in (I | '_id')]?: key extends keyof T ? T[key] | DocumentDataplyCondition<T[key]> : never;
|
|
65
|
+
};
|
|
55
66
|
export interface DataplyTreeValue<T> {
|
|
56
67
|
k: number;
|
|
57
68
|
v: T;
|
|
@@ -66,8 +77,8 @@ type Prev = [never, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17
|
|
|
66
77
|
*/
|
|
67
78
|
export type DeepFlattenKeys<T, Prefix extends string = "", D extends number = 12> = [
|
|
68
79
|
D
|
|
69
|
-
] extends [0] ? never : T extends Primitive ? (Prefix extends `${infer P}.` ? P : never) : T extends readonly any[] ? (
|
|
70
|
-
[K in keyof T & string]: NonNullable<T[K]> extends Primitive ? `${Prefix}${K}` :
|
|
80
|
+
] extends [0] ? never : T extends Primitive ? (Prefix extends `${infer P}.` ? P : never) : T extends readonly any[] ? (DeepFlattenKeys<T[number], `${Prefix}${number}.`, Prev[D]>) : T extends object ? {
|
|
81
|
+
[K in keyof T & string]: NonNullable<T[K]> extends Primitive ? `${Prefix}${K}` : DeepFlattenKeys<NonNullable<T[K]>, `${Prefix}${K}.`, Prev[D]>;
|
|
71
82
|
}[keyof T & string] : never;
|
|
72
83
|
/**
|
|
73
84
|
* 경로 문자열(Path)을 기반으로 원본 객체(T)에서 타입을 찾아옵니다.
|
|
@@ -77,15 +88,15 @@ type GetTypeByPath<T, Path extends string> = T extends readonly (infer U)[] ? Pa
|
|
|
77
88
|
export type FinalFlatten<T> = {
|
|
78
89
|
[P in DeepFlattenKeys<T>]: GetTypeByPath<T, P & string>;
|
|
79
90
|
};
|
|
80
|
-
export interface DocumentDataplyOptions extends DataplyOptions {
|
|
91
|
+
export interface DocumentDataplyOptions<T> extends DataplyOptions {
|
|
81
92
|
/**
|
|
82
93
|
* Indecies to create when initializing the database.
|
|
83
94
|
* If not specified, no indecies will be created.
|
|
84
95
|
* If the value of the index is `true`, the index will be created for the already inserted data.
|
|
85
96
|
* If the value of the index is `false`, the index will not be created for the already inserted data.
|
|
86
97
|
*/
|
|
87
|
-
|
|
88
|
-
[key
|
|
89
|
-
}
|
|
98
|
+
indices?: Partial<{
|
|
99
|
+
[key in keyof FinalFlatten<T>]: boolean;
|
|
100
|
+
}>;
|
|
90
101
|
}
|
|
91
102
|
export {};
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "document-dataply",
|
|
3
|
-
"version": "0.0.2
|
|
4
|
-
"description": "",
|
|
3
|
+
"version": "0.0.2",
|
|
4
|
+
"description": "Simple and powerful JSON document database supporting complex queries and flexible indexing policies.",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"author": "izure <admin@izure.org>",
|
|
7
7
|
"type": "commonjs",
|
|
@@ -25,8 +25,21 @@
|
|
|
25
25
|
"test": "jest -i",
|
|
26
26
|
"build": "node build/index.js && tsc"
|
|
27
27
|
},
|
|
28
|
+
"keywords": [
|
|
29
|
+
"database",
|
|
30
|
+
"document-database",
|
|
31
|
+
"nosql",
|
|
32
|
+
"json",
|
|
33
|
+
"indexing",
|
|
34
|
+
"bptree",
|
|
35
|
+
"transactions",
|
|
36
|
+
"acid",
|
|
37
|
+
"embedded-database",
|
|
38
|
+
"deep-indexing",
|
|
39
|
+
"dataply"
|
|
40
|
+
],
|
|
28
41
|
"dependencies": {
|
|
29
|
-
"dataply": "^0.0.18
|
|
42
|
+
"dataply": "^0.0.18"
|
|
30
43
|
},
|
|
31
44
|
"devDependencies": {
|
|
32
45
|
"@types/jest": "^30.0.0",
|