document-dataply 0.0.10-alpha.3 → 0.0.10-alpha.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/index.js +1871 -1310
- package/dist/types/core/AnalysisManager.d.ts +105 -0
- package/dist/types/core/AnalysisProvider.d.ts +30 -0
- package/dist/types/core/DocumentFormatter.d.ts +5 -0
- package/dist/types/core/IndexManager.d.ts +68 -0
- package/dist/types/core/IntervalAnalysisProvider.d.ts +31 -0
- package/dist/types/core/MetadataManager.d.ts +11 -0
- package/dist/types/core/MutationManager.d.ts +14 -0
- package/dist/types/core/Optimizer.d.ts +79 -0
- package/dist/types/core/QueryManager.d.ts +75 -0
- package/dist/types/core/RealtimeAnalysisProvider.d.ts +27 -0
- package/dist/types/core/analysis/FTSTermCount.d.ts +28 -0
- package/dist/types/core/analysis/index.d.ts +2 -0
- package/dist/types/core/document.d.ts +6 -0
- package/dist/types/core/documentAPI.d.ts +62 -193
- package/dist/types/types/index.d.ts +8 -0
- package/package.json +2 -2
package/dist/cjs/index.js
CHANGED
|
@@ -95,7 +95,7 @@ var require_cjs = __commonJS({
|
|
|
95
95
|
StringComparator: () => StringComparator,
|
|
96
96
|
SyncMVCCStrategy: () => SyncMVCCStrategy2,
|
|
97
97
|
SyncMVCCTransaction: () => SyncMVCCTransaction2,
|
|
98
|
-
Transaction: () =>
|
|
98
|
+
Transaction: () => Transaction4,
|
|
99
99
|
UnknownPageManager: () => UnknownPageManager,
|
|
100
100
|
ValueComparator: () => ValueComparator2
|
|
101
101
|
});
|
|
@@ -9598,7 +9598,7 @@ var require_cjs = __commonJS({
|
|
|
9598
9598
|
}
|
|
9599
9599
|
}
|
|
9600
9600
|
};
|
|
9601
|
-
var
|
|
9601
|
+
var Transaction4 = class {
|
|
9602
9602
|
/**
|
|
9603
9603
|
* @param id Transaction ID
|
|
9604
9604
|
* @param context Transaction context
|
|
@@ -9997,7 +9997,7 @@ var require_cjs = __commonJS({
|
|
|
9997
9997
|
* @returns Transaction object
|
|
9998
9998
|
*/
|
|
9999
9999
|
createTransaction() {
|
|
10000
|
-
return new
|
|
10000
|
+
return new Transaction4(
|
|
10001
10001
|
++this.txIdCounter,
|
|
10002
10002
|
this.txContext,
|
|
10003
10003
|
this.pfs.getPageStrategy(),
|
|
@@ -10383,88 +10383,17 @@ var require_cjs = __commonJS({
|
|
|
10383
10383
|
var src_exports = {};
|
|
10384
10384
|
__export(src_exports, {
|
|
10385
10385
|
DocumentDataply: () => DocumentDataply,
|
|
10386
|
-
GlobalTransaction: () =>
|
|
10387
|
-
Transaction: () =>
|
|
10386
|
+
GlobalTransaction: () => import_dataply5.GlobalTransaction,
|
|
10387
|
+
Transaction: () => import_dataply5.Transaction
|
|
10388
10388
|
});
|
|
10389
10389
|
module.exports = __toCommonJS(src_exports);
|
|
10390
10390
|
|
|
10391
10391
|
// src/core/documentAPI.ts
|
|
10392
|
-
var
|
|
10393
|
-
var import_dataply3 = __toESM(require_cjs());
|
|
10394
|
-
|
|
10395
|
-
// src/core/bptree/documentStrategy.ts
|
|
10396
|
-
var import_dataply = __toESM(require_cjs());
|
|
10397
|
-
var DocumentSerializeStrategyAsync = class extends import_dataply.SerializeStrategyAsync {
|
|
10398
|
-
constructor(order, api, txContext, treeKey) {
|
|
10399
|
-
super(order);
|
|
10400
|
-
this.api = api;
|
|
10401
|
-
this.txContext = txContext;
|
|
10402
|
-
this.treeKey = treeKey;
|
|
10403
|
-
}
|
|
10404
|
-
/**
|
|
10405
|
-
* readHead에서 할당된 headPk를 캐싱하여
|
|
10406
|
-
* writeHead에서 AsyncLocalStorage 컨텍스트 유실 시에도 사용할 수 있도록 함
|
|
10407
|
-
*/
|
|
10408
|
-
cachedHeadPk = null;
|
|
10409
|
-
async id(isLeaf) {
|
|
10410
|
-
const tx = this.txContext.get();
|
|
10411
|
-
const pk = await this.api.insertAsOverflow("__BPTREE_NODE_PLACEHOLDER__", false, tx);
|
|
10412
|
-
return pk + "";
|
|
10413
|
-
}
|
|
10414
|
-
async read(id) {
|
|
10415
|
-
const tx = this.txContext.get();
|
|
10416
|
-
const row = await this.api.select(Number(id), false, tx);
|
|
10417
|
-
if (row === null || row === "" || row.startsWith("__BPTREE_")) {
|
|
10418
|
-
throw new Error(`Node not found or empty with ID: ${id}`);
|
|
10419
|
-
}
|
|
10420
|
-
return JSON.parse(row);
|
|
10421
|
-
}
|
|
10422
|
-
async write(id, node) {
|
|
10423
|
-
const tx = this.txContext.get();
|
|
10424
|
-
const json = JSON.stringify(node);
|
|
10425
|
-
await this.api.update(+id, json, tx);
|
|
10426
|
-
}
|
|
10427
|
-
async delete(id) {
|
|
10428
|
-
const tx = this.txContext.get();
|
|
10429
|
-
await this.api.delete(+id, false, tx);
|
|
10430
|
-
}
|
|
10431
|
-
async readHead() {
|
|
10432
|
-
const tx = this.txContext.get();
|
|
10433
|
-
const metadata = await this.api.getDocumentInnerMetadata(tx);
|
|
10434
|
-
const indexInfo = metadata.indices[this.treeKey];
|
|
10435
|
-
if (!indexInfo) return null;
|
|
10436
|
-
const headPk = indexInfo[0];
|
|
10437
|
-
if (headPk === -1) {
|
|
10438
|
-
const pk = await this.api.insertAsOverflow("__BPTREE_HEAD_PLACEHOLDER__", false, tx);
|
|
10439
|
-
metadata.indices[this.treeKey][0] = pk;
|
|
10440
|
-
await this.api.updateDocumentInnerMetadata(metadata, tx);
|
|
10441
|
-
this.cachedHeadPk = pk;
|
|
10442
|
-
return null;
|
|
10443
|
-
}
|
|
10444
|
-
this.cachedHeadPk = headPk;
|
|
10445
|
-
const row = await this.api.select(headPk, false, tx);
|
|
10446
|
-
if (row === null || row === "" || row.startsWith("__BPTREE_")) return null;
|
|
10447
|
-
return JSON.parse(row);
|
|
10448
|
-
}
|
|
10449
|
-
async writeHead(head) {
|
|
10450
|
-
const tx = this.txContext.get();
|
|
10451
|
-
let headPk = this.cachedHeadPk;
|
|
10452
|
-
if (headPk === null) {
|
|
10453
|
-
const metadata = await this.api.getDocumentInnerMetadata(tx);
|
|
10454
|
-
const indexInfo = metadata.indices[this.treeKey];
|
|
10455
|
-
if (!indexInfo) {
|
|
10456
|
-
throw new Error(`Index info not found for tree: ${this.treeKey}. Initialization should be handled outside.`);
|
|
10457
|
-
}
|
|
10458
|
-
headPk = indexInfo[0];
|
|
10459
|
-
}
|
|
10460
|
-
const json = JSON.stringify(head);
|
|
10461
|
-
await this.api.update(headPk, json, tx);
|
|
10462
|
-
}
|
|
10463
|
-
};
|
|
10392
|
+
var import_dataply4 = __toESM(require_cjs());
|
|
10464
10393
|
|
|
10465
10394
|
// src/core/bptree/documentComparator.ts
|
|
10466
|
-
var
|
|
10467
|
-
var DocumentValueComparator = class extends
|
|
10395
|
+
var import_dataply = __toESM(require_cjs());
|
|
10396
|
+
var DocumentValueComparator = class extends import_dataply.ValueComparator {
|
|
10468
10397
|
primaryAsc(a, b) {
|
|
10469
10398
|
return this._compareValue(a.v, b.v);
|
|
10470
10399
|
}
|
|
@@ -10505,76 +10434,6 @@ var DocumentValueComparator = class extends import_dataply2.ValueComparator {
|
|
|
10505
10434
|
}
|
|
10506
10435
|
};
|
|
10507
10436
|
|
|
10508
|
-
// src/utils/catchPromise.ts
|
|
10509
|
-
async function catchPromise(promise) {
|
|
10510
|
-
return promise.then((res) => [void 0, res]).catch((reason) => [reason]);
|
|
10511
|
-
}
|
|
10512
|
-
|
|
10513
|
-
// src/utils/heap.ts
|
|
10514
|
-
var BinaryHeap = class {
|
|
10515
|
-
constructor(comparator) {
|
|
10516
|
-
this.comparator = comparator;
|
|
10517
|
-
}
|
|
10518
|
-
heap = [];
|
|
10519
|
-
get size() {
|
|
10520
|
-
return this.heap.length;
|
|
10521
|
-
}
|
|
10522
|
-
peek() {
|
|
10523
|
-
return this.heap[0];
|
|
10524
|
-
}
|
|
10525
|
-
push(value) {
|
|
10526
|
-
this.heap.push(value);
|
|
10527
|
-
this.bubbleUp(this.heap.length - 1);
|
|
10528
|
-
}
|
|
10529
|
-
pop() {
|
|
10530
|
-
if (this.size === 0) return void 0;
|
|
10531
|
-
const top = this.heap[0];
|
|
10532
|
-
const bottom = this.heap.pop();
|
|
10533
|
-
if (this.size > 0) {
|
|
10534
|
-
this.heap[0] = bottom;
|
|
10535
|
-
this.sinkDown(0);
|
|
10536
|
-
}
|
|
10537
|
-
return top;
|
|
10538
|
-
}
|
|
10539
|
-
/**
|
|
10540
|
-
* Replace the root element with a new value and re-heapify.
|
|
10541
|
-
* Faster than pop() followed by push().
|
|
10542
|
-
*/
|
|
10543
|
-
replace(value) {
|
|
10544
|
-
const top = this.heap[0];
|
|
10545
|
-
this.heap[0] = value;
|
|
10546
|
-
this.sinkDown(0);
|
|
10547
|
-
return top;
|
|
10548
|
-
}
|
|
10549
|
-
toArray() {
|
|
10550
|
-
return [...this.heap];
|
|
10551
|
-
}
|
|
10552
|
-
bubbleUp(index) {
|
|
10553
|
-
while (index > 0) {
|
|
10554
|
-
const parentIndex = Math.floor((index - 1) / 2);
|
|
10555
|
-
if (this.comparator(this.heap[index], this.heap[parentIndex]) >= 0) break;
|
|
10556
|
-
[this.heap[index], this.heap[parentIndex]] = [this.heap[parentIndex], this.heap[index]];
|
|
10557
|
-
index = parentIndex;
|
|
10558
|
-
}
|
|
10559
|
-
}
|
|
10560
|
-
sinkDown(index) {
|
|
10561
|
-
while (true) {
|
|
10562
|
-
let smallest = index;
|
|
10563
|
-
const left = 2 * index + 1;
|
|
10564
|
-
const right = 2 * index + 2;
|
|
10565
|
-
if (left < this.size && this.comparator(this.heap[left], this.heap[smallest]) < 0) {
|
|
10566
|
-
smallest = left;
|
|
10567
|
-
}
|
|
10568
|
-
if (right < this.size && this.comparator(this.heap[right], this.heap[smallest]) < 0) {
|
|
10569
|
-
smallest = right;
|
|
10570
|
-
}
|
|
10571
|
-
if (smallest === index) break;
|
|
10572
|
-
[this.heap[index], this.heap[smallest]] = [this.heap[smallest], this.heap[index]];
|
|
10573
|
-
index = smallest;
|
|
10574
|
-
}
|
|
10575
|
-
}
|
|
10576
|
-
};
|
|
10577
|
-
|
|
10578
10437
|
// src/utils/tokenizer.ts
|
|
10579
10438
|
function whitespaceTokenize(text) {
|
|
10580
10439
|
if (typeof text !== "string") return [];
|
|
@@ -10606,168 +10465,984 @@ function tokenize(text, options) {
|
|
|
10606
10465
|
return [];
|
|
10607
10466
|
}
|
|
10608
10467
|
|
|
10609
|
-
// src/core/
|
|
10610
|
-
var
|
|
10611
|
-
|
|
10612
|
-
|
|
10613
|
-
comparator = new DocumentValueComparator();
|
|
10614
|
-
pendingBackfillFields = [];
|
|
10615
|
-
_initialized = false;
|
|
10616
|
-
indexedFields;
|
|
10617
|
-
/**
|
|
10618
|
-
* Registered indices via createIndex() (before init)
|
|
10619
|
-
* Key: index name, Value: index configuration
|
|
10620
|
-
*/
|
|
10621
|
-
pendingCreateIndices = /* @__PURE__ */ new Map();
|
|
10622
|
-
/**
|
|
10623
|
-
* Resolved index configurations after init.
|
|
10624
|
-
* Key: index name, Value: index config (from metadata)
|
|
10625
|
-
*/
|
|
10626
|
-
registeredIndices = /* @__PURE__ */ new Map();
|
|
10627
|
-
/**
|
|
10628
|
-
* Maps field name → index names that cover this field.
|
|
10629
|
-
* Used for query resolution.
|
|
10630
|
-
*/
|
|
10631
|
-
fieldToIndices = /* @__PURE__ */ new Map();
|
|
10632
|
-
operatorConverters = {
|
|
10633
|
-
equal: "primaryEqual",
|
|
10634
|
-
notEqual: "primaryNotEqual",
|
|
10635
|
-
lt: "primaryLt",
|
|
10636
|
-
lte: "primaryLte",
|
|
10637
|
-
gt: "primaryGt",
|
|
10638
|
-
gte: "primaryGte",
|
|
10639
|
-
or: "primaryOr",
|
|
10640
|
-
like: "like"
|
|
10641
|
-
};
|
|
10642
|
-
constructor(file, options) {
|
|
10643
|
-
super(file, options);
|
|
10644
|
-
this.trees = /* @__PURE__ */ new Map();
|
|
10645
|
-
this.indexedFields = /* @__PURE__ */ new Set(["_id"]);
|
|
10646
|
-
this.hook.onceAfter("init", async (tx, isNewlyCreated) => {
|
|
10647
|
-
if (isNewlyCreated) {
|
|
10648
|
-
await this.initializeDocumentFile(tx);
|
|
10649
|
-
}
|
|
10650
|
-
if (!await this.verifyDocumentFile(tx)) {
|
|
10651
|
-
throw new Error("Document metadata verification failed");
|
|
10652
|
-
}
|
|
10653
|
-
const metadata = await this.getDocumentInnerMetadata(tx);
|
|
10654
|
-
const targetIndices = /* @__PURE__ */ new Map([
|
|
10655
|
-
["_id", { type: "btree", fields: ["_id"] }]
|
|
10656
|
-
]);
|
|
10657
|
-
for (const [name, info] of Object.entries(metadata.indices)) {
|
|
10658
|
-
targetIndices.set(name, info[1]);
|
|
10659
|
-
}
|
|
10660
|
-
for (const [name, option] of this.pendingCreateIndices) {
|
|
10661
|
-
const config = this.toIndexMetaConfig(option);
|
|
10662
|
-
targetIndices.set(name, config);
|
|
10663
|
-
}
|
|
10664
|
-
const backfillTargets = [];
|
|
10665
|
-
let isMetadataChanged = false;
|
|
10666
|
-
for (const [indexName, config] of targetIndices) {
|
|
10667
|
-
const existingIndex = metadata.indices[indexName];
|
|
10668
|
-
if (!existingIndex) {
|
|
10669
|
-
metadata.indices[indexName] = [-1, config];
|
|
10670
|
-
isMetadataChanged = true;
|
|
10671
|
-
if (!isNewlyCreated) {
|
|
10672
|
-
backfillTargets.push(indexName);
|
|
10673
|
-
}
|
|
10674
|
-
} else {
|
|
10675
|
-
const [_pk, existingConfig] = existingIndex;
|
|
10676
|
-
if (JSON.stringify(existingConfig) !== JSON.stringify(config)) {
|
|
10677
|
-
metadata.indices[indexName] = [_pk, config];
|
|
10678
|
-
isMetadataChanged = true;
|
|
10679
|
-
if (!isNewlyCreated) {
|
|
10680
|
-
backfillTargets.push(indexName);
|
|
10681
|
-
}
|
|
10682
|
-
}
|
|
10683
|
-
}
|
|
10684
|
-
}
|
|
10685
|
-
if (isMetadataChanged) {
|
|
10686
|
-
await this.updateDocumentInnerMetadata(metadata, tx);
|
|
10687
|
-
}
|
|
10688
|
-
this.indices = metadata.indices;
|
|
10689
|
-
this.registeredIndices = /* @__PURE__ */ new Map();
|
|
10690
|
-
this.fieldToIndices = /* @__PURE__ */ new Map();
|
|
10691
|
-
for (const [indexName, config] of targetIndices) {
|
|
10692
|
-
this.registeredIndices.set(indexName, config);
|
|
10693
|
-
const fields = this.getFieldsFromConfig(config);
|
|
10694
|
-
for (const field of fields) {
|
|
10695
|
-
this.indexedFields.add(field);
|
|
10696
|
-
if (!this.fieldToIndices.has(field)) {
|
|
10697
|
-
this.fieldToIndices.set(field, []);
|
|
10698
|
-
}
|
|
10699
|
-
this.fieldToIndices.get(field).push(indexName);
|
|
10700
|
-
}
|
|
10701
|
-
}
|
|
10702
|
-
for (const indexName of targetIndices.keys()) {
|
|
10703
|
-
if (metadata.indices[indexName]) {
|
|
10704
|
-
const tree = new import_dataply3.BPTreeAsync(
|
|
10705
|
-
new DocumentSerializeStrategyAsync(
|
|
10706
|
-
this.rowTableEngine.order,
|
|
10707
|
-
this,
|
|
10708
|
-
this.txContext,
|
|
10709
|
-
indexName
|
|
10710
|
-
),
|
|
10711
|
-
this.comparator
|
|
10712
|
-
);
|
|
10713
|
-
await tree.init();
|
|
10714
|
-
this.trees.set(indexName, tree);
|
|
10715
|
-
}
|
|
10716
|
-
}
|
|
10717
|
-
this.pendingBackfillFields = backfillTargets;
|
|
10718
|
-
this._initialized = true;
|
|
10719
|
-
return tx;
|
|
10720
|
-
});
|
|
10721
|
-
}
|
|
10722
|
-
/**
|
|
10723
|
-
* Whether the document database has been initialized.
|
|
10724
|
-
*/
|
|
10725
|
-
get isDocInitialized() {
|
|
10726
|
-
return this._initialized;
|
|
10727
|
-
}
|
|
10728
|
-
/**
|
|
10729
|
-
* Register an index. If called before init(), queues it for processing during init.
|
|
10730
|
-
* If called after init(), immediately creates the tree, updates metadata, and backfills.
|
|
10731
|
-
*/
|
|
10732
|
-
async registerIndex(name, option, tx) {
|
|
10733
|
-
if (!this._initialized) {
|
|
10734
|
-
this.pendingCreateIndices.set(name, option);
|
|
10735
|
-
return;
|
|
10736
|
-
}
|
|
10737
|
-
await this.registerIndexRuntime(name, option, tx);
|
|
10468
|
+
// src/core/Optimizer.ts
|
|
10469
|
+
var Optimizer = class {
|
|
10470
|
+
constructor(api) {
|
|
10471
|
+
this.api = api;
|
|
10738
10472
|
}
|
|
10739
10473
|
/**
|
|
10740
|
-
*
|
|
10741
|
-
* Creates the tree, updates metadata, and backfills existing data.
|
|
10474
|
+
* B-Tree 타입 인덱스의 선택도를 평가하고 트리에 부여할 조건을 산출합니다.
|
|
10742
10475
|
*/
|
|
10743
|
-
|
|
10744
|
-
const
|
|
10745
|
-
if (
|
|
10746
|
-
|
|
10747
|
-
|
|
10748
|
-
|
|
10749
|
-
|
|
10750
|
-
|
|
10751
|
-
|
|
10752
|
-
|
|
10753
|
-
|
|
10754
|
-
|
|
10755
|
-
|
|
10756
|
-
|
|
10757
|
-
|
|
10758
|
-
|
|
10759
|
-
|
|
10760
|
-
}
|
|
10761
|
-
this.fieldToIndices.get(field).push(name);
|
|
10476
|
+
evaluateBTreeCandidate(indexName, config, query, queryFields, treeTx, orderByField) {
|
|
10477
|
+
const primaryField = config.fields[0];
|
|
10478
|
+
if (!queryFields.has(primaryField)) return null;
|
|
10479
|
+
const builtCondition = {};
|
|
10480
|
+
let score = 0;
|
|
10481
|
+
let isConsecutive = true;
|
|
10482
|
+
const coveredFields = [];
|
|
10483
|
+
const compositeVerifyFields = [];
|
|
10484
|
+
const startValues = [];
|
|
10485
|
+
const endValues = [];
|
|
10486
|
+
let startOperator = null;
|
|
10487
|
+
let endOperator = null;
|
|
10488
|
+
for (let i = 0, len = config.fields.length; i < len; i++) {
|
|
10489
|
+
const field = config.fields[i];
|
|
10490
|
+
if (!queryFields.has(field)) {
|
|
10491
|
+
isConsecutive = false;
|
|
10492
|
+
continue;
|
|
10762
10493
|
}
|
|
10763
|
-
|
|
10494
|
+
coveredFields.push(field);
|
|
10495
|
+
score += 1;
|
|
10496
|
+
if (isConsecutive) {
|
|
10497
|
+
const cond = query[field];
|
|
10498
|
+
if (cond !== void 0) {
|
|
10499
|
+
let isBounded = false;
|
|
10500
|
+
if (typeof cond !== "object" || cond === null) {
|
|
10501
|
+
score += 100;
|
|
10502
|
+
startValues.push(cond);
|
|
10503
|
+
endValues.push(cond);
|
|
10504
|
+
startOperator = "primaryGte";
|
|
10505
|
+
endOperator = "primaryLte";
|
|
10506
|
+
isBounded = true;
|
|
10507
|
+
} else if ("primaryEqual" in cond || "equal" in cond) {
|
|
10508
|
+
const val = cond.primaryEqual?.v ?? cond.equal?.v ?? cond.primaryEqual ?? cond.equal;
|
|
10509
|
+
score += 100;
|
|
10510
|
+
startValues.push(val);
|
|
10511
|
+
endValues.push(val);
|
|
10512
|
+
startOperator = "primaryGte";
|
|
10513
|
+
endOperator = "primaryLte";
|
|
10514
|
+
isBounded = true;
|
|
10515
|
+
} else if ("primaryGte" in cond || "gte" in cond) {
|
|
10516
|
+
const val = cond.primaryGte?.v ?? cond.gte?.v ?? cond.primaryGte ?? cond.gte;
|
|
10517
|
+
score += 50;
|
|
10518
|
+
isConsecutive = false;
|
|
10519
|
+
startValues.push(val);
|
|
10520
|
+
startOperator = "primaryGte";
|
|
10521
|
+
if (endValues.length > 0) endOperator = "primaryLte";
|
|
10522
|
+
isBounded = true;
|
|
10523
|
+
} else if ("primaryGt" in cond || "gt" in cond) {
|
|
10524
|
+
const val = cond.primaryGt?.v ?? cond.gt?.v ?? cond.primaryGt ?? cond.gt;
|
|
10525
|
+
score += 50;
|
|
10526
|
+
isConsecutive = false;
|
|
10527
|
+
startValues.push(val);
|
|
10528
|
+
startOperator = "primaryGt";
|
|
10529
|
+
if (endValues.length > 0) endOperator = "primaryLte";
|
|
10530
|
+
isBounded = true;
|
|
10531
|
+
} else if ("primaryLte" in cond || "lte" in cond) {
|
|
10532
|
+
const val = cond.primaryLte?.v ?? cond.lte?.v ?? cond.primaryLte ?? cond.lte;
|
|
10533
|
+
score += 50;
|
|
10534
|
+
isConsecutive = false;
|
|
10535
|
+
endValues.push(val);
|
|
10536
|
+
endOperator = "primaryLte";
|
|
10537
|
+
if (startValues.length > 0) startOperator = "primaryGte";
|
|
10538
|
+
isBounded = true;
|
|
10539
|
+
} else if ("primaryLt" in cond || "lt" in cond) {
|
|
10540
|
+
const val = cond.primaryLt?.v ?? cond.lt?.v ?? cond.primaryLt ?? cond.lt;
|
|
10541
|
+
score += 50;
|
|
10542
|
+
isConsecutive = false;
|
|
10543
|
+
endValues.push(val);
|
|
10544
|
+
endOperator = "primaryLt";
|
|
10545
|
+
if (startValues.length > 0) startOperator = "primaryGte";
|
|
10546
|
+
isBounded = true;
|
|
10547
|
+
} else if ("primaryOr" in cond || "or" in cond) {
|
|
10548
|
+
score += 20;
|
|
10549
|
+
isConsecutive = false;
|
|
10550
|
+
} else if ("like" in cond) {
|
|
10551
|
+
score += 15;
|
|
10552
|
+
isConsecutive = false;
|
|
10553
|
+
} else {
|
|
10554
|
+
score += 10;
|
|
10555
|
+
isConsecutive = false;
|
|
10556
|
+
}
|
|
10557
|
+
if (!isBounded && field !== primaryField) {
|
|
10558
|
+
compositeVerifyFields.push(field);
|
|
10559
|
+
}
|
|
10560
|
+
}
|
|
10561
|
+
} else {
|
|
10562
|
+
if (field !== primaryField) {
|
|
10563
|
+
compositeVerifyFields.push(field);
|
|
10564
|
+
}
|
|
10565
|
+
}
|
|
10566
|
+
}
|
|
10567
|
+
if (coveredFields.length === 1 && config.fields.length === 1) {
|
|
10568
|
+
Object.assign(builtCondition, query[primaryField]);
|
|
10569
|
+
} else {
|
|
10570
|
+
if (startOperator && startValues.length > 0) {
|
|
10571
|
+
builtCondition[startOperator] = { v: startValues.length === 1 ? startValues[0] : startValues };
|
|
10572
|
+
}
|
|
10573
|
+
if (endOperator && endValues.length > 0) {
|
|
10574
|
+
if (startOperator && startValues.length === endValues.length && startValues.every((val, i) => val === endValues[i])) {
|
|
10575
|
+
delete builtCondition[startOperator];
|
|
10576
|
+
builtCondition["primaryEqual"] = { v: startValues.length === 1 ? startValues[0] : startValues };
|
|
10577
|
+
} else {
|
|
10578
|
+
builtCondition[endOperator] = { v: endValues.length === 1 ? endValues[0] : endValues };
|
|
10579
|
+
}
|
|
10580
|
+
}
|
|
10581
|
+
if (Object.keys(builtCondition).length === 0) {
|
|
10582
|
+
Object.assign(builtCondition, query[primaryField] || {});
|
|
10583
|
+
}
|
|
10584
|
+
}
|
|
10585
|
+
let isIndexOrderSupported = false;
|
|
10586
|
+
if (orderByField) {
|
|
10587
|
+
for (let i = 0, len = config.fields.length; i < len; i++) {
|
|
10588
|
+
const field = config.fields[i];
|
|
10589
|
+
if (field === orderByField) {
|
|
10590
|
+
isIndexOrderSupported = true;
|
|
10591
|
+
break;
|
|
10592
|
+
}
|
|
10593
|
+
const cond = query[field];
|
|
10594
|
+
let isExactMatch = false;
|
|
10595
|
+
if (cond !== void 0) {
|
|
10596
|
+
if (typeof cond !== "object" || cond === null) isExactMatch = true;
|
|
10597
|
+
else if ("primaryEqual" in cond || "equal" in cond) isExactMatch = true;
|
|
10598
|
+
}
|
|
10599
|
+
if (!isExactMatch) break;
|
|
10600
|
+
}
|
|
10601
|
+
if (isIndexOrderSupported) {
|
|
10602
|
+
score += 200;
|
|
10603
|
+
}
|
|
10604
|
+
}
|
|
10605
|
+
return {
|
|
10606
|
+
tree: treeTx,
|
|
10607
|
+
condition: builtCondition,
|
|
10608
|
+
field: primaryField,
|
|
10609
|
+
indexName,
|
|
10610
|
+
isFtsMatch: false,
|
|
10611
|
+
score,
|
|
10612
|
+
compositeVerifyFields,
|
|
10613
|
+
coveredFields,
|
|
10614
|
+
isIndexOrderSupported
|
|
10615
|
+
};
|
|
10616
|
+
}
|
|
10617
|
+
/**
|
|
10618
|
+
* FTS 타입 인덱스의 선택도를 평가합니다.
|
|
10619
|
+
* FTSTermCount 통계가 있으면 토큰 빈도 기반 동적 score를 산출합니다.
|
|
10620
|
+
*/
|
|
10621
|
+
evaluateFTSCandidate(indexName, config, query, queryFields, treeTx) {
|
|
10622
|
+
const field = config.fields;
|
|
10623
|
+
if (!queryFields.has(field)) return null;
|
|
10624
|
+
const condition = query[field];
|
|
10625
|
+
if (!condition || typeof condition !== "object" || !("match" in condition)) return null;
|
|
10626
|
+
const ftsConfig = this.api.indexManager.getFtsConfig(config);
|
|
10627
|
+
const matchTokens = ftsConfig ? tokenize(condition.match, ftsConfig) : [];
|
|
10628
|
+
const MAX_FTS_SCORE = 400;
|
|
10629
|
+
const MIN_FTS_SCORE = 10;
|
|
10630
|
+
const DEFAULT_FTS_SCORE = 90;
|
|
10631
|
+
let score = DEFAULT_FTS_SCORE;
|
|
10632
|
+
const termCountProvider = this.api.analysisManager.getProvider("fts_term_count");
|
|
10633
|
+
if (termCountProvider && termCountProvider.hasSampleData && ftsConfig && matchTokens.length > 0) {
|
|
10634
|
+
const strategy = ftsConfig.tokenizer === "ngram" ? `${ftsConfig.gramSize}gram` : ftsConfig.tokenizer;
|
|
10635
|
+
const minCount = termCountProvider.getMinTokenCount(field, strategy, matchTokens);
|
|
10636
|
+
if (minCount >= 0) {
|
|
10637
|
+
const sampleSize = termCountProvider.getSampleSize();
|
|
10638
|
+
const selectivityRatio = Math.min(minCount / sampleSize, 1);
|
|
10639
|
+
score = Math.round(MAX_FTS_SCORE * (1 - selectivityRatio) + MIN_FTS_SCORE);
|
|
10640
|
+
}
|
|
10641
|
+
}
|
|
10642
|
+
return {
|
|
10643
|
+
tree: treeTx,
|
|
10644
|
+
condition,
|
|
10645
|
+
field,
|
|
10646
|
+
indexName,
|
|
10647
|
+
isFtsMatch: true,
|
|
10648
|
+
matchTokens,
|
|
10649
|
+
score,
|
|
10650
|
+
compositeVerifyFields: [],
|
|
10651
|
+
coveredFields: [field],
|
|
10652
|
+
isIndexOrderSupported: false
|
|
10653
|
+
};
|
|
10654
|
+
}
|
|
10655
|
+
/**
|
|
10656
|
+
* 실행할 최적의 인덱스를 선택합니다. (최적 드라이버 선택)
|
|
10657
|
+
*/
|
|
10658
|
+
async getSelectivityCandidate(query, orderByField) {
|
|
10659
|
+
const queryFields = new Set(Object.keys(query));
|
|
10660
|
+
const candidates = [];
|
|
10661
|
+
for (const [indexName, config] of this.api.indexManager.registeredIndices) {
|
|
10662
|
+
const tree = this.api.trees.get(indexName);
|
|
10663
|
+
if (!tree) continue;
|
|
10664
|
+
if (config.type === "btree") {
|
|
10665
|
+
const treeTx = await tree.createTransaction();
|
|
10666
|
+
const candidate = this.evaluateBTreeCandidate(
|
|
10667
|
+
indexName,
|
|
10668
|
+
config,
|
|
10669
|
+
query,
|
|
10670
|
+
queryFields,
|
|
10671
|
+
treeTx,
|
|
10672
|
+
orderByField
|
|
10673
|
+
);
|
|
10674
|
+
if (candidate) candidates.push(candidate);
|
|
10675
|
+
} else if (config.type === "fts") {
|
|
10676
|
+
const treeTx = await tree.createTransaction();
|
|
10677
|
+
const candidate = this.evaluateFTSCandidate(
|
|
10678
|
+
indexName,
|
|
10679
|
+
config,
|
|
10680
|
+
query,
|
|
10681
|
+
queryFields,
|
|
10682
|
+
treeTx
|
|
10683
|
+
);
|
|
10684
|
+
if (candidate) candidates.push(candidate);
|
|
10685
|
+
}
|
|
10686
|
+
}
|
|
10687
|
+
const rollback = () => {
|
|
10688
|
+
for (const { tree } of candidates) {
|
|
10689
|
+
tree.rollback();
|
|
10690
|
+
}
|
|
10691
|
+
};
|
|
10692
|
+
if (candidates.length === 0) {
|
|
10693
|
+
rollback();
|
|
10694
|
+
return null;
|
|
10695
|
+
}
|
|
10696
|
+
candidates.sort((a, b) => {
|
|
10697
|
+
if (b.score !== a.score) return b.score - a.score;
|
|
10698
|
+
const aConfig = this.api.indexManager.registeredIndices.get(a.indexName);
|
|
10699
|
+
const bConfig = this.api.indexManager.registeredIndices.get(b.indexName);
|
|
10700
|
+
const aFieldCount = aConfig ? Array.isArray(aConfig.fields) ? aConfig.fields.length : 1 : 0;
|
|
10701
|
+
const bFieldCount = bConfig ? Array.isArray(bConfig.fields) ? bConfig.fields.length : 1 : 0;
|
|
10702
|
+
return aFieldCount - bFieldCount;
|
|
10703
|
+
});
|
|
10704
|
+
const driver = candidates[0];
|
|
10705
|
+
const driverCoveredFields = new Set(driver.coveredFields);
|
|
10706
|
+
const nonDriverCandidates = candidates.slice(1).filter((c) => !driverCoveredFields.has(c.field));
|
|
10707
|
+
const others = [];
|
|
10708
|
+
for (let i = 0, len = nonDriverCandidates.length; i < len; i++) {
|
|
10709
|
+
const candidate = nonDriverCandidates[i];
|
|
10710
|
+
let isSubset = false;
|
|
10711
|
+
for (let j = 0, oLen = others.length; j < oLen; j++) {
|
|
10712
|
+
const higher = others[j];
|
|
10713
|
+
if (candidate.coveredFields.every((f) => higher.coveredFields.includes(f))) {
|
|
10714
|
+
isSubset = true;
|
|
10715
|
+
break;
|
|
10716
|
+
}
|
|
10717
|
+
}
|
|
10718
|
+
if (!isSubset) others.push(candidate);
|
|
10719
|
+
}
|
|
10720
|
+
const compositeVerifyConditions = [];
|
|
10721
|
+
for (let i = 0, len = driver.compositeVerifyFields.length; i < len; i++) {
|
|
10722
|
+
const field = driver.compositeVerifyFields[i];
|
|
10723
|
+
if (query[field]) {
|
|
10724
|
+
compositeVerifyConditions.push({ field, condition: query[field] });
|
|
10725
|
+
}
|
|
10726
|
+
}
|
|
10727
|
+
return {
|
|
10728
|
+
driver,
|
|
10729
|
+
others,
|
|
10730
|
+
compositeVerifyConditions,
|
|
10731
|
+
rollback
|
|
10732
|
+
};
|
|
10733
|
+
}
|
|
10734
|
+
};
|
|
10735
|
+
|
|
10736
|
+
// src/core/QueryManager.ts
|
|
10737
|
+
var os = __toESM(require("node:os"));
|
|
10738
|
+
|
|
10739
|
+
// src/utils/heap.ts
|
|
10740
|
+
var BinaryHeap = class {
|
|
10741
|
+
constructor(comparator) {
|
|
10742
|
+
this.comparator = comparator;
|
|
10743
|
+
}
|
|
10744
|
+
heap = [];
|
|
10745
|
+
get size() {
|
|
10746
|
+
return this.heap.length;
|
|
10747
|
+
}
|
|
10748
|
+
peek() {
|
|
10749
|
+
return this.heap[0];
|
|
10750
|
+
}
|
|
10751
|
+
push(value) {
|
|
10752
|
+
this.heap.push(value);
|
|
10753
|
+
this.bubbleUp(this.heap.length - 1);
|
|
10754
|
+
}
|
|
10755
|
+
pop() {
|
|
10756
|
+
if (this.size === 0) return void 0;
|
|
10757
|
+
const top = this.heap[0];
|
|
10758
|
+
const bottom = this.heap.pop();
|
|
10759
|
+
if (this.size > 0) {
|
|
10760
|
+
this.heap[0] = bottom;
|
|
10761
|
+
this.sinkDown(0);
|
|
10762
|
+
}
|
|
10763
|
+
return top;
|
|
10764
|
+
}
|
|
10765
|
+
/**
|
|
10766
|
+
* Replace the root element with a new value and re-heapify.
|
|
10767
|
+
* Faster than pop() followed by push().
|
|
10768
|
+
*/
|
|
10769
|
+
replace(value) {
|
|
10770
|
+
const top = this.heap[0];
|
|
10771
|
+
this.heap[0] = value;
|
|
10772
|
+
this.sinkDown(0);
|
|
10773
|
+
return top;
|
|
10774
|
+
}
|
|
10775
|
+
toArray() {
|
|
10776
|
+
return [...this.heap];
|
|
10777
|
+
}
|
|
10778
|
+
bubbleUp(index) {
|
|
10779
|
+
while (index > 0) {
|
|
10780
|
+
const parentIndex = Math.floor((index - 1) / 2);
|
|
10781
|
+
if (this.comparator(this.heap[index], this.heap[parentIndex]) >= 0) break;
|
|
10782
|
+
[this.heap[index], this.heap[parentIndex]] = [this.heap[parentIndex], this.heap[index]];
|
|
10783
|
+
index = parentIndex;
|
|
10784
|
+
}
|
|
10785
|
+
}
|
|
10786
|
+
sinkDown(index) {
|
|
10787
|
+
while (true) {
|
|
10788
|
+
let smallest = index;
|
|
10789
|
+
const left = 2 * index + 1;
|
|
10790
|
+
const right = 2 * index + 2;
|
|
10791
|
+
if (left < this.size && this.comparator(this.heap[left], this.heap[smallest]) < 0) {
|
|
10792
|
+
smallest = left;
|
|
10793
|
+
}
|
|
10794
|
+
if (right < this.size && this.comparator(this.heap[right], this.heap[smallest]) < 0) {
|
|
10795
|
+
smallest = right;
|
|
10796
|
+
}
|
|
10797
|
+
if (smallest === index) break;
|
|
10798
|
+
[this.heap[index], this.heap[smallest]] = [this.heap[smallest], this.heap[index]];
|
|
10799
|
+
index = smallest;
|
|
10800
|
+
}
|
|
10801
|
+
}
|
|
10802
|
+
};
|
|
10803
|
+
|
|
10804
|
+
// src/core/QueryManager.ts
|
|
10805
|
+
var QueryManager = class {
|
|
10806
|
+
constructor(api, optimizer) {
|
|
10807
|
+
this.api = api;
|
|
10808
|
+
this.optimizer = optimizer;
|
|
10809
|
+
}
|
|
10810
|
+
operatorConverters = {
|
|
10811
|
+
equal: "primaryEqual",
|
|
10812
|
+
notEqual: "primaryNotEqual",
|
|
10813
|
+
lt: "primaryLt",
|
|
10814
|
+
lte: "primaryLte",
|
|
10815
|
+
gt: "primaryGt",
|
|
10816
|
+
gte: "primaryGte",
|
|
10817
|
+
or: "primaryOr",
|
|
10818
|
+
like: "like"
|
|
10819
|
+
};
|
|
10820
|
+
/**
|
|
10821
|
+
* Transforms a query object into a verbose query object
|
|
10822
|
+
*/
|
|
10823
|
+
verboseQuery(query) {
|
|
10824
|
+
const result = {};
|
|
10825
|
+
for (const field in query) {
|
|
10826
|
+
const conditions = query[field];
|
|
10827
|
+
let newConditions;
|
|
10828
|
+
if (typeof conditions !== "object" || conditions === null) {
|
|
10829
|
+
newConditions = { primaryEqual: { v: conditions } };
|
|
10830
|
+
} else {
|
|
10831
|
+
newConditions = {};
|
|
10832
|
+
for (const operator in conditions) {
|
|
10833
|
+
const before = operator;
|
|
10834
|
+
const after = this.operatorConverters[before];
|
|
10835
|
+
const v = conditions[before];
|
|
10836
|
+
if (!after) {
|
|
10837
|
+
if (before === "match") {
|
|
10838
|
+
newConditions[before] = v;
|
|
10839
|
+
}
|
|
10840
|
+
continue;
|
|
10841
|
+
}
|
|
10842
|
+
if (before === "or" && Array.isArray(v)) {
|
|
10843
|
+
newConditions[after] = v.map((val) => ({ v: val }));
|
|
10844
|
+
} else if (before === "like") {
|
|
10845
|
+
newConditions[after] = v;
|
|
10846
|
+
} else {
|
|
10847
|
+
newConditions[after] = { v };
|
|
10848
|
+
}
|
|
10849
|
+
}
|
|
10850
|
+
}
|
|
10851
|
+
result[field] = newConditions;
|
|
10852
|
+
}
|
|
10853
|
+
return result;
|
|
10854
|
+
}
|
|
10855
|
+
getFreeMemoryChunkSize() {
|
|
10856
|
+
const freeMem = os.freemem();
|
|
10857
|
+
const safeLimit = freeMem * 0.2;
|
|
10858
|
+
const verySmallChunkSize = safeLimit * 0.05;
|
|
10859
|
+
const smallChunkSize = safeLimit * 0.3;
|
|
10860
|
+
return { verySmallChunkSize, smallChunkSize };
|
|
10861
|
+
}
|
|
10862
|
+
async *applyCandidateByFTSStream(candidate, matchedTokens, filterValues, order) {
|
|
10863
|
+
const keys = /* @__PURE__ */ new Set();
|
|
10864
|
+
for (let i = 0, len = matchedTokens.length; i < len; i++) {
|
|
10865
|
+
const token = matchedTokens[i];
|
|
10866
|
+
for await (const pair of candidate.tree.whereStream(
|
|
10867
|
+
{ primaryEqual: { v: token } },
|
|
10868
|
+
{ order }
|
|
10869
|
+
)) {
|
|
10870
|
+
const pk = pair[1].k;
|
|
10871
|
+
if (filterValues && !filterValues.has(pk)) continue;
|
|
10872
|
+
if (!keys.has(pk)) {
|
|
10873
|
+
keys.add(pk);
|
|
10874
|
+
yield pk;
|
|
10875
|
+
}
|
|
10876
|
+
}
|
|
10877
|
+
}
|
|
10878
|
+
}
|
|
10879
|
+
applyCandidateStream(candidate, filterValues, order) {
|
|
10880
|
+
return candidate.tree.keysStream(
|
|
10881
|
+
candidate.condition,
|
|
10882
|
+
{ filterValues, order }
|
|
10883
|
+
);
|
|
10884
|
+
}
|
|
10885
|
+
async getKeys(query, orderBy, sortOrder = "asc") {
|
|
10886
|
+
const isQueryEmpty = Object.keys(query).length === 0;
|
|
10887
|
+
const normalizedQuery = isQueryEmpty ? { _id: { gte: 0 } } : query;
|
|
10888
|
+
const selectivity = await this.optimizer.getSelectivityCandidate(
|
|
10889
|
+
this.verboseQuery(normalizedQuery),
|
|
10890
|
+
orderBy
|
|
10891
|
+
);
|
|
10892
|
+
if (!selectivity) return new Float64Array(0);
|
|
10893
|
+
const { driver, others, rollback } = selectivity;
|
|
10894
|
+
const useIndexOrder = orderBy === void 0 || driver.isIndexOrderSupported;
|
|
10895
|
+
const candidates = [driver, ...others];
|
|
10896
|
+
let keys = void 0;
|
|
10897
|
+
for (let i = 0, len = candidates.length; i < len; i++) {
|
|
10898
|
+
const candidate = candidates[i];
|
|
10899
|
+
const currentOrder = useIndexOrder ? sortOrder : void 0;
|
|
10900
|
+
if (candidate.isFtsMatch && candidate.matchTokens && candidate.matchTokens.length > 0) {
|
|
10901
|
+
const stream = this.applyCandidateByFTSStream(
|
|
10902
|
+
candidate,
|
|
10903
|
+
candidate.matchTokens,
|
|
10904
|
+
keys,
|
|
10905
|
+
currentOrder
|
|
10906
|
+
);
|
|
10907
|
+
keys = /* @__PURE__ */ new Set();
|
|
10908
|
+
for await (const pk of stream) keys.add(pk);
|
|
10909
|
+
} else {
|
|
10910
|
+
const stream = this.applyCandidateStream(candidate, keys, currentOrder);
|
|
10911
|
+
keys = /* @__PURE__ */ new Set();
|
|
10912
|
+
for await (const pk of stream) keys.add(pk);
|
|
10913
|
+
}
|
|
10914
|
+
}
|
|
10915
|
+
rollback();
|
|
10916
|
+
return new Float64Array(Array.from(keys || []));
|
|
10917
|
+
}
|
|
10918
|
+
async getDriverKeys(query, orderBy, sortOrder = "asc") {
|
|
10919
|
+
const isQueryEmpty = Object.keys(query).length === 0;
|
|
10920
|
+
const normalizedQuery = isQueryEmpty ? { _id: { gte: 0 } } : query;
|
|
10921
|
+
const selectivity = await this.optimizer.getSelectivityCandidate(
|
|
10922
|
+
this.verboseQuery(normalizedQuery),
|
|
10923
|
+
orderBy
|
|
10924
|
+
);
|
|
10925
|
+
if (!selectivity) return null;
|
|
10926
|
+
const { driver, others, compositeVerifyConditions, rollback } = selectivity;
|
|
10927
|
+
const useIndexOrder = orderBy === void 0 || driver.isIndexOrderSupported;
|
|
10928
|
+
const currentOrder = useIndexOrder ? sortOrder : void 0;
|
|
10929
|
+
let keysStream;
|
|
10930
|
+
if (driver.isFtsMatch && driver.matchTokens && driver.matchTokens.length > 0) {
|
|
10931
|
+
keysStream = this.applyCandidateByFTSStream(
|
|
10932
|
+
driver,
|
|
10933
|
+
driver.matchTokens,
|
|
10934
|
+
void 0,
|
|
10935
|
+
currentOrder
|
|
10936
|
+
);
|
|
10937
|
+
} else {
|
|
10938
|
+
keysStream = this.applyCandidateStream(driver, void 0, currentOrder);
|
|
10939
|
+
}
|
|
10940
|
+
return {
|
|
10941
|
+
keysStream,
|
|
10942
|
+
others,
|
|
10943
|
+
compositeVerifyConditions,
|
|
10944
|
+
isDriverOrderByField: useIndexOrder,
|
|
10945
|
+
rollback
|
|
10946
|
+
};
|
|
10947
|
+
}
|
|
10948
|
+
verifyFts(doc, ftsConditions) {
|
|
10949
|
+
const flatDoc = this.api.flattenDocument(doc);
|
|
10950
|
+
for (let i = 0, len = ftsConditions.length; i < len; i++) {
|
|
10951
|
+
const { field, matchTokens } = ftsConditions[i];
|
|
10952
|
+
const docValue = flatDoc[field];
|
|
10953
|
+
if (typeof docValue !== "string") return false;
|
|
10954
|
+
for (let j = 0, jLen = matchTokens.length; j < jLen; j++) {
|
|
10955
|
+
const token = matchTokens[j];
|
|
10956
|
+
if (!docValue.includes(token)) return false;
|
|
10957
|
+
}
|
|
10958
|
+
}
|
|
10959
|
+
return true;
|
|
10960
|
+
}
|
|
10961
|
+
verifyCompositeConditions(doc, conditions) {
|
|
10962
|
+
if (conditions.length === 0) return true;
|
|
10963
|
+
const flatDoc = this.api.flattenDocument(doc);
|
|
10964
|
+
for (let i = 0, len = conditions.length; i < len; i++) {
|
|
10965
|
+
const { field, condition } = conditions[i];
|
|
10966
|
+
const docValue = flatDoc[field];
|
|
10967
|
+
if (docValue === void 0) return false;
|
|
10968
|
+
if (!this.verifyValue(docValue, condition)) return false;
|
|
10969
|
+
}
|
|
10970
|
+
return true;
|
|
10971
|
+
}
|
|
10972
|
+
verifyValue(value, condition) {
|
|
10973
|
+
if (typeof condition !== "object" || condition === null) {
|
|
10974
|
+
return value === condition;
|
|
10975
|
+
}
|
|
10976
|
+
if ("primaryEqual" in condition) {
|
|
10977
|
+
return value === condition.primaryEqual?.v;
|
|
10978
|
+
}
|
|
10979
|
+
if ("primaryNotEqual" in condition) {
|
|
10980
|
+
return value !== condition.primaryNotEqual?.v;
|
|
10981
|
+
}
|
|
10982
|
+
if ("primaryLt" in condition) {
|
|
10983
|
+
return value !== null && condition.primaryLt?.v !== void 0 && value < condition.primaryLt.v;
|
|
10984
|
+
}
|
|
10985
|
+
if ("primaryLte" in condition) {
|
|
10986
|
+
return value !== null && condition.primaryLte?.v !== void 0 && value <= condition.primaryLte.v;
|
|
10987
|
+
}
|
|
10988
|
+
if ("primaryGt" in condition) {
|
|
10989
|
+
return value !== null && condition.primaryGt?.v !== void 0 && value > condition.primaryGt.v;
|
|
10990
|
+
}
|
|
10991
|
+
if ("primaryGte" in condition) {
|
|
10992
|
+
return value !== null && condition.primaryGte?.v !== void 0 && value >= condition.primaryGte.v;
|
|
10993
|
+
}
|
|
10994
|
+
if ("primaryOr" in condition && Array.isArray(condition.primaryOr)) {
|
|
10995
|
+
return condition.primaryOr.some((c) => value === c?.v);
|
|
10996
|
+
}
|
|
10997
|
+
return true;
|
|
10998
|
+
}
|
|
10999
|
+
adjustChunkSize(currentChunkSize, chunkTotalSize) {
|
|
11000
|
+
if (chunkTotalSize <= 0) return currentChunkSize;
|
|
11001
|
+
const { verySmallChunkSize, smallChunkSize } = this.getFreeMemoryChunkSize();
|
|
11002
|
+
if (chunkTotalSize < verySmallChunkSize) return currentChunkSize * 2;
|
|
11003
|
+
if (chunkTotalSize > smallChunkSize) return Math.max(Math.floor(currentChunkSize / 2), 20);
|
|
11004
|
+
return currentChunkSize;
|
|
11005
|
+
}
|
|
11006
|
+
async *processChunkedKeysWithVerify(keysStream, startIdx, initialChunkSize, limit, ftsConditions, compositeVerifyConditions, others, tx) {
|
|
11007
|
+
const verifyOthers = others.filter((o) => !o.isFtsMatch);
|
|
11008
|
+
const isFts = ftsConditions.length > 0;
|
|
11009
|
+
const isCompositeVerify = compositeVerifyConditions.length > 0;
|
|
11010
|
+
const isVerifyOthers = verifyOthers.length > 0;
|
|
11011
|
+
const isInfinityLimit = !isFinite(limit);
|
|
11012
|
+
const isReadQuotaLimited = !isInfinityLimit && // limit이 임의의 유한한 값으로 설정되어 있으며
|
|
11013
|
+
!isCompositeVerify && // 문서를 가져온 후 복합 인덱스 기준으로 2차 필터링할 필요가 없고
|
|
11014
|
+
!isVerifyOthers && // 문서를 가져온 후 다른 인덱스 기준으로 2차 필터링할 필요가 없으며
|
|
11015
|
+
!isFts;
|
|
11016
|
+
let currentChunkSize = isReadQuotaLimited ? limit : initialChunkSize;
|
|
11017
|
+
let chunk = [];
|
|
11018
|
+
let chunkSize = 0;
|
|
11019
|
+
let dropped = 0;
|
|
11020
|
+
const processChunk = async (pks) => {
|
|
11021
|
+
const docs = [];
|
|
11022
|
+
const rawResults = await this.api.selectMany(new Float64Array(pks), false, tx);
|
|
11023
|
+
let chunkTotalSize = 0;
|
|
11024
|
+
for (let j = 0, len = rawResults.length; j < len; j++) {
|
|
11025
|
+
const s = rawResults[j];
|
|
11026
|
+
if (!s) continue;
|
|
11027
|
+
const doc = JSON.parse(s);
|
|
11028
|
+
chunkTotalSize += s.length * 2;
|
|
11029
|
+
if (isFts && !this.verifyFts(doc, ftsConditions)) continue;
|
|
11030
|
+
if (isCompositeVerify && this.verifyCompositeConditions(doc, compositeVerifyConditions) === false) continue;
|
|
11031
|
+
if (isVerifyOthers) {
|
|
11032
|
+
const flatDoc = this.api.flattenDocument(doc);
|
|
11033
|
+
let passed = true;
|
|
11034
|
+
for (let k = 0, kLen = verifyOthers.length; k < kLen; k++) {
|
|
11035
|
+
const other = verifyOthers[k];
|
|
11036
|
+
const coveredFields = other.coveredFields;
|
|
11037
|
+
let fieldValue;
|
|
11038
|
+
if (coveredFields && coveredFields.length > 1) {
|
|
11039
|
+
const values = [];
|
|
11040
|
+
let hasMissing = false;
|
|
11041
|
+
for (let f = 0, fLen = coveredFields.length; f < fLen; f++) {
|
|
11042
|
+
const v = flatDoc[coveredFields[f]];
|
|
11043
|
+
if (v === void 0) {
|
|
11044
|
+
hasMissing = true;
|
|
11045
|
+
break;
|
|
11046
|
+
}
|
|
11047
|
+
values.push(v);
|
|
11048
|
+
}
|
|
11049
|
+
if (hasMissing) {
|
|
11050
|
+
passed = false;
|
|
11051
|
+
break;
|
|
11052
|
+
}
|
|
11053
|
+
fieldValue = values;
|
|
11054
|
+
} else {
|
|
11055
|
+
fieldValue = flatDoc[other.field];
|
|
11056
|
+
if (fieldValue === void 0) {
|
|
11057
|
+
passed = false;
|
|
11058
|
+
break;
|
|
11059
|
+
}
|
|
11060
|
+
}
|
|
11061
|
+
const treeValue = { k: doc._id, v: fieldValue };
|
|
11062
|
+
if (!other.tree.verify(treeValue, other.condition)) {
|
|
11063
|
+
passed = false;
|
|
11064
|
+
break;
|
|
11065
|
+
}
|
|
11066
|
+
}
|
|
11067
|
+
if (!passed) continue;
|
|
11068
|
+
}
|
|
11069
|
+
docs.push(doc);
|
|
11070
|
+
}
|
|
11071
|
+
if (!isReadQuotaLimited) {
|
|
11072
|
+
currentChunkSize = this.adjustChunkSize(currentChunkSize, chunkTotalSize);
|
|
11073
|
+
}
|
|
11074
|
+
return docs;
|
|
11075
|
+
};
|
|
11076
|
+
for await (const pk of keysStream) {
|
|
11077
|
+
if (dropped < startIdx) {
|
|
11078
|
+
dropped++;
|
|
11079
|
+
continue;
|
|
11080
|
+
}
|
|
11081
|
+
chunk.push(pk);
|
|
11082
|
+
chunkSize++;
|
|
11083
|
+
if (chunkSize >= currentChunkSize) {
|
|
11084
|
+
const docs = await processChunk(chunk);
|
|
11085
|
+
for (let j = 0, dLen = docs.length; j < dLen; j++) yield docs[j];
|
|
11086
|
+
chunk = [];
|
|
11087
|
+
chunkSize = 0;
|
|
11088
|
+
}
|
|
11089
|
+
}
|
|
11090
|
+
if (chunkSize > 0) {
|
|
11091
|
+
const docs = await processChunk(chunk);
|
|
11092
|
+
for (let j = 0, dLen = docs.length; j < dLen; j++) yield docs[j];
|
|
11093
|
+
}
|
|
11094
|
+
}
|
|
11095
|
+
/**
|
|
11096
|
+
* Count documents from the database that match the query
|
|
11097
|
+
* @param query The query to use
|
|
11098
|
+
* @param tx The transaction to use
|
|
11099
|
+
* @returns The number of documents that match the query
|
|
11100
|
+
*/
|
|
11101
|
+
async countDocuments(query, tx) {
|
|
11102
|
+
return this.api.runWithDefault(async (tx2) => {
|
|
11103
|
+
const pks = await this.getKeys(query);
|
|
11104
|
+
return pks.length;
|
|
11105
|
+
}, tx);
|
|
11106
|
+
}
|
|
11107
|
+
selectDocuments(query, options = {}, tx) {
|
|
11108
|
+
for (const field of Object.keys(query)) {
|
|
11109
|
+
if (!this.api.indexedFields.has(field)) {
|
|
11110
|
+
throw new Error(`Query field "${field}" is not indexed. Available indexed fields: ${Array.from(this.api.indexedFields).join(", ")}`);
|
|
11111
|
+
}
|
|
11112
|
+
}
|
|
11113
|
+
const orderBy = options.orderBy;
|
|
11114
|
+
if (orderBy !== void 0 && !this.api.indexedFields.has(orderBy)) {
|
|
11115
|
+
throw new Error(`orderBy field "${orderBy}" is not indexed. Available indexed fields: ${Array.from(this.api.indexedFields).join(", ")}`);
|
|
11116
|
+
}
|
|
11117
|
+
const {
|
|
11118
|
+
limit = Infinity,
|
|
11119
|
+
offset = 0,
|
|
11120
|
+
sortOrder = "asc",
|
|
11121
|
+
orderBy: orderByField
|
|
11122
|
+
} = options;
|
|
11123
|
+
const self = this;
|
|
11124
|
+
const stream = () => this.api.streamWithDefault(async function* (tx2) {
|
|
11125
|
+
const ftsConditions = [];
|
|
11126
|
+
for (const field in query) {
|
|
11127
|
+
const q = query[field];
|
|
11128
|
+
if (q && typeof q === "object" && "match" in q && typeof q.match === "string") {
|
|
11129
|
+
const indexNames = self.api.indexManager.fieldToIndices.get(field) || [];
|
|
11130
|
+
for (const indexName of indexNames) {
|
|
11131
|
+
const config = self.api.indexManager.registeredIndices.get(indexName);
|
|
11132
|
+
if (config && config.type === "fts") {
|
|
11133
|
+
const ftsConfig = self.api.indexManager.getFtsConfig(config);
|
|
11134
|
+
if (ftsConfig) {
|
|
11135
|
+
ftsConditions.push({ field, matchTokens: tokenize(q.match, ftsConfig) });
|
|
11136
|
+
}
|
|
11137
|
+
break;
|
|
11138
|
+
}
|
|
11139
|
+
}
|
|
11140
|
+
}
|
|
11141
|
+
}
|
|
11142
|
+
const driverResult = await self.getDriverKeys(query, orderByField, sortOrder);
|
|
11143
|
+
if (!driverResult) return;
|
|
11144
|
+
const { keysStream, others, compositeVerifyConditions, isDriverOrderByField, rollback } = driverResult;
|
|
11145
|
+
const initialChunkSize = self.api.options.pageSize;
|
|
11146
|
+
try {
|
|
11147
|
+
if (!isDriverOrderByField && orderByField) {
|
|
11148
|
+
const topK = limit === Infinity ? Infinity : offset + limit;
|
|
11149
|
+
let heap = null;
|
|
11150
|
+
if (topK !== Infinity) {
|
|
11151
|
+
heap = new BinaryHeap((a, b) => {
|
|
11152
|
+
const aVal = a[orderByField] ?? a._id;
|
|
11153
|
+
const bVal = b[orderByField] ?? b._id;
|
|
11154
|
+
const cmp = aVal < bVal ? -1 : aVal > bVal ? 1 : 0;
|
|
11155
|
+
return sortOrder === "asc" ? -cmp : cmp;
|
|
11156
|
+
});
|
|
11157
|
+
}
|
|
11158
|
+
const results = [];
|
|
11159
|
+
for await (const doc of self.processChunkedKeysWithVerify(
|
|
11160
|
+
keysStream,
|
|
11161
|
+
0,
|
|
11162
|
+
initialChunkSize,
|
|
11163
|
+
Infinity,
|
|
11164
|
+
ftsConditions,
|
|
11165
|
+
compositeVerifyConditions,
|
|
11166
|
+
others,
|
|
11167
|
+
tx2
|
|
11168
|
+
)) {
|
|
11169
|
+
if (heap) {
|
|
11170
|
+
if (heap.size < topK) heap.push(doc);
|
|
11171
|
+
else {
|
|
11172
|
+
const top = heap.peek();
|
|
11173
|
+
if (top) {
|
|
11174
|
+
const aVal = doc[orderByField] ?? doc._id;
|
|
11175
|
+
const bVal = top[orderByField] ?? top._id;
|
|
11176
|
+
const cmp = aVal < bVal ? -1 : aVal > bVal ? 1 : 0;
|
|
11177
|
+
if (sortOrder === "asc" ? cmp < 0 : cmp > 0) heap.replace(doc);
|
|
11178
|
+
}
|
|
11179
|
+
}
|
|
11180
|
+
} else {
|
|
11181
|
+
results.push(doc);
|
|
11182
|
+
}
|
|
11183
|
+
}
|
|
11184
|
+
const finalDocs = heap ? heap.toArray() : results;
|
|
11185
|
+
finalDocs.sort((a, b) => {
|
|
11186
|
+
const aVal = a[orderByField] ?? a._id;
|
|
11187
|
+
const bVal = b[orderByField] ?? b._id;
|
|
11188
|
+
const cmp = aVal < bVal ? -1 : aVal > bVal ? 1 : 0;
|
|
11189
|
+
return sortOrder === "asc" ? cmp : -cmp;
|
|
11190
|
+
});
|
|
11191
|
+
const end = limit === Infinity ? void 0 : offset + limit;
|
|
11192
|
+
const limitedResults = finalDocs.slice(offset, end);
|
|
11193
|
+
for (let j = 0, len = limitedResults.length; j < len; j++) {
|
|
11194
|
+
yield limitedResults[j];
|
|
11195
|
+
}
|
|
11196
|
+
} else {
|
|
11197
|
+
const hasFilters = ftsConditions.length > 0 || compositeVerifyConditions.length > 0 || others.length > 0;
|
|
11198
|
+
const startIdx = hasFilters ? 0 : offset;
|
|
11199
|
+
let yieldedCount = 0;
|
|
11200
|
+
let skippedCount = hasFilters ? 0 : offset;
|
|
11201
|
+
for await (const doc of self.processChunkedKeysWithVerify(
|
|
11202
|
+
keysStream,
|
|
11203
|
+
startIdx,
|
|
11204
|
+
initialChunkSize,
|
|
11205
|
+
limit,
|
|
11206
|
+
ftsConditions,
|
|
11207
|
+
compositeVerifyConditions,
|
|
11208
|
+
others,
|
|
11209
|
+
tx2
|
|
11210
|
+
)) {
|
|
11211
|
+
if (skippedCount < offset) {
|
|
11212
|
+
skippedCount++;
|
|
11213
|
+
continue;
|
|
11214
|
+
}
|
|
11215
|
+
if (yieldedCount >= limit) break;
|
|
11216
|
+
yield doc;
|
|
11217
|
+
yieldedCount++;
|
|
11218
|
+
}
|
|
11219
|
+
}
|
|
11220
|
+
} finally {
|
|
11221
|
+
rollback();
|
|
11222
|
+
}
|
|
11223
|
+
}, tx);
|
|
11224
|
+
const drain = async () => {
|
|
11225
|
+
const result = [];
|
|
11226
|
+
for await (const document of stream()) {
|
|
11227
|
+
result.push(document);
|
|
11228
|
+
}
|
|
11229
|
+
return result;
|
|
11230
|
+
};
|
|
11231
|
+
return { stream, drain };
|
|
11232
|
+
}
|
|
11233
|
+
};
|
|
11234
|
+
|
|
11235
|
+
// src/core/IndexManager.ts
|
|
11236
|
+
var import_dataply3 = __toESM(require_cjs());
|
|
11237
|
+
|
|
11238
|
+
// src/core/bptree/documentStrategy.ts
|
|
11239
|
+
var import_dataply2 = __toESM(require_cjs());
|
|
11240
|
+
var DocumentSerializeStrategyAsync = class extends import_dataply2.SerializeStrategyAsync {
|
|
11241
|
+
constructor(order, api, txContext, treeKey) {
|
|
11242
|
+
super(order);
|
|
11243
|
+
this.api = api;
|
|
11244
|
+
this.txContext = txContext;
|
|
11245
|
+
this.treeKey = treeKey;
|
|
11246
|
+
}
|
|
11247
|
+
/**
|
|
11248
|
+
* readHead에서 할당된 headPk를 캐싱하여
|
|
11249
|
+
* writeHead에서 AsyncLocalStorage 컨텍스트 유실 시에도 사용할 수 있도록 함
|
|
11250
|
+
*/
|
|
11251
|
+
cachedHeadPk = null;
|
|
11252
|
+
async id(isLeaf) {
|
|
11253
|
+
const tx = this.txContext.get();
|
|
11254
|
+
const pk = await this.api.insertAsOverflow("__BPTREE_NODE_PLACEHOLDER__", false, tx);
|
|
11255
|
+
return pk + "";
|
|
11256
|
+
}
|
|
11257
|
+
async read(id) {
|
|
11258
|
+
const tx = this.txContext.get();
|
|
11259
|
+
const row = await this.api.select(Number(id), false, tx);
|
|
11260
|
+
if (row === null || row === "" || row.startsWith("__BPTREE_")) {
|
|
11261
|
+
throw new Error(`Node not found or empty with ID: ${id}`);
|
|
11262
|
+
}
|
|
11263
|
+
return JSON.parse(row);
|
|
11264
|
+
}
|
|
11265
|
+
async write(id, node) {
|
|
11266
|
+
const tx = this.txContext.get();
|
|
11267
|
+
const json = JSON.stringify(node);
|
|
11268
|
+
await this.api.update(+id, json, tx);
|
|
11269
|
+
}
|
|
11270
|
+
async delete(id) {
|
|
11271
|
+
const tx = this.txContext.get();
|
|
11272
|
+
await this.api.delete(+id, false, tx);
|
|
11273
|
+
}
|
|
11274
|
+
async readHead() {
|
|
11275
|
+
const tx = this.txContext.get();
|
|
11276
|
+
const metadata = await this.api.getDocumentInnerMetadata(tx);
|
|
11277
|
+
const indexInfo = metadata.indices[this.treeKey];
|
|
11278
|
+
if (!indexInfo) return null;
|
|
11279
|
+
const headPk = indexInfo[0];
|
|
11280
|
+
if (headPk === -1) {
|
|
11281
|
+
const pk = await this.api.insertAsOverflow("__BPTREE_HEAD_PLACEHOLDER__", false, tx);
|
|
11282
|
+
metadata.indices[this.treeKey][0] = pk;
|
|
11283
|
+
await this.api.updateDocumentInnerMetadata(metadata, tx);
|
|
11284
|
+
this.cachedHeadPk = pk;
|
|
11285
|
+
return null;
|
|
11286
|
+
}
|
|
11287
|
+
this.cachedHeadPk = headPk;
|
|
11288
|
+
const row = await this.api.select(headPk, false, tx);
|
|
11289
|
+
if (row === null || row === "" || row.startsWith("__BPTREE_")) return null;
|
|
11290
|
+
return JSON.parse(row);
|
|
11291
|
+
}
|
|
11292
|
+
async writeHead(head) {
|
|
11293
|
+
const tx = this.txContext.get();
|
|
11294
|
+
let headPk = this.cachedHeadPk;
|
|
11295
|
+
if (headPk === null) {
|
|
11296
|
+
const metadata = await this.api.getDocumentInnerMetadata(tx);
|
|
11297
|
+
const indexInfo = metadata.indices[this.treeKey];
|
|
11298
|
+
if (!indexInfo) {
|
|
11299
|
+
throw new Error(`Index info not found for tree: ${this.treeKey}. Initialization should be handled outside.`);
|
|
11300
|
+
}
|
|
11301
|
+
headPk = indexInfo[0];
|
|
11302
|
+
}
|
|
11303
|
+
const json = JSON.stringify(head);
|
|
11304
|
+
await this.api.update(headPk, json, tx);
|
|
11305
|
+
}
|
|
11306
|
+
};
|
|
11307
|
+
|
|
11308
|
+
// src/core/IndexManager.ts
|
|
11309
|
+
var IndexManager = class {
|
|
11310
|
+
constructor(api) {
|
|
11311
|
+
this.api = api;
|
|
11312
|
+
this.trees = /* @__PURE__ */ new Map();
|
|
11313
|
+
this.indexedFields = /* @__PURE__ */ new Set(["_id"]);
|
|
11314
|
+
}
|
|
11315
|
+
indices = {};
|
|
11316
|
+
trees = /* @__PURE__ */ new Map();
|
|
11317
|
+
indexedFields;
|
|
11318
|
+
/**
|
|
11319
|
+
* Registered indices via createIndex() (before init)
|
|
11320
|
+
* Key: index name, Value: index configuration
|
|
11321
|
+
*/
|
|
11322
|
+
pendingCreateIndices = /* @__PURE__ */ new Map();
|
|
11323
|
+
/**
|
|
11324
|
+
* Resolved index configurations after init.
|
|
11325
|
+
* Key: index name, Value: index config (from metadata)
|
|
11326
|
+
*/
|
|
11327
|
+
registeredIndices = /* @__PURE__ */ new Map();
|
|
11328
|
+
/**
|
|
11329
|
+
* Maps field name → index names that cover this field.
|
|
11330
|
+
* Used for query resolution.
|
|
11331
|
+
*/
|
|
11332
|
+
fieldToIndices = /* @__PURE__ */ new Map();
|
|
11333
|
+
pendingBackfillFields = [];
|
|
11334
|
+
/**
|
|
11335
|
+
* Validate and apply indices from DB metadata and pending indices.
|
|
11336
|
+
* Called during database initialization.
|
|
11337
|
+
*/
|
|
11338
|
+
async initializeIndices(metadata, isNewlyCreated, tx) {
|
|
11339
|
+
const targetIndices = /* @__PURE__ */ new Map([
|
|
11340
|
+
["_id", { type: "btree", fields: ["_id"] }]
|
|
11341
|
+
]);
|
|
11342
|
+
for (const [name, info] of Object.entries(metadata.indices)) {
|
|
11343
|
+
targetIndices.set(name, info[1]);
|
|
11344
|
+
}
|
|
11345
|
+
for (const [name, option] of this.pendingCreateIndices) {
|
|
11346
|
+
const config = this.toIndexMetaConfig(option);
|
|
11347
|
+
targetIndices.set(name, config);
|
|
11348
|
+
}
|
|
11349
|
+
const backfillTargets = [];
|
|
11350
|
+
let isMetadataChanged = false;
|
|
11351
|
+
for (const [indexName, config] of targetIndices) {
|
|
11352
|
+
const existingIndex = metadata.indices[indexName];
|
|
11353
|
+
if (!existingIndex) {
|
|
11354
|
+
metadata.indices[indexName] = [-1, config];
|
|
11355
|
+
isMetadataChanged = true;
|
|
11356
|
+
if (!isNewlyCreated) {
|
|
11357
|
+
backfillTargets.push(indexName);
|
|
11358
|
+
}
|
|
11359
|
+
} else {
|
|
11360
|
+
const [_pk, existingConfig] = existingIndex;
|
|
11361
|
+
if (JSON.stringify(existingConfig) !== JSON.stringify(config)) {
|
|
11362
|
+
metadata.indices[indexName] = [_pk, config];
|
|
11363
|
+
isMetadataChanged = true;
|
|
11364
|
+
if (!isNewlyCreated) {
|
|
11365
|
+
backfillTargets.push(indexName);
|
|
11366
|
+
}
|
|
11367
|
+
}
|
|
11368
|
+
}
|
|
11369
|
+
}
|
|
11370
|
+
if (isMetadataChanged) {
|
|
11371
|
+
await this.api.updateDocumentInnerMetadata(metadata, tx);
|
|
11372
|
+
}
|
|
11373
|
+
this.indices = metadata.indices;
|
|
11374
|
+
this.registeredIndices = /* @__PURE__ */ new Map();
|
|
11375
|
+
this.fieldToIndices = /* @__PURE__ */ new Map();
|
|
11376
|
+
for (const [indexName, config] of targetIndices) {
|
|
11377
|
+
this.registeredIndices.set(indexName, config);
|
|
11378
|
+
const fields = this.getFieldsFromConfig(config);
|
|
11379
|
+
for (const field of fields) {
|
|
11380
|
+
this.indexedFields.add(field);
|
|
11381
|
+
if (!this.fieldToIndices.has(field)) {
|
|
11382
|
+
this.fieldToIndices.set(field, []);
|
|
11383
|
+
}
|
|
11384
|
+
this.fieldToIndices.get(field).push(indexName);
|
|
11385
|
+
}
|
|
11386
|
+
}
|
|
11387
|
+
for (const indexName of targetIndices.keys()) {
|
|
11388
|
+
if (metadata.indices[indexName]) {
|
|
11389
|
+
const tree = new import_dataply3.BPTreeAsync(
|
|
11390
|
+
new DocumentSerializeStrategyAsync(
|
|
11391
|
+
this.api.rowTableEngine.order,
|
|
11392
|
+
this.api,
|
|
11393
|
+
this.api.txContext,
|
|
11394
|
+
indexName
|
|
11395
|
+
),
|
|
11396
|
+
this.api.comparator
|
|
11397
|
+
);
|
|
11398
|
+
await tree.init();
|
|
11399
|
+
this.trees.set(indexName, tree);
|
|
11400
|
+
}
|
|
11401
|
+
}
|
|
11402
|
+
this.pendingBackfillFields = backfillTargets;
|
|
11403
|
+
return isMetadataChanged;
|
|
11404
|
+
}
|
|
11405
|
+
/**
|
|
11406
|
+
* Register an index. If called before init(), queues it.
|
|
11407
|
+
*/
|
|
11408
|
+
async registerIndex(name, option, tx) {
|
|
11409
|
+
if (!this.api.isDocInitialized) {
|
|
11410
|
+
this.pendingCreateIndices.set(name, option);
|
|
11411
|
+
return;
|
|
11412
|
+
}
|
|
11413
|
+
await this.registerIndexRuntime(name, option, tx);
|
|
11414
|
+
}
|
|
11415
|
+
/**
|
|
11416
|
+
* Register an index at runtime (after init).
|
|
11417
|
+
*/
|
|
11418
|
+
async registerIndexRuntime(name, option, tx) {
|
|
11419
|
+
const config = this.toIndexMetaConfig(option);
|
|
11420
|
+
if (this.registeredIndices.has(name)) {
|
|
11421
|
+
throw new Error(`Index "${name}" already exists.`);
|
|
11422
|
+
}
|
|
11423
|
+
await this.api.runWithDefaultWrite(async (tx2) => {
|
|
11424
|
+
const metadata = await this.api.getDocumentInnerMetadata(tx2);
|
|
11425
|
+
metadata.indices[name] = [-1, config];
|
|
11426
|
+
await this.api.updateDocumentInnerMetadata(metadata, tx2);
|
|
11427
|
+
this.indices = metadata.indices;
|
|
11428
|
+
this.registeredIndices.set(name, config);
|
|
11429
|
+
const fields = this.getFieldsFromConfig(config);
|
|
11430
|
+
for (let i = 0; i < fields.length; i++) {
|
|
11431
|
+
const field = fields[i];
|
|
11432
|
+
this.indexedFields.add(field);
|
|
11433
|
+
if (!this.fieldToIndices.has(field)) {
|
|
11434
|
+
this.fieldToIndices.set(field, []);
|
|
11435
|
+
}
|
|
11436
|
+
this.fieldToIndices.get(field).push(name);
|
|
11437
|
+
}
|
|
11438
|
+
const tree = new import_dataply3.BPTreeAsync(
|
|
10764
11439
|
new DocumentSerializeStrategyAsync(
|
|
10765
|
-
this.rowTableEngine.order,
|
|
10766
|
-
this,
|
|
10767
|
-
this.txContext,
|
|
11440
|
+
this.api.rowTableEngine.order,
|
|
11441
|
+
this.api,
|
|
11442
|
+
this.api.txContext,
|
|
10768
11443
|
name
|
|
10769
11444
|
),
|
|
10770
|
-
this.comparator
|
|
11445
|
+
this.api.comparator
|
|
10771
11446
|
);
|
|
10772
11447
|
await tree.init();
|
|
10773
11448
|
this.trees.set(name, tree);
|
|
@@ -10779,26 +11454,23 @@ var DocumentDataplyAPI = class extends import_dataply3.DataplyAPI {
|
|
|
10779
11454
|
}
|
|
10780
11455
|
/**
|
|
10781
11456
|
* Drop (remove) a named index.
|
|
10782
|
-
* Removes the index from metadata, in-memory maps, and trees.
|
|
10783
|
-
* The '_id' index cannot be dropped.
|
|
10784
|
-
* @param name The name of the index to drop
|
|
10785
11457
|
*/
|
|
10786
11458
|
async dropIndex(name, tx) {
|
|
10787
11459
|
if (name === "_id") {
|
|
10788
11460
|
throw new Error('Cannot drop the "_id" index.');
|
|
10789
11461
|
}
|
|
10790
|
-
if (!this.
|
|
11462
|
+
if (!this.api.isDocInitialized) {
|
|
10791
11463
|
this.pendingCreateIndices.delete(name);
|
|
10792
11464
|
return;
|
|
10793
11465
|
}
|
|
10794
11466
|
if (!this.registeredIndices.has(name)) {
|
|
10795
11467
|
throw new Error(`Index "${name}" does not exist.`);
|
|
10796
11468
|
}
|
|
10797
|
-
await this.runWithDefaultWrite(async (tx2) => {
|
|
11469
|
+
await this.api.runWithDefaultWrite(async (tx2) => {
|
|
10798
11470
|
const config = this.registeredIndices.get(name);
|
|
10799
|
-
const metadata = await this.getDocumentInnerMetadata(tx2);
|
|
11471
|
+
const metadata = await this.api.getDocumentInnerMetadata(tx2);
|
|
10800
11472
|
delete metadata.indices[name];
|
|
10801
|
-
await this.updateDocumentInnerMetadata(metadata, tx2);
|
|
11473
|
+
await this.api.updateDocumentInnerMetadata(metadata, tx2);
|
|
10802
11474
|
this.indices = metadata.indices;
|
|
10803
11475
|
this.registeredIndices.delete(name);
|
|
10804
11476
|
const fields = this.getFieldsFromConfig(config);
|
|
@@ -10817,7 +11489,107 @@ var DocumentDataplyAPI = class extends import_dataply3.DataplyAPI {
|
|
|
10817
11489
|
}
|
|
10818
11490
|
}
|
|
10819
11491
|
}
|
|
10820
|
-
this.trees.delete(name);
|
|
11492
|
+
this.trees.delete(name);
|
|
11493
|
+
}, tx);
|
|
11494
|
+
}
|
|
11495
|
+
/**
|
|
11496
|
+
* Backfill indices for newly created indices after data was inserted.
|
|
11497
|
+
*/
|
|
11498
|
+
async backfillIndices(tx) {
|
|
11499
|
+
return this.api.runWithDefaultWrite(async (tx2) => {
|
|
11500
|
+
if (this.pendingBackfillFields.length === 0) {
|
|
11501
|
+
return 0;
|
|
11502
|
+
}
|
|
11503
|
+
const backfillTargets = this.pendingBackfillFields;
|
|
11504
|
+
const metadata = await this.api.getDocumentInnerMetadata(tx2);
|
|
11505
|
+
if (metadata.lastId === 0) {
|
|
11506
|
+
return 0;
|
|
11507
|
+
}
|
|
11508
|
+
let indexTxMap = {};
|
|
11509
|
+
for (const indexName of backfillTargets) {
|
|
11510
|
+
const tree = this.trees.get(indexName);
|
|
11511
|
+
if (tree && indexName !== "_id") {
|
|
11512
|
+
indexTxMap[indexName] = await tree.createTransaction();
|
|
11513
|
+
}
|
|
11514
|
+
}
|
|
11515
|
+
let backfilledCount = 0;
|
|
11516
|
+
let chunkCount = 0;
|
|
11517
|
+
const CHUNK_SIZE = 1e3;
|
|
11518
|
+
const idTree = this.trees.get("_id");
|
|
11519
|
+
if (!idTree) {
|
|
11520
|
+
throw new Error("ID tree not found");
|
|
11521
|
+
}
|
|
11522
|
+
const stream = idTree.whereStream({
|
|
11523
|
+
primaryGte: { v: 0 }
|
|
11524
|
+
});
|
|
11525
|
+
for await (const [k, complexValue] of stream) {
|
|
11526
|
+
const doc = await this.api.getDocument(k, tx2);
|
|
11527
|
+
if (!doc) continue;
|
|
11528
|
+
const flatDoc = this.api.flattenDocument(doc);
|
|
11529
|
+
for (let i = 0, len = backfillTargets.length; i < len; i++) {
|
|
11530
|
+
const indexName = backfillTargets[i];
|
|
11531
|
+
if (!(indexName in indexTxMap)) continue;
|
|
11532
|
+
const config = this.registeredIndices.get(indexName);
|
|
11533
|
+
if (!config) continue;
|
|
11534
|
+
const btx = indexTxMap[indexName];
|
|
11535
|
+
if (config.type === "fts") {
|
|
11536
|
+
const primaryField = this.getPrimaryField(config);
|
|
11537
|
+
const v = flatDoc[primaryField];
|
|
11538
|
+
if (v === void 0 || typeof v !== "string") continue;
|
|
11539
|
+
const ftsConfig = this.getFtsConfig(config);
|
|
11540
|
+
const tokens = ftsConfig ? tokenize(v, ftsConfig) : [v];
|
|
11541
|
+
const batchInsertData = [];
|
|
11542
|
+
for (let i2 = 0, len2 = tokens.length; i2 < len2; i2++) {
|
|
11543
|
+
const token = tokens[i2];
|
|
11544
|
+
const keyToInsert = this.getTokenKey(k, token);
|
|
11545
|
+
const entry = { k, v: token };
|
|
11546
|
+
batchInsertData.push([keyToInsert, entry]);
|
|
11547
|
+
}
|
|
11548
|
+
await btx.batchInsert(batchInsertData);
|
|
11549
|
+
} else {
|
|
11550
|
+
const indexVal = this.getIndexValue(config, flatDoc);
|
|
11551
|
+
if (indexVal === void 0) continue;
|
|
11552
|
+
const entry = { k, v: indexVal };
|
|
11553
|
+
const batchInsertData = [[k, entry]];
|
|
11554
|
+
await btx.batchInsert(batchInsertData);
|
|
11555
|
+
}
|
|
11556
|
+
}
|
|
11557
|
+
backfilledCount++;
|
|
11558
|
+
chunkCount++;
|
|
11559
|
+
if (chunkCount >= CHUNK_SIZE) {
|
|
11560
|
+
try {
|
|
11561
|
+
for (const btx of Object.values(indexTxMap)) {
|
|
11562
|
+
await btx.commit();
|
|
11563
|
+
}
|
|
11564
|
+
} catch (err) {
|
|
11565
|
+
for (const btx of Object.values(indexTxMap)) {
|
|
11566
|
+
await btx.rollback();
|
|
11567
|
+
}
|
|
11568
|
+
throw err;
|
|
11569
|
+
}
|
|
11570
|
+
for (const indexName of backfillTargets) {
|
|
11571
|
+
const tree = this.trees.get(indexName);
|
|
11572
|
+
if (tree && indexName !== "_id") {
|
|
11573
|
+
indexTxMap[indexName] = await tree.createTransaction();
|
|
11574
|
+
}
|
|
11575
|
+
}
|
|
11576
|
+
chunkCount = 0;
|
|
11577
|
+
}
|
|
11578
|
+
}
|
|
11579
|
+
if (chunkCount > 0) {
|
|
11580
|
+
try {
|
|
11581
|
+
for (const btx of Object.values(indexTxMap)) {
|
|
11582
|
+
await btx.commit();
|
|
11583
|
+
}
|
|
11584
|
+
} catch (err) {
|
|
11585
|
+
for (const btx of Object.values(indexTxMap)) {
|
|
11586
|
+
await btx.rollback();
|
|
11587
|
+
}
|
|
11588
|
+
throw err;
|
|
11589
|
+
}
|
|
11590
|
+
}
|
|
11591
|
+
this.pendingBackfillFields = [];
|
|
11592
|
+
return backfilledCount;
|
|
10821
11593
|
}, tx);
|
|
10822
11594
|
}
|
|
10823
11595
|
/**
|
|
@@ -10880,9 +11652,7 @@ var DocumentDataplyAPI = class extends import_dataply3.DataplyAPI {
|
|
|
10880
11652
|
return [];
|
|
10881
11653
|
}
|
|
10882
11654
|
/**
|
|
10883
|
-
* Get the primary field of an index
|
|
10884
|
-
* For btree: first field in fields array.
|
|
10885
|
-
* For fts: the single field.
|
|
11655
|
+
* Get the primary field of an index.
|
|
10886
11656
|
*/
|
|
10887
11657
|
getPrimaryField(config) {
|
|
10888
11658
|
if (config.type === "btree") {
|
|
@@ -10891,11 +11661,7 @@ var DocumentDataplyAPI = class extends import_dataply3.DataplyAPI {
|
|
|
10891
11661
|
return config.fields;
|
|
10892
11662
|
}
|
|
10893
11663
|
/**
|
|
10894
|
-
*
|
|
10895
|
-
* - 단일 필드 btree: Primitive (단일 값)
|
|
10896
|
-
* - 복합 필드 btree: Primitive[] (필드 순서대로 배열)
|
|
10897
|
-
* - fts: 별도 처리 (이 메서드 사용 안 함)
|
|
10898
|
-
* @returns undefined면 해당 문서에 필수 필드가 없으므로 인덱싱 스킵
|
|
11664
|
+
* Create B+Tree value string for indexing a document
|
|
10899
11665
|
*/
|
|
10900
11666
|
getIndexValue(config, flatDoc) {
|
|
10901
11667
|
if (config.type !== "btree") return void 0;
|
|
@@ -10912,7 +11678,7 @@ var DocumentDataplyAPI = class extends import_dataply3.DataplyAPI {
|
|
|
10912
11678
|
return values;
|
|
10913
11679
|
}
|
|
10914
11680
|
/**
|
|
10915
|
-
* Get FTSConfig from IndexMetaConfig
|
|
11681
|
+
* Get FTSConfig from IndexMetaConfig
|
|
10916
11682
|
*/
|
|
10917
11683
|
getFtsConfig(config) {
|
|
10918
11684
|
if (config.type !== "fts") return null;
|
|
@@ -10921,171 +11687,265 @@ var DocumentDataplyAPI = class extends import_dataply3.DataplyAPI {
|
|
|
10921
11687
|
}
|
|
10922
11688
|
return { type: "fts", tokenizer: "whitespace" };
|
|
10923
11689
|
}
|
|
10924
|
-
|
|
10925
|
-
return
|
|
10926
|
-
|
|
10927
|
-
|
|
10928
|
-
|
|
11690
|
+
getTokenKey(pk, token) {
|
|
11691
|
+
return pk + ":" + token;
|
|
11692
|
+
}
|
|
11693
|
+
};
|
|
11694
|
+
|
|
11695
|
+
// src/utils/catchPromise.ts
|
|
11696
|
+
async function catchPromise(promise) {
|
|
11697
|
+
return promise.then((res) => [void 0, res]).catch((reason) => [reason]);
|
|
11698
|
+
}
|
|
11699
|
+
|
|
11700
|
+
// src/core/MutationManager.ts
|
|
11701
|
+
var MutationManager = class {
|
|
11702
|
+
constructor(api) {
|
|
11703
|
+
this.api = api;
|
|
11704
|
+
}
|
|
11705
|
+
async insertDocumentInternal(document, tx) {
|
|
11706
|
+
const metadata = await this.api.getDocumentInnerMetadata(tx);
|
|
11707
|
+
const id = ++metadata.lastId;
|
|
11708
|
+
await this.api.updateDocumentInnerMetadata(metadata, tx);
|
|
11709
|
+
const dataplyDocument = Object.assign({
|
|
11710
|
+
_id: id
|
|
11711
|
+
}, document);
|
|
11712
|
+
const pk = await this.api.insert(JSON.stringify(dataplyDocument), true, tx);
|
|
11713
|
+
return {
|
|
11714
|
+
pk,
|
|
11715
|
+
id,
|
|
11716
|
+
document: dataplyDocument
|
|
11717
|
+
};
|
|
11718
|
+
}
|
|
11719
|
+
async insertSingleDocument(document, tx) {
|
|
11720
|
+
return this.api.runWithDefaultWrite(async (tx2) => {
|
|
11721
|
+
const { pk: dpk, document: dataplyDocument } = await this.insertDocumentInternal(document, tx2);
|
|
11722
|
+
const flattenDocument = this.api.flattenDocument(dataplyDocument);
|
|
11723
|
+
for (const [indexName, config] of this.api.indexManager.registeredIndices) {
|
|
11724
|
+
const tree = this.api.trees.get(indexName);
|
|
11725
|
+
if (!tree) continue;
|
|
11726
|
+
if (config.type === "fts") {
|
|
11727
|
+
const primaryField = this.api.indexManager.getPrimaryField(config);
|
|
11728
|
+
const v = flattenDocument[primaryField];
|
|
11729
|
+
if (v === void 0 || typeof v !== "string") continue;
|
|
11730
|
+
const ftsConfig = this.api.indexManager.getFtsConfig(config);
|
|
11731
|
+
const tokens = ftsConfig ? tokenize(v, ftsConfig) : [v];
|
|
11732
|
+
for (let i = 0, len = tokens.length; i < len; i++) {
|
|
11733
|
+
const token = tokens[i];
|
|
11734
|
+
const keyToInsert = this.api.indexManager.getTokenKey(dpk, token);
|
|
11735
|
+
const [error] = await catchPromise(tree.insert(keyToInsert, { k: dpk, v: token }));
|
|
11736
|
+
if (error) throw error;
|
|
11737
|
+
}
|
|
11738
|
+
} else {
|
|
11739
|
+
const indexVal = this.api.indexManager.getIndexValue(config, flattenDocument);
|
|
11740
|
+
if (indexVal === void 0) continue;
|
|
11741
|
+
const [error] = await catchPromise(tree.insert(dpk, { k: dpk, v: indexVal }));
|
|
11742
|
+
if (error) throw error;
|
|
11743
|
+
}
|
|
10929
11744
|
}
|
|
10930
|
-
|
|
11745
|
+
await this.api.analysisManager.notifyInsert([flattenDocument], tx2);
|
|
11746
|
+
return dataplyDocument._id;
|
|
10931
11747
|
}, tx);
|
|
10932
11748
|
}
|
|
10933
|
-
|
|
10934
|
-
|
|
10935
|
-
|
|
10936
|
-
|
|
10937
|
-
|
|
10938
|
-
|
|
10939
|
-
|
|
10940
|
-
|
|
10941
|
-
|
|
10942
|
-
|
|
10943
|
-
|
|
10944
|
-
|
|
10945
|
-
|
|
10946
|
-
|
|
10947
|
-
|
|
10948
|
-
|
|
10949
|
-
|
|
10950
|
-
|
|
10951
|
-
|
|
10952
|
-
if (tree && indexName !== "_id") {
|
|
10953
|
-
indexTxMap[indexName] = await tree.createTransaction();
|
|
10954
|
-
}
|
|
11749
|
+
async insertBatchDocuments(documents, tx) {
|
|
11750
|
+
return this.api.runWithDefaultWrite(async (tx2) => {
|
|
11751
|
+
const metadata = await this.api.getDocumentInnerMetadata(tx2);
|
|
11752
|
+
const startId = metadata.lastId + 1;
|
|
11753
|
+
metadata.lastId += documents.length;
|
|
11754
|
+
await this.api.updateDocumentInnerMetadata(metadata, tx2);
|
|
11755
|
+
const ids = [];
|
|
11756
|
+
const dataplyDocuments = [];
|
|
11757
|
+
const flattenedData = [];
|
|
11758
|
+
for (let i = 0, len = documents.length; i < len; i++) {
|
|
11759
|
+
const id = startId + i;
|
|
11760
|
+
const dataplyDocument = Object.assign({
|
|
11761
|
+
_id: id
|
|
11762
|
+
}, documents[i]);
|
|
11763
|
+
const stringified = JSON.stringify(dataplyDocument);
|
|
11764
|
+
dataplyDocuments.push(stringified);
|
|
11765
|
+
const flattenDocument = this.api.flattenDocument(dataplyDocument);
|
|
11766
|
+
flattenedData.push({ pk: -1, data: flattenDocument });
|
|
11767
|
+
ids.push(id);
|
|
10955
11768
|
}
|
|
10956
|
-
|
|
10957
|
-
let
|
|
10958
|
-
|
|
10959
|
-
const idTree = this.trees.get("_id");
|
|
10960
|
-
if (!idTree) {
|
|
10961
|
-
throw new Error("ID tree not found");
|
|
11769
|
+
const pks = await this.api.insertBatch(dataplyDocuments, true, tx2);
|
|
11770
|
+
for (let i = 0, len = pks.length; i < len; i++) {
|
|
11771
|
+
flattenedData[i].pk = pks[i];
|
|
10962
11772
|
}
|
|
10963
|
-
const
|
|
10964
|
-
|
|
10965
|
-
|
|
10966
|
-
|
|
10967
|
-
const
|
|
10968
|
-
if (
|
|
10969
|
-
|
|
10970
|
-
|
|
10971
|
-
|
|
10972
|
-
|
|
10973
|
-
|
|
10974
|
-
if (!config) continue;
|
|
10975
|
-
const btx = indexTxMap[indexName];
|
|
10976
|
-
if (config.type === "fts") {
|
|
10977
|
-
const primaryField = this.getPrimaryField(config);
|
|
10978
|
-
const v = flatDoc[primaryField];
|
|
11773
|
+
for (const [indexName, config] of this.api.indexManager.registeredIndices) {
|
|
11774
|
+
const tree = this.api.trees.get(indexName);
|
|
11775
|
+
if (!tree) continue;
|
|
11776
|
+
const treeTx = await tree.createTransaction();
|
|
11777
|
+
const batchInsertData = [];
|
|
11778
|
+
if (config.type === "fts") {
|
|
11779
|
+
const primaryField = this.api.indexManager.getPrimaryField(config);
|
|
11780
|
+
const ftsConfig = this.api.indexManager.getFtsConfig(config);
|
|
11781
|
+
for (let i = 0, len = flattenedData.length; i < len; i++) {
|
|
11782
|
+
const item = flattenedData[i];
|
|
11783
|
+
const v = item.data[primaryField];
|
|
10979
11784
|
if (v === void 0 || typeof v !== "string") continue;
|
|
10980
|
-
const ftsConfig = this.getFtsConfig(config);
|
|
10981
11785
|
const tokens = ftsConfig ? tokenize(v, ftsConfig) : [v];
|
|
10982
|
-
|
|
10983
|
-
|
|
10984
|
-
|
|
10985
|
-
const keyToInsert = this.getTokenKey(k, token);
|
|
10986
|
-
const entry = { k, v: token };
|
|
10987
|
-
batchInsertData.push([keyToInsert, entry]);
|
|
11786
|
+
for (let j = 0, tLen = tokens.length; j < tLen; j++) {
|
|
11787
|
+
const token = tokens[j];
|
|
11788
|
+
batchInsertData.push([this.api.indexManager.getTokenKey(item.pk, token), { k: item.pk, v: token }]);
|
|
10988
11789
|
}
|
|
10989
|
-
|
|
10990
|
-
|
|
10991
|
-
|
|
11790
|
+
}
|
|
11791
|
+
} else {
|
|
11792
|
+
for (let i = 0, len = flattenedData.length; i < len; i++) {
|
|
11793
|
+
const item = flattenedData[i];
|
|
11794
|
+
const indexVal = this.api.indexManager.getIndexValue(config, item.data);
|
|
10992
11795
|
if (indexVal === void 0) continue;
|
|
10993
|
-
|
|
10994
|
-
const batchInsertData = [[k, entry]];
|
|
10995
|
-
await btx.batchInsert(batchInsertData);
|
|
11796
|
+
batchInsertData.push([item.pk, { k: item.pk, v: indexVal }]);
|
|
10996
11797
|
}
|
|
10997
11798
|
}
|
|
10998
|
-
|
|
10999
|
-
|
|
11000
|
-
|
|
11001
|
-
|
|
11002
|
-
|
|
11003
|
-
|
|
11004
|
-
|
|
11005
|
-
|
|
11006
|
-
|
|
11007
|
-
|
|
11799
|
+
const [error] = await catchPromise(treeTx.batchInsert(batchInsertData));
|
|
11800
|
+
if (error) {
|
|
11801
|
+
throw error;
|
|
11802
|
+
}
|
|
11803
|
+
const res = await treeTx.commit();
|
|
11804
|
+
if (!res.success) {
|
|
11805
|
+
throw res.error;
|
|
11806
|
+
}
|
|
11807
|
+
}
|
|
11808
|
+
const flatDocs = [];
|
|
11809
|
+
for (let i = 0, len = flattenedData.length; i < len; i++) {
|
|
11810
|
+
flatDocs.push(flattenedData[i].data);
|
|
11811
|
+
}
|
|
11812
|
+
await this.api.analysisManager.notifyInsert(flatDocs, tx2);
|
|
11813
|
+
return ids;
|
|
11814
|
+
}, tx);
|
|
11815
|
+
}
|
|
11816
|
+
async updateInternal(query, computeUpdatedDoc, tx) {
|
|
11817
|
+
const pks = await this.api.queryManager.getKeys(query);
|
|
11818
|
+
let updatedCount = 0;
|
|
11819
|
+
const updatePairs = [];
|
|
11820
|
+
const treeTxs = /* @__PURE__ */ new Map();
|
|
11821
|
+
for (const [indexName, tree] of this.api.trees) {
|
|
11822
|
+
treeTxs.set(indexName, await tree.createTransaction());
|
|
11823
|
+
}
|
|
11824
|
+
treeTxs.delete("_id");
|
|
11825
|
+
for (let i = 0, len = pks.length; i < len; i++) {
|
|
11826
|
+
const pk = pks[i];
|
|
11827
|
+
const doc = await this.api.getDocument(pk, tx);
|
|
11828
|
+
if (!doc) continue;
|
|
11829
|
+
const updatedDoc = computeUpdatedDoc(doc);
|
|
11830
|
+
const oldFlatDoc = this.api.flattenDocument(doc);
|
|
11831
|
+
const newFlatDoc = this.api.flattenDocument(updatedDoc);
|
|
11832
|
+
for (const [indexName, treeTx] of treeTxs) {
|
|
11833
|
+
const config = this.api.indexManager.registeredIndices.get(indexName);
|
|
11834
|
+
if (!config) continue;
|
|
11835
|
+
if (config.type === "fts") {
|
|
11836
|
+
const primaryField = this.api.indexManager.getPrimaryField(config);
|
|
11837
|
+
const oldV = oldFlatDoc[primaryField];
|
|
11838
|
+
const newV = newFlatDoc[primaryField];
|
|
11839
|
+
if (oldV === newV) continue;
|
|
11840
|
+
const ftsConfig = this.api.indexManager.getFtsConfig(config);
|
|
11841
|
+
if (typeof oldV === "string") {
|
|
11842
|
+
const oldTokens = ftsConfig ? tokenize(oldV, ftsConfig) : [oldV];
|
|
11843
|
+
for (let j = 0, jLen = oldTokens.length; j < jLen; j++) {
|
|
11844
|
+
await treeTx.delete(this.api.indexManager.getTokenKey(pk, oldTokens[j]), { k: pk, v: oldTokens[j] });
|
|
11008
11845
|
}
|
|
11009
|
-
throw err;
|
|
11010
11846
|
}
|
|
11011
|
-
|
|
11012
|
-
const
|
|
11013
|
-
|
|
11014
|
-
|
|
11847
|
+
if (typeof newV === "string") {
|
|
11848
|
+
const newTokens = ftsConfig ? tokenize(newV, ftsConfig) : [newV];
|
|
11849
|
+
const batchInsertData = [];
|
|
11850
|
+
for (let j = 0, jLen = newTokens.length; j < jLen; j++) {
|
|
11851
|
+
batchInsertData.push([this.api.indexManager.getTokenKey(pk, newTokens[j]), { k: pk, v: newTokens[j] }]);
|
|
11015
11852
|
}
|
|
11853
|
+
await treeTx.batchInsert(batchInsertData);
|
|
11016
11854
|
}
|
|
11017
|
-
|
|
11018
|
-
|
|
11019
|
-
|
|
11020
|
-
|
|
11021
|
-
|
|
11022
|
-
|
|
11023
|
-
await btx.commit();
|
|
11855
|
+
} else {
|
|
11856
|
+
const oldIndexVal = this.api.indexManager.getIndexValue(config, oldFlatDoc);
|
|
11857
|
+
const newIndexVal = this.api.indexManager.getIndexValue(config, newFlatDoc);
|
|
11858
|
+
if (JSON.stringify(oldIndexVal) === JSON.stringify(newIndexVal)) continue;
|
|
11859
|
+
if (oldIndexVal !== void 0) {
|
|
11860
|
+
await treeTx.delete(pk, { k: pk, v: oldIndexVal });
|
|
11024
11861
|
}
|
|
11025
|
-
|
|
11026
|
-
|
|
11027
|
-
await btx.rollback();
|
|
11862
|
+
if (newIndexVal !== void 0) {
|
|
11863
|
+
await treeTx.batchInsert([[pk, { k: pk, v: newIndexVal }]]);
|
|
11028
11864
|
}
|
|
11029
|
-
throw err;
|
|
11030
11865
|
}
|
|
11031
11866
|
}
|
|
11032
|
-
|
|
11033
|
-
|
|
11034
|
-
|
|
11035
|
-
}
|
|
11036
|
-
createDocumentInnerMetadata(indices) {
|
|
11037
|
-
return {
|
|
11038
|
-
magicString: "document-dataply",
|
|
11039
|
-
version: 1,
|
|
11040
|
-
createdAt: Date.now(),
|
|
11041
|
-
updatedAt: Date.now(),
|
|
11042
|
-
lastId: 0,
|
|
11043
|
-
schemeVersion: 0,
|
|
11044
|
-
indices
|
|
11045
|
-
};
|
|
11046
|
-
}
|
|
11047
|
-
async initializeDocumentFile(tx) {
|
|
11048
|
-
const metadata = await this.select(1, false, tx);
|
|
11049
|
-
if (metadata) {
|
|
11050
|
-
throw new Error("Document metadata already exists");
|
|
11867
|
+
updatePairs.push({ oldDocument: oldFlatDoc, newDocument: newFlatDoc });
|
|
11868
|
+
await this.api.update(pk, JSON.stringify(updatedDoc), tx);
|
|
11869
|
+
updatedCount++;
|
|
11051
11870
|
}
|
|
11052
|
-
const
|
|
11053
|
-
|
|
11054
|
-
|
|
11055
|
-
|
|
11056
|
-
|
|
11057
|
-
|
|
11058
|
-
|
|
11059
|
-
|
|
11060
|
-
return false;
|
|
11871
|
+
for (const [indexName, treeTx] of treeTxs) {
|
|
11872
|
+
const result = await treeTx.commit();
|
|
11873
|
+
if (!result.success) {
|
|
11874
|
+
for (const rollbackTx of treeTxs.values()) {
|
|
11875
|
+
rollbackTx.rollback();
|
|
11876
|
+
}
|
|
11877
|
+
throw result.error;
|
|
11878
|
+
}
|
|
11061
11879
|
}
|
|
11062
|
-
|
|
11063
|
-
return
|
|
11880
|
+
await this.api.analysisManager.notifyUpdate(updatePairs, tx);
|
|
11881
|
+
return updatedCount;
|
|
11882
|
+
}
|
|
11883
|
+
async fullUpdate(query, newRecord, tx) {
|
|
11884
|
+
return this.api.runWithDefaultWrite(async (tx2) => {
|
|
11885
|
+
return this.updateInternal(query, (doc) => {
|
|
11886
|
+
const newDoc = typeof newRecord === "function" ? newRecord(doc) : newRecord;
|
|
11887
|
+
return { _id: doc._id, ...newDoc };
|
|
11888
|
+
}, tx2);
|
|
11889
|
+
}, tx);
|
|
11890
|
+
}
|
|
11891
|
+
async partialUpdate(query, newRecord, tx) {
|
|
11892
|
+
return this.api.runWithDefaultWrite(async (tx2) => {
|
|
11893
|
+
return this.updateInternal(query, (doc) => {
|
|
11894
|
+
const partialUpdateContent = typeof newRecord === "function" ? newRecord(doc) : newRecord;
|
|
11895
|
+
const finalUpdate = { ...partialUpdateContent };
|
|
11896
|
+
delete finalUpdate._id;
|
|
11897
|
+
return { ...doc, ...finalUpdate };
|
|
11898
|
+
}, tx2);
|
|
11899
|
+
}, tx);
|
|
11064
11900
|
}
|
|
11065
|
-
|
|
11066
|
-
|
|
11067
|
-
const
|
|
11068
|
-
|
|
11069
|
-
|
|
11070
|
-
|
|
11071
|
-
|
|
11901
|
+
async deleteDocuments(query, tx) {
|
|
11902
|
+
return this.api.runWithDefaultWrite(async (tx2) => {
|
|
11903
|
+
const pks = await this.api.queryManager.getKeys(query);
|
|
11904
|
+
let deletedCount = 0;
|
|
11905
|
+
const deletedFlatDocs = [];
|
|
11906
|
+
for (let i = 0, len = pks.length; i < len; i++) {
|
|
11907
|
+
const pk = pks[i];
|
|
11908
|
+
const doc = await this.api.getDocument(pk, tx2);
|
|
11909
|
+
if (!doc) continue;
|
|
11910
|
+
const flatDoc = this.api.flattenDocument(doc);
|
|
11911
|
+
for (const [indexName, tree] of this.api.trees) {
|
|
11912
|
+
const config = this.api.indexManager.registeredIndices.get(indexName);
|
|
11913
|
+
if (!config) continue;
|
|
11914
|
+
if (config.type === "fts") {
|
|
11915
|
+
const primaryField = this.api.indexManager.getPrimaryField(config);
|
|
11916
|
+
const v = flatDoc[primaryField];
|
|
11917
|
+
if (v === void 0 || typeof v !== "string") continue;
|
|
11918
|
+
const ftsConfig = this.api.indexManager.getFtsConfig(config);
|
|
11919
|
+
const tokens = ftsConfig ? tokenize(v, ftsConfig) : [v];
|
|
11920
|
+
for (let j = 0, jLen = tokens.length; j < jLen; j++) {
|
|
11921
|
+
await tree.delete(this.api.indexManager.getTokenKey(pk, tokens[j]), { k: pk, v: tokens[j] });
|
|
11922
|
+
}
|
|
11923
|
+
} else {
|
|
11924
|
+
const indexVal = this.api.indexManager.getIndexValue(config, flatDoc);
|
|
11925
|
+
if (indexVal === void 0) continue;
|
|
11926
|
+
await tree.delete(pk, { k: pk, v: indexVal });
|
|
11927
|
+
}
|
|
11928
|
+
}
|
|
11929
|
+
deletedFlatDocs.push(flatDoc);
|
|
11930
|
+
await this.api.delete(pk, true, tx2);
|
|
11931
|
+
deletedCount++;
|
|
11072
11932
|
}
|
|
11073
|
-
|
|
11074
|
-
|
|
11933
|
+
await this.api.analysisManager.notifyDelete(deletedFlatDocs, tx2);
|
|
11934
|
+
return deletedCount;
|
|
11935
|
+
}, tx);
|
|
11075
11936
|
}
|
|
11076
|
-
|
|
11077
|
-
|
|
11078
|
-
|
|
11079
|
-
|
|
11080
|
-
|
|
11081
|
-
|
|
11082
|
-
return this.flatten(document, "", {});
|
|
11937
|
+
};
|
|
11938
|
+
|
|
11939
|
+
// src/core/MetadataManager.ts
|
|
11940
|
+
var MetadataManager = class {
|
|
11941
|
+
constructor(api) {
|
|
11942
|
+
this.api = api;
|
|
11083
11943
|
}
|
|
11084
11944
|
async getDocumentMetadata(tx) {
|
|
11085
|
-
const metadata = await this.getMetadata(tx);
|
|
11945
|
+
const metadata = await this.api.getMetadata(tx);
|
|
11086
11946
|
const innerMetadata = await this.getDocumentInnerMetadata(tx);
|
|
11087
11947
|
const indices = [];
|
|
11088
|
-
for (const name of this.registeredIndices.keys()) {
|
|
11948
|
+
for (const name of this.api.indexManager.registeredIndices.keys()) {
|
|
11089
11949
|
if (name !== "_id") {
|
|
11090
11950
|
indices.push(name);
|
|
11091
11951
|
}
|
|
@@ -11100,24 +11960,17 @@ var DocumentDataplyAPI = class extends import_dataply3.DataplyAPI {
|
|
|
11100
11960
|
};
|
|
11101
11961
|
}
|
|
11102
11962
|
async getDocumentInnerMetadata(tx) {
|
|
11103
|
-
const row = await this.select(1, false, tx);
|
|
11963
|
+
const row = await this.api.select(1, false, tx);
|
|
11104
11964
|
if (!row) {
|
|
11105
11965
|
throw new Error("Document metadata not found");
|
|
11106
11966
|
}
|
|
11107
11967
|
return JSON.parse(row);
|
|
11108
11968
|
}
|
|
11109
11969
|
async updateDocumentInnerMetadata(metadata, tx) {
|
|
11110
|
-
await this.update(1, JSON.stringify(metadata), tx);
|
|
11970
|
+
await this.api.update(1, JSON.stringify(metadata), tx);
|
|
11111
11971
|
}
|
|
11112
|
-
/**
|
|
11113
|
-
* Run a migration if the current schemeVersion is lower than the target version.
|
|
11114
|
-
* After the callback completes, schemeVersion is updated to the target version.
|
|
11115
|
-
* @param version The target scheme version
|
|
11116
|
-
* @param callback The migration callback
|
|
11117
|
-
* @param tx Optional transaction
|
|
11118
|
-
*/
|
|
11119
11972
|
async migration(version, callback, tx) {
|
|
11120
|
-
await this.runWithDefaultWrite(async (tx2) => {
|
|
11973
|
+
await this.api.runWithDefaultWrite(async (tx2) => {
|
|
11121
11974
|
const innerMetadata = await this.getDocumentInnerMetadata(tx2);
|
|
11122
11975
|
const currentVersion = innerMetadata.schemeVersion ?? 0;
|
|
11123
11976
|
if (currentVersion < version) {
|
|
@@ -11129,821 +11982,644 @@ var DocumentDataplyAPI = class extends import_dataply3.DataplyAPI {
|
|
|
11129
11982
|
}
|
|
11130
11983
|
}, tx);
|
|
11131
11984
|
}
|
|
11132
|
-
|
|
11133
|
-
|
|
11134
|
-
|
|
11135
|
-
|
|
11136
|
-
|
|
11137
|
-
|
|
11138
|
-
|
|
11139
|
-
|
|
11140
|
-
|
|
11141
|
-
let newConditions;
|
|
11142
|
-
if (typeof conditions !== "object" || conditions === null) {
|
|
11143
|
-
newConditions = { primaryEqual: { v: conditions } };
|
|
11985
|
+
};
|
|
11986
|
+
|
|
11987
|
+
// src/core/DocumentFormatter.ts
|
|
11988
|
+
var DocumentFormatter = class {
|
|
11989
|
+
flattenInternal(obj, parentKey = "", result = {}) {
|
|
11990
|
+
for (const key in obj) {
|
|
11991
|
+
const newKey = parentKey ? `${parentKey}.${key}` : key;
|
|
11992
|
+
if (typeof obj[key] === "object" && obj[key] !== null) {
|
|
11993
|
+
this.flattenInternal(obj[key], newKey, result);
|
|
11144
11994
|
} else {
|
|
11145
|
-
|
|
11146
|
-
for (const operator in conditions) {
|
|
11147
|
-
const before = operator;
|
|
11148
|
-
const after = this.operatorConverters[before];
|
|
11149
|
-
const v = conditions[before];
|
|
11150
|
-
if (!after) {
|
|
11151
|
-
if (before === "match") {
|
|
11152
|
-
newConditions[before] = v;
|
|
11153
|
-
}
|
|
11154
|
-
continue;
|
|
11155
|
-
}
|
|
11156
|
-
if (before === "or" && Array.isArray(v)) {
|
|
11157
|
-
newConditions[after] = v.map((val) => ({ v: val }));
|
|
11158
|
-
} else if (before === "like") {
|
|
11159
|
-
newConditions[after] = v;
|
|
11160
|
-
} else {
|
|
11161
|
-
newConditions[after] = { v };
|
|
11162
|
-
}
|
|
11163
|
-
}
|
|
11995
|
+
result[newKey] = obj[key];
|
|
11164
11996
|
}
|
|
11165
|
-
result[field] = newConditions;
|
|
11166
11997
|
}
|
|
11167
11998
|
return result;
|
|
11168
11999
|
}
|
|
12000
|
+
flattenDocument(document) {
|
|
12001
|
+
return this.flattenInternal(document, "", {});
|
|
12002
|
+
}
|
|
12003
|
+
};
|
|
12004
|
+
|
|
12005
|
+
// src/core/AnalysisProvider.ts
|
|
12006
|
+
var AnalysisProvider = class {
|
|
12007
|
+
constructor(api) {
|
|
12008
|
+
this.api = api;
|
|
12009
|
+
}
|
|
12010
|
+
/** Overflow row PK assigned by AnalysisManager during initialization. */
|
|
12011
|
+
storageKey = -1;
|
|
12012
|
+
};
|
|
12013
|
+
|
|
12014
|
+
// src/core/RealtimeAnalysisProvider.ts
|
|
12015
|
+
var RealtimeAnalysisProvider = class extends AnalysisProvider {
|
|
12016
|
+
};
|
|
12017
|
+
|
|
12018
|
+
// src/core/IntervalAnalysisProvider.ts
|
|
12019
|
+
var IntervalAnalysisProvider = class extends AnalysisProvider {
|
|
11169
12020
|
/**
|
|
11170
|
-
*
|
|
11171
|
-
*
|
|
11172
|
-
*
|
|
11173
|
-
* @param
|
|
11174
|
-
* @
|
|
11175
|
-
* @param query 쿼리 객체
|
|
11176
|
-
* @param queryFields 쿼리에 포함된 필드 목록 집합
|
|
11177
|
-
* @param treeTx 조회를 수행할 B-Tree 트랜잭션 객체
|
|
11178
|
-
* @param orderByField 정렬에 사용할 필드명 (옵션)
|
|
11179
|
-
* @returns B-Tree 인덱스 후보 정보 (조건, 점수, 커버된 필드 등), 적합하지 않으면 null
|
|
12021
|
+
* Sample random documents from the entire dataset.
|
|
12022
|
+
* Fetches only PK index, then reads only the selected documents from disk.
|
|
12023
|
+
* @param sampleOptions Sampling strategy — either `{ rate }` or `{ count }`
|
|
12024
|
+
* @param tx Optional transaction
|
|
12025
|
+
* @returns Randomly selected documents
|
|
11180
12026
|
*/
|
|
11181
|
-
|
|
11182
|
-
const
|
|
11183
|
-
|
|
11184
|
-
|
|
11185
|
-
|
|
11186
|
-
|
|
11187
|
-
|
|
11188
|
-
|
|
11189
|
-
|
|
11190
|
-
|
|
11191
|
-
|
|
11192
|
-
|
|
11193
|
-
|
|
11194
|
-
|
|
11195
|
-
|
|
11196
|
-
|
|
11197
|
-
|
|
12027
|
+
async sample(sampleOptions, tx) {
|
|
12028
|
+
const pks = await this.api.queryManager.getKeys({});
|
|
12029
|
+
const total = pks.length;
|
|
12030
|
+
if (total === 0) return [];
|
|
12031
|
+
const k = "rate" in sampleOptions && sampleOptions.rate != null ? Math.ceil(total * Math.min(Math.max(sampleOptions.rate, 0), 1)) : sampleOptions.count;
|
|
12032
|
+
const sampleCount = Math.min(Math.max(k, 0), total);
|
|
12033
|
+
if (sampleCount === 0) return [];
|
|
12034
|
+
for (let i = 0; i < sampleCount; i++) {
|
|
12035
|
+
const j = i + Math.floor(Math.random() * (total - i));
|
|
12036
|
+
const tmp = pks[i];
|
|
12037
|
+
pks[i] = pks[j];
|
|
12038
|
+
pks[j] = tmp;
|
|
12039
|
+
}
|
|
12040
|
+
const selectedPks = pks.slice(0, sampleCount);
|
|
12041
|
+
const rawResults = await this.api.selectMany(selectedPks, false, tx);
|
|
12042
|
+
const docs = [];
|
|
12043
|
+
for (let i = 0, len = rawResults.length; i < len; i++) {
|
|
12044
|
+
const raw = rawResults[i];
|
|
12045
|
+
if (raw) docs.push(JSON.parse(raw));
|
|
12046
|
+
}
|
|
12047
|
+
return docs;
|
|
12048
|
+
}
|
|
12049
|
+
};
|
|
12050
|
+
|
|
12051
|
+
// src/core/analysis/FTSTermCount.ts
|
|
12052
|
+
var FTSTermCount = class extends IntervalAnalysisProvider {
|
|
12053
|
+
name = "fts_term_count";
|
|
12054
|
+
termCount = {};
|
|
12055
|
+
sampleSize = 0;
|
|
12056
|
+
async serialize(tx) {
|
|
12057
|
+
const docs = await this.sample({ count: 1e3 }, tx);
|
|
12058
|
+
this.termCount = {};
|
|
12059
|
+
this.sampleSize = docs.length;
|
|
12060
|
+
if (docs.length === 0) return JSON.stringify({ _sampleSize: 0 });
|
|
12061
|
+
const ftsIndices = /* @__PURE__ */ new Map();
|
|
12062
|
+
for (const [indexName, config] of this.api.indexManager.registeredIndices) {
|
|
12063
|
+
if (config.type === "fts") {
|
|
12064
|
+
ftsIndices.set(indexName, config);
|
|
11198
12065
|
}
|
|
11199
|
-
|
|
11200
|
-
|
|
11201
|
-
|
|
11202
|
-
|
|
11203
|
-
|
|
11204
|
-
|
|
11205
|
-
|
|
11206
|
-
|
|
11207
|
-
|
|
11208
|
-
|
|
11209
|
-
|
|
11210
|
-
|
|
11211
|
-
|
|
11212
|
-
|
|
11213
|
-
const val = cond.primaryEqual?.v ?? cond.equal?.v ?? cond.primaryEqual ?? cond.equal;
|
|
11214
|
-
score += 100;
|
|
11215
|
-
startValues.push(val);
|
|
11216
|
-
endValues.push(val);
|
|
11217
|
-
startOperator = "primaryGte";
|
|
11218
|
-
endOperator = "primaryLte";
|
|
11219
|
-
isBounded = true;
|
|
11220
|
-
} else if ("primaryGte" in cond || "gte" in cond) {
|
|
11221
|
-
const val = cond.primaryGte?.v ?? cond.gte?.v ?? cond.primaryGte ?? cond.gte;
|
|
11222
|
-
score += 50;
|
|
11223
|
-
isConsecutive = false;
|
|
11224
|
-
startValues.push(val);
|
|
11225
|
-
startOperator = "primaryGte";
|
|
11226
|
-
if (endValues.length > 0) endOperator = "primaryLte";
|
|
11227
|
-
isBounded = true;
|
|
11228
|
-
} else if ("primaryGt" in cond || "gt" in cond) {
|
|
11229
|
-
const val = cond.primaryGt?.v ?? cond.gt?.v ?? cond.primaryGt ?? cond.gt;
|
|
11230
|
-
score += 50;
|
|
11231
|
-
isConsecutive = false;
|
|
11232
|
-
startValues.push(val);
|
|
11233
|
-
startOperator = "primaryGt";
|
|
11234
|
-
if (endValues.length > 0) endOperator = "primaryLte";
|
|
11235
|
-
isBounded = true;
|
|
11236
|
-
} else if ("primaryLte" in cond || "lte" in cond) {
|
|
11237
|
-
const val = cond.primaryLte?.v ?? cond.lte?.v ?? cond.primaryLte ?? cond.lte;
|
|
11238
|
-
score += 50;
|
|
11239
|
-
isConsecutive = false;
|
|
11240
|
-
endValues.push(val);
|
|
11241
|
-
endOperator = "primaryLte";
|
|
11242
|
-
if (startValues.length > 0) startOperator = "primaryGte";
|
|
11243
|
-
isBounded = true;
|
|
11244
|
-
} else if ("primaryLt" in cond || "lt" in cond) {
|
|
11245
|
-
const val = cond.primaryLt?.v ?? cond.lt?.v ?? cond.primaryLt ?? cond.lt;
|
|
11246
|
-
score += 50;
|
|
11247
|
-
isConsecutive = false;
|
|
11248
|
-
endValues.push(val);
|
|
11249
|
-
endOperator = "primaryLt";
|
|
11250
|
-
if (startValues.length > 0) startOperator = "primaryGte";
|
|
11251
|
-
isBounded = true;
|
|
11252
|
-
} else if ("primaryOr" in cond || "or" in cond) {
|
|
11253
|
-
score += 20;
|
|
11254
|
-
isConsecutive = false;
|
|
11255
|
-
} else if ("like" in cond) {
|
|
11256
|
-
score += 15;
|
|
11257
|
-
isConsecutive = false;
|
|
11258
|
-
} else {
|
|
11259
|
-
score += 10;
|
|
11260
|
-
isConsecutive = false;
|
|
12066
|
+
}
|
|
12067
|
+
if (ftsIndices.size === 0) return JSON.stringify({ _sampleSize: this.sampleSize });
|
|
12068
|
+
for (let i = 0, len = docs.length; i < len; i++) {
|
|
12069
|
+
const doc = docs[i];
|
|
12070
|
+
const flatDoc = this.api.flattenDocument(doc);
|
|
12071
|
+
for (const [indexName, config] of ftsIndices) {
|
|
12072
|
+
const primaryField = this.api.indexManager.getPrimaryField(config);
|
|
12073
|
+
const v = flatDoc[primaryField];
|
|
12074
|
+
if (typeof v === "string" && v.length > 0) {
|
|
12075
|
+
const ftsConfig = this.api.indexManager.getFtsConfig(config);
|
|
12076
|
+
const tokens = ftsConfig ? tokenize(v, ftsConfig) : [v];
|
|
12077
|
+
const tokenizerStrategy = ftsConfig ? ftsConfig.tokenizer === "ngram" ? `${ftsConfig.gramSize}gram` : ftsConfig.tokenizer : "whitespace";
|
|
12078
|
+
if (!this.termCount[primaryField]) {
|
|
12079
|
+
this.termCount[primaryField] = {};
|
|
11261
12080
|
}
|
|
11262
|
-
if (!
|
|
11263
|
-
|
|
12081
|
+
if (!this.termCount[primaryField][tokenizerStrategy]) {
|
|
12082
|
+
this.termCount[primaryField][tokenizerStrategy] = {};
|
|
12083
|
+
}
|
|
12084
|
+
const targetMap = this.termCount[primaryField][tokenizerStrategy];
|
|
12085
|
+
for (let j = 0, len2 = tokens.length; j < len2; j++) {
|
|
12086
|
+
const token = tokens[j];
|
|
12087
|
+
targetMap[token] = (targetMap[token] || 0) + 1;
|
|
11264
12088
|
}
|
|
11265
|
-
}
|
|
11266
|
-
} else {
|
|
11267
|
-
if (field !== primaryField) {
|
|
11268
|
-
compositeVerifyFields.push(field);
|
|
11269
12089
|
}
|
|
11270
12090
|
}
|
|
11271
12091
|
}
|
|
11272
|
-
|
|
11273
|
-
|
|
11274
|
-
|
|
11275
|
-
|
|
11276
|
-
|
|
11277
|
-
|
|
11278
|
-
|
|
11279
|
-
|
|
11280
|
-
|
|
11281
|
-
builtCondition["primaryEqual"] = { v: startValues.length === 1 ? startValues[0] : startValues };
|
|
11282
|
-
} else {
|
|
11283
|
-
builtCondition[endOperator] = { v: endValues.length === 1 ? endValues[0] : endValues };
|
|
12092
|
+
const optimizedTermCount = {};
|
|
12093
|
+
for (const field in this.termCount) {
|
|
12094
|
+
optimizedTermCount[field] = {};
|
|
12095
|
+
for (const strategy in this.termCount[field]) {
|
|
12096
|
+
const tokenMap = this.termCount[field][strategy];
|
|
12097
|
+
const sorted = Object.entries(tokenMap).sort((a, b) => b[1] - a[1]).slice(0, 1e3);
|
|
12098
|
+
optimizedTermCount[field][strategy] = {};
|
|
12099
|
+
for (let i = 0, len = sorted.length; i < len; i++) {
|
|
12100
|
+
optimizedTermCount[field][strategy][sorted[i][0]] = sorted[i][1];
|
|
11284
12101
|
}
|
|
11285
12102
|
}
|
|
11286
|
-
if (Object.keys(builtCondition).length === 0) {
|
|
11287
|
-
Object.assign(builtCondition, query[primaryField] || {});
|
|
11288
|
-
}
|
|
11289
12103
|
}
|
|
11290
|
-
|
|
11291
|
-
|
|
11292
|
-
|
|
11293
|
-
|
|
11294
|
-
|
|
11295
|
-
|
|
11296
|
-
|
|
11297
|
-
|
|
11298
|
-
|
|
11299
|
-
|
|
11300
|
-
|
|
11301
|
-
|
|
11302
|
-
|
|
11303
|
-
|
|
11304
|
-
|
|
12104
|
+
this.termCount = optimizedTermCount;
|
|
12105
|
+
return JSON.stringify({ _sampleSize: this.sampleSize, ...this.termCount });
|
|
12106
|
+
}
|
|
12107
|
+
async load(data, tx) {
|
|
12108
|
+
this.termCount = {};
|
|
12109
|
+
this.sampleSize = 0;
|
|
12110
|
+
if (!data) {
|
|
12111
|
+
return;
|
|
12112
|
+
}
|
|
12113
|
+
try {
|
|
12114
|
+
const parsed = JSON.parse(data);
|
|
12115
|
+
if (typeof parsed === "object" && parsed !== null && !Array.isArray(parsed)) {
|
|
12116
|
+
const { _sampleSize, ...rest } = parsed;
|
|
12117
|
+
this.sampleSize = typeof _sampleSize === "number" ? _sampleSize : 0;
|
|
12118
|
+
this.termCount = rest;
|
|
12119
|
+
}
|
|
12120
|
+
} catch (e) {
|
|
12121
|
+
}
|
|
12122
|
+
}
|
|
12123
|
+
/**
|
|
12124
|
+
* 특정 field/strategy/token의 문서 빈도를 반환합니다.
|
|
12125
|
+
* 통계에 없으면 0을 반환합니다.
|
|
12126
|
+
*/
|
|
12127
|
+
getTermCount(field, strategy, token) {
|
|
12128
|
+
return this.termCount[field]?.[strategy]?.[token] ?? 0;
|
|
12129
|
+
}
|
|
12130
|
+
/**
|
|
12131
|
+
* 쿼리 토큰 배열에서 최소 빈도(AND 시맨틱스 상한선)를 반환합니다.
|
|
12132
|
+
* 통계가 없거나 sampleSize가 0이면 -1을 반환합니다.
|
|
12133
|
+
*/
|
|
12134
|
+
getMinTokenCount(field, strategy, tokens) {
|
|
12135
|
+
if (this.sampleSize === 0 || tokens.length === 0) return -1;
|
|
12136
|
+
let minCount = Infinity;
|
|
12137
|
+
for (let i = 0, len = tokens.length; i < len; i++) {
|
|
12138
|
+
const count = this.getTermCount(field, strategy, tokens[i]);
|
|
12139
|
+
if (count < minCount) minCount = count;
|
|
12140
|
+
}
|
|
12141
|
+
return minCount === Infinity ? -1 : minCount;
|
|
12142
|
+
}
|
|
12143
|
+
/**
|
|
12144
|
+
* 통계가 유효한지 여부를 반환합니다.
|
|
12145
|
+
*/
|
|
12146
|
+
get hasSampleData() {
|
|
12147
|
+
return this.sampleSize > 0;
|
|
12148
|
+
}
|
|
12149
|
+
/**
|
|
12150
|
+
* 통계 수집 시 사용된 샘플 크기를 반환합니다.
|
|
12151
|
+
*/
|
|
12152
|
+
getSampleSize() {
|
|
12153
|
+
return this.sampleSize;
|
|
12154
|
+
}
|
|
12155
|
+
};
|
|
12156
|
+
|
|
12157
|
+
// src/core/analysis/index.ts
|
|
12158
|
+
var BuiltinAnalysisProviders = [
|
|
12159
|
+
FTSTermCount
|
|
12160
|
+
];
|
|
12161
|
+
|
|
12162
|
+
// src/core/AnalysisManager.ts
|
|
12163
|
+
var AnalysisManager = class {
|
|
12164
|
+
constructor(api) {
|
|
12165
|
+
this.api = api;
|
|
12166
|
+
}
|
|
12167
|
+
providers = /* @__PURE__ */ new Map();
|
|
12168
|
+
/**
|
|
12169
|
+
* Register all built-in analysis providers.
|
|
12170
|
+
* Each provider class is instantiated with the API reference and registered.
|
|
12171
|
+
*/
|
|
12172
|
+
registerBuiltinProviders() {
|
|
12173
|
+
for (const Provider of BuiltinAnalysisProviders) {
|
|
12174
|
+
const instance = new Provider(this.api);
|
|
12175
|
+
this.registerProvider(instance);
|
|
12176
|
+
}
|
|
12177
|
+
}
|
|
12178
|
+
/**
|
|
12179
|
+
* Register an analysis provider.
|
|
12180
|
+
* @param provider The provider instance to register
|
|
12181
|
+
*/
|
|
12182
|
+
registerProvider(provider) {
|
|
12183
|
+
if (this.providers.has(provider.name)) {
|
|
12184
|
+
throw new Error(`Analysis provider "${provider.name}" is already registered.`);
|
|
12185
|
+
}
|
|
12186
|
+
this.providers.set(provider.name, provider);
|
|
12187
|
+
}
|
|
12188
|
+
/**
|
|
12189
|
+
* Get a registered analysis provider by name.
|
|
12190
|
+
* @param name The provider name
|
|
12191
|
+
* @returns The provider instance, or null if not found
|
|
12192
|
+
*/
|
|
12193
|
+
getProvider(name) {
|
|
12194
|
+
return this.providers.get(name) ?? null;
|
|
12195
|
+
}
|
|
12196
|
+
/**
|
|
12197
|
+
* Initialize all registered providers by loading existing data from disk.
|
|
12198
|
+
* Should be called after database initialization.
|
|
12199
|
+
* @param tx The transaction to use
|
|
12200
|
+
*/
|
|
12201
|
+
async initializeProviders(tx) {
|
|
12202
|
+
const header = await this.getOrCreateAnalysisHeader(tx);
|
|
12203
|
+
const metadata = await this.api.getDocumentInnerMetadata(tx);
|
|
12204
|
+
for (const [name, provider] of this.providers) {
|
|
12205
|
+
if (header[name] !== null) {
|
|
12206
|
+
provider.storageKey = header[name];
|
|
12207
|
+
const raw = await this.api.select(header[name], false, tx);
|
|
12208
|
+
await provider.load(raw, tx);
|
|
12209
|
+
} else {
|
|
12210
|
+
const pk = await this.api.insertAsOverflow(JSON.stringify(null), false, tx);
|
|
12211
|
+
provider.storageKey = pk;
|
|
12212
|
+
header[name] = pk;
|
|
12213
|
+
await this.api.update(metadata.analysis, JSON.stringify(header), tx);
|
|
12214
|
+
await provider.load(null, tx);
|
|
11305
12215
|
}
|
|
11306
|
-
|
|
11307
|
-
|
|
12216
|
+
}
|
|
12217
|
+
}
|
|
12218
|
+
/**
|
|
12219
|
+
* Notify all realtime providers that documents were inserted.
|
|
12220
|
+
* Data is persisted immediately after each provider processes the mutation.
|
|
12221
|
+
* @param documents The flattened documents that were inserted
|
|
12222
|
+
* @param tx The transaction to use
|
|
12223
|
+
*/
|
|
12224
|
+
async notifyInsert(documents, tx) {
|
|
12225
|
+
if (documents.length === 0) return;
|
|
12226
|
+
for (const [name, provider] of this.providers) {
|
|
12227
|
+
if (provider instanceof RealtimeAnalysisProvider) {
|
|
12228
|
+
await provider.onInsert(documents);
|
|
12229
|
+
await this.setAnalysisData(name, await provider.serialize(tx), tx);
|
|
11308
12230
|
}
|
|
11309
12231
|
}
|
|
11310
|
-
return {
|
|
11311
|
-
tree: treeTx,
|
|
11312
|
-
condition: builtCondition,
|
|
11313
|
-
field: primaryField,
|
|
11314
|
-
indexName,
|
|
11315
|
-
isFtsMatch: false,
|
|
11316
|
-
score,
|
|
11317
|
-
compositeVerifyFields,
|
|
11318
|
-
coveredFields,
|
|
11319
|
-
isIndexOrderSupported
|
|
11320
|
-
};
|
|
11321
12232
|
}
|
|
11322
12233
|
/**
|
|
11323
|
-
*
|
|
11324
|
-
*
|
|
11325
|
-
*
|
|
11326
|
-
* @param
|
|
11327
|
-
* @param config 등록된 인덱스의 설정 객체
|
|
11328
|
-
* @param query 쿼리 객체
|
|
11329
|
-
* @param queryFields 쿼리에 포함된 필드 목록 집합
|
|
11330
|
-
* @param treeTx 조회를 수행할 B-Tree 트랜잭션 객체
|
|
11331
|
-
* @returns FTS 인덱스 후보 정보 (조건, 점수, 분석된 토큰 등), 적합하지 않으면 null
|
|
12234
|
+
* Notify all realtime providers that documents were deleted.
|
|
12235
|
+
* Data is persisted immediately after each provider processes the mutation.
|
|
12236
|
+
* @param documents The flattened documents that were deleted
|
|
12237
|
+
* @param tx The transaction to use
|
|
11332
12238
|
*/
|
|
11333
|
-
|
|
11334
|
-
|
|
11335
|
-
|
|
11336
|
-
|
|
11337
|
-
|
|
11338
|
-
|
|
11339
|
-
|
|
11340
|
-
|
|
11341
|
-
tree: treeTx,
|
|
11342
|
-
condition,
|
|
11343
|
-
field,
|
|
11344
|
-
indexName,
|
|
11345
|
-
isFtsMatch: true,
|
|
11346
|
-
matchTokens,
|
|
11347
|
-
score: 90,
|
|
11348
|
-
// FTS 쿼리는 기본적인 B-Tree 단일 검색(대략 101점)보다는 우선순위를 조금 낮게 가져가도록 90점 부여
|
|
11349
|
-
compositeVerifyFields: [],
|
|
11350
|
-
coveredFields: [field],
|
|
11351
|
-
isIndexOrderSupported: false
|
|
11352
|
-
};
|
|
12239
|
+
async notifyDelete(documents, tx) {
|
|
12240
|
+
if (documents.length === 0) return;
|
|
12241
|
+
for (const [name, provider] of this.providers) {
|
|
12242
|
+
if (provider instanceof RealtimeAnalysisProvider) {
|
|
12243
|
+
await provider.onDelete(documents);
|
|
12244
|
+
await this.setAnalysisData(name, await provider.serialize(tx), tx);
|
|
12245
|
+
}
|
|
12246
|
+
}
|
|
11353
12247
|
}
|
|
11354
12248
|
/**
|
|
11355
|
-
*
|
|
11356
|
-
*
|
|
11357
|
-
*
|
|
11358
|
-
* @param
|
|
11359
|
-
* @param orderByField Optional field name for orderBy optimization
|
|
11360
|
-
* @returns Driver and other candidates for query execution
|
|
12249
|
+
* Notify all realtime providers that documents were updated.
|
|
12250
|
+
* Data is persisted immediately after each provider processes the mutation.
|
|
12251
|
+
* @param pairs Array of { oldDocument, newDocument } pairs
|
|
12252
|
+
* @param tx The transaction to use
|
|
11361
12253
|
*/
|
|
11362
|
-
async
|
|
11363
|
-
|
|
11364
|
-
const
|
|
11365
|
-
|
|
11366
|
-
|
|
11367
|
-
|
|
11368
|
-
if (config.type === "btree") {
|
|
11369
|
-
const treeTx = await tree.createTransaction();
|
|
11370
|
-
const candidate = this.evaluateBTreeCandidate(
|
|
11371
|
-
indexName,
|
|
11372
|
-
config,
|
|
11373
|
-
query,
|
|
11374
|
-
queryFields,
|
|
11375
|
-
treeTx,
|
|
11376
|
-
orderByField
|
|
11377
|
-
);
|
|
11378
|
-
if (candidate) candidates.push(candidate);
|
|
11379
|
-
} else if (config.type === "fts") {
|
|
11380
|
-
const treeTx = await tree.createTransaction();
|
|
11381
|
-
const candidate = this.evaluateFTSCandidate(
|
|
11382
|
-
indexName,
|
|
11383
|
-
config,
|
|
11384
|
-
query,
|
|
11385
|
-
queryFields,
|
|
11386
|
-
treeTx
|
|
11387
|
-
);
|
|
11388
|
-
if (candidate) candidates.push(candidate);
|
|
12254
|
+
async notifyUpdate(pairs, tx) {
|
|
12255
|
+
if (pairs.length === 0) return;
|
|
12256
|
+
for (const [name, provider] of this.providers) {
|
|
12257
|
+
if (provider instanceof RealtimeAnalysisProvider) {
|
|
12258
|
+
await provider.onUpdate(pairs);
|
|
12259
|
+
await this.setAnalysisData(name, await provider.serialize(tx), tx);
|
|
11389
12260
|
}
|
|
11390
12261
|
}
|
|
11391
|
-
|
|
11392
|
-
|
|
11393
|
-
|
|
12262
|
+
}
|
|
12263
|
+
/**
|
|
12264
|
+
* Flush all interval providers' data to disk.
|
|
12265
|
+
* @param tx The transaction to use (must be a write transaction)
|
|
12266
|
+
*/
|
|
12267
|
+
async flush(tx) {
|
|
12268
|
+
for (const [name, provider] of this.providers) {
|
|
12269
|
+
if (provider instanceof IntervalAnalysisProvider) {
|
|
12270
|
+
await this.setAnalysisData(name, await provider.serialize(tx), tx);
|
|
11394
12271
|
}
|
|
11395
|
-
}
|
|
11396
|
-
|
|
11397
|
-
|
|
12272
|
+
}
|
|
12273
|
+
}
|
|
12274
|
+
/**
|
|
12275
|
+
* Get the analysis header row.
|
|
12276
|
+
* Returns null if no analysis header exists yet.
|
|
12277
|
+
* @param tx The transaction to use
|
|
12278
|
+
*/
|
|
12279
|
+
async getAnalysisHeader(tx) {
|
|
12280
|
+
const metadata = await this.api.getDocumentInnerMetadata(tx);
|
|
12281
|
+
if (metadata.analysis == null) {
|
|
11398
12282
|
return null;
|
|
11399
12283
|
}
|
|
11400
|
-
|
|
11401
|
-
|
|
11402
|
-
|
|
11403
|
-
|
|
11404
|
-
|
|
11405
|
-
|
|
11406
|
-
|
|
11407
|
-
|
|
11408
|
-
|
|
11409
|
-
|
|
11410
|
-
|
|
11411
|
-
const
|
|
11412
|
-
|
|
11413
|
-
const
|
|
11414
|
-
if (
|
|
11415
|
-
|
|
12284
|
+
const row = await this.api.select(metadata.analysis, false, tx);
|
|
12285
|
+
if (!row) {
|
|
12286
|
+
return null;
|
|
12287
|
+
}
|
|
12288
|
+
return JSON.parse(row);
|
|
12289
|
+
}
|
|
12290
|
+
/**
|
|
12291
|
+
* Get the analysis header row, creating it if it doesn't exist.
|
|
12292
|
+
* @param tx The transaction to use (must be a write transaction)
|
|
12293
|
+
*/
|
|
12294
|
+
async getOrCreateAnalysisHeader(tx) {
|
|
12295
|
+
const metadata = await this.api.getDocumentInnerMetadata(tx);
|
|
12296
|
+
if (metadata.analysis != null) {
|
|
12297
|
+
const row = await this.api.select(metadata.analysis, false, tx);
|
|
12298
|
+
if (row) {
|
|
12299
|
+
return JSON.parse(row);
|
|
11416
12300
|
}
|
|
11417
12301
|
}
|
|
11418
|
-
|
|
11419
|
-
|
|
11420
|
-
|
|
11421
|
-
|
|
11422
|
-
|
|
11423
|
-
};
|
|
12302
|
+
const header = {};
|
|
12303
|
+
const pk = await this.api.insertAsOverflow(JSON.stringify(header), false, tx);
|
|
12304
|
+
metadata.analysis = pk;
|
|
12305
|
+
await this.api.updateDocumentInnerMetadata(metadata, tx);
|
|
12306
|
+
return header;
|
|
11424
12307
|
}
|
|
11425
12308
|
/**
|
|
11426
|
-
* Get
|
|
11427
|
-
*
|
|
12309
|
+
* Get analysis data for a specific type as a raw string.
|
|
12310
|
+
* Returns null if the type doesn't exist in the analysis header.
|
|
12311
|
+
* @param type The analysis type name
|
|
12312
|
+
* @param tx The transaction to use
|
|
11428
12313
|
*/
|
|
11429
|
-
|
|
11430
|
-
const
|
|
11431
|
-
|
|
11432
|
-
|
|
11433
|
-
|
|
11434
|
-
|
|
12314
|
+
async getAnalysisData(type, tx) {
|
|
12315
|
+
const header = await this.getAnalysisHeader(tx);
|
|
12316
|
+
if (!header || header[type] == null) {
|
|
12317
|
+
return null;
|
|
12318
|
+
}
|
|
12319
|
+
const row = await this.api.select(header[type], false, tx);
|
|
12320
|
+
if (!row) {
|
|
12321
|
+
return null;
|
|
12322
|
+
}
|
|
12323
|
+
return row;
|
|
11435
12324
|
}
|
|
11436
|
-
|
|
11437
|
-
|
|
12325
|
+
/**
|
|
12326
|
+
* Set analysis data for a specific type.
|
|
12327
|
+
* Creates a new overflow row if the type doesn't exist yet,
|
|
12328
|
+
* or updates the existing row if it does.
|
|
12329
|
+
* @param type The analysis type name
|
|
12330
|
+
* @param data The raw string data to store
|
|
12331
|
+
* @param tx The transaction to use (must be a write transaction)
|
|
12332
|
+
*/
|
|
12333
|
+
async setAnalysisData(type, data, tx) {
|
|
12334
|
+
const header = await this.getOrCreateAnalysisHeader(tx);
|
|
12335
|
+
const metadata = await this.api.getDocumentInnerMetadata(tx);
|
|
12336
|
+
if (header[type] != null) {
|
|
12337
|
+
await this.api.update(header[type], data, tx);
|
|
12338
|
+
} else {
|
|
12339
|
+
const pk = await this.api.insertAsOverflow(data, false, tx);
|
|
12340
|
+
header[type] = pk;
|
|
12341
|
+
await this.api.update(metadata.analysis, JSON.stringify(header), tx);
|
|
12342
|
+
}
|
|
11438
12343
|
}
|
|
11439
|
-
|
|
11440
|
-
|
|
11441
|
-
|
|
11442
|
-
|
|
11443
|
-
|
|
11444
|
-
|
|
11445
|
-
|
|
11446
|
-
|
|
11447
|
-
|
|
11448
|
-
|
|
11449
|
-
|
|
11450
|
-
|
|
11451
|
-
|
|
11452
|
-
|
|
11453
|
-
}
|
|
12344
|
+
/**
|
|
12345
|
+
* Delete analysis data for a specific type.
|
|
12346
|
+
* Removes the type entry from the analysis header.
|
|
12347
|
+
* @param type The analysis type name
|
|
12348
|
+
* @param tx The transaction to use (must be a write transaction)
|
|
12349
|
+
*/
|
|
12350
|
+
async deleteAnalysisData(type, tx) {
|
|
12351
|
+
const metadata = await this.api.getDocumentInnerMetadata(tx);
|
|
12352
|
+
if (metadata.analysis == null) {
|
|
12353
|
+
return false;
|
|
12354
|
+
}
|
|
12355
|
+
const header = await this.getAnalysisHeader(tx);
|
|
12356
|
+
if (!header || header[type] == null) {
|
|
12357
|
+
return false;
|
|
11454
12358
|
}
|
|
12359
|
+
await this.api.delete(header[type], false, tx);
|
|
12360
|
+
delete header[type];
|
|
12361
|
+
await this.api.update(metadata.analysis, JSON.stringify(header), tx);
|
|
12362
|
+
return true;
|
|
11455
12363
|
}
|
|
11456
12364
|
/**
|
|
11457
|
-
*
|
|
12365
|
+
* Check if analysis data exists for a specific type.
|
|
12366
|
+
* @param type The analysis type name
|
|
12367
|
+
* @param tx The transaction to use
|
|
11458
12368
|
*/
|
|
11459
|
-
|
|
11460
|
-
|
|
11461
|
-
|
|
11462
|
-
{ filterValues, order }
|
|
11463
|
-
);
|
|
12369
|
+
async hasAnalysisData(type, tx) {
|
|
12370
|
+
const header = await this.getAnalysisHeader(tx);
|
|
12371
|
+
return header != null && header[type] != null;
|
|
11464
12372
|
}
|
|
11465
12373
|
/**
|
|
11466
|
-
*
|
|
11467
|
-
*
|
|
12374
|
+
* Get all registered analysis type names.
|
|
12375
|
+
* @param tx The transaction to use
|
|
11468
12376
|
*/
|
|
11469
|
-
async
|
|
11470
|
-
const
|
|
11471
|
-
|
|
11472
|
-
|
|
11473
|
-
this.verboseQuery(normalizedQuery),
|
|
11474
|
-
orderBy
|
|
11475
|
-
);
|
|
11476
|
-
if (!selectivity) return new Float64Array(0);
|
|
11477
|
-
const { driver, others, rollback } = selectivity;
|
|
11478
|
-
const useIndexOrder = orderBy === void 0 || driver.isIndexOrderSupported;
|
|
11479
|
-
const candidates = [driver, ...others];
|
|
11480
|
-
let keys = void 0;
|
|
11481
|
-
for (let i = 0, len = candidates.length; i < len; i++) {
|
|
11482
|
-
const candidate = candidates[i];
|
|
11483
|
-
const currentOrder = useIndexOrder ? sortOrder : void 0;
|
|
11484
|
-
if (candidate.isFtsMatch && candidate.matchTokens && candidate.matchTokens.length > 0) {
|
|
11485
|
-
const stream = this.applyCandidateByFTSStream(
|
|
11486
|
-
candidate,
|
|
11487
|
-
candidate.matchTokens,
|
|
11488
|
-
keys,
|
|
11489
|
-
currentOrder
|
|
11490
|
-
);
|
|
11491
|
-
keys = /* @__PURE__ */ new Set();
|
|
11492
|
-
for await (const pk of stream) keys.add(pk);
|
|
11493
|
-
} else {
|
|
11494
|
-
const stream = this.applyCandidateStream(candidate, keys, currentOrder);
|
|
11495
|
-
keys = /* @__PURE__ */ new Set();
|
|
11496
|
-
for await (const pk of stream) keys.add(pk);
|
|
11497
|
-
}
|
|
12377
|
+
async getAnalysisTypes(tx) {
|
|
12378
|
+
const header = await this.getAnalysisHeader(tx);
|
|
12379
|
+
if (!header) {
|
|
12380
|
+
return [];
|
|
11498
12381
|
}
|
|
11499
|
-
|
|
11500
|
-
|
|
12382
|
+
return Object.keys(header);
|
|
12383
|
+
}
|
|
12384
|
+
};
|
|
12385
|
+
|
|
12386
|
+
// src/core/documentAPI.ts
|
|
12387
|
+
var DocumentDataplyAPI = class extends import_dataply4.DataplyAPI {
|
|
12388
|
+
comparator = new DocumentValueComparator();
|
|
12389
|
+
_initialized = false;
|
|
12390
|
+
optimizer;
|
|
12391
|
+
queryManager;
|
|
12392
|
+
indexManager;
|
|
12393
|
+
mutationManager;
|
|
12394
|
+
metadataManager;
|
|
12395
|
+
documentFormatter;
|
|
12396
|
+
analysisManager;
|
|
12397
|
+
constructor(file, options) {
|
|
12398
|
+
super(file, options);
|
|
12399
|
+
this.optimizer = new Optimizer(this);
|
|
12400
|
+
this.queryManager = new QueryManager(this, this.optimizer);
|
|
12401
|
+
this.indexManager = new IndexManager(this);
|
|
12402
|
+
this.mutationManager = new MutationManager(this);
|
|
12403
|
+
this.metadataManager = new MetadataManager(this);
|
|
12404
|
+
this.documentFormatter = new DocumentFormatter();
|
|
12405
|
+
this.analysisManager = new AnalysisManager(this);
|
|
12406
|
+
this.hook.onceAfter("init", async (tx, isNewlyCreated) => {
|
|
12407
|
+
if (isNewlyCreated) {
|
|
12408
|
+
await this.initializeDocumentFile(tx);
|
|
12409
|
+
}
|
|
12410
|
+
if (!await this.verifyDocumentFile(tx)) {
|
|
12411
|
+
throw new Error("Document metadata verification failed");
|
|
12412
|
+
}
|
|
12413
|
+
const metadata = await this.getDocumentInnerMetadata(tx);
|
|
12414
|
+
await this.indexManager.initializeIndices(metadata, isNewlyCreated, tx);
|
|
12415
|
+
this.analysisManager.registerBuiltinProviders();
|
|
12416
|
+
await this.analysisManager.initializeProviders(tx);
|
|
12417
|
+
this._initialized = true;
|
|
12418
|
+
return tx;
|
|
12419
|
+
});
|
|
11501
12420
|
}
|
|
11502
12421
|
/**
|
|
11503
|
-
*
|
|
11504
|
-
* selectDocuments에서 사용하며, 나머지 조건(others)은 스트리밍 중 tree.verify()로 검증합니다.
|
|
11505
|
-
* @returns 드라이버 키 스트림, others 후보 목록, rollback 함수. 또는 null.
|
|
12422
|
+
* Whether the document database has been initialized.
|
|
11506
12423
|
*/
|
|
11507
|
-
|
|
11508
|
-
|
|
11509
|
-
const normalizedQuery = isQueryEmpty ? { _id: { gte: 0 } } : query;
|
|
11510
|
-
const selectivity = await this.getSelectivityCandidate(
|
|
11511
|
-
this.verboseQuery(normalizedQuery),
|
|
11512
|
-
orderBy
|
|
11513
|
-
);
|
|
11514
|
-
if (!selectivity) return null;
|
|
11515
|
-
const { driver, others, compositeVerifyConditions, rollback } = selectivity;
|
|
11516
|
-
const useIndexOrder = orderBy === void 0 || driver.isIndexOrderSupported;
|
|
11517
|
-
const currentOrder = useIndexOrder ? sortOrder : void 0;
|
|
11518
|
-
let keysStream;
|
|
11519
|
-
if (driver.isFtsMatch && driver.matchTokens && driver.matchTokens.length > 0) {
|
|
11520
|
-
keysStream = this.applyCandidateByFTSStream(
|
|
11521
|
-
driver,
|
|
11522
|
-
driver.matchTokens,
|
|
11523
|
-
void 0,
|
|
11524
|
-
currentOrder
|
|
11525
|
-
);
|
|
11526
|
-
} else {
|
|
11527
|
-
keysStream = this.applyCandidateStream(driver, void 0, currentOrder);
|
|
11528
|
-
}
|
|
11529
|
-
return {
|
|
11530
|
-
keysStream,
|
|
11531
|
-
others,
|
|
11532
|
-
compositeVerifyConditions,
|
|
11533
|
-
isDriverOrderByField: useIndexOrder,
|
|
11534
|
-
rollback
|
|
11535
|
-
};
|
|
12424
|
+
get isDocInitialized() {
|
|
12425
|
+
return this._initialized;
|
|
11536
12426
|
}
|
|
11537
|
-
|
|
11538
|
-
|
|
11539
|
-
|
|
11540
|
-
|
|
11541
|
-
|
|
11542
|
-
|
|
11543
|
-
|
|
11544
|
-
|
|
11545
|
-
|
|
11546
|
-
|
|
11547
|
-
|
|
11548
|
-
|
|
11549
|
-
|
|
12427
|
+
get indices() {
|
|
12428
|
+
return this.indexManager.indices;
|
|
12429
|
+
}
|
|
12430
|
+
get trees() {
|
|
12431
|
+
return this.indexManager.trees;
|
|
12432
|
+
}
|
|
12433
|
+
get indexedFields() {
|
|
12434
|
+
return this.indexManager.indexedFields;
|
|
12435
|
+
}
|
|
12436
|
+
/**
|
|
12437
|
+
* Register an index.
|
|
12438
|
+
* @param name The name of the index
|
|
12439
|
+
* @param option The option of the index
|
|
12440
|
+
* @param tx The transaction to use
|
|
12441
|
+
*/
|
|
12442
|
+
async registerIndex(name, option, tx) {
|
|
12443
|
+
return this.indexManager.registerIndex(name, option, tx);
|
|
12444
|
+
}
|
|
12445
|
+
/**
|
|
12446
|
+
* Drop (remove) a named index.
|
|
12447
|
+
* @param name The name of the index
|
|
12448
|
+
* @param tx The transaction to use
|
|
12449
|
+
*/
|
|
12450
|
+
async dropIndex(name, tx) {
|
|
12451
|
+
return this.indexManager.dropIndex(name, tx);
|
|
11550
12452
|
}
|
|
11551
12453
|
/**
|
|
11552
|
-
*
|
|
11553
|
-
* @param
|
|
12454
|
+
* Get a document by its primary key.
|
|
12455
|
+
* @param pk The primary key of the document
|
|
11554
12456
|
* @param tx The transaction to use
|
|
11555
|
-
* @returns The
|
|
12457
|
+
* @returns The document
|
|
11556
12458
|
*/
|
|
11557
|
-
async
|
|
11558
|
-
return this.
|
|
11559
|
-
const
|
|
11560
|
-
|
|
11561
|
-
|
|
11562
|
-
const tree = this.trees.get(indexName);
|
|
11563
|
-
if (!tree) continue;
|
|
11564
|
-
if (config.type === "fts") {
|
|
11565
|
-
const primaryField = this.getPrimaryField(config);
|
|
11566
|
-
const v = flattenDocument[primaryField];
|
|
11567
|
-
if (v === void 0 || typeof v !== "string") continue;
|
|
11568
|
-
const ftsConfig = this.getFtsConfig(config);
|
|
11569
|
-
const tokens = ftsConfig ? tokenize(v, ftsConfig) : [v];
|
|
11570
|
-
for (let i = 0, len = tokens.length; i < len; i++) {
|
|
11571
|
-
const token = tokens[i];
|
|
11572
|
-
const keyToInsert = this.getTokenKey(dpk, token);
|
|
11573
|
-
const [error] = await catchPromise(tree.insert(keyToInsert, { k: dpk, v: token }));
|
|
11574
|
-
if (error) throw error;
|
|
11575
|
-
}
|
|
11576
|
-
} else {
|
|
11577
|
-
const indexVal = this.getIndexValue(config, flattenDocument);
|
|
11578
|
-
if (indexVal === void 0) continue;
|
|
11579
|
-
const [error] = await catchPromise(tree.insert(dpk, { k: dpk, v: indexVal }));
|
|
11580
|
-
if (error) throw error;
|
|
11581
|
-
}
|
|
12459
|
+
async getDocument(pk, tx) {
|
|
12460
|
+
return this.runWithDefault(async (tx2) => {
|
|
12461
|
+
const row = await this.select(pk, false, tx2);
|
|
12462
|
+
if (!row) {
|
|
12463
|
+
throw new Error(`Document not found with PK: ${pk}`);
|
|
11582
12464
|
}
|
|
11583
|
-
return
|
|
12465
|
+
return JSON.parse(row);
|
|
11584
12466
|
}, tx);
|
|
11585
12467
|
}
|
|
11586
12468
|
/**
|
|
11587
|
-
*
|
|
11588
|
-
*
|
|
12469
|
+
* Backfill indices for newly created indices after data was inserted.
|
|
12470
|
+
* Delegated to IndexManager.
|
|
12471
|
+
*/
|
|
12472
|
+
async backfillIndices(tx) {
|
|
12473
|
+
return this.indexManager.backfillIndices(tx);
|
|
12474
|
+
}
|
|
12475
|
+
/**
|
|
12476
|
+
* Flush all interval analysis providers, forcing statistics to be recalculated.
|
|
12477
|
+
* Call this after bulk inserts or periodically to keep statistics fresh.
|
|
11589
12478
|
* @param tx The transaction to use
|
|
11590
|
-
* @returns The primary keys of the inserted documents
|
|
11591
12479
|
*/
|
|
11592
|
-
async
|
|
12480
|
+
async flushAnalysis(tx) {
|
|
11593
12481
|
return this.runWithDefaultWrite(async (tx2) => {
|
|
11594
|
-
|
|
11595
|
-
const startId = metadata.lastId + 1;
|
|
11596
|
-
metadata.lastId += documents.length;
|
|
11597
|
-
await this.updateDocumentInnerMetadata(metadata, tx2);
|
|
11598
|
-
const ids = [];
|
|
11599
|
-
const dataplyDocuments = [];
|
|
11600
|
-
const flattenedData = [];
|
|
11601
|
-
for (let i = 0, len = documents.length; i < len; i++) {
|
|
11602
|
-
const id = startId + i;
|
|
11603
|
-
const dataplyDocument = Object.assign({
|
|
11604
|
-
_id: id
|
|
11605
|
-
}, documents[i]);
|
|
11606
|
-
const stringified = JSON.stringify(dataplyDocument);
|
|
11607
|
-
dataplyDocuments.push(stringified);
|
|
11608
|
-
const flattenDocument = this.flattenDocument(dataplyDocument);
|
|
11609
|
-
flattenedData.push({ pk: -1, data: flattenDocument });
|
|
11610
|
-
ids.push(id);
|
|
11611
|
-
}
|
|
11612
|
-
const pks = await super.insertBatch(dataplyDocuments, true, tx2);
|
|
11613
|
-
for (let i = 0, len = pks.length; i < len; i++) {
|
|
11614
|
-
flattenedData[i].pk = pks[i];
|
|
11615
|
-
}
|
|
11616
|
-
for (const [indexName, config] of this.registeredIndices) {
|
|
11617
|
-
const tree = this.trees.get(indexName);
|
|
11618
|
-
if (!tree) continue;
|
|
11619
|
-
const treeTx = await tree.createTransaction();
|
|
11620
|
-
const batchInsertData = [];
|
|
11621
|
-
if (config.type === "fts") {
|
|
11622
|
-
const primaryField = this.getPrimaryField(config);
|
|
11623
|
-
const ftsConfig = this.getFtsConfig(config);
|
|
11624
|
-
for (let i = 0, len = flattenedData.length; i < len; i++) {
|
|
11625
|
-
const item = flattenedData[i];
|
|
11626
|
-
const v = item.data[primaryField];
|
|
11627
|
-
if (v === void 0 || typeof v !== "string") continue;
|
|
11628
|
-
const tokens = ftsConfig ? tokenize(v, ftsConfig) : [v];
|
|
11629
|
-
for (let j = 0, tLen = tokens.length; j < tLen; j++) {
|
|
11630
|
-
const token = tokens[j];
|
|
11631
|
-
batchInsertData.push([this.getTokenKey(item.pk, token), { k: item.pk, v: token }]);
|
|
11632
|
-
}
|
|
11633
|
-
}
|
|
11634
|
-
} else {
|
|
11635
|
-
for (let i = 0, len = flattenedData.length; i < len; i++) {
|
|
11636
|
-
const item = flattenedData[i];
|
|
11637
|
-
const indexVal = this.getIndexValue(config, item.data);
|
|
11638
|
-
if (indexVal === void 0) continue;
|
|
11639
|
-
batchInsertData.push([item.pk, { k: item.pk, v: indexVal }]);
|
|
11640
|
-
}
|
|
11641
|
-
}
|
|
11642
|
-
const [error] = await catchPromise(treeTx.batchInsert(batchInsertData));
|
|
11643
|
-
if (error) {
|
|
11644
|
-
throw error;
|
|
11645
|
-
}
|
|
11646
|
-
const res = await treeTx.commit();
|
|
11647
|
-
if (!res.success) {
|
|
11648
|
-
throw res.error;
|
|
11649
|
-
}
|
|
11650
|
-
}
|
|
11651
|
-
return ids;
|
|
12482
|
+
await this.analysisManager.flush(tx2);
|
|
11652
12483
|
}, tx);
|
|
11653
12484
|
}
|
|
12485
|
+
createDocumentInnerMetadata(indices) {
|
|
12486
|
+
return {
|
|
12487
|
+
magicString: "document-dataply",
|
|
12488
|
+
version: 1,
|
|
12489
|
+
createdAt: Date.now(),
|
|
12490
|
+
updatedAt: Date.now(),
|
|
12491
|
+
lastId: 0,
|
|
12492
|
+
schemeVersion: 0,
|
|
12493
|
+
indices
|
|
12494
|
+
};
|
|
12495
|
+
}
|
|
11654
12496
|
/**
|
|
11655
|
-
*
|
|
11656
|
-
* @param query The query to use
|
|
11657
|
-
* @param computeUpdatedDoc Function that computes the updated document from the original
|
|
12497
|
+
* Initialize the document database file.
|
|
11658
12498
|
* @param tx The transaction to use
|
|
11659
|
-
* @returns The number of updated documents
|
|
11660
12499
|
*/
|
|
11661
|
-
async
|
|
11662
|
-
const
|
|
11663
|
-
|
|
11664
|
-
|
|
11665
|
-
for (const [indexName, tree] of this.trees) {
|
|
11666
|
-
treeTxs.set(indexName, await tree.createTransaction());
|
|
11667
|
-
}
|
|
11668
|
-
treeTxs.delete("_id");
|
|
11669
|
-
for (let i = 0, len = pks.length; i < len; i++) {
|
|
11670
|
-
const pk = pks[i];
|
|
11671
|
-
const doc = await this.getDocument(pk, tx);
|
|
11672
|
-
if (!doc) continue;
|
|
11673
|
-
const updatedDoc = computeUpdatedDoc(doc);
|
|
11674
|
-
const oldFlatDoc = this.flattenDocument(doc);
|
|
11675
|
-
const newFlatDoc = this.flattenDocument(updatedDoc);
|
|
11676
|
-
for (const [indexName, treeTx] of treeTxs) {
|
|
11677
|
-
const config = this.registeredIndices.get(indexName);
|
|
11678
|
-
if (!config) continue;
|
|
11679
|
-
if (config.type === "fts") {
|
|
11680
|
-
const primaryField = this.getPrimaryField(config);
|
|
11681
|
-
const oldV = oldFlatDoc[primaryField];
|
|
11682
|
-
const newV = newFlatDoc[primaryField];
|
|
11683
|
-
if (oldV === newV) continue;
|
|
11684
|
-
const ftsConfig = this.getFtsConfig(config);
|
|
11685
|
-
if (typeof oldV === "string") {
|
|
11686
|
-
const oldTokens = ftsConfig ? tokenize(oldV, ftsConfig) : [oldV];
|
|
11687
|
-
for (let j = 0, jLen = oldTokens.length; j < jLen; j++) {
|
|
11688
|
-
await treeTx.delete(this.getTokenKey(pk, oldTokens[j]), { k: pk, v: oldTokens[j] });
|
|
11689
|
-
}
|
|
11690
|
-
}
|
|
11691
|
-
if (typeof newV === "string") {
|
|
11692
|
-
const newTokens = ftsConfig ? tokenize(newV, ftsConfig) : [newV];
|
|
11693
|
-
const batchInsertData = [];
|
|
11694
|
-
for (let j = 0, jLen = newTokens.length; j < jLen; j++) {
|
|
11695
|
-
batchInsertData.push([this.getTokenKey(pk, newTokens[j]), { k: pk, v: newTokens[j] }]);
|
|
11696
|
-
}
|
|
11697
|
-
await treeTx.batchInsert(batchInsertData);
|
|
11698
|
-
}
|
|
11699
|
-
} else {
|
|
11700
|
-
const oldIndexVal = this.getIndexValue(config, oldFlatDoc);
|
|
11701
|
-
const newIndexVal = this.getIndexValue(config, newFlatDoc);
|
|
11702
|
-
if (JSON.stringify(oldIndexVal) === JSON.stringify(newIndexVal)) continue;
|
|
11703
|
-
if (oldIndexVal !== void 0) {
|
|
11704
|
-
await treeTx.delete(pk, { k: pk, v: oldIndexVal });
|
|
11705
|
-
}
|
|
11706
|
-
if (newIndexVal !== void 0) {
|
|
11707
|
-
await treeTx.batchInsert([[pk, { k: pk, v: newIndexVal }]]);
|
|
11708
|
-
}
|
|
11709
|
-
}
|
|
11710
|
-
}
|
|
11711
|
-
await this.update(pk, JSON.stringify(updatedDoc), tx);
|
|
11712
|
-
updatedCount++;
|
|
12500
|
+
async initializeDocumentFile(tx) {
|
|
12501
|
+
const metadata = await this.select(1, false, tx);
|
|
12502
|
+
if (metadata) {
|
|
12503
|
+
throw new Error("Document metadata already exists");
|
|
11713
12504
|
}
|
|
11714
|
-
|
|
11715
|
-
|
|
11716
|
-
|
|
11717
|
-
|
|
11718
|
-
|
|
11719
|
-
|
|
11720
|
-
|
|
11721
|
-
|
|
12505
|
+
const metaObj = this.createDocumentInnerMetadata({
|
|
12506
|
+
_id: [-1, {
|
|
12507
|
+
type: "btree",
|
|
12508
|
+
fields: ["_id"]
|
|
12509
|
+
}]
|
|
12510
|
+
});
|
|
12511
|
+
await this.insertAsOverflow(JSON.stringify(metaObj), false, tx);
|
|
12512
|
+
}
|
|
12513
|
+
/**
|
|
12514
|
+
* Verify the document database file.
|
|
12515
|
+
* @param tx The transaction to use
|
|
12516
|
+
* @returns True if the document database file is valid, false otherwise
|
|
12517
|
+
*/
|
|
12518
|
+
async verifyDocumentFile(tx) {
|
|
12519
|
+
const row = await this.select(1, false, tx);
|
|
12520
|
+
if (!row) {
|
|
12521
|
+
return false;
|
|
11722
12522
|
}
|
|
11723
|
-
|
|
12523
|
+
const data = JSON.parse(row);
|
|
12524
|
+
return data.magicString === "document-dataply" && data.version === 1;
|
|
11724
12525
|
}
|
|
11725
12526
|
/**
|
|
11726
|
-
*
|
|
11727
|
-
* @param
|
|
11728
|
-
* @
|
|
12527
|
+
* returns flattened document
|
|
12528
|
+
* @param document
|
|
12529
|
+
* @returns
|
|
12530
|
+
*/
|
|
12531
|
+
flattenDocument(document) {
|
|
12532
|
+
return this.documentFormatter.flattenDocument(document);
|
|
12533
|
+
}
|
|
12534
|
+
/**
|
|
12535
|
+
* Get the document metadata.
|
|
11729
12536
|
* @param tx The transaction to use
|
|
11730
|
-
* @returns The
|
|
12537
|
+
* @returns The document metadata
|
|
11731
12538
|
*/
|
|
11732
|
-
async
|
|
11733
|
-
return this.
|
|
11734
|
-
return this.updateInternal(query, (doc) => {
|
|
11735
|
-
const newDoc = typeof newRecord === "function" ? newRecord(doc) : newRecord;
|
|
11736
|
-
return { _id: doc._id, ...newDoc };
|
|
11737
|
-
}, tx2);
|
|
11738
|
-
}, tx);
|
|
12539
|
+
async getDocumentMetadata(tx) {
|
|
12540
|
+
return this.metadataManager.getDocumentMetadata(tx);
|
|
11739
12541
|
}
|
|
11740
12542
|
/**
|
|
11741
|
-
*
|
|
11742
|
-
* @param query The query to use
|
|
11743
|
-
* @param newRecord Partial document to merge, or function that receives current document and returns partial update
|
|
12543
|
+
* Get the document inner metadata.
|
|
11744
12544
|
* @param tx The transaction to use
|
|
11745
|
-
* @returns The
|
|
12545
|
+
* @returns The document inner metadata
|
|
11746
12546
|
*/
|
|
11747
|
-
async
|
|
11748
|
-
return this.
|
|
11749
|
-
return this.updateInternal(query, (doc) => {
|
|
11750
|
-
const partialUpdateContent = typeof newRecord === "function" ? newRecord(doc) : newRecord;
|
|
11751
|
-
const finalUpdate = { ...partialUpdateContent };
|
|
11752
|
-
delete finalUpdate._id;
|
|
11753
|
-
return { ...doc, ...finalUpdate };
|
|
11754
|
-
}, tx2);
|
|
11755
|
-
}, tx);
|
|
12547
|
+
async getDocumentInnerMetadata(tx) {
|
|
12548
|
+
return this.metadataManager.getDocumentInnerMetadata(tx);
|
|
11756
12549
|
}
|
|
11757
12550
|
/**
|
|
11758
|
-
*
|
|
11759
|
-
* @param
|
|
12551
|
+
* Update the document inner metadata.
|
|
12552
|
+
* @param metadata The document inner metadata
|
|
11760
12553
|
* @param tx The transaction to use
|
|
11761
|
-
* @returns The number of deleted documents
|
|
11762
12554
|
*/
|
|
11763
|
-
async
|
|
11764
|
-
return this.
|
|
11765
|
-
const pks = await this.getKeys(query);
|
|
11766
|
-
let deletedCount = 0;
|
|
11767
|
-
for (let i = 0, len = pks.length; i < len; i++) {
|
|
11768
|
-
const pk = pks[i];
|
|
11769
|
-
const doc = await this.getDocument(pk, tx2);
|
|
11770
|
-
if (!doc) continue;
|
|
11771
|
-
const flatDoc = this.flattenDocument(doc);
|
|
11772
|
-
for (const [indexName, tree] of this.trees) {
|
|
11773
|
-
const config = this.registeredIndices.get(indexName);
|
|
11774
|
-
if (!config) continue;
|
|
11775
|
-
if (config.type === "fts") {
|
|
11776
|
-
const primaryField = this.getPrimaryField(config);
|
|
11777
|
-
const v = flatDoc[primaryField];
|
|
11778
|
-
if (v === void 0 || typeof v !== "string") continue;
|
|
11779
|
-
const ftsConfig = this.getFtsConfig(config);
|
|
11780
|
-
const tokens = ftsConfig ? tokenize(v, ftsConfig) : [v];
|
|
11781
|
-
for (let j = 0, jLen = tokens.length; j < jLen; j++) {
|
|
11782
|
-
await tree.delete(this.getTokenKey(pk, tokens[j]), { k: pk, v: tokens[j] });
|
|
11783
|
-
}
|
|
11784
|
-
} else {
|
|
11785
|
-
const indexVal = this.getIndexValue(config, flatDoc);
|
|
11786
|
-
if (indexVal === void 0) continue;
|
|
11787
|
-
await tree.delete(pk, { k: pk, v: indexVal });
|
|
11788
|
-
}
|
|
11789
|
-
}
|
|
11790
|
-
await super.delete(pk, true, tx2);
|
|
11791
|
-
deletedCount++;
|
|
11792
|
-
}
|
|
11793
|
-
return deletedCount;
|
|
11794
|
-
}, tx);
|
|
12555
|
+
async updateDocumentInnerMetadata(metadata, tx) {
|
|
12556
|
+
return this.metadataManager.updateDocumentInnerMetadata(metadata, tx);
|
|
11795
12557
|
}
|
|
11796
12558
|
/**
|
|
11797
|
-
*
|
|
11798
|
-
*
|
|
12559
|
+
* Run a migration if the current schemeVersion is lower than the target version.
|
|
12560
|
+
* After the callback completes, schemeVersion is updated to the target version.
|
|
12561
|
+
* @param version The target scheme version
|
|
12562
|
+
* @param callback The migration callback
|
|
12563
|
+
* @param tx Optional transaction
|
|
12564
|
+
*/
|
|
12565
|
+
async migration(version, callback, tx) {
|
|
12566
|
+
return this.metadataManager.migration(version, callback, tx);
|
|
12567
|
+
}
|
|
12568
|
+
/**
|
|
12569
|
+
* Insert a document into the database
|
|
12570
|
+
* @param document The document to insert
|
|
11799
12571
|
* @param tx The transaction to use
|
|
11800
|
-
* @returns The
|
|
12572
|
+
* @returns The primary key of the inserted document
|
|
11801
12573
|
*/
|
|
11802
|
-
async
|
|
11803
|
-
return this.
|
|
11804
|
-
const pks = await this.getKeys(query);
|
|
11805
|
-
return pks.length;
|
|
11806
|
-
}, tx);
|
|
12574
|
+
async insertSingleDocument(document, tx) {
|
|
12575
|
+
return this.mutationManager.insertSingleDocument(document, tx);
|
|
11807
12576
|
}
|
|
11808
12577
|
/**
|
|
11809
|
-
*
|
|
12578
|
+
* Insert a batch of documents into the database
|
|
12579
|
+
* @param documents The documents to insert
|
|
12580
|
+
* @param tx The transaction to use
|
|
12581
|
+
* @returns The primary keys of the inserted documents
|
|
11810
12582
|
*/
|
|
11811
|
-
|
|
11812
|
-
|
|
11813
|
-
for (let i = 0, len = ftsConditions.length; i < len; i++) {
|
|
11814
|
-
const { field, matchTokens } = ftsConditions[i];
|
|
11815
|
-
const docValue = flatDoc[field];
|
|
11816
|
-
if (typeof docValue !== "string") return false;
|
|
11817
|
-
for (let j = 0, jLen = matchTokens.length; j < jLen; j++) {
|
|
11818
|
-
const token = matchTokens[j];
|
|
11819
|
-
if (!docValue.includes(token)) return false;
|
|
11820
|
-
}
|
|
11821
|
-
}
|
|
11822
|
-
return true;
|
|
12583
|
+
async insertBatchDocuments(documents, tx) {
|
|
12584
|
+
return this.mutationManager.insertBatchDocuments(documents, tx);
|
|
11823
12585
|
}
|
|
11824
12586
|
/**
|
|
11825
|
-
*
|
|
12587
|
+
* Fully update documents from the database that match the query
|
|
12588
|
+
* @param query The query to use
|
|
12589
|
+
* @param newRecord Complete document to replace with, or function that receives current document and returns new document
|
|
12590
|
+
* @param tx The transaction to use
|
|
12591
|
+
* @returns The number of updated documents
|
|
11826
12592
|
*/
|
|
11827
|
-
|
|
11828
|
-
|
|
11829
|
-
const flatDoc = this.flattenDocument(doc);
|
|
11830
|
-
for (let i = 0, len = conditions.length; i < len; i++) {
|
|
11831
|
-
const { field, condition } = conditions[i];
|
|
11832
|
-
const docValue = flatDoc[field];
|
|
11833
|
-
if (docValue === void 0) return false;
|
|
11834
|
-
const treeValue = { k: doc._id, v: docValue };
|
|
11835
|
-
if (!this.verifyValue(docValue, condition)) return false;
|
|
11836
|
-
}
|
|
11837
|
-
return true;
|
|
12593
|
+
async fullUpdate(query, newRecord, tx) {
|
|
12594
|
+
return this.mutationManager.fullUpdate(query, newRecord, tx);
|
|
11838
12595
|
}
|
|
11839
12596
|
/**
|
|
11840
|
-
*
|
|
12597
|
+
* Partially update documents from the database that match the query
|
|
12598
|
+
* @param query The query to use
|
|
12599
|
+
* @param newRecord Partial document to merge, or function that receives current document and returns partial update
|
|
12600
|
+
* @param tx The transaction to use
|
|
12601
|
+
* @returns The number of updated documents
|
|
11841
12602
|
*/
|
|
11842
|
-
|
|
11843
|
-
|
|
11844
|
-
return value === condition;
|
|
11845
|
-
}
|
|
11846
|
-
if ("primaryEqual" in condition) {
|
|
11847
|
-
return value === condition.primaryEqual?.v;
|
|
11848
|
-
}
|
|
11849
|
-
if ("primaryNotEqual" in condition) {
|
|
11850
|
-
return value !== condition.primaryNotEqual?.v;
|
|
11851
|
-
}
|
|
11852
|
-
if ("primaryLt" in condition) {
|
|
11853
|
-
return value !== null && condition.primaryLt?.v !== void 0 && value < condition.primaryLt.v;
|
|
11854
|
-
}
|
|
11855
|
-
if ("primaryLte" in condition) {
|
|
11856
|
-
return value !== null && condition.primaryLte?.v !== void 0 && value <= condition.primaryLte.v;
|
|
11857
|
-
}
|
|
11858
|
-
if ("primaryGt" in condition) {
|
|
11859
|
-
return value !== null && condition.primaryGt?.v !== void 0 && value > condition.primaryGt.v;
|
|
11860
|
-
}
|
|
11861
|
-
if ("primaryGte" in condition) {
|
|
11862
|
-
return value !== null && condition.primaryGte?.v !== void 0 && value >= condition.primaryGte.v;
|
|
11863
|
-
}
|
|
11864
|
-
if ("primaryOr" in condition && Array.isArray(condition.primaryOr)) {
|
|
11865
|
-
return condition.primaryOr.some((c) => value === c?.v);
|
|
11866
|
-
}
|
|
11867
|
-
return true;
|
|
12603
|
+
async partialUpdate(query, newRecord, tx) {
|
|
12604
|
+
return this.mutationManager.partialUpdate(query, newRecord, tx);
|
|
11868
12605
|
}
|
|
11869
12606
|
/**
|
|
11870
|
-
*
|
|
12607
|
+
* Delete documents from the database that match the query
|
|
12608
|
+
* @param query The query to use
|
|
12609
|
+
* @param tx The transaction to use
|
|
12610
|
+
* @returns The number of deleted documents
|
|
11871
12611
|
*/
|
|
11872
|
-
|
|
11873
|
-
|
|
11874
|
-
const { verySmallChunkSize, smallChunkSize } = this.getFreeMemoryChunkSize();
|
|
11875
|
-
if (chunkTotalSize < verySmallChunkSize) return currentChunkSize * 2;
|
|
11876
|
-
if (chunkTotalSize > smallChunkSize) return Math.max(Math.floor(currentChunkSize / 2), 20);
|
|
11877
|
-
return currentChunkSize;
|
|
12612
|
+
async deleteDocuments(query, tx) {
|
|
12613
|
+
return this.mutationManager.deleteDocuments(query, tx);
|
|
11878
12614
|
}
|
|
11879
12615
|
/**
|
|
11880
|
-
*
|
|
11881
|
-
*
|
|
12616
|
+
* Count documents from the database that match the query
|
|
12617
|
+
* @param query The query to use
|
|
12618
|
+
* @param tx The transaction to use
|
|
12619
|
+
* @returns The number of documents that match the query
|
|
11882
12620
|
*/
|
|
11883
|
-
async
|
|
11884
|
-
|
|
11885
|
-
const isFts = ftsConditions.length > 0;
|
|
11886
|
-
const isCompositeVerify = compositeVerifyConditions.length > 0;
|
|
11887
|
-
const isVerifyOthers = verifyOthers.length > 0;
|
|
11888
|
-
const isFiniteLimit = isFinite(limit);
|
|
11889
|
-
let currentChunkSize = isFiniteLimit ? limit : initialChunkSize;
|
|
11890
|
-
let chunk = [];
|
|
11891
|
-
let chunkSize = 0;
|
|
11892
|
-
let dropped = 0;
|
|
11893
|
-
const processChunk = async (pks) => {
|
|
11894
|
-
const docs = [];
|
|
11895
|
-
const rawResults = await this.selectMany(new Float64Array(pks), false, tx);
|
|
11896
|
-
let chunkTotalSize = 0;
|
|
11897
|
-
for (let j = 0, len = rawResults.length; j < len; j++) {
|
|
11898
|
-
const s = rawResults[j];
|
|
11899
|
-
if (!s) continue;
|
|
11900
|
-
const doc = JSON.parse(s);
|
|
11901
|
-
chunkTotalSize += s.length * 2;
|
|
11902
|
-
if (isFts && !this.verifyFts(doc, ftsConditions)) continue;
|
|
11903
|
-
if (isCompositeVerify && this.verifyCompositeConditions(doc, compositeVerifyConditions) === false) continue;
|
|
11904
|
-
if (isVerifyOthers) {
|
|
11905
|
-
const flatDoc = this.flattenDocument(doc);
|
|
11906
|
-
let passed = true;
|
|
11907
|
-
for (let k = 0, kLen = verifyOthers.length; k < kLen; k++) {
|
|
11908
|
-
const other = verifyOthers[k];
|
|
11909
|
-
const fieldValue = flatDoc[other.field];
|
|
11910
|
-
if (fieldValue === void 0) {
|
|
11911
|
-
passed = false;
|
|
11912
|
-
break;
|
|
11913
|
-
}
|
|
11914
|
-
const treeValue = { k: doc._id, v: fieldValue };
|
|
11915
|
-
if (!other.tree.verify(treeValue, other.condition)) {
|
|
11916
|
-
passed = false;
|
|
11917
|
-
break;
|
|
11918
|
-
}
|
|
11919
|
-
}
|
|
11920
|
-
if (!passed) continue;
|
|
11921
|
-
}
|
|
11922
|
-
docs.push(doc);
|
|
11923
|
-
}
|
|
11924
|
-
if (!isFiniteLimit) {
|
|
11925
|
-
currentChunkSize = this.adjustChunkSize(currentChunkSize, chunkTotalSize);
|
|
11926
|
-
}
|
|
11927
|
-
return docs;
|
|
11928
|
-
};
|
|
11929
|
-
for await (const pk of keysStream) {
|
|
11930
|
-
if (dropped < startIdx) {
|
|
11931
|
-
dropped++;
|
|
11932
|
-
continue;
|
|
11933
|
-
}
|
|
11934
|
-
chunk.push(pk);
|
|
11935
|
-
chunkSize++;
|
|
11936
|
-
if (chunkSize >= currentChunkSize) {
|
|
11937
|
-
const docs = await processChunk(chunk);
|
|
11938
|
-
for (let j = 0, dLen = docs.length; j < dLen; j++) yield docs[j];
|
|
11939
|
-
chunk = [];
|
|
11940
|
-
chunkSize = 0;
|
|
11941
|
-
}
|
|
11942
|
-
}
|
|
11943
|
-
if (chunkSize > 0) {
|
|
11944
|
-
const docs = await processChunk(chunk);
|
|
11945
|
-
for (let j = 0, dLen = docs.length; j < dLen; j++) yield docs[j];
|
|
11946
|
-
}
|
|
12621
|
+
async countDocuments(query, tx) {
|
|
12622
|
+
return this.queryManager.countDocuments(query, tx);
|
|
11947
12623
|
}
|
|
11948
12624
|
/**
|
|
11949
12625
|
* Select documents from the database
|
|
@@ -11954,130 +12630,7 @@ var DocumentDataplyAPI = class extends import_dataply3.DataplyAPI {
|
|
|
11954
12630
|
* @throws Error if query or orderBy contains non-indexed fields
|
|
11955
12631
|
*/
|
|
11956
12632
|
selectDocuments(query, options = {}, tx) {
|
|
11957
|
-
|
|
11958
|
-
if (!this.indexedFields.has(field)) {
|
|
11959
|
-
throw new Error(`Query field "${field}" is not indexed. Available indexed fields: ${Array.from(this.indexedFields).join(", ")}`);
|
|
11960
|
-
}
|
|
11961
|
-
}
|
|
11962
|
-
const orderBy = options.orderBy;
|
|
11963
|
-
if (orderBy !== void 0 && !this.indexedFields.has(orderBy)) {
|
|
11964
|
-
throw new Error(`orderBy field "${orderBy}" is not indexed. Available indexed fields: ${Array.from(this.indexedFields).join(", ")}`);
|
|
11965
|
-
}
|
|
11966
|
-
const {
|
|
11967
|
-
limit = Infinity,
|
|
11968
|
-
offset = 0,
|
|
11969
|
-
sortOrder = "asc",
|
|
11970
|
-
orderBy: orderByField
|
|
11971
|
-
} = options;
|
|
11972
|
-
const self = this;
|
|
11973
|
-
const stream = () => this.streamWithDefault(async function* (tx2) {
|
|
11974
|
-
const ftsConditions = [];
|
|
11975
|
-
for (const field in query) {
|
|
11976
|
-
const q = query[field];
|
|
11977
|
-
if (q && typeof q === "object" && "match" in q && typeof q.match === "string") {
|
|
11978
|
-
const indexNames = self.fieldToIndices.get(field) || [];
|
|
11979
|
-
for (const indexName of indexNames) {
|
|
11980
|
-
const config = self.registeredIndices.get(indexName);
|
|
11981
|
-
if (config && config.type === "fts") {
|
|
11982
|
-
const ftsConfig = self.getFtsConfig(config);
|
|
11983
|
-
if (ftsConfig) {
|
|
11984
|
-
ftsConditions.push({ field, matchTokens: tokenize(q.match, ftsConfig) });
|
|
11985
|
-
}
|
|
11986
|
-
break;
|
|
11987
|
-
}
|
|
11988
|
-
}
|
|
11989
|
-
}
|
|
11990
|
-
}
|
|
11991
|
-
const driverResult = await self.getDriverKeys(query, orderByField, sortOrder);
|
|
11992
|
-
if (!driverResult) return;
|
|
11993
|
-
const { keysStream, others, compositeVerifyConditions, isDriverOrderByField, rollback } = driverResult;
|
|
11994
|
-
const initialChunkSize = self.options.pageSize;
|
|
11995
|
-
try {
|
|
11996
|
-
if (!isDriverOrderByField && orderByField) {
|
|
11997
|
-
const topK = limit === Infinity ? Infinity : offset + limit;
|
|
11998
|
-
let heap = null;
|
|
11999
|
-
if (topK !== Infinity) {
|
|
12000
|
-
heap = new BinaryHeap((a, b) => {
|
|
12001
|
-
const aVal = a[orderByField] ?? a._id;
|
|
12002
|
-
const bVal = b[orderByField] ?? b._id;
|
|
12003
|
-
const cmp = aVal < bVal ? -1 : aVal > bVal ? 1 : 0;
|
|
12004
|
-
return sortOrder === "asc" ? -cmp : cmp;
|
|
12005
|
-
});
|
|
12006
|
-
}
|
|
12007
|
-
const results = [];
|
|
12008
|
-
for await (const doc of self.processChunkedKeysWithVerify(
|
|
12009
|
-
keysStream,
|
|
12010
|
-
0,
|
|
12011
|
-
initialChunkSize,
|
|
12012
|
-
Infinity,
|
|
12013
|
-
ftsConditions,
|
|
12014
|
-
compositeVerifyConditions,
|
|
12015
|
-
others,
|
|
12016
|
-
tx2
|
|
12017
|
-
)) {
|
|
12018
|
-
if (heap) {
|
|
12019
|
-
if (heap.size < topK) heap.push(doc);
|
|
12020
|
-
else {
|
|
12021
|
-
const top = heap.peek();
|
|
12022
|
-
if (top) {
|
|
12023
|
-
const aVal = doc[orderByField] ?? doc._id;
|
|
12024
|
-
const bVal = top[orderByField] ?? top._id;
|
|
12025
|
-
const cmp = aVal < bVal ? -1 : aVal > bVal ? 1 : 0;
|
|
12026
|
-
if (sortOrder === "asc" ? cmp < 0 : cmp > 0) heap.replace(doc);
|
|
12027
|
-
}
|
|
12028
|
-
}
|
|
12029
|
-
} else {
|
|
12030
|
-
results.push(doc);
|
|
12031
|
-
}
|
|
12032
|
-
}
|
|
12033
|
-
const finalDocs = heap ? heap.toArray() : results;
|
|
12034
|
-
finalDocs.sort((a, b) => {
|
|
12035
|
-
const aVal = a[orderByField] ?? a._id;
|
|
12036
|
-
const bVal = b[orderByField] ?? b._id;
|
|
12037
|
-
const cmp = aVal < bVal ? -1 : aVal > bVal ? 1 : 0;
|
|
12038
|
-
return sortOrder === "asc" ? cmp : -cmp;
|
|
12039
|
-
});
|
|
12040
|
-
const end = limit === Infinity ? void 0 : offset + limit;
|
|
12041
|
-
const limitedResults = finalDocs.slice(offset, end);
|
|
12042
|
-
for (let j = 0, len = limitedResults.length; j < len; j++) {
|
|
12043
|
-
yield limitedResults[j];
|
|
12044
|
-
}
|
|
12045
|
-
} else {
|
|
12046
|
-
const hasFilters = ftsConditions.length > 0 || compositeVerifyConditions.length > 0 || others.length > 0;
|
|
12047
|
-
const startIdx = hasFilters ? 0 : offset;
|
|
12048
|
-
let yieldedCount = 0;
|
|
12049
|
-
let skippedCount = hasFilters ? 0 : offset;
|
|
12050
|
-
for await (const doc of self.processChunkedKeysWithVerify(
|
|
12051
|
-
keysStream,
|
|
12052
|
-
startIdx,
|
|
12053
|
-
initialChunkSize,
|
|
12054
|
-
limit,
|
|
12055
|
-
ftsConditions,
|
|
12056
|
-
compositeVerifyConditions,
|
|
12057
|
-
others,
|
|
12058
|
-
tx2
|
|
12059
|
-
)) {
|
|
12060
|
-
if (skippedCount < offset) {
|
|
12061
|
-
skippedCount++;
|
|
12062
|
-
continue;
|
|
12063
|
-
}
|
|
12064
|
-
if (yieldedCount >= limit) break;
|
|
12065
|
-
yield doc;
|
|
12066
|
-
yieldedCount++;
|
|
12067
|
-
}
|
|
12068
|
-
}
|
|
12069
|
-
} finally {
|
|
12070
|
-
rollback();
|
|
12071
|
-
}
|
|
12072
|
-
}, tx);
|
|
12073
|
-
const drain = async () => {
|
|
12074
|
-
const result = [];
|
|
12075
|
-
for await (const document of stream()) {
|
|
12076
|
-
result.push(document);
|
|
12077
|
-
}
|
|
12078
|
-
return result;
|
|
12079
|
-
};
|
|
12080
|
-
return { stream, drain };
|
|
12633
|
+
return this.queryManager.selectDocuments(query, options, tx);
|
|
12081
12634
|
}
|
|
12082
12635
|
};
|
|
12083
12636
|
|
|
@@ -12150,6 +12703,14 @@ var DocumentDataply = class _DocumentDataply {
|
|
|
12150
12703
|
await this.api.init();
|
|
12151
12704
|
await this.api.backfillIndices();
|
|
12152
12705
|
}
|
|
12706
|
+
/**
|
|
12707
|
+
* Flush all interval analysis providers, forcing statistics to be recalculated.
|
|
12708
|
+
* Call this after bulk inserts or periodically to keep FTS statistics fresh.
|
|
12709
|
+
* @param tx Optional transaction
|
|
12710
|
+
*/
|
|
12711
|
+
async flushAnalysis(tx) {
|
|
12712
|
+
return this.api.flushAnalysis(tx);
|
|
12713
|
+
}
|
|
12153
12714
|
/**
|
|
12154
12715
|
* Run a migration if the current schemeVersion is lower than the target version.
|
|
12155
12716
|
* The callback is only executed when the database's schemeVersion is below the given version.
|
|
@@ -12249,7 +12810,7 @@ var DocumentDataply = class _DocumentDataply {
|
|
|
12249
12810
|
};
|
|
12250
12811
|
|
|
12251
12812
|
// src/core/index.ts
|
|
12252
|
-
var
|
|
12813
|
+
var import_dataply5 = __toESM(require_cjs());
|
|
12253
12814
|
// Annotate the CommonJS export names for ESM import in node:
|
|
12254
12815
|
0 && (module.exports = {
|
|
12255
12816
|
DocumentDataply,
|