@pirireis/webglobeplugins 0.15.20-alpha → 0.15.22-alpha
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/Math/arc.js +1 -2
- package/Math/circle-cdf-points.js +1 -170
- package/Math/circle.js +0 -25
- package/Math/vec3.js +1 -1
- package/altitude-locator/plugin.js +1 -1
- package/package.json +1 -1
- package/point-tracks/plugin.js +1 -2
- package/programs/line-on-globe/lines-color-instanced-flat.js +0 -1
- package/programs/point-on-globe/element-globe-surface-glow.js +0 -1
- package/programs/totems/camerauniformblock.js +7 -0
- package/range-tools-on-terrain/bearing-line/plugin.js +0 -1
- package/range-tools-on-terrain/circle-line-chain/chain-list-map.js +4 -9
- package/range-tools-on-terrain/circle-line-chain/plugin.js +5 -3
- package/semiplugins/lightweight/line-plugin.js +35 -40
- package/semiplugins/shape-on-terrain/padding-1-degree.js +94 -44
- package/util/account/single-attribute-buffer-management/buffer-manager.js +1 -1
- package/util/account/single-attribute-buffer-management/buffer-orchestrator.js +121 -70
- package/util/account/single-attribute-buffer-management/buffer-orchestrator1.js +159 -0
- package/util/account/single-attribute-buffer-management/object-store.js +1 -1
- package/Math/arc-generate-points copy.js +0 -366
- package/Math/globe-util/horizon-plane.js +0 -112
- package/altitude-locator/draw-subset-obj.js +0 -16
- package/semiplugins/shape-on-terrain/derived/padding-plugin.js +0 -101
- package/util/account/single-attribute-buffer-management/buffer-orchestrator copy.js +0 -161
- package/util/account/single-attribute-buffer-management/chunked-buffer-manager.js +0 -75
- package/util/account/single-attribute-buffer-management/chunked-buffer-orchestrator.js +0 -195
|
@@ -1,161 +0,0 @@
|
|
|
1
|
-
const EXTRA_SIZE = 10;
|
|
2
|
-
export class BufferOrchestrator {
|
|
3
|
-
_capacity;
|
|
4
|
-
offsetMap;
|
|
5
|
-
tombstoneOffsets;
|
|
6
|
-
_length;
|
|
7
|
-
constructor({ capacity = 10 } = {}) {
|
|
8
|
-
this._capacity = capacity;
|
|
9
|
-
this.offsetMap = new Map();
|
|
10
|
-
this.tombstoneOffsets = [];
|
|
11
|
-
this._length = 0;
|
|
12
|
-
}
|
|
13
|
-
resetWithCapacity(bufferManagersMap, capacity = null) {
|
|
14
|
-
this._capacity = capacity !== null ? capacity : this._capacity;
|
|
15
|
-
for (const [key, { bufferManager }] of bufferManagersMap) {
|
|
16
|
-
bufferManager.resetWithCapacity(this._capacity);
|
|
17
|
-
}
|
|
18
|
-
this.offsetMap.clear();
|
|
19
|
-
this.tombstoneOffsets = [];
|
|
20
|
-
this._length = 0;
|
|
21
|
-
}
|
|
22
|
-
insertBulk(items, bufferManagersMap, bufferKeys = null) {
|
|
23
|
-
this.ensureSpace(items.length, bufferManagersMap);
|
|
24
|
-
const { offsetMap } = this;
|
|
25
|
-
const offsets = [];
|
|
26
|
-
for (const item of items) {
|
|
27
|
-
let o = offsetMap.get(item.key);
|
|
28
|
-
const offset = o !== undefined ? o : this.nextOffset();
|
|
29
|
-
offsetMap.set(item.key, offset);
|
|
30
|
-
offsets.push(offset);
|
|
31
|
-
}
|
|
32
|
-
if (bufferKeys) {
|
|
33
|
-
for (const key of bufferKeys) {
|
|
34
|
-
const bufferManagerComp = bufferManagersMap.get(key);
|
|
35
|
-
if (bufferManagerComp === undefined)
|
|
36
|
-
throw new Error("insertBulk bufferKey does not exist");
|
|
37
|
-
const { bufferManager, adaptor } = bufferManagerComp;
|
|
38
|
-
bufferManager.insertBulk(items.map(adaptor), offsets);
|
|
39
|
-
}
|
|
40
|
-
}
|
|
41
|
-
else {
|
|
42
|
-
for (const [key, { bufferManager, adaptor }] of bufferManagersMap) {
|
|
43
|
-
bufferManager.insertBulk(items.map(adaptor), offsets);
|
|
44
|
-
}
|
|
45
|
-
}
|
|
46
|
-
}
|
|
47
|
-
// does not assign offset to the new items.
|
|
48
|
-
updateBulk(items, bufferManagersMap, bufferKeys = null) {
|
|
49
|
-
const { offsetMap } = this;
|
|
50
|
-
const offsets = [];
|
|
51
|
-
for (const item of items) {
|
|
52
|
-
const offset = offsetMap.get(item.key);
|
|
53
|
-
if (offset !== undefined) {
|
|
54
|
-
offsets.push(offset);
|
|
55
|
-
}
|
|
56
|
-
else {
|
|
57
|
-
throw new Error("updateBulk item Key does not exist");
|
|
58
|
-
}
|
|
59
|
-
}
|
|
60
|
-
if (bufferKeys) {
|
|
61
|
-
for (const key of bufferKeys) {
|
|
62
|
-
const bufferManagerComp = bufferManagersMap.get(key);
|
|
63
|
-
if (bufferManagerComp === undefined)
|
|
64
|
-
throw new Error("updateBulk bufferKey does not exist");
|
|
65
|
-
const { bufferManager, adaptor } = bufferManagerComp;
|
|
66
|
-
bufferManager.insertBulk(items.map(adaptor), offsets);
|
|
67
|
-
}
|
|
68
|
-
}
|
|
69
|
-
else {
|
|
70
|
-
for (const [key, { bufferManager, adaptor }] of bufferManagersMap) {
|
|
71
|
-
bufferManager.insertBulk(items.map(adaptor), offsets);
|
|
72
|
-
}
|
|
73
|
-
}
|
|
74
|
-
}
|
|
75
|
-
deleteBulk(keys, bufferManagersMap) {
|
|
76
|
-
const offsets = [];
|
|
77
|
-
for (const key of keys) {
|
|
78
|
-
const offset = this.getOffset(key);
|
|
79
|
-
if (offset !== undefined) {
|
|
80
|
-
offsets.push(offset);
|
|
81
|
-
this.offsetMap.delete(key);
|
|
82
|
-
this.tombstoneOffsets.push(offset);
|
|
83
|
-
}
|
|
84
|
-
}
|
|
85
|
-
for (const [key, { bufferManager }] of bufferManagersMap) {
|
|
86
|
-
bufferManager.deleteBulk(offsets);
|
|
87
|
-
}
|
|
88
|
-
}
|
|
89
|
-
getOffset(key) {
|
|
90
|
-
return this.offsetMap.get(key);
|
|
91
|
-
}
|
|
92
|
-
nextOffset() {
|
|
93
|
-
if (this.tombstoneOffsets.length > 0) {
|
|
94
|
-
const offset = this.tombstoneOffsets.pop();
|
|
95
|
-
return offset;
|
|
96
|
-
}
|
|
97
|
-
if (this._length < this._capacity) {
|
|
98
|
-
return this._length++;
|
|
99
|
-
}
|
|
100
|
-
return false;
|
|
101
|
-
}
|
|
102
|
-
ensureSpace(itemsLength, bufferManagersMap) {
|
|
103
|
-
if (itemsLength <= this.emptySpace)
|
|
104
|
-
return;
|
|
105
|
-
const newCapacity = this.length + itemsLength;
|
|
106
|
-
for (const [key, { bufferManager }] of bufferManagersMap) {
|
|
107
|
-
bufferManager.extendBuffer(this.length, newCapacity);
|
|
108
|
-
}
|
|
109
|
-
this._capacity = newCapacity;
|
|
110
|
-
}
|
|
111
|
-
defrag(bufferManagers, bufferKeys) {
|
|
112
|
-
const offsetMap = this.offsetMap;
|
|
113
|
-
const newCapacity = offsetMap.size + EXTRA_SIZE;
|
|
114
|
-
if (bufferKeys) {
|
|
115
|
-
for (const key of bufferKeys) {
|
|
116
|
-
const offset = offsetMap.get(key);
|
|
117
|
-
if (offset !== undefined) {
|
|
118
|
-
for (const [key, { bufferManager }] of bufferManagers) {
|
|
119
|
-
bufferManager.defrag([offset], this.length, newCapacity);
|
|
120
|
-
}
|
|
121
|
-
}
|
|
122
|
-
}
|
|
123
|
-
}
|
|
124
|
-
else {
|
|
125
|
-
for (const [key, { bufferManager }] of bufferManagers) {
|
|
126
|
-
bufferManager.defrag(offsetMap.values(), this.length, newCapacity);
|
|
127
|
-
}
|
|
128
|
-
}
|
|
129
|
-
this._defrag();
|
|
130
|
-
this._length = offsetMap.size;
|
|
131
|
-
this._capacity = newCapacity;
|
|
132
|
-
this.tombstoneOffsets = [];
|
|
133
|
-
}
|
|
134
|
-
/**
|
|
135
|
-
* Flushes metadata and sets length to 0 without actualize change on buffers
|
|
136
|
-
* This method created for cases in which data is loaded on each frame
|
|
137
|
-
*/
|
|
138
|
-
flush({ capacity = 10 } = {}) {
|
|
139
|
-
this._length = 0;
|
|
140
|
-
this._capacity = capacity;
|
|
141
|
-
this.tombstoneOffsets = [];
|
|
142
|
-
this.offsetMap.clear();
|
|
143
|
-
}
|
|
144
|
-
_defrag() {
|
|
145
|
-
const newOffsetMap = new Map();
|
|
146
|
-
let newOffset = 0;
|
|
147
|
-
for (const [key, offset] of this.offsetMap) {
|
|
148
|
-
newOffsetMap.set(key, newOffset++);
|
|
149
|
-
}
|
|
150
|
-
this.offsetMap = newOffsetMap;
|
|
151
|
-
}
|
|
152
|
-
get length() {
|
|
153
|
-
return this._length;
|
|
154
|
-
}
|
|
155
|
-
get emptySpace() {
|
|
156
|
-
return this._capacity - this.offsetMap.size;
|
|
157
|
-
}
|
|
158
|
-
get capacity() {
|
|
159
|
-
return this._capacity;
|
|
160
|
-
}
|
|
161
|
-
}
|
|
@@ -1,75 +0,0 @@
|
|
|
1
|
-
import { BufferManager } from "./buffer-manager";
|
|
2
|
-
export class ChunkedBufferManager extends BufferManager {
|
|
3
|
-
// Chunked insert that batches multiple items into fewer GPU calls
|
|
4
|
-
insertBulkChunked(blocks, offsets, chunkSize = 1000) {
|
|
5
|
-
const { gl, buffer, itemSize } = this;
|
|
6
|
-
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
|
|
7
|
-
const offsetMultiplier = itemSize * 4;
|
|
8
|
-
// Process in chunks to reduce GPU call overhead
|
|
9
|
-
for (let i = 0; i < blocks.length; i += chunkSize) {
|
|
10
|
-
const chunkEnd = Math.min(i + chunkSize, blocks.length);
|
|
11
|
-
const chunkBlocks = blocks.slice(i, chunkEnd);
|
|
12
|
-
const chunkOffsets = offsets.slice(i, chunkEnd);
|
|
13
|
-
// Create a single large buffer for this chunk
|
|
14
|
-
const totalItems = chunkBlocks.length;
|
|
15
|
-
const chunkBuffer = new Float32Array(totalItems * itemSize);
|
|
16
|
-
// Copy all chunk data into one buffer
|
|
17
|
-
for (let j = 0; j < chunkBlocks.length; j++) {
|
|
18
|
-
chunkBuffer.set(chunkBlocks[j], j * itemSize);
|
|
19
|
-
}
|
|
20
|
-
// Find contiguous ranges for efficient uploads
|
|
21
|
-
const ranges = this.findContiguousRanges(chunkOffsets, offsetMultiplier);
|
|
22
|
-
for (const range of ranges) {
|
|
23
|
-
const startOffset = range.startOffset;
|
|
24
|
-
const dataStart = range.indices[0] * itemSize;
|
|
25
|
-
const dataLength = range.indices.length * itemSize;
|
|
26
|
-
const rangeData = chunkBuffer.subarray(dataStart, dataStart + dataLength);
|
|
27
|
-
gl.bufferSubData(gl.ARRAY_BUFFER, startOffset, rangeData);
|
|
28
|
-
}
|
|
29
|
-
}
|
|
30
|
-
gl.bindBuffer(gl.ARRAY_BUFFER, null);
|
|
31
|
-
}
|
|
32
|
-
// Optimized delete that batches operations
|
|
33
|
-
deleteBulkChunked(offsets, chunkSize = 1000) {
|
|
34
|
-
const { gl, buffer, itemSize } = this;
|
|
35
|
-
const emptyBlock = new Float32Array(itemSize).fill(NaN);
|
|
36
|
-
const offsetMultiplier = itemSize * 4;
|
|
37
|
-
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
|
|
38
|
-
// Sort offsets for better cache coherency
|
|
39
|
-
const sortedOffsets = [...offsets].sort((a, b) => a - b);
|
|
40
|
-
// Process in chunks
|
|
41
|
-
for (let i = 0; i < sortedOffsets.length; i += chunkSize) {
|
|
42
|
-
const chunkEnd = Math.min(i + chunkSize, sortedOffsets.length);
|
|
43
|
-
for (let j = i; j < chunkEnd; j++) {
|
|
44
|
-
const offset = sortedOffsets[j];
|
|
45
|
-
if (offset !== undefined) {
|
|
46
|
-
gl.bufferSubData(gl.ARRAY_BUFFER, offset * offsetMultiplier, emptyBlock);
|
|
47
|
-
}
|
|
48
|
-
}
|
|
49
|
-
}
|
|
50
|
-
gl.bindBuffer(gl.ARRAY_BUFFER, null);
|
|
51
|
-
}
|
|
52
|
-
// Find contiguous memory ranges for batch uploads
|
|
53
|
-
findContiguousRanges(offsets, offsetMultiplier) {
|
|
54
|
-
const ranges = [];
|
|
55
|
-
const sortedIndices = offsets
|
|
56
|
-
.map((offset, index) => ({ offset: offset * offsetMultiplier, index }))
|
|
57
|
-
.sort((a, b) => a.offset - b.offset);
|
|
58
|
-
let currentRange = null;
|
|
59
|
-
for (const { offset, index } of sortedIndices) {
|
|
60
|
-
if (!currentRange || offset !== currentRange.startOffset + currentRange.indices.length * offsetMultiplier) {
|
|
61
|
-
// Start new range
|
|
62
|
-
if (currentRange)
|
|
63
|
-
ranges.push(currentRange);
|
|
64
|
-
currentRange = { startOffset: offset, indices: [index] };
|
|
65
|
-
}
|
|
66
|
-
else {
|
|
67
|
-
// Extend current range
|
|
68
|
-
currentRange.indices.push(index);
|
|
69
|
-
}
|
|
70
|
-
}
|
|
71
|
-
if (currentRange)
|
|
72
|
-
ranges.push(currentRange);
|
|
73
|
-
return ranges;
|
|
74
|
-
}
|
|
75
|
-
}
|
|
@@ -1,195 +0,0 @@
|
|
|
1
|
-
export class ChunkedBufferOrchestrator {
|
|
2
|
-
_capacity;
|
|
3
|
-
_length;
|
|
4
|
-
offsetMap;
|
|
5
|
-
tombstoneOffsets;
|
|
6
|
-
// Chunking configuration
|
|
7
|
-
chunkSize;
|
|
8
|
-
maxPendingOperations;
|
|
9
|
-
autoFlushInterval;
|
|
10
|
-
// Pending operations batching
|
|
11
|
-
pendingInserts;
|
|
12
|
-
pendingUpdates;
|
|
13
|
-
pendingDeletes;
|
|
14
|
-
operationCount;
|
|
15
|
-
flushTimer;
|
|
16
|
-
constructor({ capacity = 10, chunkSize = 1000, maxPendingOperations = 5000, autoFlushInterval = 16 // ~60fps
|
|
17
|
-
} = {}) {
|
|
18
|
-
this._capacity = capacity;
|
|
19
|
-
this._length = 0;
|
|
20
|
-
this.offsetMap = new Map();
|
|
21
|
-
this.tombstoneOffsets = [];
|
|
22
|
-
this.chunkSize = chunkSize;
|
|
23
|
-
this.maxPendingOperations = maxPendingOperations;
|
|
24
|
-
this.autoFlushInterval = autoFlushInterval;
|
|
25
|
-
this.pendingInserts = new Map();
|
|
26
|
-
this.pendingUpdates = new Map();
|
|
27
|
-
this.pendingDeletes = new Set();
|
|
28
|
-
this.operationCount = 0;
|
|
29
|
-
this.flushTimer = null;
|
|
30
|
-
this.startAutoFlush();
|
|
31
|
-
}
|
|
32
|
-
resetWithCapacity(bufferManagersMap, capacity = null) {
|
|
33
|
-
this.flushPendingOperations(bufferManagersMap);
|
|
34
|
-
this._capacity = capacity !== null ? capacity : this._capacity;
|
|
35
|
-
for (const [key, { bufferManager }] of bufferManagersMap) {
|
|
36
|
-
bufferManager.resetWithCapacity(this._capacity);
|
|
37
|
-
}
|
|
38
|
-
this.offsetMap.clear();
|
|
39
|
-
this.tombstoneOffsets = [];
|
|
40
|
-
this._length = 0;
|
|
41
|
-
}
|
|
42
|
-
// Batch insert operations
|
|
43
|
-
insertBulk(items, bufferManagersMap, bufferKeys = null) {
|
|
44
|
-
const targetKeys = bufferKeys || Array.from(bufferManagersMap.keys());
|
|
45
|
-
// Process items in chunks
|
|
46
|
-
for (let i = 0; i < items.length; i += this.chunkSize) {
|
|
47
|
-
const chunk = items.slice(i, i + this.chunkSize);
|
|
48
|
-
this.queueInsertChunk(chunk, targetKeys);
|
|
49
|
-
}
|
|
50
|
-
this.operationCount += items.length;
|
|
51
|
-
this.checkAutoFlush(bufferManagersMap);
|
|
52
|
-
}
|
|
53
|
-
// Batch update operations
|
|
54
|
-
updateBulk(items, bufferManagersMap, bufferKeys = null) {
|
|
55
|
-
const targetKeys = bufferKeys || Array.from(bufferManagersMap.keys());
|
|
56
|
-
for (let i = 0; i < items.length; i += this.chunkSize) {
|
|
57
|
-
const chunk = items.slice(i, i + this.chunkSize);
|
|
58
|
-
this.queueUpdateChunk(chunk, targetKeys);
|
|
59
|
-
}
|
|
60
|
-
this.operationCount += items.length;
|
|
61
|
-
this.checkAutoFlush(bufferManagersMap);
|
|
62
|
-
}
|
|
63
|
-
// Batch delete operations
|
|
64
|
-
deleteBulk(keys, bufferManagersMap) {
|
|
65
|
-
for (const key of keys) {
|
|
66
|
-
const offset = this.offsetMap.get(key);
|
|
67
|
-
if (offset !== undefined) {
|
|
68
|
-
this.pendingDeletes.add(offset);
|
|
69
|
-
this.offsetMap.delete(key);
|
|
70
|
-
this.tombstoneOffsets.push(offset);
|
|
71
|
-
}
|
|
72
|
-
}
|
|
73
|
-
this.operationCount += keys.length;
|
|
74
|
-
this.checkAutoFlush(bufferManagersMap);
|
|
75
|
-
}
|
|
76
|
-
queueInsertChunk(items, bufferKeys) {
|
|
77
|
-
this.ensureSpaceForItems(items.length);
|
|
78
|
-
const offsets = [];
|
|
79
|
-
for (const item of items) {
|
|
80
|
-
let offset = this.offsetMap.get(item.key);
|
|
81
|
-
if (offset === undefined) {
|
|
82
|
-
offset = this.nextOffset();
|
|
83
|
-
this.offsetMap.set(item.key, offset);
|
|
84
|
-
}
|
|
85
|
-
offsets.push(offset);
|
|
86
|
-
}
|
|
87
|
-
for (const bufferKey of bufferKeys) {
|
|
88
|
-
if (!this.pendingInserts.has(bufferKey)) {
|
|
89
|
-
this.pendingInserts.set(bufferKey, { items: [], offsets: [] });
|
|
90
|
-
}
|
|
91
|
-
const pending = this.pendingInserts.get(bufferKey);
|
|
92
|
-
pending.items.push(...items);
|
|
93
|
-
pending.offsets.push(...offsets);
|
|
94
|
-
}
|
|
95
|
-
}
|
|
96
|
-
queueUpdateChunk(items, bufferKeys) {
|
|
97
|
-
const offsets = [];
|
|
98
|
-
for (const item of items) {
|
|
99
|
-
const offset = this.offsetMap.get(item.key);
|
|
100
|
-
if (offset === undefined) {
|
|
101
|
-
throw new Error(`Update item key '${item.key}' does not exist`);
|
|
102
|
-
}
|
|
103
|
-
offsets.push(offset);
|
|
104
|
-
}
|
|
105
|
-
for (const bufferKey of bufferKeys) {
|
|
106
|
-
if (!this.pendingUpdates.has(bufferKey)) {
|
|
107
|
-
this.pendingUpdates.set(bufferKey, { items: [], offsets: [] });
|
|
108
|
-
}
|
|
109
|
-
const pending = this.pendingUpdates.get(bufferKey);
|
|
110
|
-
pending.items.push(...items);
|
|
111
|
-
pending.offsets.push(...offsets);
|
|
112
|
-
}
|
|
113
|
-
}
|
|
114
|
-
checkAutoFlush(bufferManagersMap) {
|
|
115
|
-
if (this.operationCount >= this.maxPendingOperations) {
|
|
116
|
-
this.flushPendingOperations(bufferManagersMap);
|
|
117
|
-
}
|
|
118
|
-
}
|
|
119
|
-
// Force flush all pending operations to GPU
|
|
120
|
-
flushPendingOperations(bufferManagersMap) {
|
|
121
|
-
// Process deletes first
|
|
122
|
-
if (this.pendingDeletes.size > 0) {
|
|
123
|
-
const deleteOffsets = Array.from(this.pendingDeletes);
|
|
124
|
-
for (const [key, { bufferManager }] of bufferManagersMap) {
|
|
125
|
-
bufferManager.deleteBulk(deleteOffsets);
|
|
126
|
-
}
|
|
127
|
-
this.pendingDeletes.clear();
|
|
128
|
-
}
|
|
129
|
-
// Process inserts
|
|
130
|
-
for (const [bufferKey, { items, offsets }] of this.pendingInserts) {
|
|
131
|
-
const bufferManagerComp = bufferManagersMap.get(bufferKey);
|
|
132
|
-
if (bufferManagerComp) {
|
|
133
|
-
const { bufferManager, adaptor } = bufferManagerComp;
|
|
134
|
-
const adaptedData = items.map(adaptor);
|
|
135
|
-
bufferManager.insertBulkChunked(adaptedData, offsets, this.chunkSize);
|
|
136
|
-
}
|
|
137
|
-
}
|
|
138
|
-
this.pendingInserts.clear();
|
|
139
|
-
// Process updates
|
|
140
|
-
for (const [bufferKey, { items, offsets }] of this.pendingUpdates) {
|
|
141
|
-
const bufferManagerComp = bufferManagersMap.get(bufferKey);
|
|
142
|
-
if (bufferManagerComp) {
|
|
143
|
-
const { bufferManager, adaptor } = bufferManagerComp;
|
|
144
|
-
const adaptedData = items.map(adaptor);
|
|
145
|
-
bufferManager.insertBulkChunked(adaptedData, offsets, this.chunkSize);
|
|
146
|
-
}
|
|
147
|
-
}
|
|
148
|
-
this.pendingUpdates.clear();
|
|
149
|
-
this.operationCount = 0;
|
|
150
|
-
}
|
|
151
|
-
ensureSpaceForItems(itemCount) {
|
|
152
|
-
if (itemCount <= this.emptySpace)
|
|
153
|
-
return;
|
|
154
|
-
const newCapacity = Math.max(this._capacity * 2, this._length + itemCount + this.chunkSize);
|
|
155
|
-
this._capacity = newCapacity;
|
|
156
|
-
}
|
|
157
|
-
nextOffset() {
|
|
158
|
-
if (this.tombstoneOffsets.length > 0) {
|
|
159
|
-
return this.tombstoneOffsets.pop();
|
|
160
|
-
}
|
|
161
|
-
if (this._length < this._capacity) {
|
|
162
|
-
return this._length++;
|
|
163
|
-
}
|
|
164
|
-
return false;
|
|
165
|
-
}
|
|
166
|
-
startAutoFlush() {
|
|
167
|
-
if (this.autoFlushInterval > 0) {
|
|
168
|
-
this.flushTimer = setInterval(() => {
|
|
169
|
-
if (this.operationCount > 0) {
|
|
170
|
-
// Note: You'll need to pass bufferManagersMap here
|
|
171
|
-
// This requires restructuring to maintain reference
|
|
172
|
-
console.log(`Auto-flush triggered with ${this.operationCount} pending operations`);
|
|
173
|
-
}
|
|
174
|
-
}, this.autoFlushInterval);
|
|
175
|
-
}
|
|
176
|
-
}
|
|
177
|
-
stopAutoFlush() {
|
|
178
|
-
if (this.flushTimer) {
|
|
179
|
-
clearInterval(this.flushTimer);
|
|
180
|
-
this.flushTimer = null;
|
|
181
|
-
}
|
|
182
|
-
}
|
|
183
|
-
// Getters
|
|
184
|
-
get length() { return this._length; }
|
|
185
|
-
get emptySpace() { return this._capacity - this.offsetMap.size; }
|
|
186
|
-
get capacity() { return this._capacity; }
|
|
187
|
-
get pendingOperationCount() { return this.operationCount; }
|
|
188
|
-
// Manual control
|
|
189
|
-
forceMajorFlush(bufferManagersMap) {
|
|
190
|
-
this.flushPendingOperations(bufferManagersMap);
|
|
191
|
-
}
|
|
192
|
-
getOffset(key) {
|
|
193
|
-
return this.offsetMap.get(key);
|
|
194
|
-
}
|
|
195
|
-
}
|