@speckle/objectloader2 2.26.2 → 2.26.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/commonjs/deferment/defermentManager.test.js +10 -5
- package/dist/commonjs/deferment/defermentManager.test.js.map +1 -1
- package/dist/commonjs/index.d.ts +1 -0
- package/dist/commonjs/index.d.ts.map +1 -1
- package/dist/commonjs/queues/batchingQueue.d.ts +2 -0
- package/dist/commonjs/queues/batchingQueue.d.ts.map +1 -1
- package/dist/commonjs/queues/batchingQueue.dispose.test.js +1 -1
- package/dist/commonjs/queues/batchingQueue.dispose.test.js.map +1 -1
- package/dist/commonjs/queues/batchingQueue.js +19 -15
- package/dist/commonjs/queues/batchingQueue.js.map +1 -1
- package/dist/commonjs/queues/batchingQueue.test.js +98 -0
- package/dist/commonjs/queues/batchingQueue.test.js.map +1 -1
- package/dist/esm/deferment/defermentManager.test.js +10 -5
- package/dist/esm/deferment/defermentManager.test.js.map +1 -1
- package/dist/esm/index.d.ts +1 -0
- package/dist/esm/index.d.ts.map +1 -1
- package/dist/esm/queues/batchingQueue.d.ts +2 -0
- package/dist/esm/queues/batchingQueue.d.ts.map +1 -1
- package/dist/esm/queues/batchingQueue.dispose.test.js +1 -1
- package/dist/esm/queues/batchingQueue.dispose.test.js.map +1 -1
- package/dist/esm/queues/batchingQueue.js +19 -15
- package/dist/esm/queues/batchingQueue.js.map +1 -1
- package/dist/esm/queues/batchingQueue.test.js +98 -0
- package/dist/esm/queues/batchingQueue.test.js.map +1 -1
- package/package.json +2 -2
- package/src/core/objectLoader2.spec.ts +10 -3
- package/src/core/stages/cacheReader.spec.ts +1 -1
- package/src/core/stages/cacheWriter.spec.ts +1 -1
- package/src/core/stages/serverDownloader.spec.ts +122 -0
- package/src/deferment/defermentManager.test.ts +10 -5
- package/src/index.ts +1 -0
- package/src/queues/batchingQueue.dispose.test.ts +1 -1
- package/src/queues/batchingQueue.test.ts +121 -0
- package/src/queues/batchingQueue.ts +20 -16
|
@@ -146,4 +146,125 @@ describe('BatchingQueue', () => {
|
|
|
146
146
|
await queue.disposeAsync()
|
|
147
147
|
}
|
|
148
148
|
})
|
|
149
|
+
|
|
150
|
+
test('should handle processFunction throwing an exception during flush and is disposed', async () => {
|
|
151
|
+
const errorMessage = 'Process function failed'
|
|
152
|
+
const processFunction = vi.fn().mockRejectedValue(new Error(errorMessage))
|
|
153
|
+
|
|
154
|
+
const queue = new BatchingQueue<{ id: string }>({
|
|
155
|
+
batchSize: 5,
|
|
156
|
+
maxWaitTime: 1000,
|
|
157
|
+
processFunction
|
|
158
|
+
})
|
|
159
|
+
|
|
160
|
+
const items = Array.from({ length: 3 }, (_, i) => ({ id: `item-${i}` }))
|
|
161
|
+
items.forEach((item) => queue.add(item.id, item))
|
|
162
|
+
|
|
163
|
+
expect(queue.count()).toBe(3)
|
|
164
|
+
|
|
165
|
+
// flush should not throw even if processFunction rejects
|
|
166
|
+
await expect(queue.flush()).resolves.not.toThrow()
|
|
167
|
+
|
|
168
|
+
expect(processFunction).toHaveBeenCalled()
|
|
169
|
+
expect(queue.count()).toBe(0)
|
|
170
|
+
expect(queue.isDisposed()).toBe(false)
|
|
171
|
+
expect(queue.isErrored()).toBe(true)
|
|
172
|
+
// Add more items after the exception
|
|
173
|
+
queue.add('key3', { id: `item-3` })
|
|
174
|
+
queue.add('key4', { id: `item-4` })
|
|
175
|
+
|
|
176
|
+
// Wait to see if second batch gets processed (it shouldn't due to errored state)
|
|
177
|
+
await new Promise((resolve) => setTimeout(resolve, 200))
|
|
178
|
+
|
|
179
|
+
expect(queue.count()).toBe(0) // Items were not added due to errored state
|
|
180
|
+
await queue.disposeAsync()
|
|
181
|
+
})
|
|
182
|
+
|
|
183
|
+
test('should drain remaining items when disposed', async () => {
|
|
184
|
+
const processSpy = vi.fn()
|
|
185
|
+
const queue = new BatchingQueue({
|
|
186
|
+
batchSize: 5, // Large batch size to prevent automatic processing
|
|
187
|
+
maxWaitTime: 10000, // Long timeout to prevent timeout-based processing
|
|
188
|
+
processFunction: async (batch: string[]): Promise<void> => {
|
|
189
|
+
await new Promise((resolve) => setTimeout(resolve, 10))
|
|
190
|
+
processSpy(batch)
|
|
191
|
+
}
|
|
192
|
+
})
|
|
193
|
+
|
|
194
|
+
// Add items that won't trigger automatic processing (less than batch size)
|
|
195
|
+
queue.add('key1', 'item1')
|
|
196
|
+
queue.add('key2', 'item2')
|
|
197
|
+
queue.add('key3', 'item3')
|
|
198
|
+
|
|
199
|
+
// Verify items are in queue but haven't been processed yet
|
|
200
|
+
expect(queue.count()).toBe(3)
|
|
201
|
+
expect(processSpy).not.toHaveBeenCalled()
|
|
202
|
+
|
|
203
|
+
// Dispose should drain the remaining items
|
|
204
|
+
await queue.disposeAsync()
|
|
205
|
+
|
|
206
|
+
// Verify all items were processed during disposal
|
|
207
|
+
expect(processSpy).toHaveBeenCalledTimes(1)
|
|
208
|
+
expect(processSpy).toHaveBeenCalledWith(['item1', 'item2', 'item3'])
|
|
209
|
+
expect(queue.count()).toBe(0)
|
|
210
|
+
expect(queue.isDisposed()).toBe(true)
|
|
211
|
+
})
|
|
212
|
+
|
|
213
|
+
test('should drain items even with ongoing processing during dispose', async () => {
|
|
214
|
+
const processSpy = vi.fn()
|
|
215
|
+
let firstBatchStarted = false
|
|
216
|
+
let allowFirstBatchToComplete: (() => void) | null = null
|
|
217
|
+
|
|
218
|
+
const queue = new BatchingQueue({
|
|
219
|
+
batchSize: 2,
|
|
220
|
+
maxWaitTime: 100,
|
|
221
|
+
processFunction: async (batch: string[]): Promise<void> => {
|
|
222
|
+
processSpy(batch)
|
|
223
|
+
|
|
224
|
+
// Make the first batch wait for our signal
|
|
225
|
+
if (!firstBatchStarted) {
|
|
226
|
+
firstBatchStarted = true
|
|
227
|
+
await new Promise<void>((resolve) => {
|
|
228
|
+
allowFirstBatchToComplete = resolve
|
|
229
|
+
})
|
|
230
|
+
} else {
|
|
231
|
+
// Other batches process normally
|
|
232
|
+
await new Promise((resolve) => setTimeout(resolve, 10))
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
})
|
|
236
|
+
|
|
237
|
+
// Add first batch that will trigger processing but will be blocked
|
|
238
|
+
queue.add('key1', 'item1')
|
|
239
|
+
queue.add('key2', 'item2')
|
|
240
|
+
|
|
241
|
+
// Wait for first batch to start processing and allowFirstBatchToComplete to be assigned
|
|
242
|
+
await new Promise((resolve) => setTimeout(resolve, 50))
|
|
243
|
+
expect(firstBatchStarted).toBe(true)
|
|
244
|
+
expect(processSpy).toHaveBeenCalledTimes(1)
|
|
245
|
+
expect(allowFirstBatchToComplete).not.toBeNull()
|
|
246
|
+
|
|
247
|
+
// Add more items while first batch is still processing
|
|
248
|
+
queue.add('key3', 'item3')
|
|
249
|
+
queue.add('key4', 'item4')
|
|
250
|
+
|
|
251
|
+
// Verify the additional items are queued
|
|
252
|
+
expect(queue.count()).toBe(2)
|
|
253
|
+
|
|
254
|
+
// Start disposal (this should wait for ongoing processing and then drain)
|
|
255
|
+
const disposePromise = queue.disposeAsync()
|
|
256
|
+
|
|
257
|
+
// Allow the first batch to complete
|
|
258
|
+
allowFirstBatchToComplete!()
|
|
259
|
+
|
|
260
|
+
// Wait for disposal to complete
|
|
261
|
+
await disposePromise
|
|
262
|
+
|
|
263
|
+
// Verify all batches were processed
|
|
264
|
+
expect(processSpy).toHaveBeenCalledTimes(2)
|
|
265
|
+
expect(processSpy).toHaveBeenCalledWith(['item1', 'item2'])
|
|
266
|
+
expect(processSpy).toHaveBeenCalledWith(['item3', 'item4'])
|
|
267
|
+
expect(queue.count()).toBe(0)
|
|
268
|
+
expect(queue.isDisposed()).toBe(true)
|
|
269
|
+
})
|
|
149
270
|
})
|
|
@@ -11,9 +11,10 @@ export default class BatchingQueue<T> {
|
|
|
11
11
|
#batchSize: number
|
|
12
12
|
#processFunction: (batch: T[]) => Promise<void>
|
|
13
13
|
#timeoutId: ReturnType<typeof setTimeout> | null = null
|
|
14
|
-
#isProcessing = false
|
|
15
14
|
|
|
16
|
-
#
|
|
15
|
+
#isProcessing = false
|
|
16
|
+
#isDisposed = false
|
|
17
|
+
#isErrored = false
|
|
17
18
|
#batchTimeout: number
|
|
18
19
|
|
|
19
20
|
// Helper methods for cross-environment timeout handling
|
|
@@ -46,7 +47,8 @@ export default class BatchingQueue<T> {
|
|
|
46
47
|
}
|
|
47
48
|
|
|
48
49
|
async disposeAsync(): Promise<void> {
|
|
49
|
-
this.#
|
|
50
|
+
if (this.#isDisposed) return
|
|
51
|
+
this.#isDisposed = true
|
|
50
52
|
if (this.#timeoutId) {
|
|
51
53
|
this.#getClearTimeoutFn()(this.#timeoutId)
|
|
52
54
|
this.#timeoutId = null
|
|
@@ -62,56 +64,54 @@ export default class BatchingQueue<T> {
|
|
|
62
64
|
// After any ongoing flush is completed, there might be items in the queue.
|
|
63
65
|
// We should flush them.
|
|
64
66
|
if (this.#queue.size > 0) {
|
|
65
|
-
await this
|
|
67
|
+
await this.flush()
|
|
66
68
|
}
|
|
67
69
|
}
|
|
68
70
|
|
|
69
71
|
add(key: string, item: T): void {
|
|
70
|
-
if (this.#
|
|
72
|
+
if (this.#isDisposed || this.#isErrored) return
|
|
71
73
|
this.#queue.enqueue(key, item)
|
|
72
74
|
this.#addCheck()
|
|
73
75
|
}
|
|
74
76
|
|
|
75
77
|
addAll(keys: string[], items: T[]): void {
|
|
76
|
-
if (this.#
|
|
78
|
+
if (this.#isDisposed || this.#isErrored) return
|
|
77
79
|
this.#queue.enqueueAll(keys, items)
|
|
78
80
|
this.#addCheck()
|
|
79
81
|
}
|
|
80
82
|
|
|
81
83
|
#addCheck(): void {
|
|
82
|
-
if (this.#
|
|
84
|
+
if (this.#isDisposed) return
|
|
83
85
|
if (this.#queue.size >= this.#batchSize) {
|
|
84
86
|
// Fire and forget, no need to await
|
|
85
87
|
// eslint-disable-next-line @typescript-eslint/no-floating-promises
|
|
86
|
-
this
|
|
88
|
+
this.flush()
|
|
87
89
|
} else {
|
|
88
90
|
if (this.#timeoutId) {
|
|
89
91
|
this.#getClearTimeoutFn()(this.#timeoutId)
|
|
90
92
|
}
|
|
91
93
|
// eslint-disable-next-line @typescript-eslint/no-misused-promises
|
|
92
|
-
this.#timeoutId = this.#getSetTimeoutFn()(() => this
|
|
94
|
+
this.#timeoutId = this.#getSetTimeoutFn()(() => this.flush(), this.#batchTimeout)
|
|
93
95
|
}
|
|
94
96
|
}
|
|
95
97
|
|
|
96
|
-
async
|
|
98
|
+
async flush(): Promise<void> {
|
|
97
99
|
if (this.#timeoutId) {
|
|
98
100
|
this.#getClearTimeoutFn()(this.#timeoutId)
|
|
99
101
|
this.#timeoutId = null
|
|
100
102
|
}
|
|
101
103
|
|
|
102
|
-
if (this.#isProcessing || this.#queue.size === 0) {
|
|
104
|
+
if (this.#isErrored || this.#isProcessing || this.#queue.size === 0) {
|
|
103
105
|
return
|
|
104
106
|
}
|
|
105
107
|
this.#isProcessing = true
|
|
106
108
|
|
|
107
|
-
const batchToProcess = this.#getBatch(this.#batchSize)
|
|
108
|
-
if (this.#disposed) return
|
|
109
|
-
|
|
110
109
|
try {
|
|
110
|
+
const batchToProcess = this.#getBatch(this.#batchSize)
|
|
111
111
|
await this.#processFunction(batchToProcess)
|
|
112
112
|
} catch (error) {
|
|
113
113
|
console.error('Batch processing failed:', error)
|
|
114
|
-
this.#
|
|
114
|
+
this.#isErrored = true
|
|
115
115
|
} finally {
|
|
116
116
|
this.#isProcessing = false
|
|
117
117
|
}
|
|
@@ -127,7 +127,11 @@ export default class BatchingQueue<T> {
|
|
|
127
127
|
}
|
|
128
128
|
|
|
129
129
|
isDisposed(): boolean {
|
|
130
|
-
return this.#
|
|
130
|
+
return this.#isDisposed
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
isErrored(): boolean {
|
|
134
|
+
return this.#isErrored
|
|
131
135
|
}
|
|
132
136
|
|
|
133
137
|
#getBatch(batchSize: number): T[] {
|