@speckle/objectloader2 2.26.1 → 2.26.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (98) hide show
  1. package/dist/commonjs/core/objectLoader2.d.ts.map +1 -1
  2. package/dist/commonjs/core/objectLoader2.js +1 -9
  3. package/dist/commonjs/core/objectLoader2.js.map +1 -1
  4. package/dist/commonjs/core/objectLoader2Factory.d.ts +7 -2
  5. package/dist/commonjs/core/objectLoader2Factory.d.ts.map +1 -1
  6. package/dist/commonjs/core/objectLoader2Factory.js +14 -4
  7. package/dist/commonjs/core/objectLoader2Factory.js.map +1 -1
  8. package/dist/commonjs/core/objectLoader2Factory.test.d.ts +2 -0
  9. package/dist/commonjs/core/objectLoader2Factory.test.d.ts.map +1 -0
  10. package/dist/commonjs/core/objectLoader2Factory.test.js +106 -0
  11. package/dist/commonjs/core/objectLoader2Factory.test.js.map +1 -0
  12. package/dist/commonjs/core/options.d.ts +2 -0
  13. package/dist/commonjs/core/options.d.ts.map +1 -1
  14. package/dist/commonjs/core/stages/cacheReader.d.ts +2 -2
  15. package/dist/commonjs/core/stages/cacheReader.d.ts.map +1 -1
  16. package/dist/commonjs/core/stages/cacheReader.js.map +1 -1
  17. package/dist/commonjs/core/stages/cacheWriter.d.ts +2 -2
  18. package/dist/commonjs/core/stages/cacheWriter.d.ts.map +1 -1
  19. package/dist/commonjs/core/stages/cacheWriter.js +4 -4
  20. package/dist/commonjs/core/stages/cacheWriter.js.map +1 -1
  21. package/dist/commonjs/core/stages/serverDownloader.d.ts +2 -1
  22. package/dist/commonjs/core/stages/serverDownloader.d.ts.map +1 -1
  23. package/dist/commonjs/core/stages/serverDownloader.js +3 -2
  24. package/dist/commonjs/core/stages/serverDownloader.js.map +1 -1
  25. package/dist/commonjs/deferment/defermentManager.d.ts +18 -2
  26. package/dist/commonjs/deferment/defermentManager.d.ts.map +1 -1
  27. package/dist/commonjs/deferment/defermentManager.js +30 -3
  28. package/dist/commonjs/deferment/defermentManager.js.map +1 -1
  29. package/dist/commonjs/deferment/defermentManager.test.js +21 -16
  30. package/dist/commonjs/deferment/defermentManager.test.js.map +1 -1
  31. package/dist/commonjs/queues/batchingQueue.d.ts +2 -2
  32. package/dist/commonjs/queues/batchingQueue.d.ts.map +1 -1
  33. package/dist/commonjs/queues/batchingQueue.dispose.test.js +1 -3
  34. package/dist/commonjs/queues/batchingQueue.dispose.test.js.map +1 -1
  35. package/dist/commonjs/queues/batchingQueue.js +20 -15
  36. package/dist/commonjs/queues/batchingQueue.js.map +1 -1
  37. package/dist/commonjs/queues/batchingQueue.test.js +98 -0
  38. package/dist/commonjs/queues/batchingQueue.test.js.map +1 -1
  39. package/dist/commonjs/types/types.d.ts +5 -0
  40. package/dist/commonjs/types/types.d.ts.map +1 -1
  41. package/dist/esm/core/objectLoader2.d.ts.map +1 -1
  42. package/dist/esm/core/objectLoader2.js +1 -9
  43. package/dist/esm/core/objectLoader2.js.map +1 -1
  44. package/dist/esm/core/objectLoader2Factory.d.ts +7 -2
  45. package/dist/esm/core/objectLoader2Factory.d.ts.map +1 -1
  46. package/dist/esm/core/objectLoader2Factory.js +14 -4
  47. package/dist/esm/core/objectLoader2Factory.js.map +1 -1
  48. package/dist/esm/core/objectLoader2Factory.test.d.ts +2 -0
  49. package/dist/esm/core/objectLoader2Factory.test.d.ts.map +1 -0
  50. package/dist/esm/core/objectLoader2Factory.test.js +104 -0
  51. package/dist/esm/core/objectLoader2Factory.test.js.map +1 -0
  52. package/dist/esm/core/options.d.ts +2 -0
  53. package/dist/esm/core/options.d.ts.map +1 -1
  54. package/dist/esm/core/stages/cacheReader.d.ts +2 -2
  55. package/dist/esm/core/stages/cacheReader.d.ts.map +1 -1
  56. package/dist/esm/core/stages/cacheReader.js.map +1 -1
  57. package/dist/esm/core/stages/cacheWriter.d.ts +2 -2
  58. package/dist/esm/core/stages/cacheWriter.d.ts.map +1 -1
  59. package/dist/esm/core/stages/cacheWriter.js +4 -4
  60. package/dist/esm/core/stages/cacheWriter.js.map +1 -1
  61. package/dist/esm/core/stages/serverDownloader.d.ts +2 -1
  62. package/dist/esm/core/stages/serverDownloader.d.ts.map +1 -1
  63. package/dist/esm/core/stages/serverDownloader.js +3 -2
  64. package/dist/esm/core/stages/serverDownloader.js.map +1 -1
  65. package/dist/esm/deferment/defermentManager.d.ts +18 -2
  66. package/dist/esm/deferment/defermentManager.d.ts.map +1 -1
  67. package/dist/esm/deferment/defermentManager.js +28 -2
  68. package/dist/esm/deferment/defermentManager.js.map +1 -1
  69. package/dist/esm/deferment/defermentManager.test.js +21 -16
  70. package/dist/esm/deferment/defermentManager.test.js.map +1 -1
  71. package/dist/esm/queues/batchingQueue.d.ts +2 -2
  72. package/dist/esm/queues/batchingQueue.d.ts.map +1 -1
  73. package/dist/esm/queues/batchingQueue.dispose.test.js +1 -3
  74. package/dist/esm/queues/batchingQueue.dispose.test.js.map +1 -1
  75. package/dist/esm/queues/batchingQueue.js +20 -15
  76. package/dist/esm/queues/batchingQueue.js.map +1 -1
  77. package/dist/esm/queues/batchingQueue.test.js +98 -0
  78. package/dist/esm/queues/batchingQueue.test.js.map +1 -1
  79. package/dist/esm/types/types.d.ts +5 -0
  80. package/dist/esm/types/types.d.ts.map +1 -1
  81. package/package.json +2 -2
  82. package/src/core/objectLoader2.spec.ts +10 -3
  83. package/src/core/objectLoader2.ts +3 -13
  84. package/src/core/objectLoader2Factory.test.ts +135 -0
  85. package/src/core/objectLoader2Factory.ts +29 -6
  86. package/src/core/options.ts +2 -0
  87. package/src/core/stages/cacheReader.spec.ts +1 -1
  88. package/src/core/stages/cacheReader.ts +3 -3
  89. package/src/core/stages/cacheWriter.spec.ts +1 -1
  90. package/src/core/stages/cacheWriter.ts +5 -5
  91. package/src/core/stages/serverDownloader.spec.ts +122 -0
  92. package/src/core/stages/serverDownloader.ts +7 -4
  93. package/src/deferment/defermentManager.test.ts +21 -16
  94. package/src/deferment/defermentManager.ts +39 -3
  95. package/src/queues/batchingQueue.dispose.test.ts +1 -3
  96. package/src/queues/batchingQueue.test.ts +121 -0
  97. package/src/queues/batchingQueue.ts +21 -19
  98. package/src/types/types.ts +5 -0
@@ -1,5 +1,11 @@
1
- import { CustomLogger, getFeatureFlag, ObjectLoader2Flags } from '../types/functions.js'
2
- import { Base } from '../types/types.js'
1
+ import { DefermentManager, MemoryOnlyDeferment } from '../deferment/defermentManager.js'
2
+ import {
3
+ CustomLogger,
4
+ Fetcher,
5
+ getFeatureFlag,
6
+ ObjectLoader2Flags
7
+ } from '../types/functions.js'
8
+ import { Base, ObjectAttributeMask } from '../types/types.js'
3
9
  import { ObjectLoader2 } from './objectLoader2.js'
4
10
  import { IndexedDatabase } from './stages/indexedDatabase.js'
5
11
  import { MemoryDatabase } from './stages/memory/memoryDatabase.js'
@@ -10,6 +16,10 @@ export interface ObjectLoader2FactoryOptions {
10
16
  // eslint-disable-next-line @typescript-eslint/no-unsafe-function-type
11
17
  keyRange?: { bound: Function; lowerBound: Function; upperBound: Function }
12
18
  indexedDB?: IDBFactory
19
+ fetch?: Fetcher
20
+ attributeMask?: ObjectAttributeMask
21
+ useCache?: boolean
22
+ debug?: boolean
13
23
  logger?: CustomLogger
14
24
  }
15
25
 
@@ -22,6 +32,7 @@ export class ObjectLoader2Factory {
22
32
  })
23
33
  const loader = new ObjectLoader2({
24
34
  rootId: root.id,
35
+ deferments: new MemoryOnlyDeferment(records),
25
36
  database: new MemoryDatabase({ items: records }),
26
37
  downloader: new MemoryDownloader(root.id, records)
27
38
  })
@@ -40,13 +51,21 @@ export class ObjectLoader2Factory {
40
51
  token?: string
41
52
  headers?: Headers
42
53
  options?: ObjectLoader2FactoryOptions
54
+ attributeMask?: ObjectAttributeMask
43
55
  }): ObjectLoader2 {
44
56
  const log = ObjectLoader2Factory.getLogger(params.options?.logger)
45
57
  let database
46
- if (getFeatureFlag(ObjectLoader2Flags.DEBUG) === 'true') {
58
+ if (
59
+ params.options?.debug === true ||
60
+ getFeatureFlag(ObjectLoader2Flags.DEBUG) === 'true'
61
+ ) {
47
62
  this.logger('Using DEBUG mode for ObjectLoader2Factory')
48
63
  }
49
- if (getFeatureFlag(ObjectLoader2Flags.USE_CACHE) === 'true') {
64
+ const useCache = params.options?.useCache ?? true
65
+ const flag = getFeatureFlag(ObjectLoader2Flags.USE_CACHE)
66
+ const flagAllowsCache = flag !== 'false'
67
+
68
+ if (useCache && flagAllowsCache) {
50
69
  database = new IndexedDatabase({
51
70
  indexedDB: params.options?.indexedDB,
52
71
  keyRange: params.options?.keyRange
@@ -59,18 +78,22 @@ export class ObjectLoader2Factory {
59
78
  'Disabled persistent caching for ObjectLoader2. Using MemoryDatabase'
60
79
  )
61
80
  }
81
+ const logger = log || (((): void => {}) as CustomLogger)
62
82
  const loader = new ObjectLoader2({
63
83
  rootId: params.objectId,
84
+ deferments: new DefermentManager(logger),
64
85
  downloader: new ServerDownloader({
65
86
  serverUrl: params.serverUrl,
66
87
  streamId: params.streamId,
67
88
  objectId: params.objectId,
68
89
  token: params.token,
69
90
  headers: params.headers,
70
- logger: log || ((): void => {})
91
+ fetch: params.options?.fetch,
92
+ attributeMask: params.attributeMask,
93
+ logger
71
94
  }),
72
95
  database,
73
- logger: log
96
+ logger
74
97
  })
75
98
  return loader
76
99
  }
@@ -1,9 +1,11 @@
1
+ import { Deferment } from '../deferment/defermentManager.js'
1
2
  import { CustomLogger } from '../types/functions.js'
2
3
  import { Base } from '../types/types.js'
3
4
  import { Downloader, Database } from './interfaces.js'
4
5
 
5
6
  export interface ObjectLoader2Options {
6
7
  rootId: string
8
+ deferments: Deferment
7
9
  downloader: Downloader
8
10
  database: Database
9
11
  logger?: CustomLogger
@@ -10,7 +10,7 @@ describe('CacheReader testing', () => {
10
10
  const i1: Item = { baseId: 'id1', base: { id: 'id', speckle_type: 'type' } }
11
11
 
12
12
  const cache = new MemoryCache({ maxSizeInMb: 1, ttlms: 1 }, () => {})
13
- const deferments = new DefermentManager(cache, () => {})
13
+ const deferments = new DefermentManager(() => {}, cache)
14
14
  const cacheReader = new CacheReader(
15
15
  new MemoryDatabase({
16
16
  items: new Map<string, Base>([[i1.baseId, i1.base!]])
@@ -1,4 +1,4 @@
1
- import { DefermentManager } from '../../deferment/defermentManager.js'
1
+ import { Deferment } from '../../deferment/defermentManager.js'
2
2
  import BatchingQueue from '../../queues/batchingQueue.js'
3
3
  import Queue from '../../queues/queue.js'
4
4
  import { CustomLogger } from '../../types/functions.js'
@@ -8,7 +8,7 @@ import { CacheOptions } from '../options.js'
8
8
 
9
9
  export class CacheReader {
10
10
  #database: Database
11
- #defermentManager: DefermentManager
11
+ #defermentManager: Deferment
12
12
  #logger: CustomLogger
13
13
  #options: CacheOptions
14
14
  #readQueue: BatchingQueue<string> | undefined
@@ -17,7 +17,7 @@ export class CacheReader {
17
17
 
18
18
  constructor(
19
19
  database: Database,
20
- defermentManager: DefermentManager,
20
+ defermentManager: Deferment,
21
21
  logger: CustomLogger,
22
22
  options: CacheOptions
23
23
  ) {
@@ -42,7 +42,7 @@ describe('CacheWriter', () => {
42
42
  ttlms: 60000
43
43
  }
44
44
  memoryCache = new MemoryCache(memoryCacheOptions, logger)
45
- defermentManager = new DefermentManager(memoryCache, logger)
45
+ defermentManager = new DefermentManager(logger, memoryCache)
46
46
  requestItemMock = vi.fn()
47
47
 
48
48
  options = {
@@ -1,4 +1,4 @@
1
- import { DefermentManager } from '../../deferment/defermentManager.js'
1
+ import { Deferment } from '../../deferment/defermentManager.js'
2
2
  import BatchingQueue from '../../queues/batchingQueue.js'
3
3
  import Queue from '../../queues/queue.js'
4
4
  import { CustomLogger } from '../../types/functions.js'
@@ -9,7 +9,7 @@ import { CacheOptions } from '../options.js'
9
9
  export class CacheWriter implements Queue<Item> {
10
10
  #writeQueue: BatchingQueue<Item> | undefined
11
11
  #database: Database
12
- #defermentManager: DefermentManager
12
+ #deferment: Deferment
13
13
  #requestItem: (id: string) => void
14
14
  #logger: CustomLogger
15
15
  #options: CacheOptions
@@ -18,14 +18,14 @@ export class CacheWriter implements Queue<Item> {
18
18
  constructor(
19
19
  database: Database,
20
20
  logger: CustomLogger,
21
- defermentManager: DefermentManager,
21
+ deferment: Deferment,
22
22
  options: CacheOptions,
23
23
  requestItem: (id: string) => void
24
24
  ) {
25
25
  this.#database = database
26
26
  this.#options = options
27
27
  this.#logger = logger
28
- this.#defermentManager = defermentManager
28
+ this.#deferment = deferment
29
29
  this.#requestItem = requestItem
30
30
  }
31
31
 
@@ -40,7 +40,7 @@ export class CacheWriter implements Queue<Item> {
40
40
  })
41
41
  }
42
42
  this.#writeQueue.add(item.baseId, item)
43
- this.#defermentManager.undefer(item, this.#requestItem)
43
+ this.#deferment.undefer(item, this.#requestItem)
44
44
  }
45
45
 
46
46
  async writeAll(items: Item[]): Promise<void> {
@@ -257,4 +257,126 @@ describe('downloader', () => {
257
257
  })
258
258
  await downloader.disposeAsync()
259
259
  })
260
+
261
+ test('nothing is frozen when validateResponse returns 403', async () => {
262
+ const fetchMocker = createFetchMock(vi)
263
+ const consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {})
264
+
265
+ // Mock a 403 Forbidden response
266
+ fetchMocker.mockResponseOnce('', { status: 403, statusText: 'Forbidden' })
267
+
268
+ const gathered = new AsyncGeneratorQueue<Item>()
269
+ const downloader = new ServerDownloader({
270
+ serverUrl: 'http://speckle.test',
271
+ streamId: 'streamId',
272
+ objectId: 'objectId',
273
+ token: 'invalid-token',
274
+ fetch: fetchMocker,
275
+ logger: (): void => {}
276
+ })
277
+
278
+ try {
279
+ downloader.initialize({
280
+ results: gathered,
281
+ total: 2,
282
+ maxDownloadBatchWait: 100
283
+ })
284
+
285
+ // Add items to trigger batch processing
286
+ downloader.add('id1')
287
+ downloader.add('id2')
288
+
289
+ // Wait for the batch to be processed and fail with 403
290
+ await new Promise((resolve) => setTimeout(resolve, 200))
291
+
292
+ // Verify that the error was logged (indicating the batch processing failed)
293
+ expect(consoleErrorSpy).toHaveBeenCalledWith(
294
+ 'Batch processing failed:',
295
+ expect.any(Error)
296
+ )
297
+
298
+ // The key test: verify we can still dispose the downloader properly
299
+ // This ensures the system isn't frozen and can clean up resources
300
+ const disposePromise = downloader.disposeAsync()
301
+
302
+ // Add a timeout to ensure disposal doesn't hang indefinitely
303
+ const timeoutPromise = new Promise((_, reject) => {
304
+ setTimeout(() => reject(new Error('Disposal timed out')), 5000)
305
+ })
306
+
307
+ // This should complete without timing out or throwing
308
+ await Promise.race([disposePromise, timeoutPromise])
309
+
310
+ // Additional verification: the batching queue should be marked as disposed
311
+ // We can't directly access the private field, but we can verify disposal completed
312
+ expect(true).toBe(true) // If we reach here, disposal succeeded
313
+ } finally {
314
+ consoleErrorSpy.mockRestore()
315
+ }
316
+ })
317
+
318
+ test('system remains functional after 403 error and can be properly cleaned up', async () => {
319
+ const fetchMocker = createFetchMock(vi)
320
+ const consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {})
321
+
322
+ // First call returns 403, subsequent calls should not be made due to queue disposal
323
+ fetchMocker.mockResponseOnce('', { status: 403, statusText: 'Forbidden' })
324
+
325
+ const gathered = new AsyncGeneratorQueue<Item>()
326
+ const downloader = new ServerDownloader({
327
+ serverUrl: 'http://speckle.test',
328
+ streamId: 'streamId',
329
+ objectId: 'objectId',
330
+ token: 'invalid-token',
331
+ fetch: fetchMocker,
332
+ logger: (): void => {}
333
+ })
334
+
335
+ try {
336
+ downloader.initialize({
337
+ results: gathered,
338
+ total: 5,
339
+ maxDownloadBatchWait: 50
340
+ })
341
+
342
+ // Add first batch that will trigger the 403 error
343
+ downloader.add('id1')
344
+ downloader.add('id2')
345
+
346
+ // Wait for first batch to fail
347
+ await new Promise((resolve) => setTimeout(resolve, 100))
348
+
349
+ // Verify error was logged
350
+ expect(consoleErrorSpy).toHaveBeenCalledWith(
351
+ 'Batch processing failed:',
352
+ expect.any(Error)
353
+ )
354
+
355
+ // Try to add more items after the failure
356
+ // These should be ignored since the queue is now disposed
357
+ downloader.add('id3')
358
+ downloader.add('id4')
359
+ downloader.add('id5')
360
+
361
+ // Wait a bit more to ensure no additional processing attempts
362
+ await new Promise((resolve) => setTimeout(resolve, 100))
363
+
364
+ // Note: The batching queue might make multiple attempts before disposal
365
+ // The key is that disposal should still work regardless of how many calls were made
366
+ expect(fetchMocker).toHaveBeenCalled()
367
+
368
+ // Critical test: disposal should complete without hanging
369
+ const start = Date.now()
370
+ await downloader.disposeAsync()
371
+ const elapsed = Date.now() - start
372
+
373
+ // Disposal should be quick (under 1 second) and not hang
374
+ expect(elapsed).toBeLessThan(1000)
375
+
376
+ // Verify that the results queue can also be disposed properly
377
+ await gathered.disposeAsync()
378
+ } finally {
379
+ consoleErrorSpy.mockRestore()
380
+ }
381
+ })
260
382
  })
@@ -2,7 +2,7 @@ import BatchingQueue from '../../queues/batchingQueue.js'
2
2
  import Queue from '../../queues/queue.js'
3
3
  import { ObjectLoaderRuntimeError } from '../../types/errors.js'
4
4
  import { CustomLogger, Fetcher, indexOf, isBase, take } from '../../types/functions.js'
5
- import { Item } from '../../types/types.js'
5
+ import { Item, ObjectAttributeMask } from '../../types/types.js'
6
6
  import { Downloader } from '../interfaces.js'
7
7
 
8
8
  export interface ServerDownloaderOptions {
@@ -13,6 +13,7 @@ export interface ServerDownloaderOptions {
13
13
  headers?: Headers
14
14
  logger: CustomLogger
15
15
  fetch?: Fetcher
16
+ attributeMask?: ObjectAttributeMask
16
17
  }
17
18
 
18
19
  const MAX_SAFARI_DECODE_BYTES = 2 * 1024 * 1024 * 1024 - 1024 * 1024 // 2GB minus a margin
@@ -51,9 +52,10 @@ export default class ServerDownloader implements Downloader {
51
52
  if (this.#options.token) {
52
53
  this.#headers['Authorization'] = `Bearer ${this.#options.token}`
53
54
  }
54
- this.#requestUrlChildren = `${this.#options.serverUrl}/api/getobjects/${
55
+ this.#requestUrlChildren = `${this.#options.serverUrl}/api/v2/projects/${
55
56
  this.#options.streamId
56
- }`
57
+ }/object-stream/`
58
+
57
59
  this.#requestUrlRootObj = `${this.#options.serverUrl}/objects/${
58
60
  this.#options.streamId
59
61
  }/${this.#options.objectId}/single`
@@ -117,11 +119,12 @@ Chrome's behavior: Chrome generally handles larger data sizes without this speci
117
119
 
118
120
  const start = performance.now()
119
121
  this.#logger(`Downloading batch of ${batch.length} items...`)
122
+ const attributeMask = this.#options.attributeMask
120
123
  const keys = new Set<string>(batch)
121
124
  const response = await this.#fetch(url, {
122
125
  method: 'POST',
123
126
  headers: { ...headers, 'Content-Type': 'application/json' },
124
- body: JSON.stringify({ objects: JSON.stringify(batch) })
127
+ body: JSON.stringify({ objectIds: batch, attributeMask })
125
128
  })
126
129
 
127
130
  this.#validateResponse(response)
@@ -9,9 +9,10 @@ describe('DefermentManager', () => {
9
9
  const mockLogger: CustomLogger = vi.fn()
10
10
  const mockCache = {
11
11
  get: vi.fn(),
12
- add: vi.fn()
12
+ add: vi.fn(),
13
+ dispose: vi.fn()
13
14
  } as unknown as MemoryCache
14
- const defermentManager = new DefermentManager(mockCache, mockLogger)
15
+ const defermentManager = new DefermentManager(mockLogger, mockCache)
15
16
  expect(defermentManager).toBeDefined()
16
17
  })
17
18
 
@@ -24,7 +25,7 @@ describe('DefermentManager', () => {
24
25
  get,
25
26
  add
26
27
  } as unknown as MemoryCache
27
- const defermentManager = new DefermentManager(mockCache, mockLogger)
28
+ const defermentManager = new DefermentManager(mockLogger, mockCache)
28
29
 
29
30
  const item: Item = {
30
31
  // eslint-disable-next-line camelcase
@@ -49,7 +50,7 @@ describe('DefermentManager', () => {
49
50
  get,
50
51
  add
51
52
  } as unknown as MemoryCache
52
- const defermentManager = new DefermentManager(mockCache, mockLogger)
53
+ const defermentManager = new DefermentManager(mockLogger, mockCache)
53
54
 
54
55
  const [promise1, wasInCache1] = defermentManager.defer({ id: 'testId' })
55
56
  const [promise2, wasInCache2] = defermentManager.defer({ id: 'testId' })
@@ -67,7 +68,7 @@ describe('DefermentManager', () => {
67
68
  get,
68
69
  add
69
70
  } as unknown as MemoryCache
70
- const defermentManager = new DefermentManager(mockCache, mockLogger)
71
+ const defermentManager = new DefermentManager(mockLogger, mockCache)
71
72
 
72
73
  const [promise, wasInCache] = defermentManager.defer({ id: 'testId' })
73
74
 
@@ -81,9 +82,10 @@ describe('DefermentManager', () => {
81
82
  const add = vi.fn()
82
83
  const mockCache = {
83
84
  get,
84
- add
85
+ add,
86
+ dispose: vi.fn()
85
87
  } as unknown as MemoryCache
86
- const defermentManager = new DefermentManager(mockCache, mockLogger)
88
+ const defermentManager = new DefermentManager(mockLogger, mockCache)
87
89
 
88
90
  defermentManager.dispose()
89
91
  expect(() => defermentManager.defer({ id: 'testId' })).toThrow(
@@ -101,7 +103,7 @@ describe('DefermentManager', () => {
101
103
  get,
102
104
  add
103
105
  } as unknown as MemoryCache
104
- const defermentManager = new DefermentManager(mockCache, mockLogger)
106
+ const defermentManager = new DefermentManager(mockLogger, mockCache)
105
107
  const requestItem = vi.fn()
106
108
 
107
109
  const [promise] = defermentManager.defer({ id: 'testId' })
@@ -125,7 +127,7 @@ describe('DefermentManager', () => {
125
127
  get,
126
128
  add
127
129
  } as unknown as MemoryCache
128
- const defermentManager = new DefermentManager(mockCache, mockLogger)
130
+ const defermentManager = new DefermentManager(mockLogger, mockCache)
129
131
  const requestItem = vi.fn()
130
132
 
131
133
  const item: Item = { baseId: 'testId' }
@@ -141,7 +143,7 @@ describe('DefermentManager', () => {
141
143
  get,
142
144
  add
143
145
  } as unknown as MemoryCache
144
- const defermentManager = new DefermentManager(mockCache, mockLogger)
146
+ const defermentManager = new DefermentManager(mockLogger, mockCache)
145
147
  const requestItem = vi.fn()
146
148
 
147
149
  const item: Item = {
@@ -165,9 +167,10 @@ describe('DefermentManager', () => {
165
167
  const add = vi.fn()
166
168
  const mockCache = {
167
169
  get,
168
- add
170
+ add,
171
+ dispose: vi.fn()
169
172
  } as unknown as MemoryCache
170
- const defermentManager = new DefermentManager(mockCache, mockLogger)
173
+ const defermentManager = new DefermentManager(mockLogger, mockCache)
171
174
  const requestItem = vi.fn()
172
175
 
173
176
  defermentManager.dispose()
@@ -189,9 +192,10 @@ describe('DefermentManager', () => {
189
192
  const add = vi.fn()
190
193
  const mockCache = {
191
194
  get,
192
- add
195
+ add,
196
+ dispose: vi.fn()
193
197
  } as unknown as MemoryCache
194
- const defermentManager = new DefermentManager(mockCache, mockLogger)
198
+ const defermentManager = new DefermentManager(mockLogger, mockCache)
195
199
 
196
200
  void defermentManager.defer({ id: 'testId' })
197
201
  defermentManager.dispose()
@@ -205,9 +209,10 @@ describe('DefermentManager', () => {
205
209
  const add = vi.fn()
206
210
  const mockCache = {
207
211
  get,
208
- add
212
+ add,
213
+ dispose: vi.fn()
209
214
  } as unknown as MemoryCache
210
- const defermentManager = new DefermentManager(mockCache, mockLogger)
215
+ const defermentManager = new DefermentManager(mockLogger, mockCache)
211
216
 
212
217
  defermentManager.dispose()
213
218
  // @ts-expect-error - accessing private property for testing
@@ -3,15 +3,50 @@ import { CustomLogger } from '../types/functions.js'
3
3
  import { Item, Base } from '../types/types.js'
4
4
  import { MemoryCache } from './MemoryCache.js'
5
5
 
6
- export class DefermentManager {
6
+ export interface Deferment {
7
+ defer(params: { id: string }): [Promise<Base>, boolean]
8
+ undefer(item: Item, requestItem: (id: string) => void): void
9
+ dispose(): void
10
+ }
11
+
12
+ export class MemoryOnlyDeferment implements Deferment {
13
+ private items: Map<string, Base>
14
+
15
+ constructor(items: Map<string, Base>) {
16
+ this.items = items
17
+ }
18
+ defer(params: { id: string }): [Promise<Base>, boolean] {
19
+ const item = this.items.get(params.id)
20
+ if (item) {
21
+ return [Promise.resolve(item), true]
22
+ }
23
+ return [Promise.reject(new Error('Not found in cache: ' + params.id)), false]
24
+ }
25
+ undefer(): void {
26
+ //no-op
27
+ }
28
+ dispose(): void {
29
+ //no-op
30
+ }
31
+ }
32
+
33
+ export class DefermentManager implements Deferment {
7
34
  private outstanding: Map<string, DeferredBase> = new Map()
8
35
  private logger: CustomLogger
9
36
  private disposed = false
10
37
  private cache: MemoryCache
11
38
 
12
- constructor(cache: MemoryCache, logger: CustomLogger) {
13
- this.cache = cache
39
+ constructor(logger: CustomLogger, cache?: MemoryCache) {
14
40
  this.logger = logger
41
+ this.cache =
42
+ cache ||
43
+ new MemoryCache(
44
+ {
45
+ maxSizeInMb: 500, // 500 MB
46
+ ttlms: 5_000 // 5 seconds
47
+ },
48
+ logger
49
+ )
15
50
  }
16
51
 
17
52
  defer(params: { id: string }): [Promise<Base>, boolean] {
@@ -55,5 +90,6 @@ export class DefermentManager {
55
90
  this.disposed = true
56
91
  this.logger('cleared deferments, left', this.outstanding.size)
57
92
  this.outstanding.clear()
93
+ this.cache.dispose()
58
94
  }
59
95
  }
@@ -17,7 +17,7 @@ describe('BatchingQueue disposal', () => {
17
17
 
18
18
  await queue.disposeAsync()
19
19
 
20
- expect(processFunction).toHaveBeenCalledWith(items)
20
+ expect(processFunction).toHaveBeenCalled()
21
21
  expect(queue.count()).toBe(0)
22
22
  expect(queue.isDisposed()).toBe(true)
23
23
  })
@@ -52,8 +52,6 @@ describe('BatchingQueue disposal', () => {
52
52
  resolveProcess()
53
53
  await disposePromise
54
54
 
55
- expect(processFunction).toHaveBeenCalledTimes(2)
56
- expect(processFunction).toHaveBeenCalledWith(items2)
57
55
  expect(queue.count()).toBe(0)
58
56
  expect(queue.isDisposed()).toBe(true)
59
57
  })
@@ -146,4 +146,125 @@ describe('BatchingQueue', () => {
146
146
  await queue.disposeAsync()
147
147
  }
148
148
  })
149
+
150
+ test('should handle processFunction throwing an exception during flush and is disposed', async () => {
151
+ const errorMessage = 'Process function failed'
152
+ const processFunction = vi.fn().mockRejectedValue(new Error(errorMessage))
153
+
154
+ const queue = new BatchingQueue<{ id: string }>({
155
+ batchSize: 5,
156
+ maxWaitTime: 1000,
157
+ processFunction
158
+ })
159
+
160
+ const items = Array.from({ length: 3 }, (_, i) => ({ id: `item-${i}` }))
161
+ items.forEach((item) => queue.add(item.id, item))
162
+
163
+ expect(queue.count()).toBe(3)
164
+
165
+ // flush should not throw even if processFunction rejects
166
+ await expect(queue.flush()).resolves.not.toThrow()
167
+
168
+ expect(processFunction).toHaveBeenCalled()
169
+ expect(queue.count()).toBe(0)
170
+ expect(queue.isDisposed()).toBe(false)
171
+ expect(queue.isErrored()).toBe(true)
172
+ // Add more items after the exception
173
+ queue.add('key3', { id: `item-3` })
174
+ queue.add('key4', { id: `item-4` })
175
+
176
+ // Wait to see if second batch gets processed (it shouldn't due to errored state)
177
+ await new Promise((resolve) => setTimeout(resolve, 200))
178
+
179
+ expect(queue.count()).toBe(0) // Items were not added due to errored state
180
+ await queue.disposeAsync()
181
+ })
182
+
183
+ test('should drain remaining items when disposed', async () => {
184
+ const processSpy = vi.fn()
185
+ const queue = new BatchingQueue({
186
+ batchSize: 5, // Large batch size to prevent automatic processing
187
+ maxWaitTime: 10000, // Long timeout to prevent timeout-based processing
188
+ processFunction: async (batch: string[]): Promise<void> => {
189
+ await new Promise((resolve) => setTimeout(resolve, 10))
190
+ processSpy(batch)
191
+ }
192
+ })
193
+
194
+ // Add items that won't trigger automatic processing (less than batch size)
195
+ queue.add('key1', 'item1')
196
+ queue.add('key2', 'item2')
197
+ queue.add('key3', 'item3')
198
+
199
+ // Verify items are in queue but haven't been processed yet
200
+ expect(queue.count()).toBe(3)
201
+ expect(processSpy).not.toHaveBeenCalled()
202
+
203
+ // Dispose should drain the remaining items
204
+ await queue.disposeAsync()
205
+
206
+ // Verify all items were processed during disposal
207
+ expect(processSpy).toHaveBeenCalledTimes(1)
208
+ expect(processSpy).toHaveBeenCalledWith(['item1', 'item2', 'item3'])
209
+ expect(queue.count()).toBe(0)
210
+ expect(queue.isDisposed()).toBe(true)
211
+ })
212
+
213
+ test('should drain items even with ongoing processing during dispose', async () => {
214
+ const processSpy = vi.fn()
215
+ let firstBatchStarted = false
216
+ let allowFirstBatchToComplete: (() => void) | null = null
217
+
218
+ const queue = new BatchingQueue({
219
+ batchSize: 2,
220
+ maxWaitTime: 100,
221
+ processFunction: async (batch: string[]): Promise<void> => {
222
+ processSpy(batch)
223
+
224
+ // Make the first batch wait for our signal
225
+ if (!firstBatchStarted) {
226
+ firstBatchStarted = true
227
+ await new Promise<void>((resolve) => {
228
+ allowFirstBatchToComplete = resolve
229
+ })
230
+ } else {
231
+ // Other batches process normally
232
+ await new Promise((resolve) => setTimeout(resolve, 10))
233
+ }
234
+ }
235
+ })
236
+
237
+ // Add first batch that will trigger processing but will be blocked
238
+ queue.add('key1', 'item1')
239
+ queue.add('key2', 'item2')
240
+
241
+ // Wait for first batch to start processing and allowFirstBatchToComplete to be assigned
242
+ await new Promise((resolve) => setTimeout(resolve, 50))
243
+ expect(firstBatchStarted).toBe(true)
244
+ expect(processSpy).toHaveBeenCalledTimes(1)
245
+ expect(allowFirstBatchToComplete).not.toBeNull()
246
+
247
+ // Add more items while first batch is still processing
248
+ queue.add('key3', 'item3')
249
+ queue.add('key4', 'item4')
250
+
251
+ // Verify the additional items are queued
252
+ expect(queue.count()).toBe(2)
253
+
254
+ // Start disposal (this should wait for ongoing processing and then drain)
255
+ const disposePromise = queue.disposeAsync()
256
+
257
+ // Allow the first batch to complete
258
+ allowFirstBatchToComplete!()
259
+
260
+ // Wait for disposal to complete
261
+ await disposePromise
262
+
263
+ // Verify all batches were processed
264
+ expect(processSpy).toHaveBeenCalledTimes(2)
265
+ expect(processSpy).toHaveBeenCalledWith(['item1', 'item2'])
266
+ expect(processSpy).toHaveBeenCalledWith(['item3', 'item4'])
267
+ expect(queue.count()).toBe(0)
268
+ expect(queue.isDisposed()).toBe(true)
269
+ })
149
270
  })