@speckle/objectloader2 2.23.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,8 @@
1
+ {
2
+ "extends": "../tsconfig.json",
3
+ "compilerOptions": {
4
+ "rootDir": "../src",
5
+ "module": "nodenext",
6
+ "moduleResolution": "nodenext"
7
+ }
8
+ }
@@ -0,0 +1,17 @@
1
+ {
2
+ "extends": "./build.json",
3
+ "include": [
4
+ "../src/**/*.ts",
5
+ "../src/**/*.cts",
6
+ "../src/**/*.tsx",
7
+ "../src/**/*.json"
8
+ ],
9
+ "exclude": [
10
+ "../**/*.spec.ts",
11
+ "../src/**/*.mts",
12
+ "../src/package.json"
13
+ ],
14
+ "compilerOptions": {
15
+ "outDir": "../.tshy-build/commonjs"
16
+ }
17
+ }
package/.tshy/esm.json ADDED
@@ -0,0 +1,16 @@
1
+ {
2
+ "extends": "./build.json",
3
+ "include": [
4
+ "../src/**/*.ts",
5
+ "../src/**/*.mts",
6
+ "../src/**/*.tsx",
7
+ "../src/**/*.json"
8
+ ],
9
+ "exclude": [
10
+ "../**/*.spec.ts",
11
+ "../src/package.json"
12
+ ],
13
+ "compilerOptions": {
14
+ "outDir": "../.tshy-build/esm"
15
+ }
16
+ }
@@ -0,0 +1,8 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ const objectLoader2_js_1 = __importDefault(require("./operations/objectLoader2.js"));
7
+ exports.default = objectLoader2_js_1.default;
8
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1,3 @@
1
+ import ObjectLoader2 from './operations/objectLoader2.js';
2
+ export default ObjectLoader2;
3
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1,57 @@
1
+ import { baseConfigs, globals, getESMDirname } from '../../eslint.config.mjs'
2
+ import tseslint from 'typescript-eslint'
3
+
4
+ /**
5
+ * @type {Array<import('eslint').Linter.FlatConfig>}
6
+ */
7
+ const configs = [
8
+ ...baseConfigs,
9
+ {
10
+ files: ['examples/browser/**/*.{ts,js}'],
11
+ languageOptions: {
12
+ globals: {
13
+ ...globals.browser
14
+ }
15
+ }
16
+ },
17
+ ...tseslint.configs.recommendedTypeChecked.map((c) => ({
18
+ ...c,
19
+ files: [...(c.files || []), '**/*.ts', '**/*.d.ts']
20
+ })),
21
+ {
22
+ files: ['**/*.ts', '**/*.d.ts'],
23
+ languageOptions: {
24
+ parserOptions: {
25
+ tsconfigRootDir: getESMDirname(import.meta.url),
26
+ projectService: {
27
+ allowDefaultProject: ['*.ts']
28
+ }
29
+ }
30
+ },
31
+ rules: {
32
+ '@typescript-eslint/restrict-template-expressions': 'off'
33
+ }
34
+ },
35
+ {
36
+ files: ['**/*.d.ts'],
37
+ rules: {
38
+ '@typescript-eslint/no-explicit-any': 'off'
39
+ }
40
+ },
41
+ {
42
+ files: ['vite.config.ts'],
43
+ languageOptions: {
44
+ globals: {
45
+ ...globals.node
46
+ }
47
+ }
48
+ },
49
+ {
50
+ files: ['**/*.spec.ts'],
51
+ rules: {
52
+ '@typescript-eslint/no-unused-expressions': 'off'
53
+ }
54
+ }
55
+ ]
56
+
57
+ export default configs
package/package.json ADDED
@@ -0,0 +1,76 @@
1
+ {
2
+ "name": "@speckle/objectloader2",
3
+ "version": "2.23.12",
4
+ "description": "This is an updated objectloader for the Speckle viewer written in typescript",
5
+ "main": "./dist/commonjs/index.js",
6
+ "module": "./dist/esm/index.js",
7
+ "homepage": "https://speckle.systems",
8
+ "types": "./dist/commonjs/index.d.ts",
9
+ "type": "module",
10
+ "repository": {
11
+ "type": "git",
12
+ "url": "https://github.com/specklesystems/speckle-server.git",
13
+ "directory": "packages/objectloader2"
14
+ },
15
+ "engines": {
16
+ "node": ">=18.0.0"
17
+ },
18
+ "scripts": {
19
+ "lint:eslint": "eslint .",
20
+ "lint:tsc": "tsc --noEmit",
21
+ "lint": "yarn lint:eslint && yarn lint:tsc",
22
+ "lint:ci": "yarn lint:tsc",
23
+ "build": "NODE_ENV=production tshy",
24
+ "dev": "tshy --watch",
25
+ "test": "vitest",
26
+ "test:single-run": "vitest run"
27
+ },
28
+ "keywords": [
29
+ "speckle",
30
+ "aec",
31
+ "speckle api"
32
+ ],
33
+ "author": "AEC Systems",
34
+ "license": "Apache-2.0",
35
+ "dependencies": {
36
+ "@speckle/shared": "^2.23.12",
37
+ "dexie": "^4.0.11"
38
+ },
39
+ "devDependencies": {
40
+ "@types/lodash": "^4.17.5",
41
+ "@types/lodash-es": "^4.17.6",
42
+ "@typescript-eslint/eslint-plugin": "^7.12.0",
43
+ "@typescript-eslint/parser": "^7.12.0",
44
+ "@vitest/ui": "^3.0.9",
45
+ "eslint": "^9.4.0",
46
+ "eslint-config-prettier": "^9.1.0",
47
+ "fake-indexeddb": "^6.0.0",
48
+ "prettier": "^3.3.2",
49
+ "tshy": "^3.0.2",
50
+ "typescript": "^5.2.2",
51
+ "vitest": "^3.0.7",
52
+ "vitest-fetch-mock": "^0.4.5"
53
+ },
54
+ "exports": {
55
+ "./package.json": "./package.json",
56
+ ".": {
57
+ "import": {
58
+ "types": "./dist/esm/index.d.ts",
59
+ "default": "./dist/esm/index.js"
60
+ },
61
+ "require": {
62
+ "types": "./dist/commonjs/index.d.ts",
63
+ "default": "./dist/commonjs/index.js"
64
+ }
65
+ }
66
+ },
67
+ "tshy": {
68
+ "exclude": [
69
+ "**/*.spec.ts"
70
+ ],
71
+ "exports": {
72
+ "./package.json": "./package.json",
73
+ ".": "./src/index.ts"
74
+ }
75
+ }
76
+ }
package/readme.md ADDED
@@ -0,0 +1,42 @@
1
+ # objectloader2 for the Speckle viewer
2
+
3
+ This is an updated object loader for the Speckle viewer rewritten in Typescript.
4
+
5
+ The main aim for the objectloader is:
6
+
7
+ - download Speckle objects as JSON
8
+ - cache in IndexedDB so the same objects aren't downloaded twice
9
+ - give data to the viewer, as returned by the `getObjectIterator` generator, as soon as possible
10
+ - do the above as concurrently as a browser allows
11
+
12
+ ## Architecture
13
+
14
+ To achieve increased concurrency, the different phases of the objectloader are divided into pools of workers with queues to feed them.
15
+
16
+ ```mermaid
17
+ flowchart TD
18
+ start(Root Commit)
19
+ getIds(Parse Root to get all IDs)
20
+ cached{Cached?}
21
+ download(Download IDs)
22
+ save(Write to Cache)
23
+ load(Load from Cache)
24
+ generate(Generate to Viewer!)
25
+
26
+ start --> getIds
27
+ getIds --> cached
28
+ cached -->|Yes| load
29
+ cached -->|No| download
30
+ load --> generate
31
+ download --> generate
32
+ download --> save
33
+
34
+ ```
35
+
36
+ From the list of IDs, they are moved to a queue to be begin checking the cache from a pool of readers.
37
+
38
+ Results are then sent to the viewer, if found, else they're send to the download queue.
39
+
40
+ The download queue is a batching mechanism that gets what is available, up to a limit or a timeout. The results are parsed and given to the generator and written to another queue.
41
+
42
+ The write cache queue is processed with a single writer to the indexeddb.
@@ -0,0 +1,35 @@
1
+ import Queue from './queue.js'
2
+
3
+ export default class AsyncGeneratorQueue<T> implements Queue<T> {
4
+ #buffer: T[] = []
5
+ #resolveQueue: ((value: T) => void)[] = []
6
+ #finished = false
7
+
8
+ add(value: T): void {
9
+ if (this.#resolveQueue.length > 0) {
10
+ // If there's a pending consumer, resolve immediately
11
+ const resolve = this.#resolveQueue.shift()!
12
+ resolve(value)
13
+ } else {
14
+ // Otherwise, add to the buffer
15
+ this.#buffer.push(value)
16
+ }
17
+ }
18
+
19
+ async *consume(): AsyncGenerator<T> {
20
+ while (
21
+ !this.#finished ||
22
+ this.#resolveQueue.length > 0 ||
23
+ this.#buffer.length > 0
24
+ ) {
25
+ if (this.#buffer.length > 0) {
26
+ yield this.#buffer.shift()! // Yield available values
27
+ } else {
28
+ yield await new Promise<T>((resolve) => this.#resolveQueue.push(resolve))
29
+ }
30
+ }
31
+ }
32
+ dispose(): void {
33
+ this.#finished = true
34
+ }
35
+ }
@@ -0,0 +1,60 @@
1
+ export default class BatchedPool<T> {
2
+ #queue: T[] = []
3
+ #concurrencyAndSizes: number[]
4
+ #processFunction: (batch: T[]) => Promise<void>
5
+
6
+ #baseInterval: number
7
+
8
+ #processingLoop: Promise<void>
9
+ #finished = false
10
+
11
+ constructor(params: {
12
+ concurrencyAndSizes: number[]
13
+ maxWaitTime?: number
14
+ processFunction: (batch: T[]) => Promise<void>
15
+ }) {
16
+ this.#concurrencyAndSizes = params.concurrencyAndSizes
17
+ this.#baseInterval = Math.min(params.maxWaitTime ?? 200, 200) // Initial batch time (ms)
18
+ this.#processFunction = params.processFunction
19
+ this.#processingLoop = this.#loop()
20
+ }
21
+
22
+ add(item: T): void {
23
+ this.#queue.push(item)
24
+ }
25
+
26
+ getBatch(batchSize: number): T[] {
27
+ return this.#queue.splice(0, Math.min(batchSize, this.#queue.length))
28
+ }
29
+
30
+ async #runWorker(batchSize: number) {
31
+ while (!this.#finished || this.#queue.length > 0) {
32
+ if (this.#queue.length > 0) {
33
+ const batch = this.getBatch(batchSize)
34
+ try {
35
+ await this.#processFunction(batch)
36
+ } catch (e) {
37
+ console.error(e)
38
+ }
39
+ }
40
+ await this.#delay(this.#baseInterval)
41
+ }
42
+ }
43
+
44
+ async disposeAsync(): Promise<void> {
45
+ this.#finished = true
46
+ await this.#processingLoop
47
+ }
48
+
49
+ async #loop(): Promise<void> {
50
+ // Initialize workers
51
+ const workers = Array.from(this.#concurrencyAndSizes, (batchSize: number) =>
52
+ this.#runWorker(batchSize)
53
+ )
54
+ await Promise.all(workers)
55
+ }
56
+
57
+ #delay(ms: number): Promise<void> {
58
+ return new Promise((resolve) => setTimeout(resolve, ms))
59
+ }
60
+ }
@@ -0,0 +1,77 @@
1
+ import Queue from './queue.js'
2
+
3
+ export default class BatchingQueue<T> implements Queue<T> {
4
+ #queue: T[] = []
5
+ #batchSize: number
6
+ #processFunction: (batch: T[]) => Promise<void>
7
+
8
+ #baseInterval: number
9
+ #minInterval: number
10
+ #maxInterval: number
11
+
12
+ #processingLoop: Promise<void>
13
+ #finished = false
14
+
15
+ constructor(params: {
16
+ batchSize: number
17
+ maxWaitTime?: number
18
+ processFunction: (batch: T[]) => Promise<void>
19
+ }) {
20
+ this.#batchSize = params.batchSize
21
+ this.#baseInterval = Math.min(params.maxWaitTime ?? 200, 200) // Initial batch time (ms)
22
+ this.#minInterval = Math.min(params.maxWaitTime ?? 100, 100) // Minimum batch time
23
+ this.#maxInterval = Math.min(params.maxWaitTime ?? 3000, 3000) // Maximum batch time
24
+ this.#processFunction = params.processFunction
25
+ this.#processingLoop = this.#loop()
26
+ }
27
+
28
+ async disposeAsync(): Promise<void> {
29
+ this.#finished = true
30
+ await this.#processingLoop
31
+ }
32
+
33
+ add(item: T): void {
34
+ this.#queue.push(item)
35
+ }
36
+
37
+ count(): number {
38
+ return this.#queue.length
39
+ }
40
+
41
+ #getBatch(batchSize: number): T[] {
42
+ return this.#queue.splice(0, Math.min(batchSize, this.#queue.length))
43
+ }
44
+
45
+ async #loop(): Promise<void> {
46
+ let interval = this.#baseInterval
47
+ while (!this.#finished || this.#queue.length > 0) {
48
+ const startTime = performance.now()
49
+ if (this.#queue.length > 0) {
50
+ const batch = this.#getBatch(this.#batchSize)
51
+ //console.log('running with queue size of ' + this.#queue.length)
52
+ await this.#processFunction(batch)
53
+ }
54
+ if (this.#queue.length < this.#batchSize / 2) {
55
+ //refigure interval
56
+ const endTime = performance.now()
57
+ const duration = endTime - startTime
58
+ if (duration > interval) {
59
+ interval = Math.min(interval * 1.5, this.#maxInterval) // Increase if slow or empty
60
+ } else {
61
+ interval = Math.max(interval * 0.8, this.#minInterval) // Decrease if fast
62
+ }
63
+ /*console.log(
64
+ 'queue is waiting ' +
65
+ interval / 1000 +
66
+ ' with queue size of ' +
67
+ this.#queue.length
68
+ )*/
69
+ await this.#delay(interval)
70
+ }
71
+ }
72
+ }
73
+
74
+ #delay(ms: number): Promise<void> {
75
+ return new Promise((resolve) => setTimeout(resolve, ms))
76
+ }
77
+ }
@@ -0,0 +1,12 @@
1
+ import Queue from './queue.js'
2
+
3
+ export default class BufferQueue<T> implements Queue<T> {
4
+ #buffer: T[] = []
5
+ add(value: T): void {
6
+ this.#buffer.push(value)
7
+ }
8
+
9
+ values(): T[] {
10
+ return this.#buffer
11
+ }
12
+ }
@@ -0,0 +1,17 @@
1
+ import { Base } from '../types/types.js'
2
+
3
+ export class DeferredBase {
4
+ promise: Promise<Base>
5
+ resolve!: (value: Base) => void
6
+ reject!: (reason?: Error) => void
7
+
8
+ readonly id: string
9
+
10
+ constructor(id: string) {
11
+ this.id = id
12
+ this.promise = new Promise<Base>((resolve, reject) => {
13
+ this.resolve = resolve
14
+ this.reject = reject
15
+ })
16
+ }
17
+ }
@@ -0,0 +1,3 @@
1
+ export default interface Queue<T> {
2
+ add(value: T): void
3
+ }
package/src/index.ts ADDED
@@ -0,0 +1,3 @@
1
+ import ObjectLoader2 from './operations/objectLoader2.js'
2
+
3
+ export default ObjectLoader2
@@ -0,0 +1,69 @@
1
+ import { describe, expect, test } from 'vitest'
2
+ import IndexedDatabase from './indexedDatabase.js'
3
+ import { IDBFactory, IDBKeyRange } from 'fake-indexeddb'
4
+ import { Item } from '../types/types.js'
5
+ import BufferQueue from '../helpers/bufferQueue.js'
6
+
7
+ describe('database cache', () => {
8
+ test('write single item to queue use getItem', async () => {
9
+ const i: Item = { baseId: 'id', base: { id: 'id' } }
10
+ const database = new IndexedDatabase({
11
+ indexedDB: new IDBFactory(),
12
+ keyRange: IDBKeyRange,
13
+ maxCacheBatchWriteWait: 200
14
+ })
15
+ await database.add(i)
16
+ await database.disposeAsync()
17
+
18
+ const x = await database.getItem({ id: 'id' })
19
+ expect(x).toBeDefined()
20
+ expect(JSON.stringify(x)).toBe(JSON.stringify(i))
21
+ })
22
+
23
+ test('write two items to queue use getItem', async () => {
24
+ const i1: Item = { baseId: 'id1', base: { id: 'id' } }
25
+ const i2: Item = { baseId: 'id2', base: { id: 'id' } }
26
+ const database = new IndexedDatabase({
27
+ indexedDB: new IDBFactory(),
28
+ keyRange: IDBKeyRange
29
+ })
30
+ await database.add(i1)
31
+ await database.add(i2)
32
+ await database.disposeAsync()
33
+
34
+ const x1 = await database.getItem({ id: i1.baseId })
35
+ expect(x1).toBeDefined()
36
+ expect(JSON.stringify(x1)).toBe(JSON.stringify(i1))
37
+
38
+ const x2 = await database.getItem({ id: i2.baseId })
39
+ expect(x2).toBeDefined()
40
+ expect(JSON.stringify(x2)).toBe(JSON.stringify(i2))
41
+ })
42
+
43
+ test('write two items to queue use getItem', async () => {
44
+ const i1: Item = { baseId: 'id1', base: { id: 'id' } }
45
+ const i2: Item = { baseId: 'id2', base: { id: 'id' } }
46
+ const database = new IndexedDatabase({
47
+ indexedDB: new IDBFactory(),
48
+ keyRange: IDBKeyRange
49
+ })
50
+ await database.add(i1)
51
+ await database.add(i2)
52
+ await database.disposeAsync()
53
+
54
+ const foundItems = new BufferQueue<Item>()
55
+ const notFoundItems = new BufferQueue<string>()
56
+
57
+ await database.processItems({
58
+ ids: [i1.baseId, i2.baseId],
59
+ foundItems,
60
+ notFoundItems
61
+ })
62
+
63
+ expect(foundItems.values().length).toBe(2)
64
+ expect(JSON.stringify(foundItems.values()[0])).toBe(JSON.stringify(i1))
65
+ expect(JSON.stringify(foundItems.values()[1])).toBe(JSON.stringify(i2))
66
+
67
+ expect(notFoundItems.values().length).toBe(0)
68
+ })
69
+ })
@@ -0,0 +1,167 @@
1
+ import BatchingQueue from '../helpers/batchingQueue.js'
2
+ import Queue from '../helpers/queue.js'
3
+ import { CustomLogger, Item } from '../types/types.js'
4
+ import { isSafari } from '@speckle/shared'
5
+ import { BaseDatabaseOptions } from './options.js'
6
+ import { Cache } from './interfaces.js'
7
+ import { Dexie, DexieOptions, Table } from 'dexie'
8
+
9
+ class ObjectStore extends Dexie {
10
+ static #databaseName: string = 'speckle-cache'
11
+ objects!: Table<Item, string> // Table type: <entity, primaryKey>
12
+
13
+ constructor(options: DexieOptions) {
14
+ super(ObjectStore.#databaseName, options)
15
+
16
+ this.version(1).stores({
17
+ objects: 'baseId, item' // baseId is primary key
18
+ })
19
+ }
20
+ }
21
+
22
+ export default class IndexedDatabase implements Cache {
23
+ #options: BaseDatabaseOptions
24
+ #logger: CustomLogger
25
+
26
+ #cacheDB?: ObjectStore
27
+
28
+ #writeQueue: BatchingQueue<Item> | undefined
29
+
30
+ // #count: number = 0
31
+
32
+ constructor(options: BaseDatabaseOptions) {
33
+ this.#options = {
34
+ ...{
35
+ maxCacheReadSize: 10000,
36
+ maxCacheBatchWriteWait: 1000
37
+ },
38
+ ...options
39
+ }
40
+ this.#logger = options.logger || (() => {})
41
+ }
42
+
43
+ async add(item: Item): Promise<void> {
44
+ if (!this.#writeQueue) {
45
+ await this.#setupCacheDb()
46
+ this.#writeQueue = new BatchingQueue<Item>({
47
+ batchSize: this.#options.maxCacheWriteSize ?? 10000,
48
+ maxWaitTime: this.#options.maxCacheBatchWriteWait,
49
+ processFunction: (batch: Item[]) =>
50
+ this.#cacheSaveBatch({ batch, cacheDB: this.#cacheDB! })
51
+ })
52
+ }
53
+ this.#writeQueue.add(item)
54
+ }
55
+
56
+ async disposeAsync(): Promise<void> {
57
+ await this.#writeQueue?.disposeAsync()
58
+ }
59
+
60
+ async #openDatabase(): Promise<ObjectStore> {
61
+ const db = new ObjectStore({
62
+ indexedDB: this.#options.indexedDB ?? globalThis.indexedDB,
63
+ IDBKeyRange: this.#options.keyRange ?? IDBKeyRange,
64
+ chromeTransactionDurability: 'relaxed'
65
+ })
66
+ await db.open()
67
+ return db
68
+ }
69
+
70
+ async #setupCacheDb(): Promise<void> {
71
+ if (this.#cacheDB !== undefined) {
72
+ return
73
+ }
74
+
75
+ // Initialize
76
+ await this.#safariFix()
77
+ this.#cacheDB = await this.#openDatabase()
78
+ }
79
+
80
+ async processItems(params: {
81
+ ids: string[]
82
+ foundItems: Queue<Item>
83
+ notFoundItems: Queue<string>
84
+ }): Promise<void> {
85
+ const { ids, foundItems, notFoundItems } = params
86
+ await this.#setupCacheDb()
87
+ const maxCacheReadSize = this.#options.maxCacheReadSize ?? 10000
88
+
89
+ for (let i = 0; i < ids.length; ) {
90
+ if ((this.#writeQueue?.count() ?? 0) > maxCacheReadSize * 2) {
91
+ this.#logger(
92
+ 'pausing reads (# in write queue: ' + this.#writeQueue?.count() + ')'
93
+ )
94
+ await new Promise((resolve) => setTimeout(resolve, 1000)) // Pause for 1 second, protects against out of memory
95
+ continue
96
+ }
97
+ const batch = ids.slice(i, i + maxCacheReadSize)
98
+ // const x = this.#count
99
+ // this.#count++
100
+ // const startTime = performance.now()
101
+ // this.#logger('Start read ' + x + ' ' + batch.length)
102
+
103
+ //faster than BulkGet with dexie
104
+ await this.#cacheDB!.transaction('r', this.#cacheDB!.objects, async () => {
105
+ const gets = batch.map((key) => this.#cacheDB!.objects.get(key))
106
+ const cachedData = await Promise.all(gets)
107
+ for (let i = 0; i < cachedData.length; i++) {
108
+ if (cachedData[i]) {
109
+ foundItems.add(cachedData[i]!)
110
+ } else {
111
+ notFoundItems.add(batch[i])
112
+ }
113
+ }
114
+ })
115
+ // const endTime = performance.now()
116
+ // const duration = endTime - startTime
117
+ // this.#logger('Read batch ' + x + ' ' + batch.length + ' ' + duration / 1000)
118
+
119
+ // interate down here to help with pausing
120
+ i += maxCacheReadSize
121
+ }
122
+ }
123
+
124
+ async getItem(params: { id: string }): Promise<Item | undefined> {
125
+ const { id } = params
126
+ await this.#setupCacheDb()
127
+
128
+ return this.#cacheDB!.transaction('r', this.#cacheDB!.objects, async () => {
129
+ return await this.#cacheDB?.objects.get(id)
130
+ })
131
+ }
132
+
133
+ async #cacheSaveBatch(params: {
134
+ batch: Item[]
135
+ cacheDB: ObjectStore
136
+ }): Promise<void> {
137
+ const { batch, cacheDB } = params
138
+ //const x = this.#count
139
+ //this.#count++
140
+
141
+ // const startTime = performance.now()
142
+ // this.#logger('Start save ' + x + ' ' + batch.length)
143
+ await cacheDB.objects.bulkPut(batch)
144
+ // const endTime = performance.now()
145
+ // const duration = endTime - startTime
146
+ //this.#logger('Saved batch ' + x + ' ' + batch.length + ' ' + duration / 1000)
147
+ }
148
+
149
+ /**
150
+ * Fixes a Safari bug where IndexedDB requests get lost and never resolve - invoke before you use IndexedDB
151
+ * @link Credits and more info: https://github.com/jakearchibald/safari-14-idb-fix
152
+ */
153
+ async #safariFix(): Promise<void> {
154
+ // No point putting other browsers or older versions of Safari through this mess.
155
+ if (!isSafari() || !this.#options.indexedDB?.databases) return Promise.resolve()
156
+
157
+ let intervalId: ReturnType<typeof setInterval>
158
+
159
+ return new Promise<void>((resolve: () => void) => {
160
+ const tryIdb = () => this.#options.indexedDB?.databases().finally(resolve)
161
+ intervalId = setInterval(() => {
162
+ void tryIdb()
163
+ }, 100)
164
+ void tryIdb()
165
+ }).finally(() => clearInterval(intervalId))
166
+ }
167
+ }