@speckle/objectloader2 2.24.2 → 2.25.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/commonjs/index.js +6 -7
- package/dist/esm/index.js +3 -3
- package/eslint.config.mjs +3 -1
- package/package.json +2 -2
- package/src/helpers/__snapshots__/cachePump.spec.ts.snap +31 -0
- package/src/helpers/__snapshots__/cacheReader.spec.ts.snap +8 -0
- package/src/helpers/aggregateQueue.ts +20 -0
- package/src/helpers/batchedPool.ts +5 -9
- package/src/helpers/batchingQueue.ts +21 -13
- package/src/helpers/cachePump.disposal.spec.ts +49 -0
- package/src/helpers/cachePump.spec.ts +103 -0
- package/src/helpers/cachePump.ts +99 -0
- package/src/helpers/cacheReader.spec.ts +35 -0
- package/src/helpers/cacheReader.ts +64 -0
- package/src/helpers/defermentManager.disposal.spec.ts +28 -0
- package/src/helpers/defermentManager.spec.ts +25 -1
- package/src/helpers/defermentManager.ts +128 -12
- package/src/helpers/deferredBase.ts +44 -6
- package/src/helpers/keyedQueue.ts +45 -0
- package/src/helpers/memoryPump.ts +40 -0
- package/src/helpers/pump.ts +8 -0
- package/src/index.ts +3 -4
- package/src/operations/__snapshots__/objectLoader2.spec.ts.snap +16 -16
- package/src/operations/{__snapshots__ → databases/__snapshots__}/indexedDatabase.spec.ts.snap +0 -21
- package/src/operations/{indexedDatabase.spec.ts → databases/indexedDatabase.spec.ts} +2 -28
- package/src/operations/databases/indexedDatabase.ts +150 -0
- package/src/operations/databases/memoryDatabase.ts +43 -0
- package/src/operations/{__snapshots__ → downloaders/__snapshots__}/serverDownloader.spec.ts.snap +34 -0
- package/src/operations/{memoryDownloader.ts → downloaders/memoryDownloader.ts} +15 -14
- package/src/operations/{serverDownloader.spec.ts → downloaders/serverDownloader.spec.ts} +68 -43
- package/src/operations/{serverDownloader.ts → downloaders/serverDownloader.ts} +92 -38
- package/src/operations/interfaces.ts +11 -12
- package/src/operations/objectLoader2.spec.ts +76 -144
- package/src/operations/objectLoader2.ts +57 -79
- package/src/operations/objectLoader2Factory.ts +56 -0
- package/src/operations/options.ts +18 -37
- package/src/operations/traverser.spec.ts +1 -1
- package/src/operations/traverser.ts +1 -1
- package/src/test/e2e.spec.ts +4 -4
- package/src/types/types.ts +11 -0
- package/src/operations/indexedDatabase.ts +0 -167
- package/src/operations/memoryDatabase.ts +0 -42
|
@@ -1,56 +1,45 @@
|
|
|
1
1
|
import { describe, expect, test } from 'vitest'
|
|
2
|
-
import ObjectLoader2 from './objectLoader2.js'
|
|
2
|
+
import { ObjectLoader2 } from './objectLoader2.js'
|
|
3
3
|
import { Base, Item } from '../types/types.js'
|
|
4
|
-
import {
|
|
5
|
-
import
|
|
6
|
-
import { MemoryDatabase } from './memoryDatabase.js'
|
|
7
|
-
import
|
|
8
|
-
import AsyncGeneratorQueue from '../helpers/asyncGeneratorQueue.js'
|
|
4
|
+
import { MemoryDownloader } from './downloaders/memoryDownloader.js'
|
|
5
|
+
import { IDBFactory, IDBKeyRange } from 'fake-indexeddb'
|
|
6
|
+
import { MemoryDatabase } from './databases/memoryDatabase.js'
|
|
7
|
+
import IndexedDatabase from './databases/indexedDatabase.js'
|
|
9
8
|
|
|
10
9
|
describe('objectloader2', () => {
|
|
11
10
|
test('can get a root object from cache', async () => {
|
|
12
|
-
const
|
|
13
|
-
const
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
} as Cache
|
|
19
|
-
const downloader = {} as Downloader
|
|
11
|
+
const rootId = 'baseId'
|
|
12
|
+
const rootBase: Base = { id: 'baseId', speckle_type: 'type' }
|
|
13
|
+
const downloader = new MemoryDownloader(
|
|
14
|
+
rootId,
|
|
15
|
+
new Map<string, Base>([[rootId, rootBase]])
|
|
16
|
+
)
|
|
20
17
|
const loader = new ObjectLoader2({
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
18
|
+
rootId,
|
|
19
|
+
downloader,
|
|
20
|
+
database: new IndexedDatabase({
|
|
21
|
+
indexedDB: new IDBFactory(),
|
|
22
|
+
keyRange: IDBKeyRange
|
|
23
|
+
})
|
|
26
24
|
})
|
|
27
25
|
const x = await loader.getRootObject()
|
|
28
26
|
expect(x).toMatchSnapshot()
|
|
29
27
|
})
|
|
30
28
|
|
|
31
29
|
test('can get a root object from downloader', async () => {
|
|
32
|
-
const
|
|
33
|
-
const
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
add(item: Item): Promise<void> {
|
|
39
|
-
expect(item).toBe(root)
|
|
40
|
-
return Promise.resolve()
|
|
41
|
-
}
|
|
42
|
-
} as Cache
|
|
43
|
-
const downloader = {
|
|
44
|
-
downloadSingle(): Promise<Item> {
|
|
45
|
-
return Promise.resolve(root)
|
|
46
|
-
}
|
|
47
|
-
} as Downloader
|
|
30
|
+
const rootId = 'baseId'
|
|
31
|
+
const rootBase: Base = { id: 'baseId', speckle_type: 'type' }
|
|
32
|
+
const downloader = new MemoryDownloader(
|
|
33
|
+
rootId,
|
|
34
|
+
new Map<string, Base>([[rootId, rootBase]])
|
|
35
|
+
)
|
|
48
36
|
const loader = new ObjectLoader2({
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
37
|
+
rootId,
|
|
38
|
+
downloader,
|
|
39
|
+
database: new IndexedDatabase({
|
|
40
|
+
indexedDB: new IDBFactory(),
|
|
41
|
+
keyRange: IDBKeyRange
|
|
42
|
+
})
|
|
54
43
|
})
|
|
55
44
|
const x = await loader.getRootObject()
|
|
56
45
|
expect(x).toMatchSnapshot()
|
|
@@ -59,83 +48,24 @@ describe('objectloader2', () => {
|
|
|
59
48
|
test('can get single object from cache using iterator', async () => {
|
|
60
49
|
const rootId = 'baseId'
|
|
61
50
|
const rootBase: Base = { id: 'baseId', speckle_type: 'type' }
|
|
62
|
-
const root = { baseId: rootId, base: rootBase } as unknown as Item
|
|
63
|
-
const cache = {
|
|
64
|
-
getItem(params: { id: string }): Promise<Item | undefined> {
|
|
65
|
-
expect(params.id).toBe(rootId)
|
|
66
|
-
return Promise.resolve(root)
|
|
67
|
-
}
|
|
68
|
-
} as Cache
|
|
69
|
-
const downloader = {} as Downloader
|
|
70
|
-
const loader = new ObjectLoader2({
|
|
71
|
-
serverUrl: 'a',
|
|
72
|
-
streamId: 'b',
|
|
73
|
-
objectId: rootId,
|
|
74
|
-
cache,
|
|
75
|
-
downloader
|
|
76
|
-
})
|
|
77
|
-
const r = []
|
|
78
|
-
for await (const x of loader.getObjectIterator()) {
|
|
79
|
-
r.push(x)
|
|
80
|
-
}
|
|
81
|
-
|
|
82
|
-
expect(r).toMatchSnapshot()
|
|
83
|
-
})
|
|
84
|
-
|
|
85
|
-
test('can get root/child object from cache using iterator', async () => {
|
|
86
|
-
const child1Base = { id: 'child1Id' }
|
|
87
|
-
const child1 = { baseId: 'child1Id', base: child1Base } as unknown as Item
|
|
88
|
-
|
|
89
|
-
const rootId = 'rootId'
|
|
90
|
-
const rootBase: Base = {
|
|
91
|
-
id: 'rootId',
|
|
92
|
-
speckle_type: 'type',
|
|
93
|
-
__closure: { child1Id: 100 }
|
|
94
|
-
}
|
|
95
|
-
const root = {
|
|
96
|
-
baseId: rootId,
|
|
97
|
-
base: rootBase
|
|
98
|
-
} as unknown as Item
|
|
99
|
-
|
|
100
|
-
const cache = {
|
|
101
|
-
getItem(params: { id: string }): Promise<Item | undefined> {
|
|
102
|
-
expect(params.id).toBe(root.baseId)
|
|
103
|
-
return Promise.resolve(root)
|
|
104
|
-
},
|
|
105
|
-
processItems(params: {
|
|
106
|
-
ids: string[]
|
|
107
|
-
foundItems: Queue<Item>
|
|
108
51
|
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
params.foundItems.add(child1)
|
|
114
|
-
return Promise.resolve()
|
|
115
|
-
},
|
|
116
|
-
disposeAsync(): Promise<void> {
|
|
117
|
-
return Promise.resolve()
|
|
118
|
-
}
|
|
119
|
-
} as Cache
|
|
120
|
-
const downloader = {
|
|
121
|
-
initializePool(params: { total: number }): void {
|
|
122
|
-
expect(params.total).toBe(1)
|
|
123
|
-
},
|
|
124
|
-
disposeAsync(): Promise<void> {
|
|
125
|
-
return Promise.resolve()
|
|
126
|
-
}
|
|
127
|
-
} as Downloader
|
|
52
|
+
const downloader = new MemoryDownloader(
|
|
53
|
+
rootId,
|
|
54
|
+
new Map<string, Base>([[rootId, rootBase]])
|
|
55
|
+
)
|
|
128
56
|
const loader = new ObjectLoader2({
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
57
|
+
rootId,
|
|
58
|
+
downloader,
|
|
59
|
+
database: new IndexedDatabase({
|
|
60
|
+
indexedDB: new IDBFactory(),
|
|
61
|
+
keyRange: IDBKeyRange
|
|
62
|
+
})
|
|
134
63
|
})
|
|
135
64
|
const r = []
|
|
136
65
|
for await (const x of loader.getObjectIterator()) {
|
|
137
66
|
r.push(x)
|
|
138
67
|
}
|
|
68
|
+
|
|
139
69
|
expect(r).toMatchSnapshot()
|
|
140
70
|
})
|
|
141
71
|
|
|
@@ -152,19 +82,18 @@ describe('objectloader2', () => {
|
|
|
152
82
|
const root = {
|
|
153
83
|
baseId: rootId,
|
|
154
84
|
base: rootBase
|
|
155
|
-
} as
|
|
85
|
+
} as Item
|
|
156
86
|
|
|
157
|
-
const records:
|
|
158
|
-
records
|
|
159
|
-
records
|
|
87
|
+
const records: Map<string, Base> = new Map<string, Base>()
|
|
88
|
+
records.set(root.baseId, rootBase)
|
|
89
|
+
records.set(child1.baseId, child1Base)
|
|
160
90
|
|
|
161
91
|
const loader = new ObjectLoader2({
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
cache: new MemoryDatabase({ items: records }),
|
|
166
|
-
downloader: new MemoryDownloader(rootId, records)
|
|
92
|
+
rootId: root.baseId,
|
|
93
|
+
downloader: new MemoryDownloader(rootId, records),
|
|
94
|
+
database: new MemoryDatabase({ items: records })
|
|
167
95
|
})
|
|
96
|
+
|
|
168
97
|
const r = []
|
|
169
98
|
const obj = loader.getObject({ id: child1.baseId })
|
|
170
99
|
for await (const x of loader.getObjectIterator()) {
|
|
@@ -193,18 +122,17 @@ describe('objectloader2', () => {
|
|
|
193
122
|
base: rootBase
|
|
194
123
|
} as unknown as Item
|
|
195
124
|
|
|
196
|
-
const records:
|
|
197
|
-
records
|
|
198
|
-
records
|
|
125
|
+
const records: Map<string, Base> = new Map<string, Base>()
|
|
126
|
+
records.set(root.baseId, rootBase)
|
|
127
|
+
records.set(child1.baseId, child1Base)
|
|
199
128
|
|
|
200
|
-
const results: AsyncGeneratorQueue<Item> = new AsyncGeneratorQueue<Item>()
|
|
201
129
|
const loader = new ObjectLoader2({
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
130
|
+
rootId: root.baseId,
|
|
131
|
+
downloader: new MemoryDownloader(rootId, records),
|
|
132
|
+
database: new IndexedDatabase({
|
|
133
|
+
indexedDB: new IDBFactory(),
|
|
134
|
+
keyRange: IDBKeyRange
|
|
135
|
+
})
|
|
208
136
|
})
|
|
209
137
|
const r = []
|
|
210
138
|
const obj = loader.getObject({ id: child1.baseId })
|
|
@@ -220,26 +148,30 @@ describe('objectloader2', () => {
|
|
|
220
148
|
})
|
|
221
149
|
|
|
222
150
|
test('add extra header', async () => {
|
|
223
|
-
const
|
|
224
|
-
const
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
151
|
+
const rootId = 'rootId'
|
|
152
|
+
const rootBase: Base = {
|
|
153
|
+
id: 'rootId',
|
|
154
|
+
speckle_type: 'type',
|
|
155
|
+
__closure: { child1Id: 100 }
|
|
156
|
+
}
|
|
157
|
+
const root = {
|
|
158
|
+
baseId: rootId,
|
|
159
|
+
base: rootBase
|
|
160
|
+
} as Item
|
|
161
|
+
|
|
162
|
+
const records: Map<string, Base> = new Map<string, Base>()
|
|
163
|
+
records.set(root.baseId, rootBase)
|
|
231
164
|
const headers = new Headers()
|
|
232
165
|
headers.set('x-test', 'asdf')
|
|
233
166
|
const loader = new ObjectLoader2({
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
167
|
+
rootId: root.baseId,
|
|
168
|
+
downloader: new MemoryDownloader(rootId, records),
|
|
169
|
+
database: new IndexedDatabase({
|
|
170
|
+
indexedDB: new IDBFactory(),
|
|
171
|
+
keyRange: IDBKeyRange
|
|
172
|
+
})
|
|
240
173
|
})
|
|
241
174
|
const x = await loader.getRootObject()
|
|
242
|
-
expect(x).toBe(root)
|
|
243
175
|
expect(x).toMatchSnapshot()
|
|
244
176
|
})
|
|
245
177
|
|
|
@@ -1,81 +1,79 @@
|
|
|
1
1
|
import AsyncGeneratorQueue from '../helpers/asyncGeneratorQueue.js'
|
|
2
|
-
import {
|
|
3
|
-
import IndexedDatabase from './indexedDatabase.js'
|
|
4
|
-
import ServerDownloader from './serverDownloader.js'
|
|
2
|
+
import { Downloader, Database } from './interfaces.js'
|
|
5
3
|
import { CustomLogger, Base, Item } from '../types/types.js'
|
|
6
|
-
import { ObjectLoader2Options } from './options.js'
|
|
7
|
-
import { MemoryDownloader } from './memoryDownloader.js'
|
|
8
|
-
import { MemoryDatabase } from './memoryDatabase.js'
|
|
4
|
+
import { CacheOptions, ObjectLoader2Options } from './options.js'
|
|
9
5
|
import { DefermentManager } from '../helpers/defermentManager.js'
|
|
6
|
+
import { CacheReader } from '../helpers/cacheReader.js'
|
|
7
|
+
import { CachePump } from '../helpers/cachePump.js'
|
|
8
|
+
import AggregateQueue from '../helpers/aggregateQueue.js'
|
|
9
|
+
import { ObjectLoader2Factory } from './objectLoader2Factory.js'
|
|
10
10
|
|
|
11
|
-
export
|
|
12
|
-
#
|
|
11
|
+
export class ObjectLoader2 {
|
|
12
|
+
#rootId: string
|
|
13
13
|
|
|
14
14
|
#logger: CustomLogger
|
|
15
15
|
|
|
16
|
-
#database:
|
|
16
|
+
#database: Database
|
|
17
17
|
#downloader: Downloader
|
|
18
|
+
#pump: CachePump
|
|
19
|
+
#cache: CacheReader
|
|
18
20
|
|
|
19
21
|
#deferments: DefermentManager
|
|
20
22
|
|
|
21
23
|
#gathered: AsyncGeneratorQueue<Item>
|
|
22
24
|
|
|
23
|
-
|
|
24
|
-
this.#objectId = options.objectId
|
|
25
|
+
#root?: Item = undefined
|
|
25
26
|
|
|
27
|
+
constructor(options: ObjectLoader2Options) {
|
|
28
|
+
this.#rootId = options.rootId
|
|
26
29
|
this.#logger = options.logger || console.log
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
this.#
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
30
|
+
|
|
31
|
+
const cacheOptions: CacheOptions = {
|
|
32
|
+
logger: this.#logger,
|
|
33
|
+
maxCacheReadSize: 10_000,
|
|
34
|
+
maxCacheWriteSize: 10_000,
|
|
35
|
+
maxWriteQueueSize: 40_000,
|
|
36
|
+
maxCacheBatchWriteWait: 3_000,
|
|
37
|
+
maxCacheBatchReadWait: 3_000
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
this.#gathered = new AsyncGeneratorQueue()
|
|
41
|
+
this.#database = options.database
|
|
42
|
+
this.#deferments = new DefermentManager({
|
|
43
|
+
maxSizeInMb: 2_000, // 2 GBs
|
|
44
|
+
ttlms: 5_000, // 5 seconds
|
|
45
|
+
logger: this.#logger
|
|
46
|
+
})
|
|
47
|
+
this.#cache = new CacheReader(this.#database, this.#deferments, cacheOptions)
|
|
48
|
+
this.#pump = new CachePump(
|
|
49
|
+
this.#database,
|
|
50
|
+
this.#gathered,
|
|
51
|
+
this.#deferments,
|
|
52
|
+
cacheOptions
|
|
53
|
+
)
|
|
54
|
+
this.#downloader = options.downloader
|
|
49
55
|
}
|
|
50
56
|
|
|
51
57
|
async disposeAsync(): Promise<void> {
|
|
52
|
-
await Promise.all([
|
|
53
|
-
|
|
54
|
-
this.#downloader.disposeAsync(),
|
|
55
|
-
this.#gathered.dispose()
|
|
56
|
-
])
|
|
58
|
+
await Promise.all([this.#downloader.disposeAsync(), this.#cache.disposeAsync()])
|
|
59
|
+
this.#deferments.dispose()
|
|
57
60
|
}
|
|
58
61
|
|
|
59
62
|
async getRootObject(): Promise<Item | undefined> {
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
+
if (!this.#root) {
|
|
64
|
+
this.#root = await this.#database.getItem({ id: this.#rootId })
|
|
65
|
+
if (!this.#root) {
|
|
66
|
+
this.#root = await this.#downloader.downloadSingle()
|
|
67
|
+
}
|
|
63
68
|
}
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
await this.#database.add(rootItem)
|
|
67
|
-
return rootItem
|
|
69
|
+
return this.#root
|
|
68
70
|
}
|
|
69
71
|
|
|
70
72
|
async getObject(params: { id: string }): Promise<Base> {
|
|
71
|
-
|
|
72
|
-
if (item) {
|
|
73
|
-
return item.base
|
|
74
|
-
}
|
|
75
|
-
return await this.#deferments.defer({ id: params.id })
|
|
73
|
+
return await this.#cache.getObject({ id: params.id })
|
|
76
74
|
}
|
|
77
75
|
|
|
78
|
-
async getTotalObjectCount() {
|
|
76
|
+
async getTotalObjectCount(): Promise<number> {
|
|
79
77
|
const rootObj = await this.getRootObject()
|
|
80
78
|
const totalChildrenCount = Object.keys(rootObj?.base.__closure || {}).length
|
|
81
79
|
return totalChildrenCount + 1 //count the root
|
|
@@ -87,47 +85,27 @@ export default class ObjectLoader2 {
|
|
|
87
85
|
this.#logger('No root object found!')
|
|
88
86
|
return
|
|
89
87
|
}
|
|
88
|
+
//only for root
|
|
89
|
+
this.#pump.add(rootItem)
|
|
90
90
|
yield rootItem.base
|
|
91
91
|
if (!rootItem.base.__closure) return
|
|
92
92
|
|
|
93
93
|
const children = Object.keys(rootItem.base.__closure)
|
|
94
94
|
const total = children.length
|
|
95
|
-
this.#downloader.initializePool({
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
foundItems: this.#gathered,
|
|
99
|
-
notFoundItems: this.#downloader
|
|
95
|
+
this.#downloader.initializePool({
|
|
96
|
+
results: new AggregateQueue(this.#gathered, this.#pump),
|
|
97
|
+
total
|
|
100
98
|
})
|
|
101
|
-
|
|
102
|
-
for await (const item of this.#gathered.consume()) {
|
|
103
|
-
this.#deferments.undefer(item)
|
|
99
|
+
for await (const item of this.#pump.gather(children, this.#downloader)) {
|
|
104
100
|
yield item.base
|
|
105
|
-
count++
|
|
106
|
-
if (count >= total) {
|
|
107
|
-
await this.disposeAsync()
|
|
108
|
-
}
|
|
109
101
|
}
|
|
110
|
-
await processPromise
|
|
111
102
|
}
|
|
112
103
|
|
|
113
104
|
static createFromObjects(objects: Base[]): ObjectLoader2 {
|
|
114
|
-
|
|
115
|
-
const records: Record<string, Base> = {}
|
|
116
|
-
objects.forEach((element) => {
|
|
117
|
-
records[element.id] = element
|
|
118
|
-
})
|
|
119
|
-
const loader = new ObjectLoader2({
|
|
120
|
-
serverUrl: 'dummy',
|
|
121
|
-
streamId: 'dummy',
|
|
122
|
-
objectId: root.id,
|
|
123
|
-
cache: new MemoryDatabase({ items: records }),
|
|
124
|
-
downloader: new MemoryDownloader(root.id, records)
|
|
125
|
-
})
|
|
126
|
-
return loader
|
|
105
|
+
return ObjectLoader2Factory.createFromObjects(objects)
|
|
127
106
|
}
|
|
128
107
|
|
|
129
108
|
static createFromJSON(json: string): ObjectLoader2 {
|
|
130
|
-
|
|
131
|
-
return this.createFromObjects(jsonObj)
|
|
109
|
+
return ObjectLoader2Factory.createFromJSON(json)
|
|
132
110
|
}
|
|
133
111
|
}
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
import { Base, CustomLogger } from '../types/types.js'
|
|
2
|
+
import IndexedDatabase from './databases/indexedDatabase.js'
|
|
3
|
+
import { MemoryDatabase } from './databases/memoryDatabase.js'
|
|
4
|
+
import { MemoryDownloader } from './downloaders/memoryDownloader.js'
|
|
5
|
+
import ServerDownloader from './downloaders/serverDownloader.js'
|
|
6
|
+
import { ObjectLoader2 } from './objectLoader2.js'
|
|
7
|
+
|
|
8
|
+
export class ObjectLoader2Factory {
|
|
9
|
+
static createFromObjects(objects: Base[]): ObjectLoader2 {
|
|
10
|
+
const root = objects[0]
|
|
11
|
+
const records: Map<string, Base> = new Map<string, Base>()
|
|
12
|
+
objects.forEach((element) => {
|
|
13
|
+
records.set(element.id, element)
|
|
14
|
+
})
|
|
15
|
+
const loader = new ObjectLoader2({
|
|
16
|
+
rootId: root.id,
|
|
17
|
+
database: new MemoryDatabase({ items: records }),
|
|
18
|
+
downloader: new MemoryDownloader(root.id, records)
|
|
19
|
+
})
|
|
20
|
+
return loader
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
static createFromJSON(json: string): ObjectLoader2 {
|
|
24
|
+
const jsonObj = JSON.parse(json) as Base[]
|
|
25
|
+
return this.createFromObjects(jsonObj)
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
static createFromUrl(params: {
|
|
29
|
+
serverUrl: string
|
|
30
|
+
streamId: string
|
|
31
|
+
objectId: string
|
|
32
|
+
token?: string
|
|
33
|
+
headers?: Headers
|
|
34
|
+
// eslint-disable-next-line @typescript-eslint/no-unsafe-function-type
|
|
35
|
+
keyRange?: { bound: Function; lowerBound: Function; upperBound: Function }
|
|
36
|
+
indexedDB?: IDBFactory
|
|
37
|
+
logger?: CustomLogger
|
|
38
|
+
}): ObjectLoader2 {
|
|
39
|
+
const loader = new ObjectLoader2({
|
|
40
|
+
rootId: params.objectId,
|
|
41
|
+
downloader: new ServerDownloader({
|
|
42
|
+
serverUrl: params.serverUrl,
|
|
43
|
+
streamId: params.streamId,
|
|
44
|
+
objectId: params.objectId,
|
|
45
|
+
token: params.token,
|
|
46
|
+
headers: params.headers
|
|
47
|
+
}),
|
|
48
|
+
database: new IndexedDatabase({
|
|
49
|
+
logger: params.logger,
|
|
50
|
+
indexedDB: params.indexedDB,
|
|
51
|
+
keyRange: params.keyRange
|
|
52
|
+
})
|
|
53
|
+
})
|
|
54
|
+
return loader
|
|
55
|
+
}
|
|
56
|
+
}
|
|
@@ -1,48 +1,29 @@
|
|
|
1
|
-
|
|
2
|
-
import
|
|
3
|
-
import Queue from '../helpers/queue.js'
|
|
4
|
-
import { Base, CustomLogger, Fetcher, Item } from '../types/types.js'
|
|
5
|
-
import { Cache, Downloader } from './interfaces.js'
|
|
1
|
+
import { Base, CustomLogger } from '../types/types.js'
|
|
2
|
+
import { Downloader, Database } from './interfaces.js'
|
|
6
3
|
|
|
7
4
|
export interface ObjectLoader2Options {
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
streamId: string
|
|
12
|
-
objectId: string
|
|
13
|
-
token?: string
|
|
5
|
+
rootId: string
|
|
6
|
+
downloader: Downloader
|
|
7
|
+
database: Database
|
|
14
8
|
logger?: CustomLogger
|
|
15
|
-
headers?: Headers
|
|
16
|
-
results?: AsyncGeneratorQueue<Item>
|
|
17
|
-
cache?: Cache
|
|
18
|
-
downloader?: Downloader
|
|
19
9
|
}
|
|
20
|
-
|
|
10
|
+
|
|
11
|
+
export interface CacheOptions {
|
|
21
12
|
logger?: CustomLogger
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
}
|
|
28
|
-
maxCacheReadSize?: number
|
|
29
|
-
maxCacheWriteSize?: number
|
|
30
|
-
maxCacheBatchWriteWait?: number
|
|
13
|
+
maxCacheReadSize: number
|
|
14
|
+
maxCacheWriteSize: number
|
|
15
|
+
maxCacheBatchWriteWait: number
|
|
16
|
+
maxCacheBatchReadWait: number
|
|
17
|
+
maxWriteQueueSize: number
|
|
31
18
|
}
|
|
32
19
|
|
|
33
|
-
export interface
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
objectId: string
|
|
37
|
-
token?: string
|
|
38
|
-
headers?: Headers
|
|
39
|
-
|
|
40
|
-
fetch?: Fetcher
|
|
41
|
-
database: Cache
|
|
42
|
-
results: Queue<Item>
|
|
20
|
+
export interface MemoryDatabaseOptions {
|
|
21
|
+
logger?: CustomLogger
|
|
22
|
+
items?: Map<string, Base>
|
|
43
23
|
}
|
|
44
24
|
|
|
45
|
-
export interface
|
|
25
|
+
export interface DefermentManagerOptions {
|
|
46
26
|
logger?: CustomLogger
|
|
47
|
-
|
|
27
|
+
maxSizeInMb: number
|
|
28
|
+
ttlms: number
|
|
48
29
|
}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { Base, DataChunk, isBase, isReference, isScalar } from '../types/types.js'
|
|
2
|
-
import ObjectLoader2 from './objectLoader2.js'
|
|
2
|
+
import { ObjectLoader2 } from './objectLoader2.js'
|
|
3
3
|
|
|
4
4
|
export type ProgressStage = 'download' | 'construction'
|
|
5
5
|
export type OnProgress = (e: {
|
package/src/test/e2e.spec.ts
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
import { describe, test, expect } from 'vitest'
|
|
2
2
|
import { IDBFactory, IDBKeyRange } from 'fake-indexeddb'
|
|
3
|
-
import ObjectLoader2 from '../operations/objectLoader2.js'
|
|
4
3
|
import { Base } from '../types/types.js'
|
|
5
|
-
import {
|
|
4
|
+
import { TIME_MS } from '@speckle/shared'
|
|
5
|
+
import { ObjectLoader2Factory } from '../operations/objectLoader2Factory.js'
|
|
6
6
|
|
|
7
7
|
describe('e2e', () => {
|
|
8
8
|
test(
|
|
@@ -10,7 +10,7 @@ describe('e2e', () => {
|
|
|
10
10
|
async () => {
|
|
11
11
|
// Revit sample house (good for bim-like stuff with many display meshes)
|
|
12
12
|
//const resource = 'https://app.speckle.systems/streams/da9e320dad/commits/5388ef24b8'
|
|
13
|
-
const loader =
|
|
13
|
+
const loader = ObjectLoader2Factory.createFromUrl({
|
|
14
14
|
serverUrl: 'https://app.speckle.systems',
|
|
15
15
|
streamId: 'da9e320dad',
|
|
16
16
|
objectId: '31d10c0cea569a1e26809658ed27e281',
|
|
@@ -36,6 +36,6 @@ describe('e2e', () => {
|
|
|
36
36
|
expect(base2).toBeDefined()
|
|
37
37
|
expect(base2.id).toBe('3841e3cbc45d52c47bc2f1b7b0ad4eb9')
|
|
38
38
|
},
|
|
39
|
-
10 *
|
|
39
|
+
10 * TIME_MS.second
|
|
40
40
|
)
|
|
41
41
|
})
|
package/src/types/types.ts
CHANGED
|
@@ -8,6 +8,7 @@ export type Fetcher = (
|
|
|
8
8
|
export interface Item {
|
|
9
9
|
baseId: string
|
|
10
10
|
base: Base
|
|
11
|
+
size?: number
|
|
11
12
|
}
|
|
12
13
|
|
|
13
14
|
export interface Base {
|
|
@@ -58,3 +59,13 @@ export function isScalar(
|
|
|
58
59
|
type === 'undefined'
|
|
59
60
|
)
|
|
60
61
|
}
|
|
62
|
+
|
|
63
|
+
export function take<T>(it: Iterator<T>, count: number): T[] {
|
|
64
|
+
const result: T[] = []
|
|
65
|
+
for (let i = 0; i < count; i++) {
|
|
66
|
+
const itr = it.next()
|
|
67
|
+
if (itr.done) break
|
|
68
|
+
result.push(itr.value)
|
|
69
|
+
}
|
|
70
|
+
return result
|
|
71
|
+
}
|