@speckle/objectloader2 2.23.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.tshy/build.json +8 -0
- package/.tshy/commonjs.json +17 -0
- package/.tshy/esm.json +16 -0
- package/dist/commonjs/index.js +8 -0
- package/dist/esm/index.js +3 -0
- package/eslint.config.mjs +57 -0
- package/package.json +76 -0
- package/readme.md +42 -0
- package/src/helpers/asyncGeneratorQueue.ts +35 -0
- package/src/helpers/batchedPool.ts +60 -0
- package/src/helpers/batchingQueue.ts +77 -0
- package/src/helpers/bufferQueue.ts +12 -0
- package/src/helpers/deferredBase.ts +17 -0
- package/src/helpers/queue.ts +3 -0
- package/src/index.ts +3 -0
- package/src/operations/indexedDatabase.spec.ts +69 -0
- package/src/operations/indexedDatabase.ts +167 -0
- package/src/operations/interfaces.ts +20 -0
- package/src/operations/objectLoader2.spec.ts +161 -0
- package/src/operations/objectLoader2.ts +120 -0
- package/src/operations/options.ts +41 -0
- package/src/operations/serverDownloader.spec.ts +133 -0
- package/src/operations/serverDownloader.ts +163 -0
- package/src/test/e2e.spec.ts +36 -0
- package/src/types/errors.ts +25 -0
- package/src/types/types.ts +25 -0
- package/tsconfig.json +103 -0
- package/vitest.config.ts +3 -0
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import Queue from '../helpers/queue.js'
|
|
2
|
+
import { Item } from '../types/types.js'
|
|
3
|
+
|
|
4
|
+
export interface Cache {
|
|
5
|
+
getItem(params: { id: string }): Promise<Item | undefined>
|
|
6
|
+
processItems(params: {
|
|
7
|
+
ids: string[]
|
|
8
|
+
foundItems: Queue<Item>
|
|
9
|
+
notFoundItems: Queue<string>
|
|
10
|
+
}): Promise<void>
|
|
11
|
+
|
|
12
|
+
add(item: Item): Promise<void>
|
|
13
|
+
disposeAsync(): Promise<void>
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
export interface Downloader extends Queue<string> {
|
|
17
|
+
initializePool(params: { total: number }): void
|
|
18
|
+
downloadSingle(): Promise<Item>
|
|
19
|
+
disposeAsync(): Promise<void>
|
|
20
|
+
}
|
|
@@ -0,0 +1,161 @@
|
|
|
1
|
+
import { describe, expect, test } from 'vitest'
|
|
2
|
+
import ObjectLoader2 from './objectLoader2.js'
|
|
3
|
+
import { Base, Item } from '../types/types.js'
|
|
4
|
+
import { Cache, Downloader } from './interfaces.js'
|
|
5
|
+
import Queue from '../helpers/queue.js'
|
|
6
|
+
|
|
7
|
+
describe('objectloader2', () => {
|
|
8
|
+
test('can get a root object from cache', async () => {
|
|
9
|
+
const root = { baseId: 'baseId' } as unknown as Item
|
|
10
|
+
const cache = {
|
|
11
|
+
getItem(params: { id: string }): Promise<Item> {
|
|
12
|
+
expect(params.id).toBe(root.baseId)
|
|
13
|
+
return Promise.resolve(root)
|
|
14
|
+
}
|
|
15
|
+
} as Cache
|
|
16
|
+
const downloader = {} as Downloader
|
|
17
|
+
const loader = new ObjectLoader2({
|
|
18
|
+
serverUrl: 'a',
|
|
19
|
+
streamId: 'b',
|
|
20
|
+
objectId: root.baseId,
|
|
21
|
+
cache,
|
|
22
|
+
downloader
|
|
23
|
+
})
|
|
24
|
+
const x = await loader.getRootObject()
|
|
25
|
+
expect(x).toBe(root)
|
|
26
|
+
})
|
|
27
|
+
|
|
28
|
+
test('can get a root object from downloader', async () => {
|
|
29
|
+
const root = { baseId: 'baseId' } as unknown as Item
|
|
30
|
+
const cache = {
|
|
31
|
+
getItem(params: { id: string }): Promise<Item | undefined> {
|
|
32
|
+
expect(params.id).toBe(root.baseId)
|
|
33
|
+
return Promise.resolve<Item | undefined>(undefined)
|
|
34
|
+
},
|
|
35
|
+
add(item: Item): Promise<void> {
|
|
36
|
+
expect(item).toBe(root)
|
|
37
|
+
return Promise.resolve()
|
|
38
|
+
}
|
|
39
|
+
} as Cache
|
|
40
|
+
const downloader = {
|
|
41
|
+
downloadSingle(): Promise<Item> {
|
|
42
|
+
return Promise.resolve(root)
|
|
43
|
+
}
|
|
44
|
+
} as Downloader
|
|
45
|
+
const loader = new ObjectLoader2({
|
|
46
|
+
serverUrl: 'a',
|
|
47
|
+
streamId: 'b',
|
|
48
|
+
objectId: root.baseId,
|
|
49
|
+
cache,
|
|
50
|
+
downloader
|
|
51
|
+
})
|
|
52
|
+
const x = await loader.getRootObject()
|
|
53
|
+
expect(x).toBe(root)
|
|
54
|
+
})
|
|
55
|
+
|
|
56
|
+
test('can get single object from cache using iterator', async () => {
|
|
57
|
+
const rootId = 'baseId'
|
|
58
|
+
const rootBase: Base = { id: 'baseId' }
|
|
59
|
+
const root = { baseId: rootId, base: rootBase } as unknown as Item
|
|
60
|
+
const cache = {
|
|
61
|
+
getItem(params: { id: string }): Promise<Item | undefined> {
|
|
62
|
+
expect(params.id).toBe(rootId)
|
|
63
|
+
return Promise.resolve(root)
|
|
64
|
+
}
|
|
65
|
+
} as Cache
|
|
66
|
+
const downloader = {} as Downloader
|
|
67
|
+
const loader = new ObjectLoader2({
|
|
68
|
+
serverUrl: 'a',
|
|
69
|
+
streamId: 'b',
|
|
70
|
+
objectId: rootId,
|
|
71
|
+
cache,
|
|
72
|
+
downloader
|
|
73
|
+
})
|
|
74
|
+
const r = []
|
|
75
|
+
for await (const x of loader.getObjectIterator()) {
|
|
76
|
+
r.push(x)
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
expect(r.length).toBe(1)
|
|
80
|
+
expect(r[0]).toBe(rootBase)
|
|
81
|
+
})
|
|
82
|
+
|
|
83
|
+
test('can get root/child object from cache using iterator', async () => {
|
|
84
|
+
const child1Base = { id: 'child1Id' }
|
|
85
|
+
const child1 = { baseId: 'child1Id', base: child1Base } as unknown as Item
|
|
86
|
+
|
|
87
|
+
const rootId = 'rootId'
|
|
88
|
+
const rootBase: Base = { id: 'rootId', __closure: { child1Id: 100 } }
|
|
89
|
+
const root = {
|
|
90
|
+
baseId: rootId,
|
|
91
|
+
base: rootBase
|
|
92
|
+
} as unknown as Item
|
|
93
|
+
|
|
94
|
+
const cache = {
|
|
95
|
+
getItem(params: { id: string }): Promise<Item | undefined> {
|
|
96
|
+
expect(params.id).toBe(root.baseId)
|
|
97
|
+
return Promise.resolve(root)
|
|
98
|
+
},
|
|
99
|
+
processItems(params: {
|
|
100
|
+
ids: string[]
|
|
101
|
+
foundItems: Queue<Item>
|
|
102
|
+
|
|
103
|
+
notFoundItems: Queue<string>
|
|
104
|
+
}): Promise<void> {
|
|
105
|
+
expect(params.ids.length).toBe(1)
|
|
106
|
+
expect(params.ids[0]).toBe(child1.baseId)
|
|
107
|
+
params.foundItems.add(child1)
|
|
108
|
+
return Promise.resolve()
|
|
109
|
+
},
|
|
110
|
+
disposeAsync(): Promise<void> {
|
|
111
|
+
return Promise.resolve()
|
|
112
|
+
}
|
|
113
|
+
} as Cache
|
|
114
|
+
const downloader = {
|
|
115
|
+
initializePool(params: { total: number }): void {
|
|
116
|
+
expect(params.total).toBe(1)
|
|
117
|
+
},
|
|
118
|
+
disposeAsync(): Promise<void> {
|
|
119
|
+
return Promise.resolve()
|
|
120
|
+
}
|
|
121
|
+
} as Downloader
|
|
122
|
+
const loader = new ObjectLoader2({
|
|
123
|
+
serverUrl: 'a',
|
|
124
|
+
streamId: 'b',
|
|
125
|
+
objectId: root.baseId,
|
|
126
|
+
cache,
|
|
127
|
+
downloader
|
|
128
|
+
})
|
|
129
|
+
const r = []
|
|
130
|
+
for await (const x of loader.getObjectIterator()) {
|
|
131
|
+
r.push(x)
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
expect(r.length).toBe(2)
|
|
135
|
+
expect(r[0]).toBe(rootBase)
|
|
136
|
+
expect(r[1]).toBe(child1Base)
|
|
137
|
+
})
|
|
138
|
+
|
|
139
|
+
test('add extra header', async () => {
|
|
140
|
+
const root = { baseId: 'baseId' } as unknown as Item
|
|
141
|
+
const cache = {
|
|
142
|
+
getItem(params: { id: string }): Promise<Item> {
|
|
143
|
+
expect(params.id).toBe(root.baseId)
|
|
144
|
+
return Promise.resolve(root)
|
|
145
|
+
}
|
|
146
|
+
} as Cache
|
|
147
|
+
const downloader = {} as Downloader
|
|
148
|
+
const headers = new Headers()
|
|
149
|
+
headers.set('x-test', 'asdf')
|
|
150
|
+
const loader = new ObjectLoader2({
|
|
151
|
+
serverUrl: 'a',
|
|
152
|
+
streamId: 'b',
|
|
153
|
+
objectId: root.baseId,
|
|
154
|
+
headers,
|
|
155
|
+
cache,
|
|
156
|
+
downloader
|
|
157
|
+
})
|
|
158
|
+
const x = await loader.getRootObject()
|
|
159
|
+
expect(x).toBe(root)
|
|
160
|
+
})
|
|
161
|
+
})
|
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
import AsyncGeneratorQueue from '../helpers/asyncGeneratorQueue.js'
|
|
2
|
+
import { Cache, Downloader } from './interfaces.js'
|
|
3
|
+
import IndexedDatabase from './indexedDatabase.js'
|
|
4
|
+
import ServerDownloader from './serverDownloader.js'
|
|
5
|
+
import { CustomLogger, Base, Item } from '../types/types.js'
|
|
6
|
+
import { ObjectLoader2Options } from './options.js'
|
|
7
|
+
import { DeferredBase } from '../helpers/deferredBase.js'
|
|
8
|
+
|
|
9
|
+
export default class ObjectLoader2 {
|
|
10
|
+
#objectId: string
|
|
11
|
+
|
|
12
|
+
#logger: CustomLogger
|
|
13
|
+
|
|
14
|
+
#database: Cache
|
|
15
|
+
#downloader: Downloader
|
|
16
|
+
|
|
17
|
+
#gathered: AsyncGeneratorQueue<Item>
|
|
18
|
+
|
|
19
|
+
#buffer: DeferredBase[] = []
|
|
20
|
+
|
|
21
|
+
constructor(options: ObjectLoader2Options) {
|
|
22
|
+
this.#objectId = options.objectId
|
|
23
|
+
|
|
24
|
+
this.#logger = options.logger || console.log
|
|
25
|
+
this.#gathered = new AsyncGeneratorQueue()
|
|
26
|
+
this.#database =
|
|
27
|
+
options.cache ||
|
|
28
|
+
new IndexedDatabase({
|
|
29
|
+
logger: this.#logger,
|
|
30
|
+
maxCacheReadSize: 10000,
|
|
31
|
+
maxCacheWriteSize: 5000,
|
|
32
|
+
indexedDB: options.indexedDB,
|
|
33
|
+
keyRange: options.keyRange
|
|
34
|
+
})
|
|
35
|
+
this.#downloader =
|
|
36
|
+
options.downloader ||
|
|
37
|
+
new ServerDownloader({
|
|
38
|
+
database: this.#database,
|
|
39
|
+
results: this.#gathered,
|
|
40
|
+
serverUrl: options.serverUrl,
|
|
41
|
+
streamId: options.streamId,
|
|
42
|
+
objectId: this.#objectId,
|
|
43
|
+
token: options.token,
|
|
44
|
+
headers: options.headers
|
|
45
|
+
})
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
async disposeAsync(): Promise<void> {
|
|
49
|
+
await Promise.all([
|
|
50
|
+
this.#database.disposeAsync(),
|
|
51
|
+
this.#downloader.disposeAsync(),
|
|
52
|
+
this.#gathered.dispose()
|
|
53
|
+
])
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
async getRootObject(): Promise<Item | undefined> {
|
|
57
|
+
const cachedRootObject = await this.#database.getItem({ id: this.#objectId })
|
|
58
|
+
if (cachedRootObject) {
|
|
59
|
+
return cachedRootObject
|
|
60
|
+
}
|
|
61
|
+
const rootItem = await this.#downloader.downloadSingle()
|
|
62
|
+
|
|
63
|
+
await this.#database.add(rootItem)
|
|
64
|
+
return rootItem
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
async getObject(params: { id: string }): Promise<Base> {
|
|
68
|
+
const item = await this.#database.getItem({ id: params.id })
|
|
69
|
+
if (item) {
|
|
70
|
+
return item.base
|
|
71
|
+
}
|
|
72
|
+
const deferredBase = this.#buffer.find((x) => x.id === params.id)
|
|
73
|
+
if (deferredBase) {
|
|
74
|
+
return await deferredBase.promise
|
|
75
|
+
}
|
|
76
|
+
const d = new DeferredBase(params.id)
|
|
77
|
+
this.#buffer.push(d)
|
|
78
|
+
return d
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
async getTotalObjectCount() {
|
|
82
|
+
const rootObj = await this.getRootObject()
|
|
83
|
+
const totalChildrenCount = Object.keys(rootObj?.base.__closure || {}).length
|
|
84
|
+
return totalChildrenCount + 1 //count the root
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
async *getObjectIterator(): AsyncGenerator<Base> {
|
|
88
|
+
const rootItem = await this.getRootObject()
|
|
89
|
+
if (rootItem === undefined) {
|
|
90
|
+
this.#logger('No root object found!')
|
|
91
|
+
return
|
|
92
|
+
}
|
|
93
|
+
yield rootItem.base
|
|
94
|
+
if (!rootItem.base.__closure) return
|
|
95
|
+
|
|
96
|
+
const children = Object.keys(rootItem.base.__closure)
|
|
97
|
+
const total = children.length
|
|
98
|
+
this.#downloader.initializePool({ total })
|
|
99
|
+
const processPromise = this.#database.processItems({
|
|
100
|
+
ids: children,
|
|
101
|
+
foundItems: this.#gathered,
|
|
102
|
+
notFoundItems: this.#downloader
|
|
103
|
+
})
|
|
104
|
+
let count = 0
|
|
105
|
+
for await (const item of this.#gathered.consume()) {
|
|
106
|
+
const deferredIndex = this.#buffer.findIndex((x) => x.id === item.baseId)
|
|
107
|
+
if (deferredIndex !== -1) {
|
|
108
|
+
const deferredBase = this.#buffer[deferredIndex]
|
|
109
|
+
deferredBase.resolve(item.base)
|
|
110
|
+
this.#buffer.splice(deferredIndex, 1)
|
|
111
|
+
}
|
|
112
|
+
yield item.base
|
|
113
|
+
count++
|
|
114
|
+
if (count >= total) {
|
|
115
|
+
await this.disposeAsync()
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
await processPromise
|
|
119
|
+
}
|
|
120
|
+
}
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
/* eslint-disable @typescript-eslint/no-unsafe-function-type */
|
|
2
|
+
import Queue from '../helpers/queue.js'
|
|
3
|
+
import { CustomLogger, Fetcher, Item } from '../types/types.js'
|
|
4
|
+
import { Cache, Downloader } from './interfaces.js'
|
|
5
|
+
|
|
6
|
+
export interface ObjectLoader2Options {
|
|
7
|
+
keyRange?: { bound: Function; lowerBound: Function; upperBound: Function }
|
|
8
|
+
indexedDB?: IDBFactory
|
|
9
|
+
serverUrl: string
|
|
10
|
+
streamId: string
|
|
11
|
+
objectId: string
|
|
12
|
+
token?: string
|
|
13
|
+
logger?: CustomLogger
|
|
14
|
+
headers?: Headers
|
|
15
|
+
cache?: Cache
|
|
16
|
+
downloader?: Downloader
|
|
17
|
+
}
|
|
18
|
+
export interface BaseDatabaseOptions {
|
|
19
|
+
logger?: CustomLogger
|
|
20
|
+
indexedDB?: IDBFactory
|
|
21
|
+
keyRange?: {
|
|
22
|
+
bound: Function
|
|
23
|
+
lowerBound: Function
|
|
24
|
+
upperBound: Function
|
|
25
|
+
}
|
|
26
|
+
maxCacheReadSize?: number
|
|
27
|
+
maxCacheWriteSize?: number
|
|
28
|
+
maxCacheBatchWriteWait?: number
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
export interface BaseDownloadOptions {
|
|
32
|
+
serverUrl: string
|
|
33
|
+
streamId: string
|
|
34
|
+
objectId: string
|
|
35
|
+
token?: string
|
|
36
|
+
headers?: Headers
|
|
37
|
+
|
|
38
|
+
fetch?: Fetcher
|
|
39
|
+
database: Cache
|
|
40
|
+
results: Queue<Item>
|
|
41
|
+
}
|
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
import { describe, expect, test } from 'vitest'
|
|
2
|
+
import createFetchMock from 'vitest-fetch-mock'
|
|
3
|
+
import { vi } from 'vitest'
|
|
4
|
+
import AsyncGeneratorQueue from '../helpers/asyncGeneratorQueue.js'
|
|
5
|
+
import { Item } from '../types/types.js'
|
|
6
|
+
import { Cache } from './interfaces.js'
|
|
7
|
+
import ServerDownloader from './serverDownloader.js'
|
|
8
|
+
|
|
9
|
+
describe('downloader', () => {
|
|
10
|
+
test('download batch of one', async () => {
|
|
11
|
+
const fetchMocker = createFetchMock(vi)
|
|
12
|
+
const i: Item = { baseId: 'id', base: { id: 'id' } }
|
|
13
|
+
fetchMocker.mockResponseOnce('id\t' + JSON.stringify(i.base) + '\n')
|
|
14
|
+
const results = new AsyncGeneratorQueue<Item>()
|
|
15
|
+
const db = {
|
|
16
|
+
async add(): Promise<void> {
|
|
17
|
+
return Promise.resolve()
|
|
18
|
+
}
|
|
19
|
+
} as unknown as Cache
|
|
20
|
+
const downloader = new ServerDownloader({
|
|
21
|
+
database: db,
|
|
22
|
+
results,
|
|
23
|
+
serverUrl: 'http://speckle.test',
|
|
24
|
+
streamId: 'streamId',
|
|
25
|
+
objectId: 'objectId',
|
|
26
|
+
token: 'token',
|
|
27
|
+
|
|
28
|
+
fetch: fetchMocker
|
|
29
|
+
})
|
|
30
|
+
downloader.initializePool({ total: 1, maxDownloadBatchWait: 200 })
|
|
31
|
+
downloader.add('id')
|
|
32
|
+
await downloader.disposeAsync()
|
|
33
|
+
results.dispose()
|
|
34
|
+
const r = []
|
|
35
|
+
for await (const x of results.consume()) {
|
|
36
|
+
r.push(x)
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
expect(r.length).toBe(1)
|
|
40
|
+
expect(JSON.stringify(r[0])).toBe(JSON.stringify(i))
|
|
41
|
+
})
|
|
42
|
+
|
|
43
|
+
test('download batch of two', async () => {
|
|
44
|
+
const fetchMocker = createFetchMock(vi)
|
|
45
|
+
const i1: Item = { baseId: 'id1', base: { id: 'id1' } }
|
|
46
|
+
const i2: Item = { baseId: 'id2', base: { id: 'id2' } }
|
|
47
|
+
fetchMocker.mockResponseOnce(
|
|
48
|
+
'id1\t' + JSON.stringify(i1.base) + '\nid2\t' + JSON.stringify(i2.base) + '\n'
|
|
49
|
+
)
|
|
50
|
+
const results = new AsyncGeneratorQueue<Item>()
|
|
51
|
+
const db = {
|
|
52
|
+
async add(): Promise<void> {
|
|
53
|
+
return Promise.resolve()
|
|
54
|
+
}
|
|
55
|
+
} as unknown as Cache
|
|
56
|
+
const downloader = new ServerDownloader({
|
|
57
|
+
database: db,
|
|
58
|
+
results,
|
|
59
|
+
serverUrl: 'http://speckle.test',
|
|
60
|
+
streamId: 'streamId',
|
|
61
|
+
objectId: 'objectId',
|
|
62
|
+
token: 'token',
|
|
63
|
+
|
|
64
|
+
fetch: fetchMocker
|
|
65
|
+
})
|
|
66
|
+
downloader.initializePool({ total: 2, maxDownloadBatchWait: 200 })
|
|
67
|
+
downloader.add('id')
|
|
68
|
+
await downloader.disposeAsync()
|
|
69
|
+
results.dispose()
|
|
70
|
+
const r = []
|
|
71
|
+
for await (const x of results.consume()) {
|
|
72
|
+
r.push(x)
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
expect(r.length).toBe(2)
|
|
76
|
+
expect(JSON.stringify(r[0])).toBe(JSON.stringify(i1))
|
|
77
|
+
expect(JSON.stringify(r[1])).toBe(JSON.stringify(i2))
|
|
78
|
+
})
|
|
79
|
+
|
|
80
|
+
test('download single exists', async () => {
|
|
81
|
+
const fetchMocker = createFetchMock(vi)
|
|
82
|
+
const i: Item = { baseId: 'id', base: { id: 'id', __closure: { childIds: 1 } } }
|
|
83
|
+
fetchMocker.mockResponseOnce(JSON.stringify(i.base))
|
|
84
|
+
const results = new AsyncGeneratorQueue<Item>()
|
|
85
|
+
const db = {
|
|
86
|
+
async add(): Promise<void> {
|
|
87
|
+
return Promise.resolve()
|
|
88
|
+
}
|
|
89
|
+
} as unknown as Cache
|
|
90
|
+
const downloader = new ServerDownloader({
|
|
91
|
+
database: db,
|
|
92
|
+
results,
|
|
93
|
+
serverUrl: 'http://speckle.test',
|
|
94
|
+
streamId: 'streamId',
|
|
95
|
+
objectId: i.baseId,
|
|
96
|
+
token: 'token',
|
|
97
|
+
|
|
98
|
+
fetch: fetchMocker
|
|
99
|
+
})
|
|
100
|
+
const x = await downloader.downloadSingle()
|
|
101
|
+
expect(JSON.stringify(x)).toBe(JSON.stringify(i))
|
|
102
|
+
})
|
|
103
|
+
|
|
104
|
+
test('add extra header', async () => {
|
|
105
|
+
const fetchMocker = createFetchMock(vi)
|
|
106
|
+
const i: Item = { baseId: 'id', base: { id: 'id', __closure: { childIds: 1 } } }
|
|
107
|
+
fetchMocker.mockResponseIf(
|
|
108
|
+
(req) => req.headers.get('x-test') === 'asdf',
|
|
109
|
+
JSON.stringify(i.base)
|
|
110
|
+
)
|
|
111
|
+
const results = new AsyncGeneratorQueue<Item>()
|
|
112
|
+
const db = {
|
|
113
|
+
async add(): Promise<void> {
|
|
114
|
+
return Promise.resolve()
|
|
115
|
+
}
|
|
116
|
+
} as unknown as Cache
|
|
117
|
+
const headers = new Headers()
|
|
118
|
+
headers.set('x-test', 'asdf')
|
|
119
|
+
const downloader = new ServerDownloader({
|
|
120
|
+
database: db,
|
|
121
|
+
results,
|
|
122
|
+
serverUrl: 'http://speckle.test',
|
|
123
|
+
headers,
|
|
124
|
+
streamId: 'streamId',
|
|
125
|
+
objectId: i.baseId,
|
|
126
|
+
token: 'token',
|
|
127
|
+
|
|
128
|
+
fetch: fetchMocker
|
|
129
|
+
})
|
|
130
|
+
const x = await downloader.downloadSingle()
|
|
131
|
+
expect(JSON.stringify(x)).toBe(JSON.stringify(i))
|
|
132
|
+
})
|
|
133
|
+
})
|
|
@@ -0,0 +1,163 @@
|
|
|
1
|
+
import BatchedPool from '../helpers/batchedPool.js'
|
|
2
|
+
import Queue from '../helpers/queue.js'
|
|
3
|
+
import { ObjectLoaderRuntimeError } from '../types/errors.js'
|
|
4
|
+
import { Fetcher, isBase, Item } from '../types/types.js'
|
|
5
|
+
import { Downloader } from './interfaces.js'
|
|
6
|
+
import { BaseDownloadOptions } from './options.js'
|
|
7
|
+
|
|
8
|
+
export default class ServerDownloader implements Downloader {
|
|
9
|
+
#requestUrlRootObj: string
|
|
10
|
+
#requestUrlChildren: string
|
|
11
|
+
#headers: HeadersInit
|
|
12
|
+
#options: BaseDownloadOptions
|
|
13
|
+
#fetch: Fetcher
|
|
14
|
+
|
|
15
|
+
#downloadQueue?: BatchedPool<string>
|
|
16
|
+
|
|
17
|
+
constructor(options: BaseDownloadOptions) {
|
|
18
|
+
this.#options = options
|
|
19
|
+
this.#fetch = options.fetch ?? ((...args) => globalThis.fetch(...args))
|
|
20
|
+
|
|
21
|
+
this.#headers = {}
|
|
22
|
+
if (options.headers) {
|
|
23
|
+
for (const header of options.headers.entries()) {
|
|
24
|
+
this.#headers[header[0]] = header[1]
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
this.#headers['Accept'] = `text/plain`
|
|
28
|
+
|
|
29
|
+
if (this.#options.token) {
|
|
30
|
+
this.#headers['Authorization'] = `Bearer ${this.#options.token}`
|
|
31
|
+
}
|
|
32
|
+
this.#requestUrlChildren = `${this.#options.serverUrl}/api/getobjects/${
|
|
33
|
+
this.#options.streamId
|
|
34
|
+
}`
|
|
35
|
+
this.#requestUrlRootObj = `${this.#options.serverUrl}/objects/${
|
|
36
|
+
this.#options.streamId
|
|
37
|
+
}/${this.#options.objectId}/single`
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
#getDownloadCountAndSizes(total: number): number[] {
|
|
41
|
+
if (total <= 50) {
|
|
42
|
+
return [total]
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
return [10000, 30000, 10000, 1000]
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
initializePool(params: { total: number; maxDownloadBatchWait?: number }) {
|
|
49
|
+
const { total } = params
|
|
50
|
+
this.#downloadQueue = new BatchedPool<string>({
|
|
51
|
+
concurrencyAndSizes: this.#getDownloadCountAndSizes(total),
|
|
52
|
+
maxWaitTime: params.maxDownloadBatchWait,
|
|
53
|
+
processFunction: (batch: string[]) =>
|
|
54
|
+
this.downloadBatch({
|
|
55
|
+
batch,
|
|
56
|
+
url: this.#requestUrlChildren,
|
|
57
|
+
headers: this.#headers,
|
|
58
|
+
results: this.#options.results
|
|
59
|
+
})
|
|
60
|
+
})
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
#getPool(): BatchedPool<string> {
|
|
64
|
+
if (this.#downloadQueue) {
|
|
65
|
+
return this.#downloadQueue
|
|
66
|
+
}
|
|
67
|
+
throw new Error('Download pool is not initialized')
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
add(id: string): void {
|
|
71
|
+
this.#getPool().add(id)
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
async disposeAsync(): Promise<void> {
|
|
75
|
+
await this.#downloadQueue?.disposeAsync()
|
|
76
|
+
await this.#getPool().disposeAsync()
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
#processJson(baseId: string, unparsedBase: string): Item {
|
|
80
|
+
let base: unknown
|
|
81
|
+
try {
|
|
82
|
+
base = JSON.parse(unparsedBase)
|
|
83
|
+
} catch (e: unknown) {
|
|
84
|
+
throw new Error(`Error parsing object ${baseId}: ${(e as Error).message}`)
|
|
85
|
+
}
|
|
86
|
+
if (isBase(base)) {
|
|
87
|
+
return { baseId, base }
|
|
88
|
+
} else {
|
|
89
|
+
throw new ObjectLoaderRuntimeError(`${baseId} is not a base`)
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
async downloadBatch(params: {
|
|
94
|
+
batch: string[]
|
|
95
|
+
url: string
|
|
96
|
+
headers: HeadersInit
|
|
97
|
+
results: Queue<Item>
|
|
98
|
+
}): Promise<void> {
|
|
99
|
+
const { batch, url, headers, results } = params
|
|
100
|
+
const response = await this.#fetch(url, {
|
|
101
|
+
method: 'POST',
|
|
102
|
+
headers: { ...headers, 'Content-Type': 'application/json' },
|
|
103
|
+
body: JSON.stringify({ objects: JSON.stringify(batch) })
|
|
104
|
+
})
|
|
105
|
+
|
|
106
|
+
this.#validateResponse(response)
|
|
107
|
+
if (!response.body) {
|
|
108
|
+
throw new Error('ReadableStream not supported or response has no body.')
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
const reader = response.body.getReader()
|
|
112
|
+
const decoder = new TextDecoder()
|
|
113
|
+
let buffer = '' // Temporary buffer to store incoming chunks
|
|
114
|
+
|
|
115
|
+
let count = 0
|
|
116
|
+
while (true) {
|
|
117
|
+
const { done, value } = await reader.read()
|
|
118
|
+
if (done) break
|
|
119
|
+
// Decode the chunk and add to buffer
|
|
120
|
+
buffer += decoder.decode(value, { stream: true })
|
|
121
|
+
|
|
122
|
+
// Try to process JSON objects from the buffer
|
|
123
|
+
let boundary = buffer.indexOf('\n')
|
|
124
|
+
while (boundary !== -1) {
|
|
125
|
+
const jsonString = buffer.slice(0, boundary)
|
|
126
|
+
buffer = buffer.slice(boundary + 1)
|
|
127
|
+
boundary = buffer.indexOf('\n')
|
|
128
|
+
if (jsonString) {
|
|
129
|
+
const pieces = jsonString.split('\t')
|
|
130
|
+
const [id, unparsedObj] = pieces
|
|
131
|
+
const item = this.#processJson(id, unparsedObj)
|
|
132
|
+
await this.#options.database.add(item)
|
|
133
|
+
results.add(item)
|
|
134
|
+
count++
|
|
135
|
+
if (count % 1000 === 0) {
|
|
136
|
+
await new Promise((resolve) => setTimeout(resolve, 100)) //allow other stuff to happen
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
async downloadSingle(): Promise<Item> {
|
|
144
|
+
const response = await this.#fetch(this.#requestUrlRootObj, {
|
|
145
|
+
headers: this.#headers
|
|
146
|
+
})
|
|
147
|
+
this.#validateResponse(response)
|
|
148
|
+
const responseText = await response.text()
|
|
149
|
+
const item = this.#processJson(this.#options.objectId, responseText)
|
|
150
|
+
return item
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
#validateResponse(response: Response): void {
|
|
154
|
+
if (!response.ok) {
|
|
155
|
+
if ([401, 403].includes(response.status)) {
|
|
156
|
+
throw new ObjectLoaderRuntimeError('You do not have access!')
|
|
157
|
+
}
|
|
158
|
+
throw new ObjectLoaderRuntimeError(
|
|
159
|
+
`Failed to fetch objects: ${response.status} ${response.statusText})`
|
|
160
|
+
)
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
}
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import { describe, test, expect } from 'vitest'
|
|
2
|
+
import { IDBFactory, IDBKeyRange } from 'fake-indexeddb'
|
|
3
|
+
import ObjectLoader2 from '../operations/objectLoader2.js'
|
|
4
|
+
import { Base } from '../types/types.js'
|
|
5
|
+
|
|
6
|
+
describe('e2e', () => {
|
|
7
|
+
test('download small model', async () => {
|
|
8
|
+
// Revit sample house (good for bim-like stuff with many display meshes)
|
|
9
|
+
//const resource = 'https://app.speckle.systems/streams/da9e320dad/commits/5388ef24b8'
|
|
10
|
+
const loader = new ObjectLoader2({
|
|
11
|
+
serverUrl: 'https://app.speckle.systems',
|
|
12
|
+
streamId: 'da9e320dad',
|
|
13
|
+
objectId: '31d10c0cea569a1e26809658ed27e281',
|
|
14
|
+
indexedDB: new IDBFactory(),
|
|
15
|
+
keyRange: IDBKeyRange
|
|
16
|
+
})
|
|
17
|
+
|
|
18
|
+
const getObjectPromise = loader.getObject({
|
|
19
|
+
id: '1708a78e057e8115f924c620ba686db6'
|
|
20
|
+
})
|
|
21
|
+
|
|
22
|
+
const bases: Base[] = []
|
|
23
|
+
for await (const obj of loader.getObjectIterator()) {
|
|
24
|
+
bases.push(obj)
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
expect(await loader.getTotalObjectCount()).toBe(1328)
|
|
28
|
+
expect(bases.length).toBe(1328)
|
|
29
|
+
const base = await getObjectPromise
|
|
30
|
+
expect(base).toBeDefined()
|
|
31
|
+
expect(base.id).toBe('1708a78e057e8115f924c620ba686db6')
|
|
32
|
+
const base2 = await loader.getObject({ id: '3841e3cbc45d52c47bc2f1b7b0ad4eb9' })
|
|
33
|
+
expect(base2).toBeDefined()
|
|
34
|
+
expect(base2.id).toBe('3841e3cbc45d52c47bc2f1b7b0ad4eb9')
|
|
35
|
+
}, 10000)
|
|
36
|
+
})
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Base ObjectLoader error
|
|
3
|
+
*/
|
|
4
|
+
class BaseError extends Error {
|
|
5
|
+
/**
|
|
6
|
+
* Default message if none is passed
|
|
7
|
+
*/
|
|
8
|
+
static defaultMessage = 'Unexpected error occurred'
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* @param {string} [message]
|
|
12
|
+
*/
|
|
13
|
+
constructor(message: string) {
|
|
14
|
+
message ||= new.target.defaultMessage
|
|
15
|
+
super(message)
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
export class ObjectLoaderConfigurationError extends BaseError {
|
|
20
|
+
static defaultMessage = 'Object loader configured incorrectly!'
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
export class ObjectLoaderRuntimeError extends BaseError {
|
|
24
|
+
static defaultMessage = 'Object loader encountered a runtime problem!'
|
|
25
|
+
}
|