@speckle/objectloader 2.0.3 → 2.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/examples/browser/index.html +19 -0
- package/examples/browser/script.js +33 -0
- package/examples/node/script.js +1 -0
- package/index.js +382 -23
- package/package.json +2 -1
- package/readme.md +19 -1
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
<!DOCTYPE html>
|
|
2
|
+
<html lang="en" class="no-js">
|
|
3
|
+
|
|
4
|
+
<head>
|
|
5
|
+
<meta charset="UTF-8">
|
|
6
|
+
<meta name="viewport" content="width=device-width">
|
|
7
|
+
|
|
8
|
+
<title>Object Loader Test</title>
|
|
9
|
+
</head>
|
|
10
|
+
|
|
11
|
+
<body>
|
|
12
|
+
<h1>This is a test.</h1>
|
|
13
|
+
<p>All the magic is in the console.</p>
|
|
14
|
+
<p>To ensure this example runs correctly, please serve this file from a local http server - if you manually open the file in a browser, it might not work.</p>
|
|
15
|
+
<button onclick="loadData()">PRESS ME</button>
|
|
16
|
+
</body>
|
|
17
|
+
|
|
18
|
+
<script src="script.js" type="module"></script>
|
|
19
|
+
</html>
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
import ObjectLoader from '../../index.js'
|
|
2
|
+
window.ObjectLoader = ObjectLoader
|
|
3
|
+
|
|
4
|
+
// let loader = new ObjectLoader({serverUrl:"https://latest.speckle.dev", streamId:"16d73b756a", objectId:"99b20746460c4369f25e08e92c988a9d"})
|
|
5
|
+
// let loader = new ObjectLoader({serverUrl:"https://latest.speckle.dev", streamId:"16d73b756a", objectId:"b8f41c190591c196c42905b75616fdb1"})
|
|
6
|
+
// let loader = new ObjectLoader({serverUrl:"https://latest.speckle.dev", streamId:"16d73b756a", objectId:"99b20746460c4369f25e08e92c988a9d"})
|
|
7
|
+
// let loader = new ObjectLoader({serverUrl:"https://latest.speckle.dev", streamId:"92b620fb17", objectId:"5f466b7bce58fda5036489e486ce1694"})
|
|
8
|
+
// let loader = new ObjectLoader({serverUrl:"https://latest.speckle.dev", streamId:"92b620fb17", objectId:"5f466b7bce58fda5036489e486ce1694"})
|
|
9
|
+
|
|
10
|
+
// https://latest.speckle.dev/streams/92b620fb17/objects/7cd9d41b5b5f3c8908536aec2a05f1a1
|
|
11
|
+
// let loader = new ObjectLoader({
|
|
12
|
+
// serverUrl:"https://latest.speckle.dev",
|
|
13
|
+
// streamId:"92b620fb17",
|
|
14
|
+
// objectId:"878c426bb213ddb4d580da74922a2b16"
|
|
15
|
+
// })
|
|
16
|
+
|
|
17
|
+
// https://latest.speckle.dev/streams/3ed8357f29/objects/0408ab9caaa2ebefb2dd7f1f671e7555
|
|
18
|
+
let loader = new ObjectLoader({
|
|
19
|
+
serverUrl:"https://latest.speckle.dev",
|
|
20
|
+
streamId:"3ed8357f29",
|
|
21
|
+
objectId:"0408ab9caaa2ebefb2dd7f1f671e7555"
|
|
22
|
+
})
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
window.loadData = async function loadData() {
|
|
26
|
+
|
|
27
|
+
let obj = await loader.getAndConstructObject((e) =>{
|
|
28
|
+
console.log(e) // log progress!
|
|
29
|
+
})
|
|
30
|
+
|
|
31
|
+
console.log('Done!')
|
|
32
|
+
console.log( obj )
|
|
33
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
// NOTE: This lib is not working in node, because node-fetch returns node-native readable streams - we need a workaround first.
|
package/index.js
CHANGED
|
@@ -1,31 +1,68 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* Simple client that streams object info from a Speckle Server.
|
|
3
|
-
* TODO:
|
|
3
|
+
* TODO: Object construction progress reporting is weird.
|
|
4
4
|
*/
|
|
5
5
|
|
|
6
|
+
|
|
6
7
|
export default class ObjectLoader {
|
|
7
8
|
|
|
8
|
-
|
|
9
|
+
/**
|
|
10
|
+
* Creates a new object loader instance.
|
|
11
|
+
* @param {*} param0
|
|
12
|
+
*/
|
|
13
|
+
constructor( { serverUrl, streamId, token, objectId, options = { enableCaching: true, fullyTraverseArrays: false, excludeProps: [ ] } } ) {
|
|
9
14
|
this.INTERVAL_MS = 20
|
|
10
15
|
this.TIMEOUT_MS = 180000 // three mins
|
|
11
16
|
|
|
12
17
|
this.serverUrl = serverUrl || window.location.origin
|
|
13
18
|
this.streamId = streamId
|
|
14
19
|
this.objectId = objectId
|
|
15
|
-
|
|
20
|
+
console.log('Object loader constructor called!')
|
|
21
|
+
try {
|
|
22
|
+
this.token = token || localStorage.getItem( 'AuthToken' )
|
|
23
|
+
} catch (error) {
|
|
24
|
+
// Accessing localStorage may throw when executing on sandboxed document, ignore.
|
|
25
|
+
}
|
|
16
26
|
|
|
17
27
|
this.headers = {
|
|
18
28
|
'Accept': 'text/plain'
|
|
19
29
|
}
|
|
20
30
|
|
|
21
|
-
if( token ) {
|
|
31
|
+
if( this.token ) {
|
|
22
32
|
this.headers['Authorization'] = `Bearer ${this.token}`
|
|
23
33
|
}
|
|
24
34
|
|
|
25
|
-
this.
|
|
35
|
+
this.requestUrlRootObj = `${this.serverUrl}/objects/${this.streamId}/${this.objectId}/single`
|
|
36
|
+
this.requestUrlChildren = `${this.serverUrl}/api/getobjects/${this.streamId}`
|
|
26
37
|
this.promises = []
|
|
27
38
|
this.intervals = {}
|
|
28
39
|
this.buffer = []
|
|
40
|
+
this.isLoading = false
|
|
41
|
+
this.totalChildrenCount = 0
|
|
42
|
+
this.traversedReferencesCount = 0
|
|
43
|
+
this.options = options
|
|
44
|
+
this.options.numConnections = this.options.numConnections || 4
|
|
45
|
+
|
|
46
|
+
this.cacheDB = null
|
|
47
|
+
|
|
48
|
+
this.lastAsyncPause = Date.now()
|
|
49
|
+
this.existingAsyncPause = null
|
|
50
|
+
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
async asyncPause() {
|
|
54
|
+
// Don't freeze the UI
|
|
55
|
+
// while ( this.existingAsyncPause ) {
|
|
56
|
+
// await this.existingAsyncPause
|
|
57
|
+
// }
|
|
58
|
+
if ( Date.now() - this.lastAsyncPause >= 100 ) {
|
|
59
|
+
this.lastAsyncPause = Date.now()
|
|
60
|
+
this.existingAsyncPause = new Promise( resolve => setTimeout( resolve, 0 ) )
|
|
61
|
+
await this.existingAsyncPause
|
|
62
|
+
this.existingAsyncPause = null
|
|
63
|
+
if (Date.now() - this.lastAsyncPause > 500) console.log("Loader Event loop lag: ", Date.now() - this.lastAsyncPause)
|
|
64
|
+
}
|
|
65
|
+
|
|
29
66
|
}
|
|
30
67
|
|
|
31
68
|
dispose() {
|
|
@@ -33,6 +70,97 @@ export default class ObjectLoader {
|
|
|
33
70
|
this.intervals.forEach( i => clearInterval( i.interval ) )
|
|
34
71
|
}
|
|
35
72
|
|
|
73
|
+
/**
|
|
74
|
+
* Use this method to receive and construct the object. It will return the full, de-referenced and de-chunked original object.
|
|
75
|
+
* @param {*} onProgress
|
|
76
|
+
* @returns
|
|
77
|
+
*/
|
|
78
|
+
async getAndConstructObject( onProgress ) {
|
|
79
|
+
|
|
80
|
+
;( await this.downloadObjectsInBuffer( onProgress ) ) // Fire and forget; PS: semicolon of doom
|
|
81
|
+
|
|
82
|
+
let rootObject = await this.getObject( this.objectId )
|
|
83
|
+
return this.traverseAndConstruct( rootObject, onProgress )
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
/**
|
|
87
|
+
* Internal function used to download all the objects in a local buffer.
|
|
88
|
+
* @param {*} onProgress
|
|
89
|
+
*/
|
|
90
|
+
async downloadObjectsInBuffer( onProgress ) {
|
|
91
|
+
let first = true
|
|
92
|
+
let downloadNum = 0
|
|
93
|
+
|
|
94
|
+
for await ( let obj of this.getObjectIterator() ) {
|
|
95
|
+
if( first ) {
|
|
96
|
+
this.totalChildrenCount = obj.totalChildrenCount
|
|
97
|
+
first = false
|
|
98
|
+
this.isLoading = true
|
|
99
|
+
}
|
|
100
|
+
downloadNum++
|
|
101
|
+
if( onProgress ) onProgress( { stage: 'download', current: downloadNum, total: this.totalChildrenCount } )
|
|
102
|
+
}
|
|
103
|
+
this.isLoading = false
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
/**
|
|
107
|
+
* Internal function used to recursively traverse an object and populate its references and dechunk any arrays.
|
|
108
|
+
* @param {*} obj
|
|
109
|
+
* @param {*} onProgress
|
|
110
|
+
* @returns
|
|
111
|
+
*/
|
|
112
|
+
async traverseAndConstruct( obj, onProgress ) {
|
|
113
|
+
if( !obj ) return
|
|
114
|
+
if ( typeof obj !== 'object' ) return obj
|
|
115
|
+
|
|
116
|
+
// Handle arrays
|
|
117
|
+
if ( Array.isArray( obj ) && obj.length !== 0 ) {
|
|
118
|
+
let arr = []
|
|
119
|
+
for ( let element of obj ) {
|
|
120
|
+
if ( typeof element !== 'object' && ! this.options.fullyTraverseArrays ) return obj
|
|
121
|
+
|
|
122
|
+
// Dereference element if needed
|
|
123
|
+
let deRef = element.referencedId ? await this.getObject( element.referencedId ) : element
|
|
124
|
+
if( element.referencedId && onProgress ) onProgress( { stage: 'construction', current: ++this.traversedReferencesCount > this.totalChildrenCount ? this.totalChildrenCount : this.traversedReferencesCount, total: this.totalChildrenCount } )
|
|
125
|
+
|
|
126
|
+
// Push the traversed object in the array
|
|
127
|
+
arr.push( await this.traverseAndConstruct( deRef, onProgress ) )
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
// De-chunk
|
|
131
|
+
if( arr[0]?.speckle_type?.toLowerCase().includes('datachunk') ) {
|
|
132
|
+
return arr.reduce( ( prev, curr ) => prev.concat( curr.data ), [] )
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
return arr
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
// Handle objects
|
|
139
|
+
// 1) Purge ignored props
|
|
140
|
+
for( let ignoredProp of this.options.excludeProps ) {
|
|
141
|
+
delete obj[ ignoredProp ]
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
// 2) Iterate through obj
|
|
145
|
+
for( let prop in obj ) {
|
|
146
|
+
if( typeof obj[prop] !== 'object' || obj[prop] === null ) continue // leave alone primitive props
|
|
147
|
+
|
|
148
|
+
if( obj[prop].referencedId ) {
|
|
149
|
+
obj[prop] = await this.getObject( obj[prop].referencedId )
|
|
150
|
+
if( onProgress ) onProgress( { stage: 'construction', current: ++this.traversedReferencesCount > this.totalChildrenCount ? this.totalChildrenCount : this.traversedReferencesCount, total: this.totalChildrenCount } )
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
obj[prop] = await this.traverseAndConstruct( obj[prop], onProgress )
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
return obj
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
/**
|
|
160
|
+
* Internal function. Returns a promise that is resolved when the object id is loaded into the internal buffer.
|
|
161
|
+
* @param {*} id
|
|
162
|
+
* @returns
|
|
163
|
+
*/
|
|
36
164
|
async getObject( id ){
|
|
37
165
|
if ( this.buffer[id] ) return this.buffer[id]
|
|
38
166
|
|
|
@@ -71,11 +199,15 @@ export default class ObjectLoader {
|
|
|
71
199
|
}
|
|
72
200
|
|
|
73
201
|
async * getObjectIterator( ) {
|
|
202
|
+
let t0 = Date.now()
|
|
203
|
+
let count = 0
|
|
74
204
|
for await ( let line of this.getRawObjectIterator() ) {
|
|
75
205
|
let { id, obj } = this.processLine( line )
|
|
76
206
|
this.buffer[ id ] = obj
|
|
207
|
+
count += 1
|
|
77
208
|
yield obj
|
|
78
209
|
}
|
|
210
|
+
console.log(`Loaded ${count} objects in: ${(Date.now() - t0) / 1000}`)
|
|
79
211
|
}
|
|
80
212
|
|
|
81
213
|
processLine( chunk ) {
|
|
@@ -84,31 +216,258 @@ export default class ObjectLoader {
|
|
|
84
216
|
}
|
|
85
217
|
|
|
86
218
|
async * getRawObjectIterator() {
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
219
|
+
let tSTART = Date.now()
|
|
220
|
+
|
|
221
|
+
if ( this.options.enableCaching && window.indexedDB && this.cacheDB === null) {
|
|
222
|
+
await safariFix()
|
|
223
|
+
let idbOpenRequest = indexedDB.open('speckle-object-cache', 1)
|
|
224
|
+
idbOpenRequest.onupgradeneeded = () => idbOpenRequest.result.createObjectStore('objects');
|
|
225
|
+
this.cacheDB = await this.promisifyIdbRequest( idbOpenRequest )
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
const rootObjJson = await this.getRawRootObject()
|
|
229
|
+
// console.log("Root in: ", Date.now() - tSTART)
|
|
230
|
+
|
|
231
|
+
yield `${this.objectId}\t${rootObjJson}`
|
|
232
|
+
|
|
233
|
+
const rootObj = JSON.parse(rootObjJson)
|
|
234
|
+
if ( !rootObj.__closure ) return
|
|
235
|
+
|
|
236
|
+
let childrenIds = Object.keys(rootObj.__closure).sort( (a, b) => rootObj.__closure[a] - rootObj.__closure[b] )
|
|
237
|
+
if ( childrenIds.length === 0 ) return
|
|
238
|
+
|
|
239
|
+
let splitHttpRequests = []
|
|
240
|
+
|
|
241
|
+
if ( childrenIds.length > 50 ) {
|
|
242
|
+
// split into 5%, 15%, 40%, 40% (5% for the high priority children: the ones with lower minDepth)
|
|
243
|
+
let splitBeforeCacheCheck = [ [], [], [], [] ]
|
|
244
|
+
let crtChildIndex = 0
|
|
245
|
+
|
|
246
|
+
for ( ; crtChildIndex < 0.05 * childrenIds.length; crtChildIndex++ ) {
|
|
247
|
+
splitBeforeCacheCheck[0].push( childrenIds[ crtChildIndex ] )
|
|
248
|
+
}
|
|
249
|
+
for ( ; crtChildIndex < 0.2 * childrenIds.length; crtChildIndex++ ) {
|
|
250
|
+
splitBeforeCacheCheck[1].push( childrenIds[ crtChildIndex ] )
|
|
251
|
+
}
|
|
252
|
+
for ( ; crtChildIndex < 0.6 * childrenIds.length; crtChildIndex++ ) {
|
|
253
|
+
splitBeforeCacheCheck[2].push( childrenIds[ crtChildIndex ] )
|
|
254
|
+
}
|
|
255
|
+
for ( ; crtChildIndex < childrenIds.length; crtChildIndex++ ) {
|
|
256
|
+
splitBeforeCacheCheck[3].push( childrenIds[ crtChildIndex ] )
|
|
257
|
+
}
|
|
258
|
+
|
|
259
|
+
|
|
260
|
+
console.log("Cache check for: ", splitBeforeCacheCheck)
|
|
261
|
+
|
|
262
|
+
let newChildren = []
|
|
263
|
+
let nextCachePromise = this.cacheGetObjects( splitBeforeCacheCheck[ 0 ] )
|
|
92
264
|
|
|
93
|
-
|
|
94
|
-
|
|
265
|
+
for ( let i = 0; i < 4; i++ ) {
|
|
266
|
+
let cachedObjects = await nextCachePromise
|
|
267
|
+
if ( i < 3 ) nextCachePromise = this.cacheGetObjects( splitBeforeCacheCheck[ i + 1 ] )
|
|
268
|
+
|
|
269
|
+
let sortedCachedKeys = Object.keys(cachedObjects).sort( (a, b) => rootObj.__closure[a] - rootObj.__closure[b] )
|
|
270
|
+
for ( let id of sortedCachedKeys ) {
|
|
271
|
+
yield `${id}\t${cachedObjects[ id ]}`
|
|
272
|
+
}
|
|
273
|
+
let newChildrenForBatch = splitBeforeCacheCheck[i].filter( id => !( id in cachedObjects ) )
|
|
274
|
+
newChildren.push( ...newChildrenForBatch )
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
if ( newChildren.length === 0 ) return
|
|
278
|
+
|
|
279
|
+
if ( newChildren.length <= 50 ) {
|
|
280
|
+
// we have almost all of children in the cache. do only 1 requests for the remaining new children
|
|
281
|
+
splitHttpRequests.push( newChildren )
|
|
282
|
+
} else {
|
|
283
|
+
// we now set up the batches for 4 http requests, starting from `newChildren` (already sorted by priority)
|
|
284
|
+
splitHttpRequests = [ [], [], [], [] ]
|
|
285
|
+
crtChildIndex = 0
|
|
286
|
+
|
|
287
|
+
for ( ; crtChildIndex < 0.05 * newChildren.length; crtChildIndex++ ) {
|
|
288
|
+
splitHttpRequests[0].push( newChildren[ crtChildIndex ] )
|
|
289
|
+
}
|
|
290
|
+
for ( ; crtChildIndex < 0.2 * newChildren.length; crtChildIndex++ ) {
|
|
291
|
+
splitHttpRequests[1].push( newChildren[ crtChildIndex ] )
|
|
292
|
+
}
|
|
293
|
+
for ( ; crtChildIndex < 0.6 * newChildren.length; crtChildIndex++ ) {
|
|
294
|
+
splitHttpRequests[2].push( newChildren[ crtChildIndex ] )
|
|
295
|
+
}
|
|
296
|
+
for ( ; crtChildIndex < newChildren.length; crtChildIndex++ ) {
|
|
297
|
+
splitHttpRequests[3].push( newChildren[ crtChildIndex ] )
|
|
298
|
+
}
|
|
299
|
+
}
|
|
300
|
+
|
|
301
|
+
} else {
|
|
302
|
+
// small object with <= 50 children. check cache and make only 1 request
|
|
303
|
+
const cachedObjects = await this.cacheGetObjects( childrenIds )
|
|
304
|
+
let sortedCachedKeys = Object.keys(cachedObjects).sort( (a, b) => rootObj.__closure[a] - rootObj.__closure[b] )
|
|
305
|
+
for ( let id of sortedCachedKeys ) {
|
|
306
|
+
yield `${id}\t${cachedObjects[ id ]}`
|
|
307
|
+
}
|
|
308
|
+
childrenIds = childrenIds.filter(id => !( id in cachedObjects ) )
|
|
309
|
+
if ( childrenIds.length === 0 ) return
|
|
310
|
+
|
|
311
|
+
// only 1 http request with the remaining children ( <= 50 )
|
|
312
|
+
splitHttpRequests.push( childrenIds )
|
|
313
|
+
}
|
|
314
|
+
|
|
315
|
+
// Starting http requests for batches in `splitHttpRequests`
|
|
316
|
+
|
|
317
|
+
const decoders = []
|
|
318
|
+
const readers = []
|
|
319
|
+
const readPromisses = []
|
|
320
|
+
const startIndexes = []
|
|
321
|
+
const readBuffers = []
|
|
322
|
+
const finishedRequests = []
|
|
323
|
+
|
|
324
|
+
for (let i = 0; i < splitHttpRequests.length; i++) {
|
|
325
|
+
decoders.push(new TextDecoder())
|
|
326
|
+
readers.push( null )
|
|
327
|
+
readPromisses.push( null )
|
|
328
|
+
startIndexes.push( 0 )
|
|
329
|
+
readBuffers.push( '' )
|
|
330
|
+
finishedRequests.push( false )
|
|
331
|
+
|
|
332
|
+
fetch(
|
|
333
|
+
this.requestUrlChildren,
|
|
334
|
+
{
|
|
335
|
+
method: 'POST',
|
|
336
|
+
headers: { ...this.headers, 'Content-Type': 'application/json' },
|
|
337
|
+
body: JSON.stringify( { objects: JSON.stringify( splitHttpRequests[i] ) } )
|
|
338
|
+
}
|
|
339
|
+
).then( crtResponse => {
|
|
340
|
+
let crtReader = crtResponse.body.getReader()
|
|
341
|
+
readers[i] = crtReader
|
|
342
|
+
let crtReadPromise = crtReader.read().then(x => { x.reqId = i; return x })
|
|
343
|
+
readPromisses[i] = crtReadPromise
|
|
344
|
+
})
|
|
345
|
+
}
|
|
95
346
|
|
|
96
347
|
while ( true ) {
|
|
97
|
-
let
|
|
98
|
-
if (
|
|
99
|
-
if
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
348
|
+
let validReadPromises = readPromisses.filter(x => x != null)
|
|
349
|
+
if ( validReadPromises.length === 0 ) {
|
|
350
|
+
// Check if all requests finished
|
|
351
|
+
if ( finishedRequests.every(x => x) ) {
|
|
352
|
+
break
|
|
353
|
+
}
|
|
354
|
+
// Sleep 10 ms
|
|
355
|
+
await new Promise( ( resolve ) => {
|
|
356
|
+
setTimeout( resolve, 10 )
|
|
357
|
+
} )
|
|
358
|
+
continue
|
|
359
|
+
}
|
|
360
|
+
|
|
361
|
+
// Wait for data on any running request
|
|
362
|
+
let data = await Promise.any( validReadPromises )
|
|
363
|
+
let { value: crtDataChunk, done: readerDone, reqId } = data
|
|
364
|
+
finishedRequests[ reqId ] = readerDone
|
|
365
|
+
|
|
366
|
+
// Replace read promise on this request with a new `read` call
|
|
367
|
+
if ( !readerDone ) {
|
|
368
|
+
let crtReadPromise = readers[ reqId ].read().then(x => { x.reqId = reqId; return x })
|
|
369
|
+
readPromisses[ reqId ] = crtReadPromise
|
|
370
|
+
} else {
|
|
371
|
+
// This request finished. "Flush any non-newline-terminated text"
|
|
372
|
+
if ( readBuffers[ reqId ].length > 0 ) {
|
|
373
|
+
yield readBuffers[ reqId ]
|
|
374
|
+
readBuffers[ reqId ] = ''
|
|
375
|
+
}
|
|
376
|
+
// no other read calls for this request
|
|
377
|
+
readPromisses[ reqId ] = null
|
|
378
|
+
}
|
|
379
|
+
|
|
380
|
+
if ( !crtDataChunk )
|
|
104
381
|
continue
|
|
382
|
+
|
|
383
|
+
crtDataChunk = decoders[ reqId ].decode( crtDataChunk )
|
|
384
|
+
let unprocessedText = readBuffers[ reqId ] + crtDataChunk
|
|
385
|
+
let unprocessedLines = unprocessedText.split(/\r\n|\n|\r/)
|
|
386
|
+
let remainderText = unprocessedLines.pop()
|
|
387
|
+
readBuffers[ reqId ] = remainderText
|
|
388
|
+
|
|
389
|
+
for ( let line of unprocessedLines ) {
|
|
390
|
+
yield line
|
|
105
391
|
}
|
|
106
|
-
|
|
107
|
-
startIndex = re.lastIndex
|
|
392
|
+
this.cacheStoreObjects(unprocessedLines)
|
|
108
393
|
}
|
|
394
|
+
}
|
|
395
|
+
|
|
396
|
+
async getRawRootObject() {
|
|
397
|
+
const cachedRootObject = await this.cacheGetObjects( [ this.objectId ] )
|
|
398
|
+
if ( cachedRootObject[ this.objectId ] )
|
|
399
|
+
return cachedRootObject[ this.objectId ]
|
|
400
|
+
const response = await fetch( this.requestUrlRootObj, { headers: this.headers } )
|
|
401
|
+
const responseText = await response.text()
|
|
402
|
+
this.cacheStoreObjects( [ `${this.objectId}\t${responseText}` ] )
|
|
403
|
+
return responseText
|
|
404
|
+
}
|
|
109
405
|
|
|
110
|
-
|
|
111
|
-
|
|
406
|
+
promisifyIdbRequest(request) {
|
|
407
|
+
return new Promise((resolve, reject) => {
|
|
408
|
+
request.oncomplete = request.onsuccess = () => resolve(request.result);
|
|
409
|
+
request.onabort = request.onerror = () => reject(request.error);
|
|
410
|
+
})
|
|
411
|
+
}
|
|
412
|
+
|
|
413
|
+
async cacheGetObjects(ids) {
|
|
414
|
+
if ( !this.options.enableCaching || !window.indexedDB ) {
|
|
415
|
+
return {}
|
|
112
416
|
}
|
|
417
|
+
|
|
418
|
+
let ret = {}
|
|
419
|
+
|
|
420
|
+
for (let i = 0; i < ids.length; i += 500) {
|
|
421
|
+
let idsChunk = ids.slice(i, i + 500)
|
|
422
|
+
let t0 = Date.now()
|
|
423
|
+
|
|
424
|
+
let store = this.cacheDB.transaction('objects', 'readonly').objectStore('objects')
|
|
425
|
+
let idbChildrenPromises = idsChunk.map( id => this.promisifyIdbRequest( store.get( id ) ).then( data => ( { id, data } ) ) )
|
|
426
|
+
let cachedData = await Promise.all(idbChildrenPromises)
|
|
427
|
+
|
|
428
|
+
// console.log("Cache check for : ", idsChunk.length, Date.now() - t0)
|
|
429
|
+
|
|
430
|
+
for ( let cachedObj of cachedData ) {
|
|
431
|
+
if ( !cachedObj.data ) // non-existent objects are retrieved with `undefined` data
|
|
432
|
+
continue
|
|
433
|
+
ret[ cachedObj.id ] = cachedObj.data
|
|
434
|
+
}
|
|
435
|
+
}
|
|
436
|
+
|
|
437
|
+
return ret
|
|
113
438
|
}
|
|
439
|
+
|
|
440
|
+
cacheStoreObjects(objects) {
|
|
441
|
+
if ( !this.options.enableCaching || !window.indexedDB ) {
|
|
442
|
+
return {}
|
|
443
|
+
}
|
|
444
|
+
|
|
445
|
+
let store = this.cacheDB.transaction('objects', 'readwrite').objectStore('objects')
|
|
446
|
+
for ( let obj of objects ) {
|
|
447
|
+
let idAndData = obj.split( '\t' )
|
|
448
|
+
store.put(idAndData[1], idAndData[0])
|
|
449
|
+
}
|
|
450
|
+
|
|
451
|
+
return this.promisifyIdbRequest( store.transaction )
|
|
452
|
+
}
|
|
453
|
+
}
|
|
454
|
+
|
|
455
|
+
|
|
456
|
+
// Credits and more info: https://github.com/jakearchibald/safari-14-idb-fix
|
|
457
|
+
function safariFix() {
|
|
458
|
+
const isSafari =
|
|
459
|
+
!navigator.userAgentData &&
|
|
460
|
+
/Safari\//.test(navigator.userAgent) &&
|
|
461
|
+
!/Chrom(e|ium)\//.test(navigator.userAgent)
|
|
462
|
+
|
|
463
|
+
// No point putting other browsers or older versions of Safari through this mess.
|
|
464
|
+
if (!isSafari || !indexedDB.databases) return Promise.resolve()
|
|
465
|
+
|
|
466
|
+
let intervalId
|
|
467
|
+
|
|
468
|
+
return new Promise( ( resolve ) => {
|
|
469
|
+
const tryIdb = () => indexedDB.databases().finally(resolve)
|
|
470
|
+
intervalId = setInterval(tryIdb, 100)
|
|
471
|
+
tryIdb()
|
|
472
|
+
}).finally( () => clearInterval(intervalId) )
|
|
114
473
|
}
|
package/package.json
CHANGED
|
@@ -1,9 +1,10 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@speckle/objectloader",
|
|
3
|
-
"version": "2.0
|
|
3
|
+
"version": "2.3.0",
|
|
4
4
|
"description": "Simple API helper to stream in objects from the Speckle Server.",
|
|
5
5
|
"main": "index.js",
|
|
6
6
|
"homepage": "https://speckle.systems",
|
|
7
|
+
"type": "module",
|
|
7
8
|
"repository": {
|
|
8
9
|
"type": "git",
|
|
9
10
|
"url": "https://github.com/specklesystems/speckle-server.git",
|
package/readme.md
CHANGED
|
@@ -10,7 +10,7 @@ Comprehensive developer and user documentation can be found in our:
|
|
|
10
10
|
|
|
11
11
|
## Getting started
|
|
12
12
|
|
|
13
|
-
This is a small utility class that helps you stream an object and all its sub-components from the Speckle Server API. It is
|
|
13
|
+
This is a small utility class that helps you stream an object and all its sub-components from the Speckle Server API. It is intended to be used in contexts where you want to "download" the whole object, or iteratively traverse its whole tree.
|
|
14
14
|
|
|
15
15
|
Here's a sample way on how to use it, pfilfered from the [3d viewer package](../viewer):
|
|
16
16
|
|
|
@@ -36,6 +36,24 @@ async load( { serverUrl, token, streamId, objectId } ) {
|
|
|
36
36
|
|
|
37
37
|
```
|
|
38
38
|
|
|
39
|
+
If you do not want to process the objects one by one as they are streamed to you, you can use the `getAndConstructObject()` method. Here's an example:
|
|
40
|
+
|
|
41
|
+
```js
|
|
42
|
+
|
|
43
|
+
let loader = new ObjectLoader( {
|
|
44
|
+
serverUrl: "https://latest.speckle.dev",
|
|
45
|
+
streamId: "3ed8357f29",
|
|
46
|
+
objectId: "0408ab9caaa2ebefb2dd7f1f671e7555",
|
|
47
|
+
options: {
|
|
48
|
+
fullyTraverseArrays: false, // Default: false. By default, if an array starts with a primitive type, it will not be traversed. Set it to true if you want to capture scenarios in which lists can have intersped objects and primitives, e.g. [ 1, 2, "a", { important object } ]
|
|
49
|
+
excludeProps: [ 'displayValue', 'displayMesh', '__closure' ] // Default: []. Any prop names that you pass in here will be ignored from object construction traversal.
|
|
50
|
+
}
|
|
51
|
+
} )
|
|
52
|
+
|
|
53
|
+
let obj = await loader.getAndConstructObject( ( e ) => console.log( 'Progress', e ) )
|
|
54
|
+
|
|
55
|
+
```
|
|
56
|
+
|
|
39
57
|
## Community
|
|
40
58
|
|
|
41
59
|
If in trouble, the Speckle Community hangs out on [the forum](https://speckle.community). Do join and introduce yourself! We're happy to help.
|