@speckle/objectloader 2.3.0 → 2.4.2-alpha.20
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.eslintrc.cjs +16 -0
- package/LICENSE +208 -0
- package/examples/browser/index.html +15 -13
- package/examples/browser/script.js +9 -11
- package/examples/node/script.js +22 -1
- package/index.js +242 -188
- package/jsconfig.json +7 -0
- package/package.json +16 -4
- package/readme.md +22 -7
package/.eslintrc.cjs
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Extends repo root config, only put changes here that are scoped to this specific package
|
|
3
|
+
* (if you already are - evaluate whether you really need package scoped linting rules)
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
/** @type {import("eslint").Linter.Config} */
|
|
7
|
+
const config = {
|
|
8
|
+
env: {
|
|
9
|
+
browser: true
|
|
10
|
+
},
|
|
11
|
+
parserOptions: {
|
|
12
|
+
sourceType: 'module'
|
|
13
|
+
}
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
module.exports = config
|
package/LICENSE
ADDED
|
@@ -0,0 +1,208 @@
|
|
|
1
|
+
Apache License
|
|
2
|
+
Version 2.0, January 2004
|
|
3
|
+
http://www.apache.org/licenses/
|
|
4
|
+
|
|
5
|
+
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
|
6
|
+
|
|
7
|
+
1. Definitions.
|
|
8
|
+
|
|
9
|
+
"License" shall mean the terms and conditions for use, reproduction,
|
|
10
|
+
and distribution as defined by Sections 1 through 9 of this document.
|
|
11
|
+
|
|
12
|
+
"Licensor" shall mean the copyright owner or entity authorized by
|
|
13
|
+
the copyright owner that is granting the License.
|
|
14
|
+
|
|
15
|
+
"Legal Entity" shall mean the union of the acting entity and all
|
|
16
|
+
other entities that control, are controlled by, or are under common
|
|
17
|
+
control with that entity. For the purposes of this definition,
|
|
18
|
+
"control" means (i) the power, direct or indirect, to cause the
|
|
19
|
+
direction or management of such entity, whether by contract or
|
|
20
|
+
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
|
21
|
+
outstanding shares, or (iii) beneficial ownership of such entity.
|
|
22
|
+
|
|
23
|
+
"You" (or "Your") shall mean an individual or Legal Entity
|
|
24
|
+
exercising permissions granted by this License.
|
|
25
|
+
|
|
26
|
+
"Source" form shall mean the preferred form for making modifications,
|
|
27
|
+
including but not limited to software source code, documentation
|
|
28
|
+
source, and configuration files.
|
|
29
|
+
|
|
30
|
+
"Object" form shall mean any form resulting from mechanical
|
|
31
|
+
transformation or translation of a Source form, including but
|
|
32
|
+
not limited to compiled object code, generated documentation,
|
|
33
|
+
and conversions to other media types.
|
|
34
|
+
|
|
35
|
+
"Work" shall mean the work of authorship, whether in Source or
|
|
36
|
+
Object form, made available under the License, as indicated by a
|
|
37
|
+
copyright notice that is included in or attached to the work
|
|
38
|
+
(an example is provided in the Appendix below).
|
|
39
|
+
|
|
40
|
+
"Derivative Works" shall mean any work, whether in Source or Object
|
|
41
|
+
form, that is based on (or derived from) the Work and for which the
|
|
42
|
+
editorial revisions, annotations, elaborations, or other modifications
|
|
43
|
+
represent, as a whole, an original work of authorship. For the purposes
|
|
44
|
+
of this License, Derivative Works shall not include works that remain
|
|
45
|
+
separable from, or merely link (or bind by name) to the interfaces of,
|
|
46
|
+
the Work and Derivative Works thereof.
|
|
47
|
+
|
|
48
|
+
"Contribution" shall mean any work of authorship, including
|
|
49
|
+
the original version of the Work and any modifications or additions
|
|
50
|
+
to that Work or Derivative Works thereof, that is intentionally
|
|
51
|
+
submitted to Licensor for inclusion in the Work by the copyright owner
|
|
52
|
+
or by an individual or Legal Entity authorized to submit on behalf of
|
|
53
|
+
the copyright owner. For the purposes of this definition, "submitted"
|
|
54
|
+
means any form of electronic, verbal, or written communication sent
|
|
55
|
+
to the Licensor or its representatives, including but not limited to
|
|
56
|
+
communication on electronic mailing lists, source code control systems,
|
|
57
|
+
and issue tracking systems that are managed by, or on behalf of, the
|
|
58
|
+
Licensor for the purpose of discussing and improving the Work, but
|
|
59
|
+
excluding communication that is conspicuously marked or otherwise
|
|
60
|
+
designated in writing by the copyright owner as "Not a Contribution."
|
|
61
|
+
|
|
62
|
+
"Contributor" shall mean Licensor and any individual or Legal Entity
|
|
63
|
+
on behalf of whom a Contribution has been received by Licensor and
|
|
64
|
+
subsequently incorporated within the Work.
|
|
65
|
+
|
|
66
|
+
2. Grant of Copyright License. Subject to the terms and conditions of
|
|
67
|
+
this License, each Contributor hereby grants to You a perpetual,
|
|
68
|
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
69
|
+
copyright license to reproduce, prepare Derivative Works of,
|
|
70
|
+
publicly display, publicly perform, sublicense, and distribute the
|
|
71
|
+
Work and such Derivative Works in Source or Object form.
|
|
72
|
+
|
|
73
|
+
3. Grant of Patent License. Subject to the terms and conditions of
|
|
74
|
+
this License, each Contributor hereby grants to You a perpetual,
|
|
75
|
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
76
|
+
(except as stated in this section) patent license to make, have made,
|
|
77
|
+
use, offer to sell, sell, import, and otherwise transfer the Work,
|
|
78
|
+
where such license applies only to those patent claims licensable
|
|
79
|
+
by such Contributor that are necessarily infringed by their
|
|
80
|
+
Contribution(s) alone or by combination of their Contribution(s)
|
|
81
|
+
with the Work to which such Contribution(s) was submitted. If You
|
|
82
|
+
institute patent litigation against any entity (including a
|
|
83
|
+
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
|
84
|
+
or a Contribution incorporated within the Work constitutes direct
|
|
85
|
+
or contributory patent infringement, then any patent licenses
|
|
86
|
+
granted to You under this License for that Work shall terminate
|
|
87
|
+
as of the date such litigation is filed.
|
|
88
|
+
|
|
89
|
+
4. Redistribution. You may reproduce and distribute copies of the
|
|
90
|
+
Work or Derivative Works thereof in any medium, with or without
|
|
91
|
+
modifications, and in Source or Object form, provided that You
|
|
92
|
+
meet the following conditions:
|
|
93
|
+
|
|
94
|
+
(a) You must give any other recipients of the Work or
|
|
95
|
+
Derivative Works a copy of this License; and
|
|
96
|
+
|
|
97
|
+
(b) You must cause any modified files to carry prominent notices
|
|
98
|
+
stating that You changed the files; and
|
|
99
|
+
|
|
100
|
+
(c) You must retain, in the Source form of any Derivative Works
|
|
101
|
+
that You distribute, all copyright, patent, trademark, and
|
|
102
|
+
attribution notices from the Source form of the Work,
|
|
103
|
+
excluding those notices that do not pertain to any part of
|
|
104
|
+
the Derivative Works; and
|
|
105
|
+
|
|
106
|
+
(d) If the Work includes a "NOTICE" text file as part of its
|
|
107
|
+
distribution, then any Derivative Works that You distribute must
|
|
108
|
+
include a readable copy of the attribution notices contained
|
|
109
|
+
within such NOTICE file, excluding those notices that do not
|
|
110
|
+
pertain to any part of the Derivative Works, in at least one
|
|
111
|
+
of the following places: within a NOTICE text file distributed
|
|
112
|
+
as part of the Derivative Works; within the Source form or
|
|
113
|
+
documentation, if provided along with the Derivative Works; or,
|
|
114
|
+
within a display generated by the Derivative Works, if and
|
|
115
|
+
wherever such third-party notices normally appear. The contents
|
|
116
|
+
of the NOTICE file are for informational purposes only and
|
|
117
|
+
do not modify the License. You may add Your own attribution
|
|
118
|
+
notices within Derivative Works that You distribute, alongside
|
|
119
|
+
or as an addendum to the NOTICE text from the Work, provided
|
|
120
|
+
that such additional attribution notices cannot be construed
|
|
121
|
+
as modifying the License.
|
|
122
|
+
|
|
123
|
+
You may add Your own copyright statement to Your modifications and
|
|
124
|
+
may provide additional or different license terms and conditions
|
|
125
|
+
for use, reproduction, or distribution of Your modifications, or
|
|
126
|
+
for any such Derivative Works as a whole, provided Your use,
|
|
127
|
+
reproduction, and distribution of the Work otherwise complies with
|
|
128
|
+
the conditions stated in this License.
|
|
129
|
+
|
|
130
|
+
5. Submission of Contributions. Unless You explicitly state otherwise,
|
|
131
|
+
any Contribution intentionally submitted for inclusion in the Work
|
|
132
|
+
by You to the Licensor shall be under the terms and conditions of
|
|
133
|
+
this License, without any additional terms or conditions.
|
|
134
|
+
Notwithstanding the above, nothing herein shall supersede or modify
|
|
135
|
+
the terms of any separate license agreement you may have executed
|
|
136
|
+
with Licensor regarding such Contributions.
|
|
137
|
+
|
|
138
|
+
6. Trademarks. This License does not grant permission to use the trade
|
|
139
|
+
names, trademarks, service marks, or product names of the Licensor,
|
|
140
|
+
except as required for reasonable and customary use in describing the
|
|
141
|
+
origin of the Work and reproducing the content of the NOTICE file.
|
|
142
|
+
|
|
143
|
+
7. Disclaimer of Warranty. Unless required by applicable law or
|
|
144
|
+
agreed to in writing, Licensor provides the Work (and each
|
|
145
|
+
Contributor provides its Contributions) on an "AS IS" BASIS,
|
|
146
|
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
147
|
+
implied, including, without limitation, any warranties or conditions
|
|
148
|
+
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
|
149
|
+
PARTICULAR PURPOSE. You are solely responsible for determining the
|
|
150
|
+
appropriateness of using or redistributing the Work and assume any
|
|
151
|
+
risks associated with Your exercise of permissions under this License.
|
|
152
|
+
|
|
153
|
+
8. Limitation of Liability. In no event and under no legal theory,
|
|
154
|
+
whether in tort (including negligence), contract, or otherwise,
|
|
155
|
+
unless required by applicable law (such as deliberate and grossly
|
|
156
|
+
negligent acts) or agreed to in writing, shall any Contributor be
|
|
157
|
+
liable to You for damages, including any direct, indirect, special,
|
|
158
|
+
incidental, or consequential damages of any character arising as a
|
|
159
|
+
result of this License or out of the use or inability to use the
|
|
160
|
+
Work (including but not limited to damages for loss of goodwill,
|
|
161
|
+
work stoppage, computer failure or malfunction, or any and all
|
|
162
|
+
other commercial damages or losses), even if such Contributor
|
|
163
|
+
has been advised of the possibility of such damages.
|
|
164
|
+
|
|
165
|
+
9. Accepting Warranty or Additional Liability. While redistributing
|
|
166
|
+
the Work or Derivative Works thereof, You may choose to offer,
|
|
167
|
+
and charge a fee for, acceptance of support, warranty, indemnity,
|
|
168
|
+
or other liability obligations and/or rights consistent with this
|
|
169
|
+
License. However, in accepting such obligations, You may act only
|
|
170
|
+
on Your own behalf and on Your sole responsibility, not on behalf
|
|
171
|
+
of any other Contributor, and only if You agree to indemnify,
|
|
172
|
+
defend, and hold each Contributor harmless for any liability
|
|
173
|
+
incurred by, or claims asserted against, such Contributor by reason
|
|
174
|
+
of your accepting any such warranty or additional liability.
|
|
175
|
+
|
|
176
|
+
END OF TERMS AND CONDITIONS
|
|
177
|
+
|
|
178
|
+
APPENDIX: How to apply the Apache License to your work.
|
|
179
|
+
|
|
180
|
+
To apply the Apache License to your work, attach the following
|
|
181
|
+
boilerplate notice, with the fields enclosed by brackets "[]"
|
|
182
|
+
replaced with your own identifying information. (Don't include
|
|
183
|
+
the brackets!) The text should be enclosed in the appropriate
|
|
184
|
+
comment syntax for the file format. We also recommend that a
|
|
185
|
+
file or class name and description of purpose be included on the
|
|
186
|
+
same "printed page" as the copyright notice for easier
|
|
187
|
+
identification within third-party archives.
|
|
188
|
+
|
|
189
|
+
Copyright 2020 AEC Systems
|
|
190
|
+
|
|
191
|
+
Licensed under the Apache License, Version 2.0 (the "License");
|
|
192
|
+
you may not use this file except in compliance with the License.
|
|
193
|
+
You may obtain a copy of the License at
|
|
194
|
+
|
|
195
|
+
http://www.apache.org/licenses/LICENSE-2.0
|
|
196
|
+
|
|
197
|
+
Unless required by applicable law or agreed to in writing, software
|
|
198
|
+
distributed under the License is distributed on an "AS IS" BASIS,
|
|
199
|
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
200
|
+
See the License for the specific language governing permissions and
|
|
201
|
+
limitations under the License.
|
|
202
|
+
|
|
203
|
+
NOTICE: Unless otherwise described, the code in this repository is
|
|
204
|
+
licensed under the license above. Some modules, extensions or code herein
|
|
205
|
+
might be otherwise licensed. This is indicated either in the root of the
|
|
206
|
+
containing folder under a different license file, or in the respective
|
|
207
|
+
file's header. If you have any questions, don't hesitate to get in touch
|
|
208
|
+
with us via [email](mailto:hello@speckle.systems).
|
|
@@ -1,19 +1,21 @@
|
|
|
1
1
|
<!DOCTYPE html>
|
|
2
2
|
<html lang="en" class="no-js">
|
|
3
|
+
<head>
|
|
4
|
+
<meta charset="UTF-8" />
|
|
5
|
+
<meta name="viewport" content="width=device-width" />
|
|
3
6
|
|
|
4
|
-
<
|
|
5
|
-
|
|
6
|
-
<meta name="viewport" content="width=device-width">
|
|
7
|
+
<title>Object Loader Test</title>
|
|
8
|
+
</head>
|
|
7
9
|
|
|
8
|
-
<
|
|
9
|
-
|
|
10
|
+
<body>
|
|
11
|
+
<h1>This is a test.</h1>
|
|
12
|
+
<p>All the magic is in the console.</p>
|
|
13
|
+
<p>
|
|
14
|
+
To ensure this example runs correctly, please serve this file from a local http
|
|
15
|
+
server - if you manually open the file in a browser, it might not work.
|
|
16
|
+
</p>
|
|
17
|
+
<button onclick="loadData()">PRESS ME</button>
|
|
18
|
+
</body>
|
|
10
19
|
|
|
11
|
-
<body>
|
|
12
|
-
<h1>This is a test.</h1>
|
|
13
|
-
<p>All the magic is in the console.</p>
|
|
14
|
-
<p>To ensure this example runs correctly, please serve this file from a local http server - if you manually open the file in a browser, it might not work.</p>
|
|
15
|
-
<button onclick="loadData()">PRESS ME</button>
|
|
16
|
-
</body>
|
|
17
|
-
|
|
18
20
|
<script src="script.js" type="module"></script>
|
|
19
|
-
</html>
|
|
21
|
+
</html>
|
|
@@ -9,25 +9,23 @@ window.ObjectLoader = ObjectLoader
|
|
|
9
9
|
|
|
10
10
|
// https://latest.speckle.dev/streams/92b620fb17/objects/7cd9d41b5b5f3c8908536aec2a05f1a1
|
|
11
11
|
// let loader = new ObjectLoader({
|
|
12
|
-
// serverUrl:"https://latest.speckle.dev",
|
|
13
|
-
// streamId:"92b620fb17",
|
|
12
|
+
// serverUrl:"https://latest.speckle.dev",
|
|
13
|
+
// streamId:"92b620fb17",
|
|
14
14
|
// objectId:"878c426bb213ddb4d580da74922a2b16"
|
|
15
15
|
// })
|
|
16
16
|
|
|
17
17
|
// https://latest.speckle.dev/streams/3ed8357f29/objects/0408ab9caaa2ebefb2dd7f1f671e7555
|
|
18
|
-
|
|
19
|
-
serverUrl:
|
|
20
|
-
streamId:
|
|
21
|
-
objectId:
|
|
18
|
+
const loader = new ObjectLoader({
|
|
19
|
+
serverUrl: 'https://latest.speckle.dev',
|
|
20
|
+
streamId: '3ed8357f29',
|
|
21
|
+
objectId: '0408ab9caaa2ebefb2dd7f1f671e7555'
|
|
22
22
|
})
|
|
23
23
|
|
|
24
|
-
|
|
25
24
|
window.loadData = async function loadData() {
|
|
26
|
-
|
|
27
|
-
let obj = await loader.getAndConstructObject((e) =>{
|
|
25
|
+
const obj = await loader.getAndConstructObject((e) => {
|
|
28
26
|
console.log(e) // log progress!
|
|
29
27
|
})
|
|
30
28
|
|
|
31
29
|
console.log('Done!')
|
|
32
|
-
console.log(
|
|
33
|
-
}
|
|
30
|
+
console.log(obj)
|
|
31
|
+
}
|
package/examples/node/script.js
CHANGED
|
@@ -1 +1,22 @@
|
|
|
1
|
-
//
|
|
1
|
+
// Since Node v<18 does not provide fetch, we need to pass it in the options object. Note that fetch must return a WHATWG compliant stream, so cross-fetch won't work, but node/undici's implementation will.
|
|
2
|
+
|
|
3
|
+
import { fetch } from 'undici'
|
|
4
|
+
import ObjectLoader from '../../index.js'
|
|
5
|
+
|
|
6
|
+
const loader = new ObjectLoader({
|
|
7
|
+
serverUrl: 'https://latest.speckle.dev',
|
|
8
|
+
streamId: '3ed8357f29',
|
|
9
|
+
objectId: '0408ab9caaa2ebefb2dd7f1f671e7555',
|
|
10
|
+
options: { enableCaching: false, excludeProps: [], fetch }
|
|
11
|
+
})
|
|
12
|
+
|
|
13
|
+
const loadData = async function loadData() {
|
|
14
|
+
const obj = await loader.getAndConstructObject((e) => {
|
|
15
|
+
console.log(e) // log progress!
|
|
16
|
+
})
|
|
17
|
+
|
|
18
|
+
console.log('Done!')
|
|
19
|
+
console.log(obj)
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
loadData()
|
package/index.js
CHANGED
|
@@ -3,14 +3,23 @@
|
|
|
3
3
|
* TODO: Object construction progress reporting is weird.
|
|
4
4
|
*/
|
|
5
5
|
|
|
6
|
-
|
|
7
6
|
export default class ObjectLoader {
|
|
8
|
-
|
|
9
7
|
/**
|
|
10
8
|
* Creates a new object loader instance.
|
|
11
9
|
* @param {*} param0
|
|
12
10
|
*/
|
|
13
|
-
constructor(
|
|
11
|
+
constructor({
|
|
12
|
+
serverUrl,
|
|
13
|
+
streamId,
|
|
14
|
+
token,
|
|
15
|
+
objectId,
|
|
16
|
+
options = {
|
|
17
|
+
enableCaching: true,
|
|
18
|
+
fullyTraverseArrays: false,
|
|
19
|
+
excludeProps: [],
|
|
20
|
+
fetch: null
|
|
21
|
+
}
|
|
22
|
+
}) {
|
|
14
23
|
this.INTERVAL_MS = 20
|
|
15
24
|
this.TIMEOUT_MS = 180000 // three mins
|
|
16
25
|
|
|
@@ -19,16 +28,16 @@ export default class ObjectLoader {
|
|
|
19
28
|
this.objectId = objectId
|
|
20
29
|
console.log('Object loader constructor called!')
|
|
21
30
|
try {
|
|
22
|
-
this.token = token || localStorage.getItem(
|
|
31
|
+
this.token = token || localStorage.getItem('AuthToken')
|
|
23
32
|
} catch (error) {
|
|
24
|
-
|
|
33
|
+
// Accessing localStorage may throw when executing on sandboxed document, ignore.
|
|
25
34
|
}
|
|
26
35
|
|
|
27
36
|
this.headers = {
|
|
28
|
-
|
|
37
|
+
Accept: 'text/plain'
|
|
29
38
|
}
|
|
30
39
|
|
|
31
|
-
if(
|
|
40
|
+
if (this.token) {
|
|
32
41
|
this.headers['Authorization'] = `Bearer ${this.token}`
|
|
33
42
|
}
|
|
34
43
|
|
|
@@ -48,6 +57,13 @@ export default class ObjectLoader {
|
|
|
48
57
|
this.lastAsyncPause = Date.now()
|
|
49
58
|
this.existingAsyncPause = null
|
|
50
59
|
|
|
60
|
+
// we can't simply bind fetch to this.fetch, so instead we have to do some acrobatics:
|
|
61
|
+
// https://stackoverflow.com/questions/69337187/uncaught-in-promise-typeerror-failed-to-execute-fetch-on-workerglobalscope#comment124731316_69337187
|
|
62
|
+
this.preferredFetch = options.fetch
|
|
63
|
+
this.fetch = function (...args) {
|
|
64
|
+
const currentFetch = this.preferredFetch || fetch
|
|
65
|
+
return currentFetch(...args)
|
|
66
|
+
}
|
|
51
67
|
}
|
|
52
68
|
|
|
53
69
|
async asyncPause() {
|
|
@@ -55,19 +71,19 @@ export default class ObjectLoader {
|
|
|
55
71
|
// while ( this.existingAsyncPause ) {
|
|
56
72
|
// await this.existingAsyncPause
|
|
57
73
|
// }
|
|
58
|
-
if (
|
|
74
|
+
if (Date.now() - this.lastAsyncPause >= 100) {
|
|
59
75
|
this.lastAsyncPause = Date.now()
|
|
60
|
-
this.existingAsyncPause = new Promise(
|
|
76
|
+
this.existingAsyncPause = new Promise((resolve) => setTimeout(resolve, 0))
|
|
61
77
|
await this.existingAsyncPause
|
|
62
78
|
this.existingAsyncPause = null
|
|
63
|
-
if (Date.now() - this.lastAsyncPause > 500)
|
|
79
|
+
if (Date.now() - this.lastAsyncPause > 500)
|
|
80
|
+
console.log('Loader Event loop lag: ', Date.now() - this.lastAsyncPause)
|
|
64
81
|
}
|
|
65
|
-
|
|
66
82
|
}
|
|
67
83
|
|
|
68
84
|
dispose() {
|
|
69
85
|
this.buffer = []
|
|
70
|
-
this.intervals.forEach(
|
|
86
|
+
this.intervals.forEach((i) => clearInterval(i.interval))
|
|
71
87
|
}
|
|
72
88
|
|
|
73
89
|
/**
|
|
@@ -75,30 +91,34 @@ export default class ObjectLoader {
|
|
|
75
91
|
* @param {*} onProgress
|
|
76
92
|
* @returns
|
|
77
93
|
*/
|
|
78
|
-
async getAndConstructObject(
|
|
79
|
-
|
|
80
|
-
;( await this.downloadObjectsInBuffer( onProgress ) ) // Fire and forget; PS: semicolon of doom
|
|
94
|
+
async getAndConstructObject(onProgress) {
|
|
95
|
+
await this.downloadObjectsInBuffer(onProgress) // Fire and forget; PS: semicolon of doom
|
|
81
96
|
|
|
82
|
-
|
|
83
|
-
return this.traverseAndConstruct(
|
|
97
|
+
const rootObject = await this.getObject(this.objectId)
|
|
98
|
+
return this.traverseAndConstruct(rootObject, onProgress)
|
|
84
99
|
}
|
|
85
100
|
|
|
86
101
|
/**
|
|
87
102
|
* Internal function used to download all the objects in a local buffer.
|
|
88
103
|
* @param {*} onProgress
|
|
89
104
|
*/
|
|
90
|
-
async downloadObjectsInBuffer(
|
|
105
|
+
async downloadObjectsInBuffer(onProgress) {
|
|
91
106
|
let first = true
|
|
92
107
|
let downloadNum = 0
|
|
93
108
|
|
|
94
|
-
for await (
|
|
95
|
-
if(
|
|
109
|
+
for await (const obj of this.getObjectIterator()) {
|
|
110
|
+
if (first) {
|
|
96
111
|
this.totalChildrenCount = obj.totalChildrenCount
|
|
97
112
|
first = false
|
|
98
113
|
this.isLoading = true
|
|
99
114
|
}
|
|
100
115
|
downloadNum++
|
|
101
|
-
if(
|
|
116
|
+
if (onProgress)
|
|
117
|
+
onProgress({
|
|
118
|
+
stage: 'download',
|
|
119
|
+
current: downloadNum,
|
|
120
|
+
total: this.totalChildrenCount
|
|
121
|
+
})
|
|
102
122
|
}
|
|
103
123
|
this.isLoading = false
|
|
104
124
|
}
|
|
@@ -109,51 +129,69 @@ export default class ObjectLoader {
|
|
|
109
129
|
* @param {*} onProgress
|
|
110
130
|
* @returns
|
|
111
131
|
*/
|
|
112
|
-
async traverseAndConstruct(
|
|
113
|
-
if(
|
|
114
|
-
if (
|
|
132
|
+
async traverseAndConstruct(obj, onProgress) {
|
|
133
|
+
if (!obj) return
|
|
134
|
+
if (typeof obj !== 'object') return obj
|
|
115
135
|
|
|
116
136
|
// Handle arrays
|
|
117
|
-
if (
|
|
118
|
-
|
|
119
|
-
for (
|
|
120
|
-
if (
|
|
137
|
+
if (Array.isArray(obj) && obj.length !== 0) {
|
|
138
|
+
const arr = []
|
|
139
|
+
for (const element of obj) {
|
|
140
|
+
if (typeof element !== 'object' && !this.options.fullyTraverseArrays) return obj
|
|
121
141
|
|
|
122
142
|
// Dereference element if needed
|
|
123
|
-
|
|
124
|
-
|
|
143
|
+
const deRef = element.referencedId
|
|
144
|
+
? await this.getObject(element.referencedId)
|
|
145
|
+
: element
|
|
146
|
+
if (element.referencedId && onProgress)
|
|
147
|
+
onProgress({
|
|
148
|
+
stage: 'construction',
|
|
149
|
+
current:
|
|
150
|
+
++this.traversedReferencesCount > this.totalChildrenCount
|
|
151
|
+
? this.totalChildrenCount
|
|
152
|
+
: this.traversedReferencesCount,
|
|
153
|
+
total: this.totalChildrenCount
|
|
154
|
+
})
|
|
125
155
|
|
|
126
156
|
// Push the traversed object in the array
|
|
127
|
-
arr.push(
|
|
157
|
+
arr.push(await this.traverseAndConstruct(deRef, onProgress))
|
|
128
158
|
}
|
|
129
159
|
|
|
130
160
|
// De-chunk
|
|
131
|
-
if(
|
|
132
|
-
return arr.reduce(
|
|
161
|
+
if (arr[0]?.speckle_type?.toLowerCase().includes('datachunk')) {
|
|
162
|
+
return arr.reduce((prev, curr) => prev.concat(curr.data), [])
|
|
133
163
|
}
|
|
134
164
|
|
|
135
165
|
return arr
|
|
136
|
-
|
|
166
|
+
}
|
|
137
167
|
|
|
138
168
|
// Handle objects
|
|
139
169
|
// 1) Purge ignored props
|
|
140
|
-
for(
|
|
141
|
-
delete obj[
|
|
170
|
+
for (const ignoredProp of this.options.excludeProps) {
|
|
171
|
+
delete obj[ignoredProp]
|
|
142
172
|
}
|
|
143
173
|
|
|
144
174
|
// 2) Iterate through obj
|
|
145
|
-
for(
|
|
146
|
-
if(
|
|
147
|
-
|
|
148
|
-
if(
|
|
149
|
-
obj[prop] = await this.getObject(
|
|
150
|
-
if(
|
|
175
|
+
for (const prop in obj) {
|
|
176
|
+
if (typeof obj[prop] !== 'object' || obj[prop] === null) continue // leave alone primitive props
|
|
177
|
+
|
|
178
|
+
if (obj[prop].referencedId) {
|
|
179
|
+
obj[prop] = await this.getObject(obj[prop].referencedId)
|
|
180
|
+
if (onProgress)
|
|
181
|
+
onProgress({
|
|
182
|
+
stage: 'construction',
|
|
183
|
+
current:
|
|
184
|
+
++this.traversedReferencesCount > this.totalChildrenCount
|
|
185
|
+
? this.totalChildrenCount
|
|
186
|
+
: this.traversedReferencesCount,
|
|
187
|
+
total: this.totalChildrenCount
|
|
188
|
+
})
|
|
151
189
|
}
|
|
152
190
|
|
|
153
|
-
obj[prop] = await this.traverseAndConstruct(
|
|
191
|
+
obj[prop] = await this.traverseAndConstruct(obj[prop], onProgress)
|
|
154
192
|
}
|
|
155
193
|
|
|
156
|
-
|
|
194
|
+
return obj
|
|
157
195
|
}
|
|
158
196
|
|
|
159
197
|
/**
|
|
@@ -161,68 +199,71 @@ export default class ObjectLoader {
|
|
|
161
199
|
* @param {*} id
|
|
162
200
|
* @returns
|
|
163
201
|
*/
|
|
164
|
-
async getObject(
|
|
165
|
-
if (
|
|
202
|
+
async getObject(id) {
|
|
203
|
+
if (this.buffer[id]) return this.buffer[id]
|
|
166
204
|
|
|
167
|
-
|
|
168
|
-
this.promises.push(
|
|
205
|
+
const promise = new Promise((resolve, reject) => {
|
|
206
|
+
this.promises.push({ id, resolve, reject })
|
|
169
207
|
// Only create a new interval checker if none is already present!
|
|
170
|
-
if (
|
|
208
|
+
if (this.intervals[id]) {
|
|
171
209
|
this.intervals[id].elapsed = 0 // reset elapsed
|
|
172
210
|
} else {
|
|
173
|
-
|
|
211
|
+
const intervalId = setInterval(
|
|
212
|
+
this.tryResolvePromise.bind(this),
|
|
213
|
+
this.INTERVAL_MS,
|
|
214
|
+
id
|
|
215
|
+
)
|
|
174
216
|
this.intervals[id] = { interval: intervalId, elapsed: 0 }
|
|
175
217
|
}
|
|
176
|
-
}
|
|
218
|
+
})
|
|
177
219
|
return promise
|
|
178
220
|
}
|
|
179
221
|
|
|
180
|
-
tryResolvePromise(
|
|
222
|
+
tryResolvePromise(id) {
|
|
181
223
|
this.intervals[id].elapsed += this.INTERVAL_MS
|
|
182
|
-
if (
|
|
183
|
-
for (
|
|
184
|
-
p.resolve(
|
|
224
|
+
if (this.buffer[id]) {
|
|
225
|
+
for (const p of this.promises.filter((p) => p.id === id)) {
|
|
226
|
+
p.resolve(this.buffer[id])
|
|
185
227
|
}
|
|
186
228
|
|
|
187
|
-
clearInterval(
|
|
229
|
+
clearInterval(this.intervals[id].interval)
|
|
188
230
|
delete this.intervals[id]
|
|
189
231
|
// this.promises = this.promises.filter( p => p.id !== p.id ) // clearing out promises too early seems to nuke loading
|
|
190
232
|
return
|
|
191
233
|
}
|
|
192
234
|
|
|
193
|
-
if (
|
|
194
|
-
console.warn(
|
|
195
|
-
clearInterval(
|
|
196
|
-
this.promises.filter(
|
|
197
|
-
this.promises = this.promises.filter(
|
|
235
|
+
if (this.intervals[id].elapsed > this.TIMEOUT_MS) {
|
|
236
|
+
console.warn(`Timeout resolving ${id}. HIC SVNT DRACONES.`)
|
|
237
|
+
clearInterval(this.intervals[id].interval)
|
|
238
|
+
this.promises.filter((p) => p.id === id).forEach((p) => p.reject())
|
|
239
|
+
this.promises = this.promises.filter((p) => p.id !== p.id) // clear out
|
|
198
240
|
}
|
|
199
241
|
}
|
|
200
242
|
|
|
201
|
-
async *
|
|
202
|
-
|
|
243
|
+
async *getObjectIterator() {
|
|
244
|
+
const t0 = Date.now()
|
|
203
245
|
let count = 0
|
|
204
|
-
for await (
|
|
205
|
-
|
|
206
|
-
this.buffer[
|
|
246
|
+
for await (const line of this.getRawObjectIterator()) {
|
|
247
|
+
const { id, obj } = this.processLine(line)
|
|
248
|
+
this.buffer[id] = obj
|
|
207
249
|
count += 1
|
|
208
250
|
yield obj
|
|
209
251
|
}
|
|
210
252
|
console.log(`Loaded ${count} objects in: ${(Date.now() - t0) / 1000}`)
|
|
211
253
|
}
|
|
212
254
|
|
|
213
|
-
processLine(
|
|
214
|
-
|
|
215
|
-
return { id: pieces[0], obj: JSON.parse(
|
|
255
|
+
processLine(chunk) {
|
|
256
|
+
const pieces = chunk.split('\t')
|
|
257
|
+
return { id: pieces[0], obj: JSON.parse(pieces[1]) }
|
|
216
258
|
}
|
|
217
259
|
|
|
218
|
-
async *
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
if ( this.options.enableCaching && window.indexedDB && this.cacheDB === null) {
|
|
260
|
+
async *getRawObjectIterator() {
|
|
261
|
+
if (this.options.enableCaching && window.indexedDB && this.cacheDB === null) {
|
|
222
262
|
await safariFix()
|
|
223
|
-
|
|
224
|
-
idbOpenRequest.onupgradeneeded = () =>
|
|
225
|
-
|
|
263
|
+
const idbOpenRequest = indexedDB.open('speckle-object-cache', 1)
|
|
264
|
+
idbOpenRequest.onupgradeneeded = () =>
|
|
265
|
+
idbOpenRequest.result.createObjectStore('objects')
|
|
266
|
+
this.cacheDB = await this.promisifyIdbRequest(idbOpenRequest)
|
|
226
267
|
}
|
|
227
268
|
|
|
228
269
|
const rootObjJson = await this.getRawRootObject()
|
|
@@ -231,162 +272,171 @@ export default class ObjectLoader {
|
|
|
231
272
|
yield `${this.objectId}\t${rootObjJson}`
|
|
232
273
|
|
|
233
274
|
const rootObj = JSON.parse(rootObjJson)
|
|
234
|
-
if (
|
|
275
|
+
if (!rootObj.__closure) return
|
|
235
276
|
|
|
236
|
-
let childrenIds = Object.keys(rootObj.__closure).sort(
|
|
237
|
-
|
|
277
|
+
let childrenIds = Object.keys(rootObj.__closure).sort(
|
|
278
|
+
(a, b) => rootObj.__closure[a] - rootObj.__closure[b]
|
|
279
|
+
)
|
|
280
|
+
if (childrenIds.length === 0) return
|
|
238
281
|
|
|
239
282
|
let splitHttpRequests = []
|
|
240
283
|
|
|
241
|
-
if (
|
|
284
|
+
if (childrenIds.length > 50) {
|
|
242
285
|
// split into 5%, 15%, 40%, 40% (5% for the high priority children: the ones with lower minDepth)
|
|
243
|
-
|
|
286
|
+
const splitBeforeCacheCheck = [[], [], [], []]
|
|
244
287
|
let crtChildIndex = 0
|
|
245
288
|
|
|
246
|
-
for (
|
|
247
|
-
splitBeforeCacheCheck[0].push(
|
|
289
|
+
for (; crtChildIndex < 0.05 * childrenIds.length; crtChildIndex++) {
|
|
290
|
+
splitBeforeCacheCheck[0].push(childrenIds[crtChildIndex])
|
|
248
291
|
}
|
|
249
|
-
for (
|
|
250
|
-
splitBeforeCacheCheck[1].push(
|
|
292
|
+
for (; crtChildIndex < 0.2 * childrenIds.length; crtChildIndex++) {
|
|
293
|
+
splitBeforeCacheCheck[1].push(childrenIds[crtChildIndex])
|
|
251
294
|
}
|
|
252
|
-
for (
|
|
253
|
-
splitBeforeCacheCheck[2].push(
|
|
295
|
+
for (; crtChildIndex < 0.6 * childrenIds.length; crtChildIndex++) {
|
|
296
|
+
splitBeforeCacheCheck[2].push(childrenIds[crtChildIndex])
|
|
254
297
|
}
|
|
255
|
-
for (
|
|
256
|
-
splitBeforeCacheCheck[3].push(
|
|
298
|
+
for (; crtChildIndex < childrenIds.length; crtChildIndex++) {
|
|
299
|
+
splitBeforeCacheCheck[3].push(childrenIds[crtChildIndex])
|
|
257
300
|
}
|
|
258
301
|
|
|
302
|
+
console.log('Cache check for: ', splitBeforeCacheCheck)
|
|
259
303
|
|
|
260
|
-
|
|
304
|
+
const newChildren = []
|
|
305
|
+
let nextCachePromise = this.cacheGetObjects(splitBeforeCacheCheck[0])
|
|
261
306
|
|
|
262
|
-
let
|
|
263
|
-
|
|
307
|
+
for (let i = 0; i < 4; i++) {
|
|
308
|
+
const cachedObjects = await nextCachePromise
|
|
309
|
+
if (i < 3) nextCachePromise = this.cacheGetObjects(splitBeforeCacheCheck[i + 1])
|
|
264
310
|
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
for ( let id of sortedCachedKeys ) {
|
|
271
|
-
yield `${id}\t${cachedObjects[ id ]}`
|
|
311
|
+
const sortedCachedKeys = Object.keys(cachedObjects).sort(
|
|
312
|
+
(a, b) => rootObj.__closure[a] - rootObj.__closure[b]
|
|
313
|
+
)
|
|
314
|
+
for (const id of sortedCachedKeys) {
|
|
315
|
+
yield `${id}\t${cachedObjects[id]}`
|
|
272
316
|
}
|
|
273
|
-
|
|
274
|
-
|
|
317
|
+
const newChildrenForBatch = splitBeforeCacheCheck[i].filter(
|
|
318
|
+
(id) => !(id in cachedObjects)
|
|
319
|
+
)
|
|
320
|
+
newChildren.push(...newChildrenForBatch)
|
|
275
321
|
}
|
|
276
322
|
|
|
277
|
-
if (
|
|
323
|
+
if (newChildren.length === 0) return
|
|
278
324
|
|
|
279
|
-
if (
|
|
325
|
+
if (newChildren.length <= 50) {
|
|
280
326
|
// we have almost all of children in the cache. do only 1 requests for the remaining new children
|
|
281
|
-
splitHttpRequests.push(
|
|
327
|
+
splitHttpRequests.push(newChildren)
|
|
282
328
|
} else {
|
|
283
329
|
// we now set up the batches for 4 http requests, starting from `newChildren` (already sorted by priority)
|
|
284
|
-
splitHttpRequests = [
|
|
330
|
+
splitHttpRequests = [[], [], [], []]
|
|
285
331
|
crtChildIndex = 0
|
|
286
332
|
|
|
287
|
-
for (
|
|
288
|
-
splitHttpRequests[0].push(
|
|
333
|
+
for (; crtChildIndex < 0.05 * newChildren.length; crtChildIndex++) {
|
|
334
|
+
splitHttpRequests[0].push(newChildren[crtChildIndex])
|
|
289
335
|
}
|
|
290
|
-
for (
|
|
291
|
-
splitHttpRequests[1].push(
|
|
336
|
+
for (; crtChildIndex < 0.2 * newChildren.length; crtChildIndex++) {
|
|
337
|
+
splitHttpRequests[1].push(newChildren[crtChildIndex])
|
|
292
338
|
}
|
|
293
|
-
for (
|
|
294
|
-
splitHttpRequests[2].push(
|
|
339
|
+
for (; crtChildIndex < 0.6 * newChildren.length; crtChildIndex++) {
|
|
340
|
+
splitHttpRequests[2].push(newChildren[crtChildIndex])
|
|
295
341
|
}
|
|
296
|
-
for (
|
|
297
|
-
splitHttpRequests[3].push(
|
|
342
|
+
for (; crtChildIndex < newChildren.length; crtChildIndex++) {
|
|
343
|
+
splitHttpRequests[3].push(newChildren[crtChildIndex])
|
|
298
344
|
}
|
|
299
345
|
}
|
|
300
|
-
|
|
301
346
|
} else {
|
|
302
347
|
// small object with <= 50 children. check cache and make only 1 request
|
|
303
|
-
const cachedObjects = await this.cacheGetObjects(
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
348
|
+
const cachedObjects = await this.cacheGetObjects(childrenIds)
|
|
349
|
+
const sortedCachedKeys = Object.keys(cachedObjects).sort(
|
|
350
|
+
(a, b) => rootObj.__closure[a] - rootObj.__closure[b]
|
|
351
|
+
)
|
|
352
|
+
for (const id of sortedCachedKeys) {
|
|
353
|
+
yield `${id}\t${cachedObjects[id]}`
|
|
307
354
|
}
|
|
308
|
-
childrenIds = childrenIds.filter(id => !(
|
|
309
|
-
if (
|
|
355
|
+
childrenIds = childrenIds.filter((id) => !(id in cachedObjects))
|
|
356
|
+
if (childrenIds.length === 0) return
|
|
310
357
|
|
|
311
358
|
// only 1 http request with the remaining children ( <= 50 )
|
|
312
|
-
splitHttpRequests.push(
|
|
359
|
+
splitHttpRequests.push(childrenIds)
|
|
313
360
|
}
|
|
314
361
|
|
|
315
362
|
// Starting http requests for batches in `splitHttpRequests`
|
|
316
363
|
|
|
317
364
|
const decoders = []
|
|
318
365
|
const readers = []
|
|
319
|
-
const
|
|
366
|
+
const readPromises = []
|
|
320
367
|
const startIndexes = []
|
|
321
368
|
const readBuffers = []
|
|
322
369
|
const finishedRequests = []
|
|
323
370
|
|
|
324
371
|
for (let i = 0; i < splitHttpRequests.length; i++) {
|
|
325
372
|
decoders.push(new TextDecoder())
|
|
326
|
-
readers.push(
|
|
327
|
-
|
|
328
|
-
startIndexes.push(
|
|
329
|
-
readBuffers.push(
|
|
330
|
-
finishedRequests.push(
|
|
331
|
-
|
|
332
|
-
fetch(
|
|
333
|
-
|
|
334
|
-
{
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
}
|
|
339
|
-
).then( crtResponse => {
|
|
340
|
-
let crtReader = crtResponse.body.getReader()
|
|
373
|
+
readers.push(null)
|
|
374
|
+
readPromises.push(null)
|
|
375
|
+
startIndexes.push(0)
|
|
376
|
+
readBuffers.push('')
|
|
377
|
+
finishedRequests.push(false)
|
|
378
|
+
|
|
379
|
+
this.fetch(this.requestUrlChildren, {
|
|
380
|
+
method: 'POST',
|
|
381
|
+
headers: { ...this.headers, 'Content-Type': 'application/json' },
|
|
382
|
+
body: JSON.stringify({ objects: JSON.stringify(splitHttpRequests[i]) })
|
|
383
|
+
}).then((crtResponse) => {
|
|
384
|
+
const crtReader = crtResponse.body.getReader()
|
|
341
385
|
readers[i] = crtReader
|
|
342
|
-
|
|
343
|
-
|
|
386
|
+
const crtReadPromise = crtReader.read().then((x) => {
|
|
387
|
+
x.reqId = i
|
|
388
|
+
return x
|
|
389
|
+
})
|
|
390
|
+
readPromises[i] = crtReadPromise
|
|
344
391
|
})
|
|
345
392
|
}
|
|
346
393
|
|
|
347
|
-
while (
|
|
348
|
-
|
|
349
|
-
if (
|
|
394
|
+
while (true) {
|
|
395
|
+
const validReadPromises = readPromises.filter((x) => !!x)
|
|
396
|
+
if (validReadPromises.length === 0) {
|
|
350
397
|
// Check if all requests finished
|
|
351
|
-
if (
|
|
398
|
+
if (finishedRequests.every((x) => x)) {
|
|
352
399
|
break
|
|
353
400
|
}
|
|
354
401
|
// Sleep 10 ms
|
|
355
|
-
await new Promise(
|
|
356
|
-
setTimeout(
|
|
357
|
-
}
|
|
402
|
+
await new Promise((resolve) => {
|
|
403
|
+
setTimeout(resolve, 10)
|
|
404
|
+
})
|
|
358
405
|
continue
|
|
359
406
|
}
|
|
360
407
|
|
|
361
408
|
// Wait for data on any running request
|
|
362
|
-
|
|
409
|
+
const data = await Promise.any(validReadPromises)
|
|
410
|
+
// eslint-disable-next-line prefer-const
|
|
363
411
|
let { value: crtDataChunk, done: readerDone, reqId } = data
|
|
364
|
-
finishedRequests[
|
|
412
|
+
finishedRequests[reqId] = readerDone
|
|
365
413
|
|
|
366
414
|
// Replace read promise on this request with a new `read` call
|
|
367
|
-
if (
|
|
368
|
-
|
|
369
|
-
|
|
415
|
+
if (!readerDone) {
|
|
416
|
+
const crtReadPromise = readers[reqId].read().then((x) => {
|
|
417
|
+
x.reqId = reqId
|
|
418
|
+
return x
|
|
419
|
+
})
|
|
420
|
+
readPromises[reqId] = crtReadPromise
|
|
370
421
|
} else {
|
|
371
422
|
// This request finished. "Flush any non-newline-terminated text"
|
|
372
|
-
if (
|
|
373
|
-
yield readBuffers[
|
|
374
|
-
readBuffers[
|
|
423
|
+
if (readBuffers[reqId].length > 0) {
|
|
424
|
+
yield readBuffers[reqId]
|
|
425
|
+
readBuffers[reqId] = ''
|
|
375
426
|
}
|
|
376
427
|
// no other read calls for this request
|
|
377
|
-
|
|
428
|
+
readPromises[reqId] = null
|
|
378
429
|
}
|
|
379
430
|
|
|
380
|
-
if (
|
|
381
|
-
continue
|
|
431
|
+
if (!crtDataChunk) continue
|
|
382
432
|
|
|
383
|
-
crtDataChunk = decoders[
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
readBuffers[
|
|
433
|
+
crtDataChunk = decoders[reqId].decode(crtDataChunk)
|
|
434
|
+
const unprocessedText = readBuffers[reqId] + crtDataChunk
|
|
435
|
+
const unprocessedLines = unprocessedText.split(/\r\n|\n|\r/)
|
|
436
|
+
const remainderText = unprocessedLines.pop()
|
|
437
|
+
readBuffers[reqId] = remainderText
|
|
388
438
|
|
|
389
|
-
for (
|
|
439
|
+
for (const line of unprocessedLines) {
|
|
390
440
|
yield line
|
|
391
441
|
}
|
|
392
442
|
this.cacheStoreObjects(unprocessedLines)
|
|
@@ -394,43 +444,46 @@ export default class ObjectLoader {
|
|
|
394
444
|
}
|
|
395
445
|
|
|
396
446
|
async getRawRootObject() {
|
|
397
|
-
const cachedRootObject = await this.cacheGetObjects(
|
|
398
|
-
if ( cachedRootObject[
|
|
399
|
-
|
|
400
|
-
const response = await fetch( this.requestUrlRootObj, { headers: this.headers } )
|
|
447
|
+
const cachedRootObject = await this.cacheGetObjects([this.objectId])
|
|
448
|
+
if (cachedRootObject[this.objectId]) return cachedRootObject[this.objectId]
|
|
449
|
+
const response = await this.fetch(this.requestUrlRootObj, { headers: this.headers })
|
|
401
450
|
const responseText = await response.text()
|
|
402
|
-
this.cacheStoreObjects(
|
|
451
|
+
this.cacheStoreObjects([`${this.objectId}\t${responseText}`])
|
|
403
452
|
return responseText
|
|
404
453
|
}
|
|
405
454
|
|
|
406
455
|
promisifyIdbRequest(request) {
|
|
407
456
|
return new Promise((resolve, reject) => {
|
|
408
|
-
request.oncomplete = request.onsuccess = () => resolve(request.result)
|
|
409
|
-
request.onabort = request.onerror = () => reject(request.error)
|
|
457
|
+
request.oncomplete = request.onsuccess = () => resolve(request.result)
|
|
458
|
+
request.onabort = request.onerror = () => reject(request.error)
|
|
410
459
|
})
|
|
411
460
|
}
|
|
412
461
|
|
|
413
462
|
async cacheGetObjects(ids) {
|
|
414
|
-
if (
|
|
463
|
+
if (!this.options.enableCaching || !window.indexedDB) {
|
|
415
464
|
return {}
|
|
416
465
|
}
|
|
417
466
|
|
|
418
|
-
|
|
467
|
+
const ret = {}
|
|
419
468
|
|
|
420
469
|
for (let i = 0; i < ids.length; i += 500) {
|
|
421
|
-
|
|
422
|
-
let t0 = Date.now()
|
|
470
|
+
const idsChunk = ids.slice(i, i + 500)
|
|
423
471
|
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
472
|
+
const store = this.cacheDB
|
|
473
|
+
.transaction('objects', 'readonly')
|
|
474
|
+
.objectStore('objects')
|
|
475
|
+
const idbChildrenPromises = idsChunk.map((id) =>
|
|
476
|
+
this.promisifyIdbRequest(store.get(id)).then((data) => ({ id, data }))
|
|
477
|
+
)
|
|
478
|
+
const cachedData = await Promise.all(idbChildrenPromises)
|
|
427
479
|
|
|
428
480
|
// console.log("Cache check for : ", idsChunk.length, Date.now() - t0)
|
|
429
481
|
|
|
430
|
-
for (
|
|
431
|
-
if (
|
|
482
|
+
for (const cachedObj of cachedData) {
|
|
483
|
+
if (!cachedObj.data)
|
|
484
|
+
// non-existent objects are retrieved with `undefined` data
|
|
432
485
|
continue
|
|
433
|
-
ret[
|
|
486
|
+
ret[cachedObj.id] = cachedObj.data
|
|
434
487
|
}
|
|
435
488
|
}
|
|
436
489
|
|
|
@@ -438,21 +491,22 @@ export default class ObjectLoader {
|
|
|
438
491
|
}
|
|
439
492
|
|
|
440
493
|
cacheStoreObjects(objects) {
|
|
441
|
-
if (
|
|
494
|
+
if (!this.options.enableCaching || !window.indexedDB) {
|
|
442
495
|
return {}
|
|
443
496
|
}
|
|
444
497
|
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
498
|
+
const store = this.cacheDB
|
|
499
|
+
.transaction('objects', 'readwrite')
|
|
500
|
+
.objectStore('objects')
|
|
501
|
+
for (const obj of objects) {
|
|
502
|
+
const idAndData = obj.split('\t')
|
|
448
503
|
store.put(idAndData[1], idAndData[0])
|
|
449
504
|
}
|
|
450
505
|
|
|
451
|
-
return this.promisifyIdbRequest(
|
|
506
|
+
return this.promisifyIdbRequest(store.transaction)
|
|
452
507
|
}
|
|
453
508
|
}
|
|
454
509
|
|
|
455
|
-
|
|
456
510
|
// Credits and more info: https://github.com/jakearchibald/safari-14-idb-fix
|
|
457
511
|
function safariFix() {
|
|
458
512
|
const isSafari =
|
|
@@ -465,9 +519,9 @@ function safariFix() {
|
|
|
465
519
|
|
|
466
520
|
let intervalId
|
|
467
521
|
|
|
468
|
-
return new Promise(
|
|
522
|
+
return new Promise((resolve) => {
|
|
469
523
|
const tryIdb = () => indexedDB.databases().finally(resolve)
|
|
470
524
|
intervalId = setInterval(tryIdb, 100)
|
|
471
525
|
tryIdb()
|
|
472
|
-
}).finally(
|
|
526
|
+
}).finally(() => clearInterval(intervalId))
|
|
473
527
|
}
|
package/jsconfig.json
ADDED
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@speckle/objectloader",
|
|
3
|
-
"version": "2.
|
|
3
|
+
"version": "2.4.2-alpha.20",
|
|
4
4
|
"description": "Simple API helper to stream in objects from the Speckle Server.",
|
|
5
5
|
"main": "index.js",
|
|
6
6
|
"homepage": "https://speckle.systems",
|
|
@@ -11,9 +11,21 @@
|
|
|
11
11
|
"directory": "packages/objectloader"
|
|
12
12
|
},
|
|
13
13
|
"scripts": {
|
|
14
|
-
"test": "echo \"Error: no test specified\" && exit 1"
|
|
14
|
+
"test": "echo \"Error: no test specified\" && exit 1",
|
|
15
|
+
"lint": "eslint . --ext .js,.ts"
|
|
15
16
|
},
|
|
16
|
-
"keywords": [
|
|
17
|
+
"keywords": [
|
|
18
|
+
"speckle",
|
|
19
|
+
"aec",
|
|
20
|
+
"speckle api"
|
|
21
|
+
],
|
|
17
22
|
"author": "AEC Systems",
|
|
18
|
-
"license": "Apache-2.0"
|
|
23
|
+
"license": "Apache-2.0",
|
|
24
|
+
"devDependencies": {
|
|
25
|
+
"eslint": "^8.11.0",
|
|
26
|
+
"eslint-config-prettier": "^8.5.0",
|
|
27
|
+
"prettier": "^2.5.1",
|
|
28
|
+
"undici": "^4.14.1"
|
|
29
|
+
},
|
|
30
|
+
"gitHead": "668c96bc6f505aae8388723cff78006667a6d327"
|
|
19
31
|
}
|
package/readme.md
CHANGED
|
@@ -12,7 +12,9 @@ Comprehensive developer and user documentation can be found in our:
|
|
|
12
12
|
|
|
13
13
|
This is a small utility class that helps you stream an object and all its sub-components from the Speckle Server API. It is intended to be used in contexts where you want to "download" the whole object, or iteratively traverse its whole tree.
|
|
14
14
|
|
|
15
|
-
|
|
15
|
+
### In the browser
|
|
16
|
+
|
|
17
|
+
Here's a sample way on how to use it, pilfered from the [3d viewer package](../viewer):
|
|
16
18
|
|
|
17
19
|
```js
|
|
18
20
|
|
|
@@ -36,23 +38,36 @@ async load( { serverUrl, token, streamId, objectId } ) {
|
|
|
36
38
|
|
|
37
39
|
```
|
|
38
40
|
|
|
39
|
-
If you do not want to process the objects one by one as they are streamed to you, you can use the `getAndConstructObject()` method. Here's an example:
|
|
41
|
+
If you do not want to process the objects one by one as they are streamed to you, you can use the `getAndConstructObject()` method. Here's an example:
|
|
40
42
|
|
|
41
|
-
|
|
43
|
+
````js
|
|
42
44
|
|
|
43
45
|
let loader = new ObjectLoader( {
|
|
44
|
-
serverUrl: "https://latest.speckle.dev",
|
|
45
|
-
streamId: "3ed8357f29",
|
|
46
|
+
serverUrl: "https://latest.speckle.dev",
|
|
47
|
+
streamId: "3ed8357f29",
|
|
46
48
|
objectId: "0408ab9caaa2ebefb2dd7f1f671e7555",
|
|
47
49
|
options: {
|
|
48
50
|
fullyTraverseArrays: false, // Default: false. By default, if an array starts with a primitive type, it will not be traversed. Set it to true if you want to capture scenarios in which lists can have intersped objects and primitives, e.g. [ 1, 2, "a", { important object } ]
|
|
49
|
-
excludeProps: [ 'displayValue', 'displayMesh', '__closure' ] // Default: []. Any prop names that you pass in here will be ignored from object construction traversal.
|
|
51
|
+
excludeProps: [ 'displayValue', 'displayMesh', '__closure' ] // Default: []. Any prop names that you pass in here will be ignored from object construction traversal.
|
|
50
52
|
}
|
|
51
53
|
} )
|
|
52
54
|
|
|
53
55
|
let obj = await loader.getAndConstructObject( ( e ) => console.log( 'Progress', e ) )
|
|
54
56
|
|
|
55
|
-
|
|
57
|
+
### On the server
|
|
58
|
+
|
|
59
|
+
Since Node.js does not yet support the [`fetch API`](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API/Using_Fetch), you'll need to provide your own `fetch` function in the options object. Note that `fetch` must return a [Web Stream](https://nodejs.org/api/webstreams.html), so [node-fetch](https://github.com/node-fetch/node-fetch) won't work, but [node/undici's](https://undici.nodejs.org/) implementation will.
|
|
60
|
+
|
|
61
|
+
```js
|
|
62
|
+
import { fetch } from 'undici'
|
|
63
|
+
|
|
64
|
+
let loader = new ObjectLoader({
|
|
65
|
+
serverUrl: 'https://latest.speckle.dev',
|
|
66
|
+
streamId: '3ed8357f29',
|
|
67
|
+
objectId: '0408ab9caaa2ebefb2dd7f1f671e7555',
|
|
68
|
+
options: { enableCaching: false, excludeProps: [], fetch },
|
|
69
|
+
})
|
|
70
|
+
````
|
|
56
71
|
|
|
57
72
|
## Community
|
|
58
73
|
|