@ifc-lite/server-client 1.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +373 -0
- package/dist/client.d.ts +250 -0
- package/dist/client.d.ts.map +1 -0
- package/dist/client.js +761 -0
- package/dist/client.js.map +1 -0
- package/dist/data-model-decoder.d.ts +72 -0
- package/dist/data-model-decoder.d.ts.map +1 -0
- package/dist/data-model-decoder.js +264 -0
- package/dist/data-model-decoder.js.map +1 -0
- package/dist/index.d.ts +30 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +33 -0
- package/dist/index.js.map +1 -0
- package/dist/parquet-decoder.d.ts +52 -0
- package/dist/parquet-decoder.d.ts.map +1 -0
- package/dist/parquet-decoder.js +420 -0
- package/dist/parquet-decoder.js.map +1 -0
- package/dist/types.d.ts +338 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +5 -0
- package/dist/types.js.map +1 -0
- package/package.json +57 -0
package/dist/client.js
ADDED
|
@@ -0,0 +1,761 @@
|
|
|
1
|
+
// This Source Code Form is subject to the terms of the Mozilla Public
|
|
2
|
+
// License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
3
|
+
// file, You can obtain one at https://mozilla.org/MPL/2.0/.
|
|
4
|
+
import { decodeParquetGeometry, decodeOptimizedParquetGeometry, isParquetAvailable } from './parquet-decoder';
|
|
5
|
+
/**
|
|
6
|
+
* Compress a file or ArrayBuffer using gzip compression.
|
|
7
|
+
* Uses the browser's CompressionStream API for efficient compression.
|
|
8
|
+
*
|
|
9
|
+
* @param file - File or ArrayBuffer to compress
|
|
10
|
+
* @returns Compressed Blob
|
|
11
|
+
*/
|
|
12
|
+
async function compressGzip(file) {
|
|
13
|
+
const stream = file instanceof File ? file.stream() : new Blob([file]).stream();
|
|
14
|
+
const compressionStream = new CompressionStream('gzip');
|
|
15
|
+
const compressedStream = stream.pipeThrough(compressionStream);
|
|
16
|
+
return new Response(compressedStream).blob();
|
|
17
|
+
}
|
|
18
|
+
/**
|
|
19
|
+
* Compute SHA-256 hash of a file or ArrayBuffer.
|
|
20
|
+
* Used for cache key generation client-side to avoid uploading files that are already cached.
|
|
21
|
+
*
|
|
22
|
+
* @param file - File or ArrayBuffer to hash
|
|
23
|
+
* @returns Hexadecimal SHA-256 hash string
|
|
24
|
+
*/
|
|
25
|
+
async function computeFileHash(file) {
|
|
26
|
+
const buffer = file instanceof File ? await file.arrayBuffer() : file;
|
|
27
|
+
const hashBuffer = await crypto.subtle.digest('SHA-256', buffer);
|
|
28
|
+
return Array.from(new Uint8Array(hashBuffer))
|
|
29
|
+
.map(b => b.toString(16).padStart(2, '0'))
|
|
30
|
+
.join('');
|
|
31
|
+
}
|
|
32
|
+
/**
|
|
33
|
+
* Client for the IFC-Lite Server API.
|
|
34
|
+
*
|
|
35
|
+
* @example
|
|
36
|
+
* ```typescript
|
|
37
|
+
* const client = new IfcServerClient({
|
|
38
|
+
* baseUrl: 'https://ifc-lite.railway.app'
|
|
39
|
+
* });
|
|
40
|
+
*
|
|
41
|
+
* // Check server health
|
|
42
|
+
* const health = await client.health();
|
|
43
|
+
* console.log(health.status);
|
|
44
|
+
*
|
|
45
|
+
* // Parse IFC file
|
|
46
|
+
* const result = await client.parse(file);
|
|
47
|
+
* console.log(`Meshes: ${result.meshes.length}`);
|
|
48
|
+
* ```
|
|
49
|
+
*/
|
|
50
|
+
export class IfcServerClient {
|
|
51
|
+
/**
|
|
52
|
+
* Create a new IFC server client.
|
|
53
|
+
*
|
|
54
|
+
* @param config - Client configuration
|
|
55
|
+
*/
|
|
56
|
+
constructor(config) {
|
|
57
|
+
// Remove trailing slash from base URL
|
|
58
|
+
this.baseUrl = config.baseUrl.replace(/\/$/, '');
|
|
59
|
+
this.timeout = config.timeout ?? 300000; // 5 minutes default
|
|
60
|
+
}
|
|
61
|
+
/**
|
|
62
|
+
* Check server health.
|
|
63
|
+
*
|
|
64
|
+
* @returns Health status
|
|
65
|
+
*/
|
|
66
|
+
async health() {
|
|
67
|
+
const response = await fetch(`${this.baseUrl}/api/v1/health`, {
|
|
68
|
+
signal: AbortSignal.timeout(this.timeout),
|
|
69
|
+
});
|
|
70
|
+
if (!response.ok) {
|
|
71
|
+
throw await this.handleError(response);
|
|
72
|
+
}
|
|
73
|
+
return response.json();
|
|
74
|
+
}
|
|
75
|
+
/**
|
|
76
|
+
* Parse IFC file and return all geometry.
|
|
77
|
+
*
|
|
78
|
+
* For large files (>10MB), consider using `parseStream()` instead
|
|
79
|
+
* to receive progressive updates.
|
|
80
|
+
*
|
|
81
|
+
* @param file - File or ArrayBuffer containing IFC data
|
|
82
|
+
* @returns Parse result with all meshes
|
|
83
|
+
*
|
|
84
|
+
* @example
|
|
85
|
+
* ```typescript
|
|
86
|
+
* const result = await client.parse(file);
|
|
87
|
+
* for (const mesh of result.meshes) {
|
|
88
|
+
* scene.add(createMesh(mesh.positions, mesh.indices, mesh.color));
|
|
89
|
+
* }
|
|
90
|
+
* ```
|
|
91
|
+
*/
|
|
92
|
+
async parse(file) {
|
|
93
|
+
// Compress file before upload for faster transfer
|
|
94
|
+
const compressedFile = await compressGzip(file);
|
|
95
|
+
const fileName = file instanceof File ? file.name : 'model.ifc';
|
|
96
|
+
const formData = new FormData();
|
|
97
|
+
formData.append('file', compressedFile, fileName);
|
|
98
|
+
const response = await fetch(`${this.baseUrl}/api/v1/parse`, {
|
|
99
|
+
method: 'POST',
|
|
100
|
+
body: formData,
|
|
101
|
+
signal: AbortSignal.timeout(this.timeout),
|
|
102
|
+
});
|
|
103
|
+
if (!response.ok) {
|
|
104
|
+
throw await this.handleError(response);
|
|
105
|
+
}
|
|
106
|
+
return response.json();
|
|
107
|
+
}
|
|
108
|
+
/**
|
|
109
|
+
* Parse IFC file and return geometry in Parquet format.
|
|
110
|
+
*
|
|
111
|
+
* This method provides ~15x smaller payload size compared to JSON,
|
|
112
|
+
* which is critical for large IFC files over network connections.
|
|
113
|
+
*
|
|
114
|
+
* **Cache-aware:** Computes file hash client-side and checks cache before uploading.
|
|
115
|
+
* If cached, skips upload entirely for much faster response.
|
|
116
|
+
*
|
|
117
|
+
* **Requirements:** This method requires `parquet-wasm` and `apache-arrow`
|
|
118
|
+
* to be installed as peer dependencies.
|
|
119
|
+
*
|
|
120
|
+
* @param file - File or ArrayBuffer containing IFC data
|
|
121
|
+
* @returns Parse result with all meshes (decoded from Parquet)
|
|
122
|
+
*
|
|
123
|
+
* @example
|
|
124
|
+
* ```typescript
|
|
125
|
+
* const result = await client.parseParquet(file);
|
|
126
|
+
* console.log(`Payload: ${result.parquet_stats.payload_size} bytes`);
|
|
127
|
+
* console.log(`Decode time: ${result.parquet_stats.decode_time_ms}ms`);
|
|
128
|
+
* for (const mesh of result.meshes) {
|
|
129
|
+
* scene.add(createMesh(mesh.positions, mesh.indices, mesh.color));
|
|
130
|
+
* }
|
|
131
|
+
* ```
|
|
132
|
+
*/
|
|
133
|
+
async parseParquet(file) {
|
|
134
|
+
// Check if Parquet decoding is available
|
|
135
|
+
const parquetReady = await isParquetAvailable();
|
|
136
|
+
if (!parquetReady) {
|
|
137
|
+
throw new Error('Parquet parsing requires parquet-wasm and apache-arrow. ' +
|
|
138
|
+
'Install them with: npm install parquet-wasm apache-arrow');
|
|
139
|
+
}
|
|
140
|
+
// Step 1: Compute hash client-side (fast, ~50ms for large files)
|
|
141
|
+
const hashStart = performance.now();
|
|
142
|
+
const hash = await computeFileHash(file);
|
|
143
|
+
const hashTime = performance.now() - hashStart;
|
|
144
|
+
console.log(`[client] Computed file hash in ${hashTime.toFixed(0)}ms: ${hash.substring(0, 16)}...`);
|
|
145
|
+
// Step 2: Check if already cached
|
|
146
|
+
const cacheCheckStart = performance.now();
|
|
147
|
+
const cacheCheck = await fetch(`${this.baseUrl}/api/v1/cache/check/${hash}`, {
|
|
148
|
+
method: 'GET',
|
|
149
|
+
signal: AbortSignal.timeout(5000), // 5s timeout for cache check
|
|
150
|
+
});
|
|
151
|
+
const cacheCheckTime = performance.now() - cacheCheckStart;
|
|
152
|
+
if (cacheCheck.ok) {
|
|
153
|
+
// Cache HIT - fetch directly without uploading!
|
|
154
|
+
console.log(`[client] Cache HIT (check: ${cacheCheckTime.toFixed(0)}ms) - skipping upload`);
|
|
155
|
+
return this.fetchCachedGeometry(hash);
|
|
156
|
+
}
|
|
157
|
+
// Cache MISS - upload and process as usual
|
|
158
|
+
console.log(`[client] Cache MISS (check: ${cacheCheckTime.toFixed(0)}ms) - uploading file`);
|
|
159
|
+
return this.uploadAndProcessParquet(file, hash);
|
|
160
|
+
}
|
|
161
|
+
/**
|
|
162
|
+
* Parse IFC file with streaming Parquet response for progressive rendering.
|
|
163
|
+
*
|
|
164
|
+
* Returns an async generator that yields geometry batches as they're processed.
|
|
165
|
+
* Use this for large files (>50MB) to show geometry progressively.
|
|
166
|
+
*
|
|
167
|
+
* After streaming completes, fetch the data model via `fetchDataModel(cacheKey)`.
|
|
168
|
+
*
|
|
169
|
+
* @param file - IFC file to parse (File or ArrayBuffer)
|
|
170
|
+
* @param onBatch - Callback for each geometry batch (for immediate rendering)
|
|
171
|
+
* @returns Final result with cache_key, stats, and metadata
|
|
172
|
+
*
|
|
173
|
+
* @example
|
|
174
|
+
* ```typescript
|
|
175
|
+
* const result = await client.parseParquetStream(file, (batch) => {
|
|
176
|
+
* // Render each batch immediately
|
|
177
|
+
* for (const mesh of batch.meshes) {
|
|
178
|
+
* scene.add(createMesh(mesh));
|
|
179
|
+
* }
|
|
180
|
+
* });
|
|
181
|
+
*
|
|
182
|
+
* // After geometry is complete, fetch data model for properties panel
|
|
183
|
+
* const dataModel = await client.fetchDataModel(result.cache_key);
|
|
184
|
+
* ```
|
|
185
|
+
*/
|
|
186
|
+
async parseParquetStream(file, onBatch) {
|
|
187
|
+
const parquetReady = await isParquetAvailable();
|
|
188
|
+
if (!parquetReady) {
|
|
189
|
+
throw new Error('Parquet streaming requires parquet-wasm and apache-arrow. ' +
|
|
190
|
+
'Install them with: npm install parquet-wasm apache-arrow');
|
|
191
|
+
}
|
|
192
|
+
const fileSize = file instanceof File ? file.size : file.byteLength;
|
|
193
|
+
const fileName = file instanceof File ? file.name : 'model.ifc';
|
|
194
|
+
// Step 1: Compute hash and check cache first (even for streaming)
|
|
195
|
+
const hashStart = performance.now();
|
|
196
|
+
const hash = await computeFileHash(file);
|
|
197
|
+
const hashTime = performance.now() - hashStart;
|
|
198
|
+
console.log(`[client] Stream: computed hash in ${hashTime.toFixed(0)}ms: ${hash.substring(0, 16)}...`);
|
|
199
|
+
// Step 2: Check if already cached
|
|
200
|
+
const cacheCheckStart = performance.now();
|
|
201
|
+
const cacheCheck = await fetch(`${this.baseUrl}/api/v1/cache/check/${hash}`, {
|
|
202
|
+
method: 'GET',
|
|
203
|
+
signal: AbortSignal.timeout(5000),
|
|
204
|
+
});
|
|
205
|
+
const cacheCheckTime = performance.now() - cacheCheckStart;
|
|
206
|
+
if (cacheCheck.ok) {
|
|
207
|
+
// CACHE HIT - fetch all geometry at once (much faster than re-parsing)
|
|
208
|
+
console.log(`[client] Stream: Cache HIT (check: ${cacheCheckTime.toFixed(0)}ms) - fetching cached geometry`);
|
|
209
|
+
const cachedResult = await this.fetchCachedGeometry(hash);
|
|
210
|
+
// Send all meshes as a single batch to the callback
|
|
211
|
+
const decodeStart = performance.now();
|
|
212
|
+
onBatch({
|
|
213
|
+
meshes: cachedResult.meshes,
|
|
214
|
+
batch_number: 1,
|
|
215
|
+
decode_time_ms: performance.now() - decodeStart,
|
|
216
|
+
});
|
|
217
|
+
return {
|
|
218
|
+
cache_key: cachedResult.cache_key,
|
|
219
|
+
total_meshes: cachedResult.meshes.length,
|
|
220
|
+
stats: cachedResult.stats,
|
|
221
|
+
metadata: cachedResult.metadata,
|
|
222
|
+
};
|
|
223
|
+
}
|
|
224
|
+
// CACHE MISS - use streaming for progressive rendering
|
|
225
|
+
console.log(`[client] Stream: Cache MISS (check: ${cacheCheckTime.toFixed(0)}ms) - starting stream for ${fileName} (${(fileSize / 1024 / 1024).toFixed(1)}MB)`);
|
|
226
|
+
const formData = new FormData();
|
|
227
|
+
formData.append('file', file instanceof File ? file : new Blob([file]), fileName);
|
|
228
|
+
const uploadStart = performance.now();
|
|
229
|
+
const response = await fetch(`${this.baseUrl}/api/v1/parse/parquet-stream`, {
|
|
230
|
+
method: 'POST',
|
|
231
|
+
body: formData,
|
|
232
|
+
signal: AbortSignal.timeout(this.timeout),
|
|
233
|
+
});
|
|
234
|
+
if (!response.ok) {
|
|
235
|
+
throw await this.handleError(response);
|
|
236
|
+
}
|
|
237
|
+
if (!response.body) {
|
|
238
|
+
throw new Error('No response body for streaming');
|
|
239
|
+
}
|
|
240
|
+
// Parse SSE stream
|
|
241
|
+
const reader = response.body.getReader();
|
|
242
|
+
const decoder = new TextDecoder();
|
|
243
|
+
let buffer = '';
|
|
244
|
+
let cache_key = '';
|
|
245
|
+
let total_meshes = 0;
|
|
246
|
+
let stats = null;
|
|
247
|
+
let metadata = null;
|
|
248
|
+
while (true) {
|
|
249
|
+
const { done, value } = await reader.read();
|
|
250
|
+
if (done)
|
|
251
|
+
break;
|
|
252
|
+
buffer += decoder.decode(value, { stream: true });
|
|
253
|
+
// Process complete SSE events
|
|
254
|
+
const lines = buffer.split('\n');
|
|
255
|
+
buffer = lines.pop() || ''; // Keep incomplete line in buffer
|
|
256
|
+
for (const line of lines) {
|
|
257
|
+
if (!line.startsWith('data:'))
|
|
258
|
+
continue;
|
|
259
|
+
const jsonStr = line.slice(5).trim();
|
|
260
|
+
if (!jsonStr)
|
|
261
|
+
continue;
|
|
262
|
+
try {
|
|
263
|
+
const event = JSON.parse(jsonStr);
|
|
264
|
+
switch (event.type) {
|
|
265
|
+
case 'start':
|
|
266
|
+
cache_key = event.cache_key;
|
|
267
|
+
console.log(`[client] Stream started: ${event.total_estimate} entities, cache_key: ${cache_key.substring(0, 16)}...`);
|
|
268
|
+
break;
|
|
269
|
+
case 'progress':
|
|
270
|
+
// Progress events can be used for UI feedback
|
|
271
|
+
break;
|
|
272
|
+
case 'batch': {
|
|
273
|
+
const decodeStart = performance.now();
|
|
274
|
+
// Decode base64 Parquet data
|
|
275
|
+
const binaryStr = atob(event.data);
|
|
276
|
+
const bytes = new Uint8Array(binaryStr.length);
|
|
277
|
+
for (let i = 0; i < binaryStr.length; i++) {
|
|
278
|
+
bytes[i] = binaryStr.charCodeAt(i);
|
|
279
|
+
}
|
|
280
|
+
// Decode Parquet to meshes
|
|
281
|
+
const meshes = await decodeParquetGeometry(bytes.buffer);
|
|
282
|
+
const decodeTime = performance.now() - decodeStart;
|
|
283
|
+
total_meshes += meshes.length;
|
|
284
|
+
console.log(`[client] Batch #${event.batch_number}: ${meshes.length} meshes, decode: ${decodeTime.toFixed(0)}ms`);
|
|
285
|
+
// Call the batch callback for immediate rendering
|
|
286
|
+
onBatch({
|
|
287
|
+
meshes,
|
|
288
|
+
batch_number: event.batch_number,
|
|
289
|
+
decode_time_ms: decodeTime,
|
|
290
|
+
});
|
|
291
|
+
break;
|
|
292
|
+
}
|
|
293
|
+
case 'complete':
|
|
294
|
+
stats = event.stats;
|
|
295
|
+
metadata = event.metadata;
|
|
296
|
+
const totalTime = performance.now() - uploadStart;
|
|
297
|
+
console.log(`[client] Stream complete: ${total_meshes} meshes in ${totalTime.toFixed(0)}ms`);
|
|
298
|
+
break;
|
|
299
|
+
case 'error':
|
|
300
|
+
throw new Error(`Stream error: ${event.message}`);
|
|
301
|
+
}
|
|
302
|
+
}
|
|
303
|
+
catch (e) {
|
|
304
|
+
if (e instanceof SyntaxError) {
|
|
305
|
+
console.warn('[client] Failed to parse SSE event:', jsonStr);
|
|
306
|
+
}
|
|
307
|
+
else {
|
|
308
|
+
throw e;
|
|
309
|
+
}
|
|
310
|
+
}
|
|
311
|
+
}
|
|
312
|
+
}
|
|
313
|
+
if (!stats || !metadata) {
|
|
314
|
+
throw new Error('Stream ended without complete event');
|
|
315
|
+
}
|
|
316
|
+
return {
|
|
317
|
+
cache_key,
|
|
318
|
+
total_meshes,
|
|
319
|
+
stats,
|
|
320
|
+
metadata,
|
|
321
|
+
};
|
|
322
|
+
}
|
|
323
|
+
/**
|
|
324
|
+
* Fetch cached geometry directly without uploading the file.
|
|
325
|
+
* @private
|
|
326
|
+
*/
|
|
327
|
+
async fetchCachedGeometry(hash) {
|
|
328
|
+
const fetchStart = performance.now();
|
|
329
|
+
const response = await fetch(`${this.baseUrl}/api/v1/cache/geometry/${hash}`, {
|
|
330
|
+
method: 'GET',
|
|
331
|
+
signal: AbortSignal.timeout(this.timeout),
|
|
332
|
+
});
|
|
333
|
+
if (!response.ok) {
|
|
334
|
+
throw await this.handleError(response);
|
|
335
|
+
}
|
|
336
|
+
const fetchTime = performance.now() - fetchStart;
|
|
337
|
+
console.log(`[client] Fetched cached geometry in ${fetchTime.toFixed(0)}ms`);
|
|
338
|
+
// Extract metadata from header
|
|
339
|
+
const metadataHeader = response.headers.get('X-IFC-Metadata');
|
|
340
|
+
if (!metadataHeader) {
|
|
341
|
+
throw new Error('Missing X-IFC-Metadata header in cached geometry response');
|
|
342
|
+
}
|
|
343
|
+
const metadata = JSON.parse(metadataHeader);
|
|
344
|
+
// Get binary payload
|
|
345
|
+
const payloadBuffer = await response.arrayBuffer();
|
|
346
|
+
const payloadSize = payloadBuffer.byteLength;
|
|
347
|
+
// Parse response (same format as upload path)
|
|
348
|
+
return this.parseParquetResponse(payloadBuffer, metadata, payloadSize);
|
|
349
|
+
}
|
|
350
|
+
/**
|
|
351
|
+
* Upload file and process on server.
|
|
352
|
+
* @private
|
|
353
|
+
*/
|
|
354
|
+
async uploadAndProcessParquet(file, hash) {
|
|
355
|
+
const fileSize = file instanceof File ? file.size : file.byteLength;
|
|
356
|
+
const fileName = file instanceof File ? file.name : 'model.ifc';
|
|
357
|
+
// Skip compression for large files (>50MB) - compression time exceeds transfer savings
|
|
358
|
+
// Also skip for localhost where bandwidth is not a bottleneck
|
|
359
|
+
const isLocalhost = this.baseUrl.includes('localhost') || this.baseUrl.includes('127.0.0.1');
|
|
360
|
+
const skipCompression = fileSize > 50 * 1024 * 1024 || isLocalhost;
|
|
361
|
+
let uploadFile;
|
|
362
|
+
if (skipCompression) {
|
|
363
|
+
console.log(`[client] Skipping compression (file: ${(fileSize / 1024 / 1024).toFixed(1)}MB, localhost: ${isLocalhost})`);
|
|
364
|
+
uploadFile = file instanceof File ? file : new Blob([file]);
|
|
365
|
+
}
|
|
366
|
+
else {
|
|
367
|
+
const compressStart = performance.now();
|
|
368
|
+
uploadFile = await compressGzip(file);
|
|
369
|
+
console.log(`[client] Compressed in ${(performance.now() - compressStart).toFixed(0)}ms: ${(fileSize / 1024 / 1024).toFixed(1)}MB → ${(uploadFile.size / 1024 / 1024).toFixed(1)}MB`);
|
|
370
|
+
}
|
|
371
|
+
const formData = new FormData();
|
|
372
|
+
formData.append('file', uploadFile, fileName);
|
|
373
|
+
const uploadStart = performance.now();
|
|
374
|
+
const response = await fetch(`${this.baseUrl}/api/v1/parse/parquet`, {
|
|
375
|
+
method: 'POST',
|
|
376
|
+
body: formData,
|
|
377
|
+
signal: AbortSignal.timeout(this.timeout),
|
|
378
|
+
});
|
|
379
|
+
const uploadTime = performance.now() - uploadStart;
|
|
380
|
+
console.log(`[client] Upload and processing completed in ${uploadTime.toFixed(0)}ms`);
|
|
381
|
+
if (!response.ok) {
|
|
382
|
+
throw await this.handleError(response);
|
|
383
|
+
}
|
|
384
|
+
// Extract metadata from header
|
|
385
|
+
const metadataHeader = response.headers.get('X-IFC-Metadata');
|
|
386
|
+
if (!metadataHeader) {
|
|
387
|
+
throw new Error('Missing X-IFC-Metadata header in Parquet response');
|
|
388
|
+
}
|
|
389
|
+
const metadata = JSON.parse(metadataHeader);
|
|
390
|
+
// Verify hash matches (sanity check)
|
|
391
|
+
if (metadata.cache_key !== hash) {
|
|
392
|
+
console.warn(`[client] Cache key mismatch: expected ${hash.substring(0, 16)}..., got ${metadata.cache_key.substring(0, 16)}...`);
|
|
393
|
+
}
|
|
394
|
+
// Get binary payload
|
|
395
|
+
const payloadBuffer = await response.arrayBuffer();
|
|
396
|
+
const payloadSize = payloadBuffer.byteLength;
|
|
397
|
+
// Parse response (same format as cached path)
|
|
398
|
+
return this.parseParquetResponse(payloadBuffer, metadata, payloadSize);
|
|
399
|
+
}
|
|
400
|
+
/**
|
|
401
|
+
* Parse Parquet response payload into meshes.
|
|
402
|
+
* @private
|
|
403
|
+
*/
|
|
404
|
+
async parseParquetResponse(payloadBuffer, metadata, payloadSize) {
|
|
405
|
+
// Extract geometry and data model from combined Parquet format
|
|
406
|
+
// Format: [geometry_len][geometry_data][data_model_len][data_model_data]
|
|
407
|
+
// Note: geometry_data itself contains [mesh_len][mesh_data][vertex_len][vertex_data][index_len][index_data]
|
|
408
|
+
const view = new DataView(payloadBuffer);
|
|
409
|
+
let offset = 0;
|
|
410
|
+
// Detect format: check if payload starts with length prefix (wrapped format)
|
|
411
|
+
// Even if metadata.data_model_stats is undefined, cached responses use wrapped format
|
|
412
|
+
const firstLen = view.getUint32(0, true);
|
|
413
|
+
const hasWrapper = firstLen > 0 && firstLen < payloadBuffer.byteLength && firstLen < payloadBuffer.byteLength - 4;
|
|
414
|
+
let geometryData;
|
|
415
|
+
let dataModelBuffer;
|
|
416
|
+
if (hasWrapper) {
|
|
417
|
+
// Wrapped format: [geometry_len][geometry_data][data_model_len][data_model_data]
|
|
418
|
+
const geometryLen = firstLen;
|
|
419
|
+
offset += 4;
|
|
420
|
+
// Validate geometry length
|
|
421
|
+
if (geometryLen > payloadBuffer.byteLength || geometryLen === 0 || offset + geometryLen > payloadBuffer.byteLength) {
|
|
422
|
+
throw new Error(`Invalid geometry length: ${geometryLen}, buffer size: ${payloadBuffer.byteLength}, offset: ${offset}`);
|
|
423
|
+
}
|
|
424
|
+
geometryData = payloadBuffer.slice(offset, offset + geometryLen);
|
|
425
|
+
offset += geometryLen;
|
|
426
|
+
// Extract data model if present
|
|
427
|
+
if (offset < payloadBuffer.byteLength) {
|
|
428
|
+
const dataModelLen = view.getUint32(offset, true);
|
|
429
|
+
offset += 4;
|
|
430
|
+
if (dataModelLen > 0 && offset + dataModelLen <= payloadBuffer.byteLength) {
|
|
431
|
+
dataModelBuffer = payloadBuffer.slice(offset, offset + dataModelLen);
|
|
432
|
+
}
|
|
433
|
+
}
|
|
434
|
+
}
|
|
435
|
+
else {
|
|
436
|
+
// Old format: geometry Parquet directly (no wrapper)
|
|
437
|
+
console.log('[client] Detected old format (no wrapper), using entire payload as geometry');
|
|
438
|
+
geometryData = payloadBuffer;
|
|
439
|
+
dataModelBuffer = undefined;
|
|
440
|
+
}
|
|
441
|
+
// Decode Parquet geometry
|
|
442
|
+
const decodeStart = performance.now();
|
|
443
|
+
const meshes = await decodeParquetGeometry(geometryData);
|
|
444
|
+
const decodeTime = performance.now() - decodeStart;
|
|
445
|
+
return {
|
|
446
|
+
cache_key: metadata.cache_key,
|
|
447
|
+
meshes,
|
|
448
|
+
metadata: metadata.metadata,
|
|
449
|
+
stats: metadata.stats,
|
|
450
|
+
parquet_stats: {
|
|
451
|
+
payload_size: payloadSize,
|
|
452
|
+
decode_time_ms: Math.round(decodeTime),
|
|
453
|
+
},
|
|
454
|
+
data_model: dataModelBuffer,
|
|
455
|
+
};
|
|
456
|
+
}
|
|
457
|
+
/**
|
|
458
|
+
* Fetch the data model for a previously parsed file.
|
|
459
|
+
*
|
|
460
|
+
* The data model is processed in the background after geometry is returned.
|
|
461
|
+
* This method polls until the data model is ready (with exponential backoff).
|
|
462
|
+
*
|
|
463
|
+
* @param cacheKey - The cache key from the geometry parse response
|
|
464
|
+
* @param maxRetries - Maximum number of retries (default: 10)
|
|
465
|
+
* @returns Data model Parquet buffer, or null if not available after retries
|
|
466
|
+
*
|
|
467
|
+
* @example
|
|
468
|
+
* ```typescript
|
|
469
|
+
* const geometryResult = await client.parseParquet(file);
|
|
470
|
+
* // Start rendering geometry immediately...
|
|
471
|
+
*
|
|
472
|
+
* // Then fetch data model in background
|
|
473
|
+
* const dataModelBuffer = await client.fetchDataModel(geometryResult.cache_key);
|
|
474
|
+
* if (dataModelBuffer) {
|
|
475
|
+
* const dataModel = await decodeDataModel(dataModelBuffer);
|
|
476
|
+
* }
|
|
477
|
+
* ```
|
|
478
|
+
*/
|
|
479
|
+
async fetchDataModel(cacheKey, maxRetries = 10) {
|
|
480
|
+
let delay = 100; // Start with 100ms delay
|
|
481
|
+
for (let attempt = 0; attempt < maxRetries; attempt++) {
|
|
482
|
+
try {
|
|
483
|
+
const response = await fetch(`${this.baseUrl}/api/v1/parse/data-model/${cacheKey}`, {
|
|
484
|
+
method: 'GET',
|
|
485
|
+
signal: AbortSignal.timeout(30000),
|
|
486
|
+
});
|
|
487
|
+
if (response.status === 200) {
|
|
488
|
+
// Data model is ready
|
|
489
|
+
const buffer = await response.arrayBuffer();
|
|
490
|
+
console.log(`[client] Data model fetched: ${(buffer.byteLength / 1024 / 1024).toFixed(2)}MB`);
|
|
491
|
+
return buffer;
|
|
492
|
+
}
|
|
493
|
+
else if (response.status === 202) {
|
|
494
|
+
// Still processing, wait and retry
|
|
495
|
+
console.log(`[client] Data model still processing (attempt ${attempt + 1}/${maxRetries}), waiting ${delay}ms...`);
|
|
496
|
+
await new Promise(resolve => setTimeout(resolve, delay));
|
|
497
|
+
delay = Math.min(delay * 1.5, 2000); // Exponential backoff, max 2s
|
|
498
|
+
}
|
|
499
|
+
else if (response.status === 404) {
|
|
500
|
+
// Cache key not found
|
|
501
|
+
console.warn(`[client] Data model not found for cache key: ${cacheKey}`);
|
|
502
|
+
return null;
|
|
503
|
+
}
|
|
504
|
+
else {
|
|
505
|
+
throw new Error(`Unexpected response status: ${response.status}`);
|
|
506
|
+
}
|
|
507
|
+
}
|
|
508
|
+
catch (error) {
|
|
509
|
+
if (attempt === maxRetries - 1) {
|
|
510
|
+
console.error('[client] Failed to fetch data model:', error);
|
|
511
|
+
return null;
|
|
512
|
+
}
|
|
513
|
+
// Retry on network errors
|
|
514
|
+
await new Promise(resolve => setTimeout(resolve, delay));
|
|
515
|
+
delay = Math.min(delay * 1.5, 2000);
|
|
516
|
+
}
|
|
517
|
+
}
|
|
518
|
+
console.warn('[client] Data model fetch timed out after max retries');
|
|
519
|
+
return null;
|
|
520
|
+
}
|
|
521
|
+
/**
|
|
522
|
+
* Check if Parquet parsing is available.
|
|
523
|
+
*
|
|
524
|
+
* @returns true if parquet-wasm is available for parseParquet()
|
|
525
|
+
*/
|
|
526
|
+
async isParquetSupported() {
|
|
527
|
+
return isParquetAvailable();
|
|
528
|
+
}
|
|
529
|
+
/**
|
|
530
|
+
* Parse IFC file using the ara3d BOS-optimized Parquet format.
|
|
531
|
+
*
|
|
532
|
+
* This is the most efficient transfer format, providing:
|
|
533
|
+
* - ~50x smaller payloads compared to JSON
|
|
534
|
+
* - Integer quantized vertices (0.1mm precision)
|
|
535
|
+
* - Mesh deduplication (instancing)
|
|
536
|
+
* - Byte colors instead of floats
|
|
537
|
+
* - Optional normals (computed on client if not included)
|
|
538
|
+
*
|
|
539
|
+
* **Requirements:** Requires `parquet-wasm` and `apache-arrow`.
|
|
540
|
+
*
|
|
541
|
+
* @param file - File or ArrayBuffer containing IFC data
|
|
542
|
+
* @returns Parse result with all meshes (decoded from optimized Parquet)
|
|
543
|
+
*
|
|
544
|
+
* @example
|
|
545
|
+
* ```typescript
|
|
546
|
+
* const result = await client.parseParquetOptimized(file);
|
|
547
|
+
* console.log(`Unique meshes: ${result.optimization_stats.unique_meshes}`);
|
|
548
|
+
* console.log(`Mesh reuse ratio: ${result.optimization_stats.mesh_reuse_ratio}x`);
|
|
549
|
+
* console.log(`Payload: ${result.parquet_stats.payload_size} bytes`);
|
|
550
|
+
* ```
|
|
551
|
+
*/
|
|
552
|
+
async parseParquetOptimized(file) {
|
|
553
|
+
// Check if Parquet decoding is available
|
|
554
|
+
const parquetReady = await isParquetAvailable();
|
|
555
|
+
if (!parquetReady) {
|
|
556
|
+
throw new Error('Parquet parsing requires parquet-wasm and apache-arrow. ' +
|
|
557
|
+
'Install them with: npm install parquet-wasm apache-arrow');
|
|
558
|
+
}
|
|
559
|
+
// Compress file before upload for faster transfer
|
|
560
|
+
const compressedFile = await compressGzip(file);
|
|
561
|
+
const fileName = file instanceof File ? file.name : 'model.ifc';
|
|
562
|
+
const formData = new FormData();
|
|
563
|
+
formData.append('file', compressedFile, fileName);
|
|
564
|
+
const response = await fetch(`${this.baseUrl}/api/v1/parse/parquet/optimized`, {
|
|
565
|
+
method: 'POST',
|
|
566
|
+
body: formData,
|
|
567
|
+
signal: AbortSignal.timeout(this.timeout),
|
|
568
|
+
});
|
|
569
|
+
if (!response.ok) {
|
|
570
|
+
throw await this.handleError(response);
|
|
571
|
+
}
|
|
572
|
+
// Extract metadata from header
|
|
573
|
+
const metadataHeader = response.headers.get('X-IFC-Metadata');
|
|
574
|
+
if (!metadataHeader) {
|
|
575
|
+
throw new Error('Missing X-IFC-Metadata header in optimized Parquet response');
|
|
576
|
+
}
|
|
577
|
+
const metadata = JSON.parse(metadataHeader);
|
|
578
|
+
// Get binary payload
|
|
579
|
+
const payloadBuffer = await response.arrayBuffer();
|
|
580
|
+
const payloadSize = payloadBuffer.byteLength;
|
|
581
|
+
// Decode optimized Parquet geometry
|
|
582
|
+
const decodeStart = performance.now();
|
|
583
|
+
const meshes = await decodeOptimizedParquetGeometry(payloadBuffer, metadata.vertex_multiplier);
|
|
584
|
+
const decodeTime = performance.now() - decodeStart;
|
|
585
|
+
return {
|
|
586
|
+
cache_key: metadata.cache_key,
|
|
587
|
+
meshes,
|
|
588
|
+
metadata: metadata.metadata,
|
|
589
|
+
stats: metadata.stats,
|
|
590
|
+
optimization_stats: metadata.optimization_stats,
|
|
591
|
+
parquet_stats: {
|
|
592
|
+
payload_size: payloadSize,
|
|
593
|
+
decode_time_ms: Math.round(decodeTime),
|
|
594
|
+
},
|
|
595
|
+
};
|
|
596
|
+
}
|
|
597
|
+
/**
|
|
598
|
+
* Parse IFC file with streaming response.
|
|
599
|
+
*
|
|
600
|
+
* Yields events as geometry is processed, allowing for
|
|
601
|
+
* progressive rendering of large models.
|
|
602
|
+
*
|
|
603
|
+
* @param file - File or ArrayBuffer containing IFC data
|
|
604
|
+
* @yields Stream events (start, progress, batch, complete, error)
|
|
605
|
+
*
|
|
606
|
+
* @example
|
|
607
|
+
* ```typescript
|
|
608
|
+
* for await (const event of client.parseStream(file)) {
|
|
609
|
+
* switch (event.type) {
|
|
610
|
+
* case 'start':
|
|
611
|
+
* console.log(`Processing ~${event.total_estimate} entities`);
|
|
612
|
+
* break;
|
|
613
|
+
* case 'progress':
|
|
614
|
+
* updateProgressBar(event.processed / event.total);
|
|
615
|
+
* break;
|
|
616
|
+
* case 'batch':
|
|
617
|
+
* for (const mesh of event.meshes) {
|
|
618
|
+
* scene.add(createMesh(mesh));
|
|
619
|
+
* }
|
|
620
|
+
* break;
|
|
621
|
+
* case 'complete':
|
|
622
|
+
* console.log(`Done in ${event.stats.total_time_ms}ms`);
|
|
623
|
+
* break;
|
|
624
|
+
* case 'error':
|
|
625
|
+
* console.error(event.message);
|
|
626
|
+
* break;
|
|
627
|
+
* }
|
|
628
|
+
* }
|
|
629
|
+
* ```
|
|
630
|
+
*/
|
|
631
|
+
async *parseStream(file) {
|
|
632
|
+
const formData = new FormData();
|
|
633
|
+
const blob = file instanceof File ? file : new Blob([file], { type: 'application/octet-stream' });
|
|
634
|
+
formData.append('file', blob, file instanceof File ? file.name : 'model.ifc');
|
|
635
|
+
const response = await fetch(`${this.baseUrl}/api/v1/parse/stream`, {
|
|
636
|
+
method: 'POST',
|
|
637
|
+
body: formData,
|
|
638
|
+
// Don't set Content-Type header - browser will set it with boundary for FormData
|
|
639
|
+
headers: {
|
|
640
|
+
Accept: 'text/event-stream',
|
|
641
|
+
},
|
|
642
|
+
});
|
|
643
|
+
if (!response.ok) {
|
|
644
|
+
throw await this.handleError(response);
|
|
645
|
+
}
|
|
646
|
+
if (!response.body) {
|
|
647
|
+
throw new Error('Response body is null');
|
|
648
|
+
}
|
|
649
|
+
const reader = response.body.getReader();
|
|
650
|
+
const decoder = new TextDecoder();
|
|
651
|
+
let buffer = '';
|
|
652
|
+
try {
|
|
653
|
+
while (true) {
|
|
654
|
+
const { done, value } = await reader.read();
|
|
655
|
+
if (done)
|
|
656
|
+
break;
|
|
657
|
+
buffer += decoder.decode(value, { stream: true });
|
|
658
|
+
// Parse SSE events
|
|
659
|
+
const lines = buffer.split('\n\n');
|
|
660
|
+
buffer = lines.pop() || '';
|
|
661
|
+
for (const line of lines) {
|
|
662
|
+
if (line.startsWith('data: ')) {
|
|
663
|
+
try {
|
|
664
|
+
const data = JSON.parse(line.slice(6));
|
|
665
|
+
yield data;
|
|
666
|
+
}
|
|
667
|
+
catch {
|
|
668
|
+
// Skip malformed events
|
|
669
|
+
}
|
|
670
|
+
}
|
|
671
|
+
}
|
|
672
|
+
}
|
|
673
|
+
// Process remaining buffer
|
|
674
|
+
if (buffer.startsWith('data: ')) {
|
|
675
|
+
try {
|
|
676
|
+
const data = JSON.parse(buffer.slice(6));
|
|
677
|
+
yield data;
|
|
678
|
+
}
|
|
679
|
+
catch {
|
|
680
|
+
// Skip malformed events
|
|
681
|
+
}
|
|
682
|
+
}
|
|
683
|
+
}
|
|
684
|
+
finally {
|
|
685
|
+
reader.releaseLock();
|
|
686
|
+
}
|
|
687
|
+
}
|
|
688
|
+
/**
|
|
689
|
+
* Get quick metadata about an IFC file without processing geometry.
|
|
690
|
+
*
|
|
691
|
+
* This is much faster than a full parse and is useful for
|
|
692
|
+
* showing file information before processing.
|
|
693
|
+
*
|
|
694
|
+
* @param file - File or ArrayBuffer containing IFC data
|
|
695
|
+
* @returns Metadata about the file
|
|
696
|
+
*
|
|
697
|
+
* @example
|
|
698
|
+
* ```typescript
|
|
699
|
+
* const meta = await client.getMetadata(file);
|
|
700
|
+
* console.log(`${meta.entity_count} entities, ${meta.geometry_count} with geometry`);
|
|
701
|
+
* console.log(`Schema: ${meta.schema_version}`);
|
|
702
|
+
* ```
|
|
703
|
+
*/
|
|
704
|
+
async getMetadata(file) {
|
|
705
|
+
const formData = new FormData();
|
|
706
|
+
formData.append('file', file instanceof File ? file : new Blob([file]), file instanceof File ? file.name : 'model.ifc');
|
|
707
|
+
const response = await fetch(`${this.baseUrl}/api/v1/parse/metadata`, {
|
|
708
|
+
method: 'POST',
|
|
709
|
+
body: formData,
|
|
710
|
+
signal: AbortSignal.timeout(30000), // 30 second timeout for metadata
|
|
711
|
+
});
|
|
712
|
+
if (!response.ok) {
|
|
713
|
+
throw await this.handleError(response);
|
|
714
|
+
}
|
|
715
|
+
return response.json();
|
|
716
|
+
}
|
|
717
|
+
/**
|
|
718
|
+
* Retrieve a cached parse result by key.
|
|
719
|
+
*
|
|
720
|
+
* @param key - Cache key (SHA256 hash of file content)
|
|
721
|
+
* @returns Cached parse result, or null if not found
|
|
722
|
+
*
|
|
723
|
+
* @example
|
|
724
|
+
* ```typescript
|
|
725
|
+
* // Store the cache key from a previous parse
|
|
726
|
+
* const result = await client.parse(file);
|
|
727
|
+
* const cacheKey = result.cache_key;
|
|
728
|
+
*
|
|
729
|
+
* // Later, retrieve from cache
|
|
730
|
+
* const cached = await client.getCached(cacheKey);
|
|
731
|
+
* if (cached) {
|
|
732
|
+
* console.log('Loaded from cache!');
|
|
733
|
+
* }
|
|
734
|
+
* ```
|
|
735
|
+
*/
|
|
736
|
+
async getCached(key) {
|
|
737
|
+
const response = await fetch(`${this.baseUrl}/api/v1/cache/${key}`, {
|
|
738
|
+
signal: AbortSignal.timeout(this.timeout),
|
|
739
|
+
});
|
|
740
|
+
if (response.status === 404) {
|
|
741
|
+
return null;
|
|
742
|
+
}
|
|
743
|
+
if (!response.ok) {
|
|
744
|
+
throw await this.handleError(response);
|
|
745
|
+
}
|
|
746
|
+
return response.json();
|
|
747
|
+
}
|
|
748
|
+
/**
|
|
749
|
+
* Handle error responses from the server.
|
|
750
|
+
*/
|
|
751
|
+
async handleError(response) {
|
|
752
|
+
try {
|
|
753
|
+
const error = await response.json();
|
|
754
|
+
return new Error(`Server error (${error.code}): ${error.error}`);
|
|
755
|
+
}
|
|
756
|
+
catch {
|
|
757
|
+
return new Error(`Server error: ${response.status} ${response.statusText}`);
|
|
758
|
+
}
|
|
759
|
+
}
|
|
760
|
+
}
|
|
761
|
+
//# sourceMappingURL=client.js.map
|