xrblocks 0.1.0 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +107 -22
- package/build/addons/ai/GeminiManager.d.ts +7 -4
- package/build/addons/ai/GeminiManager.js +43 -22
- package/build/agent/Agent.d.ts +25 -1
- package/build/agent/SkyboxAgent.d.ts +119 -3
- package/build/agent/Tool.d.ts +18 -4
- package/build/agent/index.d.ts +1 -0
- package/build/agent/tools/GenerateSkyboxTool.d.ts +3 -3
- package/build/agent/tools/GetWeatherTool.d.ts +7 -8
- package/build/ai/AI.d.ts +2 -2
- package/build/ai/Gemini.d.ts +5 -4
- package/build/core/Core.d.ts +1 -0
- package/build/core/Options.d.ts +10 -0
- package/build/core/components/ScreenshotSynthesizer.d.ts +4 -2
- package/build/depth/Depth.d.ts +5 -3
- package/build/depth/DepthMesh.d.ts +7 -0
- package/build/depth/DepthTextures.d.ts +6 -4
- package/build/depth/occlusion/OcclusionPass.d.ts +6 -5
- package/build/simulator/SimulatorOptions.d.ts +1 -0
- package/build/sound/AudioListener.d.ts +16 -1
- package/build/sound/AudioPlayer.d.ts +21 -2
- package/build/sound/CoreSound.d.ts +25 -0
- package/build/ui/components/IconButton.d.ts +6 -2
- package/build/ui/components/TextButton.d.ts +0 -1
- package/build/xrblocks.js +773 -151
- package/build/xrblocks.js.map +1 -1
- package/build/xrblocks.min.js +1 -1
- package/build/xrblocks.min.js.map +1 -1
- package/package.json +3 -3
package/build/xrblocks.js
CHANGED
|
@@ -14,9 +14,9 @@
|
|
|
14
14
|
* limitations under the License.
|
|
15
15
|
*
|
|
16
16
|
* @file xrblocks.js
|
|
17
|
-
* @version v0.
|
|
18
|
-
* @commitid
|
|
19
|
-
* @builddate 2025-
|
|
17
|
+
* @version v0.2.0
|
|
18
|
+
* @commitid c24c0e2
|
|
19
|
+
* @builddate 2025-10-20T22:06:39.480Z
|
|
20
20
|
* @description XR Blocks SDK, built from source with the above commit ID.
|
|
21
21
|
* @agent When using with Gemini to create XR apps, use **Gemini Canvas** mode,
|
|
22
22
|
* and follow rules below:
|
|
@@ -120,11 +120,13 @@ class Memory {
|
|
|
120
120
|
*/
|
|
121
121
|
class Agent {
|
|
122
122
|
static { this.dependencies = {}; }
|
|
123
|
-
constructor(ai, tools = [], instruction = '') {
|
|
123
|
+
constructor(ai, tools = [], instruction = '', callbacks) {
|
|
124
|
+
this.isSessionActive = false;
|
|
124
125
|
this.ai = ai;
|
|
125
126
|
this.tools = tools;
|
|
126
127
|
this.memory = new Memory();
|
|
127
128
|
this.contextBuilder = new Context(instruction);
|
|
129
|
+
this.lifecycleCallbacks = callbacks;
|
|
128
130
|
}
|
|
129
131
|
/**
|
|
130
132
|
* Starts the agent's reasoning loop with an initial prompt.
|
|
@@ -176,6 +178,17 @@ class Agent {
|
|
|
176
178
|
findTool(name) {
|
|
177
179
|
return this.tools.find(tool => tool.name === name);
|
|
178
180
|
}
|
|
181
|
+
/**
|
|
182
|
+
* Get the current session state.
|
|
183
|
+
* @returns Object containing session information
|
|
184
|
+
*/
|
|
185
|
+
getSessionState() {
|
|
186
|
+
return {
|
|
187
|
+
isActive: this.isSessionActive,
|
|
188
|
+
toolCount: this.tools.length,
|
|
189
|
+
memorySize: this.memory.getShortTerm?.()?.length || 0,
|
|
190
|
+
};
|
|
191
|
+
}
|
|
179
192
|
}
|
|
180
193
|
|
|
181
194
|
/**
|
|
@@ -192,15 +205,29 @@ class Tool {
|
|
|
192
205
|
this.onTriggered = options.onTriggered;
|
|
193
206
|
}
|
|
194
207
|
/**
|
|
195
|
-
* Executes the tool's action.
|
|
208
|
+
* Executes the tool's action with standardized error handling.
|
|
196
209
|
* @param args - The arguments for the tool.
|
|
197
|
-
* @returns
|
|
210
|
+
* @returns A promise that resolves with a ToolResult containing success/error information.
|
|
198
211
|
*/
|
|
199
|
-
execute(args) {
|
|
200
|
-
|
|
201
|
-
|
|
212
|
+
async execute(args) {
|
|
213
|
+
try {
|
|
214
|
+
if (this.onTriggered) {
|
|
215
|
+
const result = await Promise.resolve(this.onTriggered(args));
|
|
216
|
+
return {
|
|
217
|
+
success: true,
|
|
218
|
+
data: result,
|
|
219
|
+
metadata: { executedAt: Date.now(), toolName: this.name }
|
|
220
|
+
};
|
|
221
|
+
}
|
|
222
|
+
throw new Error('The execute method must be implemented by a subclass or onTriggered must be provided.');
|
|
223
|
+
}
|
|
224
|
+
catch (error) {
|
|
225
|
+
return {
|
|
226
|
+
success: false,
|
|
227
|
+
error: error instanceof Error ? error.message : String(error),
|
|
228
|
+
metadata: { executedAt: Date.now(), toolName: this.name }
|
|
229
|
+
};
|
|
202
230
|
}
|
|
203
|
-
throw new Error('The execute method must be implemented by a subclass or onTriggered must be provided.');
|
|
204
231
|
}
|
|
205
232
|
/**
|
|
206
233
|
* Returns a JSON representation of the tool.
|
|
@@ -211,7 +238,7 @@ class Tool {
|
|
|
211
238
|
if (this.description) {
|
|
212
239
|
result.description = this.description;
|
|
213
240
|
}
|
|
214
|
-
if (this.parameters
|
|
241
|
+
if (this.parameters) {
|
|
215
242
|
result.parameters = this.parameters;
|
|
216
243
|
}
|
|
217
244
|
return result;
|
|
@@ -244,7 +271,7 @@ class GenerateSkyboxTool extends Tool {
|
|
|
244
271
|
/**
|
|
245
272
|
* Executes the tool's action.
|
|
246
273
|
* @param args - The prompt to use to generate the skybox.
|
|
247
|
-
* @returns A promise that resolves with
|
|
274
|
+
* @returns A promise that resolves with a ToolResult containing success/error information.
|
|
248
275
|
*/
|
|
249
276
|
async execute(args) {
|
|
250
277
|
try {
|
|
@@ -254,30 +281,105 @@ class GenerateSkyboxTool extends Tool {
|
|
|
254
281
|
console.log('Applying texture...');
|
|
255
282
|
this.scene.background = new THREE.TextureLoader().load(image);
|
|
256
283
|
this.scene.background.mapping = THREE.EquirectangularReflectionMapping;
|
|
257
|
-
return
|
|
284
|
+
return {
|
|
285
|
+
success: true,
|
|
286
|
+
data: 'Skybox generated successfully.',
|
|
287
|
+
metadata: { prompt: args.prompt, timestamp: Date.now() }
|
|
288
|
+
};
|
|
258
289
|
}
|
|
259
290
|
else {
|
|
260
|
-
return
|
|
291
|
+
return {
|
|
292
|
+
success: false,
|
|
293
|
+
error: 'Failed to generate skybox image',
|
|
294
|
+
metadata: { prompt: args.prompt, timestamp: Date.now() }
|
|
295
|
+
};
|
|
261
296
|
}
|
|
262
297
|
}
|
|
263
298
|
catch (e) {
|
|
264
299
|
console.error('error:', e);
|
|
265
|
-
return
|
|
300
|
+
return {
|
|
301
|
+
success: false,
|
|
302
|
+
error: e instanceof Error ? e.message :
|
|
303
|
+
'Unknown error while creating skybox',
|
|
304
|
+
metadata: { prompt: args.prompt, timestamp: Date.now() }
|
|
305
|
+
};
|
|
266
306
|
}
|
|
267
307
|
}
|
|
268
308
|
}
|
|
269
309
|
|
|
310
|
+
/**
|
|
311
|
+
* Skybox Agent for generating 360-degree equirectangular backgrounds through conversation.
|
|
312
|
+
*
|
|
313
|
+
* @example Basic usage
|
|
314
|
+
* ```typescript
|
|
315
|
+
* // 1. Enable audio (required for live sessions)
|
|
316
|
+
* await xb.core.sound.enableAudio();
|
|
317
|
+
*
|
|
318
|
+
* // 2. Create agent
|
|
319
|
+
* const agent = new xb.SkyboxAgent(xb.core.ai, xb.core.sound, xb.core.scene);
|
|
320
|
+
*
|
|
321
|
+
* // 3. Start session
|
|
322
|
+
* await agent.startLiveSession({
|
|
323
|
+
* onopen: () => console.log('Session ready'),
|
|
324
|
+
* onmessage: (msg) => handleMessage(msg),
|
|
325
|
+
* onclose: () => console.log('Session closed')
|
|
326
|
+
* });
|
|
327
|
+
*
|
|
328
|
+
* // 4. Clean up when done
|
|
329
|
+
* await agent.stopLiveSession();
|
|
330
|
+
* xb.core.sound.disableAudio();
|
|
331
|
+
* ```
|
|
332
|
+
*
|
|
333
|
+
* @example With lifecycle callbacks
|
|
334
|
+
* ```typescript
|
|
335
|
+
* const agent = new xb.SkyboxAgent(
|
|
336
|
+
* xb.core.ai,
|
|
337
|
+
* xb.core.sound,
|
|
338
|
+
* xb.core.scene,
|
|
339
|
+
* {
|
|
340
|
+
* onSessionStart: () => updateUI('active'),
|
|
341
|
+
* onSessionEnd: () => updateUI('inactive'),
|
|
342
|
+
* onError: (error) => showError(error)
|
|
343
|
+
* }
|
|
344
|
+
* );
|
|
345
|
+
* ```
|
|
346
|
+
*
|
|
347
|
+
* @remarks
|
|
348
|
+
* - Audio must be enabled BEFORE starting live session using `xb.core.sound.enableAudio()`
|
|
349
|
+
* - Users are responsible for managing audio lifecycle
|
|
350
|
+
* - Always call `stopLiveSession()` before disabling audio
|
|
351
|
+
* - Session state can be checked using `getSessionState()` and `getLiveSessionState()`
|
|
352
|
+
*/
|
|
270
353
|
class SkyboxAgent extends Agent {
|
|
271
|
-
constructor(ai, sound, scene) {
|
|
354
|
+
constructor(ai, sound, scene, callbacks) {
|
|
272
355
|
super(ai, [new GenerateSkyboxTool(ai, scene)], `You are a friendly and helpful skybox designer. The response should be short. Your only capability
|
|
273
356
|
is to generate a 360-degree equirectangular skybox image based on
|
|
274
357
|
a user's description. You will generate a default skybox if the user
|
|
275
358
|
does not provide any description. You will use the tool 'generateSkybox'
|
|
276
|
-
with the summarized description as the 'prompt' argument to create the skybox
|
|
359
|
+
with the summarized description as the 'prompt' argument to create the skybox.`, callbacks);
|
|
277
360
|
this.sound = sound;
|
|
361
|
+
this.sessionState = {
|
|
362
|
+
isActive: false,
|
|
363
|
+
messageCount: 0,
|
|
364
|
+
toolCallCount: 0
|
|
365
|
+
};
|
|
278
366
|
}
|
|
367
|
+
/**
|
|
368
|
+
* Starts a live AI session for real-time conversation.
|
|
369
|
+
*
|
|
370
|
+
* @param callbacks - Optional callbacks for session events. Can also be set using ai.setLiveCallbacks()
|
|
371
|
+
* @throws If AI model is not initialized or live session is not available
|
|
372
|
+
*
|
|
373
|
+
* @remarks
|
|
374
|
+
* Audio must be enabled separately using `xb.core.sound.enableAudio()` before starting the session.
|
|
375
|
+
* This gives users control over when microphone permissions are requested.
|
|
376
|
+
*/
|
|
279
377
|
async startLiveSession(callbacks) {
|
|
280
|
-
|
|
378
|
+
// Wrap callbacks to track session state
|
|
379
|
+
const wrappedCallbacks = this.wrapCallbacks(callbacks);
|
|
380
|
+
if (callbacks) {
|
|
381
|
+
this.ai.setLiveCallbacks(wrappedCallbacks);
|
|
382
|
+
}
|
|
281
383
|
const functionDeclarations = this.tools.map(tool => tool.toJSON());
|
|
282
384
|
const systemInstruction = {
|
|
283
385
|
parts: [{ text: this.contextBuilder.instruction }]
|
|
@@ -286,16 +388,118 @@ class SkyboxAgent extends Agent {
|
|
|
286
388
|
tools: functionDeclarations,
|
|
287
389
|
systemInstruction: systemInstruction,
|
|
288
390
|
});
|
|
289
|
-
this.
|
|
391
|
+
this.sessionState.isActive = true;
|
|
392
|
+
this.sessionState.startTime = Date.now();
|
|
393
|
+
this.isSessionActive = true;
|
|
394
|
+
await this.lifecycleCallbacks?.onSessionStart?.();
|
|
290
395
|
}
|
|
396
|
+
/**
|
|
397
|
+
* Stops the live AI session.
|
|
398
|
+
*
|
|
399
|
+
* @remarks
|
|
400
|
+
* Audio must be disabled separately using `xb.core.sound.disableAudio()` after stopping the session.
|
|
401
|
+
*/
|
|
291
402
|
async stopLiveSession() {
|
|
292
403
|
await this.ai.stopLiveSession();
|
|
293
|
-
this.
|
|
404
|
+
this.sessionState.isActive = false;
|
|
405
|
+
this.sessionState.endTime = Date.now();
|
|
406
|
+
this.isSessionActive = false;
|
|
407
|
+
await this.lifecycleCallbacks?.onSessionEnd?.();
|
|
294
408
|
}
|
|
409
|
+
/**
|
|
410
|
+
* Wraps user callbacks to track session state and trigger lifecycle events.
|
|
411
|
+
* @param callbacks - The callbacks to wrap.
|
|
412
|
+
* @returns The wrapped callbacks.
|
|
413
|
+
*/
|
|
414
|
+
wrapCallbacks(callbacks) {
|
|
415
|
+
return {
|
|
416
|
+
onopen: () => {
|
|
417
|
+
callbacks?.onopen?.();
|
|
418
|
+
},
|
|
419
|
+
onmessage: (message) => {
|
|
420
|
+
this.sessionState.messageCount++;
|
|
421
|
+
callbacks?.onmessage?.(message);
|
|
422
|
+
},
|
|
423
|
+
onerror: (error) => {
|
|
424
|
+
this.sessionState.lastError = error.message;
|
|
425
|
+
this.lifecycleCallbacks?.onError?.(new Error(error.message));
|
|
426
|
+
callbacks?.onerror?.(error);
|
|
427
|
+
},
|
|
428
|
+
onclose: (event) => {
|
|
429
|
+
this.sessionState.isActive = false;
|
|
430
|
+
this.sessionState.endTime = Date.now();
|
|
431
|
+
this.isSessionActive = false;
|
|
432
|
+
callbacks?.onclose?.(event);
|
|
433
|
+
}
|
|
434
|
+
};
|
|
435
|
+
}
|
|
436
|
+
/**
|
|
437
|
+
* Sends tool execution results back to the AI.
|
|
438
|
+
*
|
|
439
|
+
* @param response - The tool response containing function results
|
|
440
|
+
*/
|
|
295
441
|
async sendToolResponse(response) {
|
|
442
|
+
if (!this.validateToolResponse(response)) {
|
|
443
|
+
console.error('Invalid tool response format:', response);
|
|
444
|
+
return;
|
|
445
|
+
}
|
|
446
|
+
// Handle both single response and array of responses
|
|
447
|
+
const responses = Array.isArray(response.functionResponses) ?
|
|
448
|
+
response.functionResponses :
|
|
449
|
+
[response.functionResponses];
|
|
450
|
+
this.sessionState.toolCallCount += responses.length;
|
|
296
451
|
console.log('Sending tool response:', response);
|
|
297
452
|
this.ai.sendToolResponse(response);
|
|
298
453
|
}
|
|
454
|
+
/**
|
|
455
|
+
* Validates that a tool response has the correct format.
|
|
456
|
+
* @param response - The tool response to validate.
|
|
457
|
+
* @returns True if the response is valid, false otherwise.
|
|
458
|
+
*/
|
|
459
|
+
validateToolResponse(response) {
|
|
460
|
+
if (!response.functionResponses) {
|
|
461
|
+
return false;
|
|
462
|
+
}
|
|
463
|
+
// Handle both single response and array of responses
|
|
464
|
+
const responses = Array.isArray(response.functionResponses) ?
|
|
465
|
+
response.functionResponses :
|
|
466
|
+
[response.functionResponses];
|
|
467
|
+
return responses.every(fr => fr.id && fr.name && fr.response !== undefined);
|
|
468
|
+
}
|
|
469
|
+
/**
|
|
470
|
+
* Helper to create a properly formatted tool response from a ToolResult.
|
|
471
|
+
*
|
|
472
|
+
* @param id - The function call ID
|
|
473
|
+
* @param name - The function name
|
|
474
|
+
* @param result - The ToolResult from tool execution
|
|
475
|
+
* @returns A properly formatted FunctionResponse
|
|
476
|
+
*/
|
|
477
|
+
static createToolResponse(id, name, result) {
|
|
478
|
+
return {
|
|
479
|
+
id,
|
|
480
|
+
name,
|
|
481
|
+
response: result.success ? { result: result.data } : { error: result.error }
|
|
482
|
+
};
|
|
483
|
+
}
|
|
484
|
+
/**
|
|
485
|
+
* Gets the current live session state.
|
|
486
|
+
*
|
|
487
|
+
* @returns Read-only session state information
|
|
488
|
+
*/
|
|
489
|
+
getLiveSessionState() {
|
|
490
|
+
return { ...this.sessionState };
|
|
491
|
+
}
|
|
492
|
+
/**
|
|
493
|
+
* Gets the duration of the session in milliseconds.
|
|
494
|
+
*
|
|
495
|
+
* @returns Duration in ms, or null if session hasn't started
|
|
496
|
+
*/
|
|
497
|
+
getSessionDuration() {
|
|
498
|
+
if (!this.sessionState.startTime)
|
|
499
|
+
return null;
|
|
500
|
+
const endTime = this.sessionState.endTime || Date.now();
|
|
501
|
+
return endTime - this.sessionState.startTime;
|
|
502
|
+
}
|
|
299
503
|
}
|
|
300
504
|
|
|
301
505
|
/**
|
|
@@ -325,7 +529,7 @@ class GetWeatherTool extends Tool {
|
|
|
325
529
|
/**
|
|
326
530
|
* Executes the tool's action.
|
|
327
531
|
* @param args - The arguments for the tool.
|
|
328
|
-
* @returns A promise that resolves with
|
|
532
|
+
* @returns A promise that resolves with a ToolResult containing weather information.
|
|
329
533
|
*/
|
|
330
534
|
async execute(args) {
|
|
331
535
|
if (!args.latitude || !args.longitude) {
|
|
@@ -338,20 +542,33 @@ class GetWeatherTool extends Tool {
|
|
|
338
542
|
const data = await response.json();
|
|
339
543
|
if (response.ok) {
|
|
340
544
|
return {
|
|
341
|
-
|
|
342
|
-
|
|
545
|
+
success: true,
|
|
546
|
+
data: {
|
|
547
|
+
temperature: data.current.temperature_2m,
|
|
548
|
+
weathercode: data.current.weather_code,
|
|
549
|
+
},
|
|
550
|
+
metadata: {
|
|
551
|
+
latitude: args.latitude,
|
|
552
|
+
longitude: args.longitude,
|
|
553
|
+
timestamp: Date.now()
|
|
554
|
+
}
|
|
343
555
|
};
|
|
344
556
|
}
|
|
345
557
|
else {
|
|
346
558
|
return {
|
|
347
|
-
|
|
559
|
+
success: false,
|
|
560
|
+
error: 'Could not retrieve weather for the specified location.',
|
|
561
|
+
metadata: { latitude: args.latitude, longitude: args.longitude }
|
|
348
562
|
};
|
|
349
563
|
}
|
|
350
564
|
}
|
|
351
565
|
catch (error) {
|
|
352
566
|
console.error('Error fetching weather:', error);
|
|
353
567
|
return {
|
|
354
|
-
|
|
568
|
+
success: false,
|
|
569
|
+
error: error instanceof Error ? error.message :
|
|
570
|
+
'There was an error fetching the weather.',
|
|
571
|
+
metadata: { latitude: args.latitude, longitude: args.longitude }
|
|
355
572
|
};
|
|
356
573
|
}
|
|
357
574
|
}
|
|
@@ -1061,7 +1278,7 @@ class Gemini extends BaseAIModel {
|
|
|
1061
1278
|
}
|
|
1062
1279
|
sendToolResponse(response) {
|
|
1063
1280
|
if (this.liveSession) {
|
|
1064
|
-
console.
|
|
1281
|
+
console.debug('Sending tool response to gemini:', response);
|
|
1065
1282
|
this.liveSession.sendToolResponse(response);
|
|
1066
1283
|
}
|
|
1067
1284
|
}
|
|
@@ -1652,6 +1869,7 @@ void main() {
|
|
|
1652
1869
|
|
|
1653
1870
|
uniform vec3 uColor;
|
|
1654
1871
|
uniform sampler2D uDepthTexture;
|
|
1872
|
+
uniform sampler2DArray uDepthTextureArray;
|
|
1655
1873
|
uniform vec3 uLightDirection;
|
|
1656
1874
|
uniform vec2 uResolution;
|
|
1657
1875
|
uniform float uRawValueToMeters;
|
|
@@ -1667,6 +1885,7 @@ uniform float uMaxDepth;
|
|
|
1667
1885
|
uniform float uDebug;
|
|
1668
1886
|
uniform float uOpacity;
|
|
1669
1887
|
uniform bool uUsingFloatDepth;
|
|
1888
|
+
uniform bool uIsTextureArray;
|
|
1670
1889
|
|
|
1671
1890
|
float saturate(in float x) {
|
|
1672
1891
|
return clamp(x, 0.0, 1.0);
|
|
@@ -1701,6 +1920,10 @@ float DepthGetMeters(in sampler2D depth_texture, in vec2 depth_uv) {
|
|
|
1701
1920
|
return dot(packedDepthAndVisibility, vec2(255.0, 256.0 * 255.0)) * uRawValueToMeters;
|
|
1702
1921
|
}
|
|
1703
1922
|
|
|
1923
|
+
float DepthArrayGetMeters(in sampler2DArray depth_texture, in vec2 depth_uv) {
|
|
1924
|
+
return uRawValueToMeters * texture(uDepthTextureArray, vec3 (depth_uv.x, depth_uv.y, 0)).r;
|
|
1925
|
+
}
|
|
1926
|
+
|
|
1704
1927
|
vec3 DepthGetColorVisualization(in float x) {
|
|
1705
1928
|
return step(kInvalidDepthThreshold, x) * TurboColormap(x);
|
|
1706
1929
|
}
|
|
@@ -1736,7 +1959,7 @@ void main() {
|
|
|
1736
1959
|
vec2 depth_uv = uv;
|
|
1737
1960
|
depth_uv.y = 1.0 - depth_uv.y;
|
|
1738
1961
|
|
|
1739
|
-
float depth = DepthGetMeters(uDepthTexture, depth_uv) * 8.0;
|
|
1962
|
+
float depth = (uIsTextureArray ? DepthArrayGetMeters(uDepthTextureArray, depth_uv) : DepthGetMeters(uDepthTexture, depth_uv)) * 8.0;
|
|
1740
1963
|
float normalized_depth =
|
|
1741
1964
|
saturate((depth - uMinDepth) / (uMaxDepth - uMinDepth));
|
|
1742
1965
|
gl_FragColor = uOpacity * vec4(TurboColormap(normalized_depth), 1.0);
|
|
@@ -1758,6 +1981,8 @@ class DepthMesh extends MeshScript {
|
|
|
1758
1981
|
if (options.useDepthTexture || options.showDebugTexture) {
|
|
1759
1982
|
uniforms = {
|
|
1760
1983
|
uDepthTexture: { value: null },
|
|
1984
|
+
uDepthTextureArray: { value: null },
|
|
1985
|
+
uIsTextureArray: { value: 0.0 },
|
|
1761
1986
|
uColor: { value: new THREE.Color(0xaaaaaa) },
|
|
1762
1987
|
uResolution: { value: new THREE.Vector2(width, height) },
|
|
1763
1988
|
uRawValueToMeters: { value: 1.0 },
|
|
@@ -1844,18 +2069,97 @@ class DepthMesh extends MeshScript {
|
|
|
1844
2069
|
this.geometry.attributes.position.needsUpdate = true;
|
|
1845
2070
|
const depthTextureLeft = this.depthTextures?.get(0);
|
|
1846
2071
|
if (depthTextureLeft && this.depthTextureMaterialUniforms) {
|
|
1847
|
-
|
|
2072
|
+
const isTextureArray = depthTextureLeft instanceof THREE.ExternalTexture;
|
|
2073
|
+
this.depthTextureMaterialUniforms.uIsTextureArray.value =
|
|
2074
|
+
isTextureArray ? 1.0 : 0;
|
|
2075
|
+
if (isTextureArray)
|
|
2076
|
+
this.depthTextureMaterialUniforms.uDepthTextureArray.value =
|
|
2077
|
+
depthTextureLeft;
|
|
2078
|
+
else
|
|
2079
|
+
this.depthTextureMaterialUniforms.uDepthTexture.value =
|
|
2080
|
+
depthTextureLeft;
|
|
1848
2081
|
this.depthTextureMaterialUniforms.uMinDepth.value = this.minDepth;
|
|
1849
2082
|
this.depthTextureMaterialUniforms.uMaxDepth.value = this.maxDepth;
|
|
1850
2083
|
this.depthTextureMaterialUniforms.uRawValueToMeters.value =
|
|
1851
|
-
this.depthTextures.depthData
|
|
1852
|
-
|
|
2084
|
+
this.depthTextures.depthData.length ?
|
|
2085
|
+
this.depthTextures.depthData[0].rawValueToMeters :
|
|
2086
|
+
1.0;
|
|
1853
2087
|
}
|
|
1854
2088
|
if (this.options.updateVertexNormals) {
|
|
1855
2089
|
this.geometry.computeVertexNormals();
|
|
1856
2090
|
}
|
|
1857
2091
|
this.updateColliderIfNeeded();
|
|
1858
2092
|
}
|
|
2093
|
+
updateGPUDepth(depthData) {
|
|
2094
|
+
this.updateDepth(this.convertGPUToGPU(depthData));
|
|
2095
|
+
}
|
|
2096
|
+
convertGPUToGPU(depthData) {
|
|
2097
|
+
if (!this.depthTarget) {
|
|
2098
|
+
this.depthTarget =
|
|
2099
|
+
new THREE.WebGLRenderTarget(depthData.width, depthData.height, {
|
|
2100
|
+
format: THREE.RedFormat,
|
|
2101
|
+
type: THREE.FloatType,
|
|
2102
|
+
internalFormat: 'R32F',
|
|
2103
|
+
minFilter: THREE.NearestFilter,
|
|
2104
|
+
magFilter: THREE.NearestFilter,
|
|
2105
|
+
depthBuffer: false
|
|
2106
|
+
});
|
|
2107
|
+
this.depthTexture = new THREE.ExternalTexture(depthData.texture);
|
|
2108
|
+
const textureProperties = this.renderer.properties.get(this.depthTexture);
|
|
2109
|
+
textureProperties.__webglTexture = depthData.texture;
|
|
2110
|
+
this.gpuPixels = new Float32Array(depthData.width * depthData.height);
|
|
2111
|
+
const depthShader = new THREE.ShaderMaterial({
|
|
2112
|
+
vertexShader: `
|
|
2113
|
+
varying vec2 vUv;
|
|
2114
|
+
void main() {
|
|
2115
|
+
vUv = uv;
|
|
2116
|
+
vUv.y = 1.0-vUv.y;
|
|
2117
|
+
gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
|
|
2118
|
+
}
|
|
2119
|
+
`,
|
|
2120
|
+
fragmentShader: `
|
|
2121
|
+
precision highp float;
|
|
2122
|
+
precision highp sampler2DArray;
|
|
2123
|
+
|
|
2124
|
+
uniform sampler2DArray uTexture;
|
|
2125
|
+
uniform float uCameraNear;
|
|
2126
|
+
varying vec2 vUv;
|
|
2127
|
+
|
|
2128
|
+
void main() {
|
|
2129
|
+
float z = texture(uTexture, vec3(vUv, 0)).r;
|
|
2130
|
+
z = uCameraNear / (1.0 - z);
|
|
2131
|
+
z = clamp(z, 0.0, 20.0);
|
|
2132
|
+
gl_FragColor = vec4(z, 0, 0, 1.0);
|
|
2133
|
+
}
|
|
2134
|
+
`,
|
|
2135
|
+
uniforms: {
|
|
2136
|
+
uTexture: { value: this.depthTexture },
|
|
2137
|
+
uCameraNear: { value: depthData.depthNear }
|
|
2138
|
+
},
|
|
2139
|
+
blending: THREE.NoBlending,
|
|
2140
|
+
depthTest: false,
|
|
2141
|
+
depthWrite: false,
|
|
2142
|
+
side: THREE.DoubleSide
|
|
2143
|
+
});
|
|
2144
|
+
const depthMesh = new THREE.Mesh(new THREE.PlaneGeometry(2, 2), depthShader);
|
|
2145
|
+
this.depthScene = new THREE.Scene();
|
|
2146
|
+
this.depthScene.add(depthMesh);
|
|
2147
|
+
this.depthCamera = new THREE.OrthographicCamera(-1, 1, 1, -1, 0, 1);
|
|
2148
|
+
}
|
|
2149
|
+
const originalRenderTarget = this.renderer.getRenderTarget();
|
|
2150
|
+
this.renderer.xr.enabled = false;
|
|
2151
|
+
this.renderer.setRenderTarget(this.depthTarget);
|
|
2152
|
+
this.renderer.render(this.depthScene, this.depthCamera);
|
|
2153
|
+
this.renderer.readRenderTargetPixels(this.depthTarget, 0, 0, depthData.width, depthData.height, this.gpuPixels, 0);
|
|
2154
|
+
this.renderer.xr.enabled = true;
|
|
2155
|
+
this.renderer.setRenderTarget(originalRenderTarget);
|
|
2156
|
+
return {
|
|
2157
|
+
width: depthData.width,
|
|
2158
|
+
height: depthData.height,
|
|
2159
|
+
data: this.gpuPixels.buffer,
|
|
2160
|
+
rawValueToMeters: depthData.rawValueToMeters
|
|
2161
|
+
};
|
|
2162
|
+
}
|
|
1859
2163
|
/**
|
|
1860
2164
|
* Method to manually update the full resolution geometry.
|
|
1861
2165
|
* Only needed if options.updateFullResolutionGeometry is false.
|
|
@@ -2127,33 +2431,34 @@ class DepthTextures {
|
|
|
2127
2431
|
this.options = options;
|
|
2128
2432
|
this.uint16Arrays = [];
|
|
2129
2433
|
this.uint8Arrays = [];
|
|
2130
|
-
this.
|
|
2434
|
+
this.dataTextures = [];
|
|
2435
|
+
this.nativeTextures = [];
|
|
2131
2436
|
this.depthData = [];
|
|
2132
2437
|
}
|
|
2133
|
-
|
|
2134
|
-
if (this.
|
|
2135
|
-
this.
|
|
2438
|
+
createDataDepthTextures(depthData, view_id) {
|
|
2439
|
+
if (this.dataTextures[view_id]) {
|
|
2440
|
+
this.dataTextures[view_id].dispose();
|
|
2136
2441
|
}
|
|
2137
2442
|
if (this.options.useFloat32) {
|
|
2138
2443
|
const typedArray = new Uint16Array(depthData.width * depthData.height);
|
|
2139
2444
|
const format = THREE.RedFormat;
|
|
2140
2445
|
const type = THREE.HalfFloatType;
|
|
2141
2446
|
this.uint16Arrays[view_id] = typedArray;
|
|
2142
|
-
this.
|
|
2447
|
+
this.dataTextures[view_id] = new THREE.DataTexture(typedArray, depthData.width, depthData.height, format, type);
|
|
2143
2448
|
}
|
|
2144
2449
|
else {
|
|
2145
2450
|
const typedArray = new Uint8Array(depthData.width * depthData.height * 2);
|
|
2146
2451
|
const format = THREE.RGFormat;
|
|
2147
2452
|
const type = THREE.UnsignedByteType;
|
|
2148
2453
|
this.uint8Arrays[view_id] = typedArray;
|
|
2149
|
-
this.
|
|
2454
|
+
this.dataTextures[view_id] = new THREE.DataTexture(typedArray, depthData.width, depthData.height, format, type);
|
|
2150
2455
|
}
|
|
2151
2456
|
}
|
|
2152
|
-
|
|
2153
|
-
if (this.
|
|
2154
|
-
this.
|
|
2155
|
-
this.
|
|
2156
|
-
this.
|
|
2457
|
+
updateData(depthData, view_id) {
|
|
2458
|
+
if (this.dataTextures.length < view_id + 1 ||
|
|
2459
|
+
this.dataTextures[view_id].image.width !== depthData.width ||
|
|
2460
|
+
this.dataTextures[view_id].image.height !== depthData.height) {
|
|
2461
|
+
this.createDataDepthTextures(depthData, view_id);
|
|
2157
2462
|
}
|
|
2158
2463
|
if (this.options.useFloat32) {
|
|
2159
2464
|
const float32Data = new Float32Array(depthData.data);
|
|
@@ -2166,11 +2471,26 @@ class DepthTextures {
|
|
|
2166
2471
|
else {
|
|
2167
2472
|
this.uint8Arrays[view_id].set(new Uint8Array(depthData.data));
|
|
2168
2473
|
}
|
|
2169
|
-
this.
|
|
2474
|
+
this.dataTextures[view_id].needsUpdate = true;
|
|
2170
2475
|
this.depthData[view_id] = depthData;
|
|
2171
2476
|
}
|
|
2477
|
+
updateNativeTexture(depthData, renderer, view_id) {
|
|
2478
|
+
if (this.dataTextures.length < view_id + 1) {
|
|
2479
|
+
this.nativeTextures[view_id] = new THREE.ExternalTexture(depthData.texture);
|
|
2480
|
+
}
|
|
2481
|
+
else {
|
|
2482
|
+
this.nativeTextures[view_id].sourceTexture = depthData.texture;
|
|
2483
|
+
}
|
|
2484
|
+
// fixed in newer revision of three
|
|
2485
|
+
const textureProperties = renderer.properties.get(this.nativeTextures[view_id]);
|
|
2486
|
+
textureProperties.__webglTexture = depthData.texture;
|
|
2487
|
+
textureProperties.__version = 1;
|
|
2488
|
+
}
|
|
2172
2489
|
get(view_id) {
|
|
2173
|
-
|
|
2490
|
+
if (this.dataTextures.length > 0) {
|
|
2491
|
+
return this.dataTextures[view_id];
|
|
2492
|
+
}
|
|
2493
|
+
return this.nativeTextures[view_id];
|
|
2174
2494
|
}
|
|
2175
2495
|
}
|
|
2176
2496
|
|
|
@@ -2370,10 +2690,15 @@ class OcclusionMapMeshMaterial extends THREE.MeshBasicMaterial {
|
|
|
2370
2690
|
super();
|
|
2371
2691
|
this.uniforms = {
|
|
2372
2692
|
uDepthTexture: { value: null },
|
|
2693
|
+
uDepthTextureArray: { value: null },
|
|
2694
|
+
uViewId: { value: 0.0 },
|
|
2695
|
+
uIsTextureArray: { value: 0.0 },
|
|
2373
2696
|
uRawValueToMeters: { value: 8.0 / 65536.0 },
|
|
2374
2697
|
cameraFar: { value: camera.far },
|
|
2375
2698
|
cameraNear: { value: camera.near },
|
|
2376
2699
|
uFloatDepth: { value: useFloatDepth },
|
|
2700
|
+
// Used for interpreting Quest 3 depth.
|
|
2701
|
+
uDepthNear: { value: 0 }
|
|
2377
2702
|
};
|
|
2378
2703
|
this.onBeforeCompile = (shader) => {
|
|
2379
2704
|
Object.assign(shader.uniforms, this.uniforms);
|
|
@@ -2395,10 +2720,13 @@ class OcclusionMapMeshMaterial extends THREE.MeshBasicMaterial {
|
|
|
2395
2720
|
shader.fragmentShader
|
|
2396
2721
|
.replace('uniform vec3 diffuse;', [
|
|
2397
2722
|
'uniform vec3 diffuse;', 'uniform sampler2D uDepthTexture;',
|
|
2723
|
+
'uniform sampler2DArray uDepthTextureArray;',
|
|
2398
2724
|
'uniform float uRawValueToMeters;',
|
|
2399
2725
|
'uniform float cameraNear;', 'uniform float cameraFar;',
|
|
2400
|
-
'uniform bool uFloatDepth;',
|
|
2401
|
-
'
|
|
2726
|
+
'uniform bool uFloatDepth;',
|
|
2727
|
+
'uniform bool uIsTextureArray;',
|
|
2728
|
+
'uniform float uDepthNear;', 'uniform int uViewId;',
|
|
2729
|
+
'varying vec2 vTexCoord;', 'varying float vVirtualDepth;'
|
|
2402
2730
|
].join('\n'))
|
|
2403
2731
|
.replace('#include <clipping_planes_pars_fragment>', [
|
|
2404
2732
|
'#include <clipping_planes_pars_fragment>', `
|
|
@@ -2412,13 +2740,17 @@ class OcclusionMapMeshMaterial extends THREE.MeshBasicMaterial {
|
|
|
2412
2740
|
}
|
|
2413
2741
|
return dot(packedDepthAndVisibility, vec2(255.0, 256.0 * 255.0)) * uRawValueToMeters;
|
|
2414
2742
|
}
|
|
2743
|
+
float DepthArrayGetMeters(in sampler2DArray depth_texture, in vec2 depth_uv) {
|
|
2744
|
+
float textureValue = texture(depth_texture, vec3(depth_uv.x, depth_uv.y, uViewId)).r;
|
|
2745
|
+
return uRawValueToMeters * uDepthNear / (1.0 - textureValue);
|
|
2746
|
+
}
|
|
2415
2747
|
`
|
|
2416
2748
|
].join('\n'))
|
|
2417
2749
|
.replace('#include <dithering_fragment>', [
|
|
2418
2750
|
'#include <dithering_fragment>',
|
|
2419
2751
|
'vec4 texCoord = vec4(vTexCoord, 0, 1);',
|
|
2420
|
-
'vec2 uv = vec2(texCoord.x, 1.0 - texCoord.y);',
|
|
2421
|
-
'highp float real_depth = DepthGetMeters(uDepthTexture, uv);',
|
|
2752
|
+
'vec2 uv = vec2(texCoord.x, uIsTextureArray?texCoord.y:(1.0 - texCoord.y));',
|
|
2753
|
+
'highp float real_depth = uIsTextureArray ? DepthArrayGetMeters(uDepthTextureArray, uv) : DepthGetMeters(uDepthTexture, uv);',
|
|
2422
2754
|
'gl_FragColor = vec4(step(vVirtualDepth, real_depth), 1.0, 0.0, 1.0);'
|
|
2423
2755
|
].join('\n'));
|
|
2424
2756
|
};
|
|
@@ -2450,10 +2782,14 @@ class OcclusionPass extends Pass {
|
|
|
2450
2782
|
this.renderToScreen = renderToScreen;
|
|
2451
2783
|
this.occludableItemsLayer = occludableItemsLayer;
|
|
2452
2784
|
this.depthTextures = [];
|
|
2785
|
+
this.depthNear = [];
|
|
2453
2786
|
this.occlusionMeshMaterial =
|
|
2454
2787
|
new OcclusionMapMeshMaterial(camera, useFloatDepth);
|
|
2455
2788
|
this.occlusionMapUniforms = {
|
|
2456
2789
|
uDepthTexture: { value: null },
|
|
2790
|
+
uDepthTextureArray: { value: null },
|
|
2791
|
+
uViewId: { value: 0.0 },
|
|
2792
|
+
uIsTextureArray: { value: 0.0 },
|
|
2457
2793
|
uUvTransform: { value: new THREE.Matrix4() },
|
|
2458
2794
|
uRawValueToMeters: { value: 8.0 / 65536.0 },
|
|
2459
2795
|
uAlpha: { value: 0.75 },
|
|
@@ -2510,14 +2846,12 @@ class OcclusionPass extends Pass {
|
|
|
2510
2846
|
});
|
|
2511
2847
|
return new FullScreenQuad(kawase1Material);
|
|
2512
2848
|
}
|
|
2513
|
-
setDepthTexture(depthTexture, rawValueToMeters,
|
|
2514
|
-
|
|
2515
|
-
return;
|
|
2516
|
-
}
|
|
2517
|
-
this.depthTextures[view_id] = depthTexture;
|
|
2849
|
+
setDepthTexture(depthTexture, rawValueToMeters, viewId, depthNear) {
|
|
2850
|
+
this.depthTextures[viewId] = depthTexture;
|
|
2518
2851
|
this.occlusionMapUniforms.uRawValueToMeters.value = rawValueToMeters;
|
|
2519
2852
|
this.occlusionMeshMaterial.uniforms.uRawValueToMeters.value =
|
|
2520
2853
|
rawValueToMeters;
|
|
2854
|
+
this.depthNear[viewId] = depthNear;
|
|
2521
2855
|
depthTexture.needsUpdate = true;
|
|
2522
2856
|
}
|
|
2523
2857
|
/**
|
|
@@ -2525,16 +2859,16 @@ class OcclusionPass extends Pass {
|
|
|
2525
2859
|
* @param renderer - The three.js renderer.
|
|
2526
2860
|
* @param writeBuffer - The buffer to write the final result.
|
|
2527
2861
|
* @param readBuffer - The buffer for the current of virtual depth.
|
|
2528
|
-
* @param
|
|
2862
|
+
* @param viewId - The view to render.
|
|
2529
2863
|
*/
|
|
2530
|
-
render(renderer, writeBuffer, readBuffer,
|
|
2864
|
+
render(renderer, writeBuffer, readBuffer, viewId = 0) {
|
|
2531
2865
|
const originalRenderTarget = renderer.getRenderTarget();
|
|
2532
2866
|
const dimensions = new THREE.Vector2();
|
|
2533
2867
|
if (readBuffer == null) {
|
|
2534
|
-
this.renderOcclusionMapFromScene(renderer, dimensions,
|
|
2868
|
+
this.renderOcclusionMapFromScene(renderer, dimensions, viewId);
|
|
2535
2869
|
}
|
|
2536
2870
|
else {
|
|
2537
|
-
this.renderOcclusionMapFromReadBuffer(renderer, readBuffer, dimensions,
|
|
2871
|
+
this.renderOcclusionMapFromReadBuffer(renderer, readBuffer, dimensions, viewId);
|
|
2538
2872
|
}
|
|
2539
2873
|
// Blur the occlusion map
|
|
2540
2874
|
this.blurOcclusionMap(renderer, dimensions);
|
|
@@ -2542,16 +2876,27 @@ class OcclusionPass extends Pass {
|
|
|
2542
2876
|
this.applyOcclusionMapToRenderedImage(renderer, readBuffer, writeBuffer);
|
|
2543
2877
|
renderer.setRenderTarget(originalRenderTarget);
|
|
2544
2878
|
}
|
|
2545
|
-
renderOcclusionMapFromScene(renderer, dimensions,
|
|
2879
|
+
renderOcclusionMapFromScene(renderer, dimensions, viewId) {
|
|
2546
2880
|
// Compute our own read buffer.
|
|
2547
|
-
this.
|
|
2548
|
-
|
|
2881
|
+
const texture = this.depthTextures[viewId];
|
|
2882
|
+
const isTextureArray = texture instanceof THREE.ExternalTexture;
|
|
2883
|
+
this.occlusionMeshMaterial.uniforms.uIsTextureArray.value =
|
|
2884
|
+
isTextureArray ? 1.0 : 0;
|
|
2885
|
+
this.occlusionMeshMaterial.uniforms.uViewId.value = viewId;
|
|
2886
|
+
if (isTextureArray) {
|
|
2887
|
+
this.occlusionMeshMaterial.uniforms.uDepthTextureArray.value = texture;
|
|
2888
|
+
this.occlusionMeshMaterial.uniforms.uDepthNear.value =
|
|
2889
|
+
this.depthNear[viewId];
|
|
2890
|
+
}
|
|
2891
|
+
else {
|
|
2892
|
+
this.occlusionMeshMaterial.uniforms.uDepthTexture.value = texture;
|
|
2893
|
+
}
|
|
2549
2894
|
this.scene.overrideMaterial = this.occlusionMeshMaterial;
|
|
2550
2895
|
renderer.getDrawingBufferSize(dimensions);
|
|
2551
2896
|
this.occlusionMapTexture.setSize(dimensions.x, dimensions.y);
|
|
2552
2897
|
const renderTarget = this.occlusionMapTexture;
|
|
2553
2898
|
renderer.setRenderTarget(renderTarget);
|
|
2554
|
-
const camera = renderer.xr.getCamera().cameras[
|
|
2899
|
+
const camera = renderer.xr.getCamera().cameras[viewId] || this.camera;
|
|
2555
2900
|
const originalCameraLayers = Array.from(Array(32).keys())
|
|
2556
2901
|
.filter(element => camera.layers.isEnabled(element));
|
|
2557
2902
|
camera.layers.set(this.occludableItemsLayer);
|
|
@@ -2562,12 +2907,24 @@ class OcclusionPass extends Pass {
|
|
|
2562
2907
|
});
|
|
2563
2908
|
this.scene.overrideMaterial = null;
|
|
2564
2909
|
}
|
|
2565
|
-
renderOcclusionMapFromReadBuffer(renderer, readBuffer, dimensions,
|
|
2910
|
+
renderOcclusionMapFromReadBuffer(renderer, readBuffer, dimensions, viewId) {
|
|
2566
2911
|
// Convert the readBuffer into an occlusion map.
|
|
2567
2912
|
// Render depth into texture
|
|
2568
2913
|
this.occlusionMapUniforms.tDiffuse.value = readBuffer.texture;
|
|
2569
2914
|
this.occlusionMapUniforms.tDepth.value = readBuffer.depthTexture;
|
|
2570
|
-
|
|
2915
|
+
const texture = this.depthTextures[viewId];
|
|
2916
|
+
const isTextureArray = texture instanceof THREE.ExternalTexture;
|
|
2917
|
+
this.occlusionMeshMaterial.uniforms.uIsTextureArray.value =
|
|
2918
|
+
isTextureArray ? 1.0 : 0;
|
|
2919
|
+
this.occlusionMeshMaterial.uniforms.uViewId.value = viewId;
|
|
2920
|
+
if (isTextureArray) {
|
|
2921
|
+
this.occlusionMeshMaterial.uniforms.uDepthTextureArray.value = texture;
|
|
2922
|
+
this.occlusionMeshMaterial.uniforms.uDepthNear.value =
|
|
2923
|
+
this.depthNear[viewId];
|
|
2924
|
+
}
|
|
2925
|
+
else {
|
|
2926
|
+
this.occlusionMeshMaterial.uniforms.uDepthTexture.value = texture;
|
|
2927
|
+
}
|
|
2571
2928
|
// First render the occlusion map to an intermediate buffer.
|
|
2572
2929
|
renderer.getDrawingBufferSize(dimensions);
|
|
2573
2930
|
this.occlusionMapTexture.setSize(dimensions.x, dimensions.y);
|
|
@@ -2630,7 +2987,8 @@ class Depth {
|
|
|
2630
2987
|
constructor() {
|
|
2631
2988
|
this.projectionMatrixInverse = new THREE.Matrix4();
|
|
2632
2989
|
this.view = [];
|
|
2633
|
-
this.
|
|
2990
|
+
this.cpuDepthData = [];
|
|
2991
|
+
this.gpuDepthData = [];
|
|
2634
2992
|
this.depthArray = [];
|
|
2635
2993
|
this.options = new DepthOptions();
|
|
2636
2994
|
this.width = DEFAULT_DEPTH_WIDTH;
|
|
@@ -2722,8 +3080,8 @@ class Depth {
|
|
|
2722
3080
|
vertexPosition.multiplyScalar(-depth / vertexPosition.z);
|
|
2723
3081
|
return vertexPosition;
|
|
2724
3082
|
}
|
|
2725
|
-
|
|
2726
|
-
this.
|
|
3083
|
+
updateCPUDepthData(depthData, view_id = 0) {
|
|
3084
|
+
this.cpuDepthData[view_id] = depthData;
|
|
2727
3085
|
// Workaround for b/382679381.
|
|
2728
3086
|
this.rawValueToMeters = depthData.rawValueToMeters;
|
|
2729
3087
|
if (this.options.useFloat32) {
|
|
@@ -2744,12 +3102,52 @@ class Depth {
|
|
|
2744
3102
|
}
|
|
2745
3103
|
// Updates Depth Texture.
|
|
2746
3104
|
if (this.options.depthTexture.enabled && this.depthTextures) {
|
|
2747
|
-
this.depthTextures.
|
|
3105
|
+
this.depthTextures.updateData(depthData, view_id);
|
|
2748
3106
|
}
|
|
2749
3107
|
if (this.options.depthMesh.enabled && this.depthMesh && view_id == 0) {
|
|
2750
3108
|
this.depthMesh.updateDepth(depthData);
|
|
2751
3109
|
}
|
|
2752
3110
|
}
|
|
3111
|
+
updateGPUDepthData(depthData, view_id = 0) {
|
|
3112
|
+
this.gpuDepthData[view_id] = depthData;
|
|
3113
|
+
// Workaround for b/382679381.
|
|
3114
|
+
this.rawValueToMeters = depthData.rawValueToMeters;
|
|
3115
|
+
if (this.options.useFloat32) {
|
|
3116
|
+
this.rawValueToMeters = 1.0;
|
|
3117
|
+
}
|
|
3118
|
+
// For now, assume that we need cpu depth only if depth mesh is enabled.
|
|
3119
|
+
// In the future, add a separate option.
|
|
3120
|
+
const needCpuDepth = this.options.depthMesh.enabled;
|
|
3121
|
+
const cpuDepth = needCpuDepth && this.depthMesh ?
|
|
3122
|
+
this.depthMesh.convertGPUToGPU(depthData) :
|
|
3123
|
+
null;
|
|
3124
|
+
if (cpuDepth) {
|
|
3125
|
+
if (this.depthArray[view_id] == null) {
|
|
3126
|
+
this.depthArray[view_id] = this.options.useFloat32 ?
|
|
3127
|
+
new Float32Array(cpuDepth.data) :
|
|
3128
|
+
new Uint16Array(cpuDepth.data);
|
|
3129
|
+
this.width = cpuDepth.width;
|
|
3130
|
+
this.height = cpuDepth.height;
|
|
3131
|
+
}
|
|
3132
|
+
else {
|
|
3133
|
+
// Copies the data from an ArrayBuffer to the existing TypedArray.
|
|
3134
|
+
this.depthArray[view_id].set(this.options.useFloat32 ? new Float32Array(cpuDepth.data) :
|
|
3135
|
+
new Uint16Array(cpuDepth.data));
|
|
3136
|
+
}
|
|
3137
|
+
}
|
|
3138
|
+
// Updates Depth Texture.
|
|
3139
|
+
if (this.options.depthTexture.enabled && this.depthTextures) {
|
|
3140
|
+
this.depthTextures.updateNativeTexture(depthData, this.renderer, view_id);
|
|
3141
|
+
}
|
|
3142
|
+
if (this.options.depthMesh.enabled && this.depthMesh && view_id == 0) {
|
|
3143
|
+
if (cpuDepth) {
|
|
3144
|
+
this.depthMesh.updateDepth(cpuDepth);
|
|
3145
|
+
}
|
|
3146
|
+
else {
|
|
3147
|
+
this.depthMesh.updateGPUDepth(depthData);
|
|
3148
|
+
}
|
|
3149
|
+
}
|
|
3150
|
+
}
|
|
2753
3151
|
getTexture(view_id) {
|
|
2754
3152
|
if (!this.options.depthTexture.enabled)
|
|
2755
3153
|
return undefined;
|
|
@@ -2775,6 +3173,7 @@ class Depth {
|
|
|
2775
3173
|
if (!frame)
|
|
2776
3174
|
return;
|
|
2777
3175
|
const session = frame.session;
|
|
3176
|
+
const binding = this.renderer.xr.getBinding();
|
|
2778
3177
|
// Enable or disable depth based on the number of clients.
|
|
2779
3178
|
const pausingDepthSupported = session.depthActive !== undefined;
|
|
2780
3179
|
if (pausingDepthSupported && this.depthClientsInitialized) {
|
|
@@ -2803,11 +3202,20 @@ class Depth {
|
|
|
2803
3202
|
for (let view_id = 0; view_id < pose.views.length; ++view_id) {
|
|
2804
3203
|
const view = pose.views[view_id];
|
|
2805
3204
|
this.view[view_id] = view;
|
|
2806
|
-
|
|
2807
|
-
|
|
2808
|
-
|
|
3205
|
+
if (session.depthUsage === 'gpu-optimized') {
|
|
3206
|
+
const depthData = binding.getDepthInformation(view);
|
|
3207
|
+
if (!depthData) {
|
|
3208
|
+
return;
|
|
3209
|
+
}
|
|
3210
|
+
this.updateGPUDepthData(depthData, view_id);
|
|
3211
|
+
}
|
|
3212
|
+
else {
|
|
3213
|
+
const depthData = frame.getDepthInformation(view);
|
|
3214
|
+
if (!depthData) {
|
|
3215
|
+
return;
|
|
3216
|
+
}
|
|
3217
|
+
this.updateCPUDepthData(depthData, view_id);
|
|
2809
3218
|
}
|
|
2810
|
-
this.updateDepthData(depthData, view_id);
|
|
2811
3219
|
}
|
|
2812
3220
|
}
|
|
2813
3221
|
else {
|
|
@@ -2818,11 +3226,13 @@ class Depth {
|
|
|
2818
3226
|
renderOcclusionPass() {
|
|
2819
3227
|
const leftDepthTexture = this.getTexture(0);
|
|
2820
3228
|
if (leftDepthTexture) {
|
|
2821
|
-
this.occlusionPass.setDepthTexture(leftDepthTexture, this.rawValueToMeters, 0
|
|
3229
|
+
this.occlusionPass.setDepthTexture(leftDepthTexture, this.rawValueToMeters, 0, this.gpuDepthData[0]
|
|
3230
|
+
?.depthNear);
|
|
2822
3231
|
}
|
|
2823
3232
|
const rightDepthTexture = this.getTexture(1);
|
|
2824
3233
|
if (rightDepthTexture) {
|
|
2825
|
-
this.occlusionPass.setDepthTexture(rightDepthTexture, this.rawValueToMeters, 1
|
|
3234
|
+
this.occlusionPass.setDepthTexture(rightDepthTexture, this.rawValueToMeters, 1, this.gpuDepthData[1]
|
|
3235
|
+
?.depthNear);
|
|
2826
3236
|
}
|
|
2827
3237
|
const xrIsPresenting = this.renderer.xr.isPresenting;
|
|
2828
3238
|
this.renderer.xr.isPresenting = false;
|
|
@@ -2833,7 +3243,7 @@ class Depth {
|
|
|
2833
3243
|
}
|
|
2834
3244
|
}
|
|
2835
3245
|
debugLog() {
|
|
2836
|
-
const arrayBuffer = this.
|
|
3246
|
+
const arrayBuffer = this.cpuDepthData[0].data;
|
|
2837
3247
|
const uint8Array = new Uint8Array(arrayBuffer);
|
|
2838
3248
|
// Convert Uint8Array to a string where each character represents a byte
|
|
2839
3249
|
const binaryString = Array.from(uint8Array, byte => String.fromCharCode(byte)).join('');
|
|
@@ -3450,6 +3860,9 @@ class Registry {
|
|
|
3450
3860
|
}
|
|
3451
3861
|
}
|
|
3452
3862
|
|
|
3863
|
+
// Use a small canvas since a full size canvas can consume a lot of memory and
|
|
3864
|
+
// cause toDataUrl to be slow.
|
|
3865
|
+
const DEFAULT_CANVAS_WIDTH = 640;
|
|
3453
3866
|
function flipBufferVertically(buffer, width, height) {
|
|
3454
3867
|
const bytesPerRow = width * 4;
|
|
3455
3868
|
const tempRow = new Uint8Array(bytesPerRow);
|
|
@@ -3475,8 +3888,9 @@ class ScreenshotSynthesizer {
|
|
|
3475
3888
|
this.pendingScreenshotRequests = [];
|
|
3476
3889
|
this.virtualBuffer = new Uint8Array();
|
|
3477
3890
|
this.virtualRealBuffer = new Uint8Array();
|
|
3891
|
+
this.renderTargetWidth = DEFAULT_CANVAS_WIDTH;
|
|
3478
3892
|
}
|
|
3479
|
-
async onAfterRender(renderer, deviceCamera) {
|
|
3893
|
+
async onAfterRender(renderer, renderSceneFn, deviceCamera) {
|
|
3480
3894
|
if (this.pendingScreenshotRequests.length == 0) {
|
|
3481
3895
|
return;
|
|
3482
3896
|
}
|
|
@@ -3486,13 +3900,14 @@ class ScreenshotSynthesizer {
|
|
|
3486
3900
|
}
|
|
3487
3901
|
const haveVirtualOnlyRequests = this.pendingScreenshotRequests.every((request) => !request.overlayOnCamera);
|
|
3488
3902
|
if (haveVirtualOnlyRequests) {
|
|
3489
|
-
this.createVirtualImageDataURL(renderer)
|
|
3903
|
+
this.createVirtualImageDataURL(renderer, renderSceneFn)
|
|
3904
|
+
.then((virtualImageDataUrl) => {
|
|
3490
3905
|
this.resolveVirtualOnlyRequests(virtualImageDataUrl);
|
|
3491
3906
|
});
|
|
3492
3907
|
}
|
|
3493
3908
|
const haveVirtualAndRealReqeusts = this.pendingScreenshotRequests.some((request) => request.overlayOnCamera);
|
|
3494
3909
|
if (haveVirtualAndRealReqeusts && deviceCamera) {
|
|
3495
|
-
this.createVirtualRealImageDataURL(renderer, deviceCamera)
|
|
3910
|
+
this.createVirtualRealImageDataURL(renderer, renderSceneFn, deviceCamera)
|
|
3496
3911
|
.then((virtualRealImageDataUrl) => {
|
|
3497
3912
|
if (virtualRealImageDataUrl) {
|
|
3498
3913
|
this.resolveVirtualRealRequests(virtualRealImageDataUrl);
|
|
@@ -3503,25 +3918,42 @@ class ScreenshotSynthesizer {
|
|
|
3503
3918
|
throw new Error('No device camera provided');
|
|
3504
3919
|
}
|
|
3505
3920
|
}
|
|
3506
|
-
async createVirtualImageDataURL(renderer) {
|
|
3507
|
-
const
|
|
3508
|
-
|
|
3509
|
-
|
|
3510
|
-
|
|
3511
|
-
|
|
3921
|
+
async createVirtualImageDataURL(renderer, renderSceneFn) {
|
|
3922
|
+
const mainRenderTarget = renderer.getRenderTarget();
|
|
3923
|
+
const isRenderingStereo = renderer.xr.isPresenting && renderer.xr.getCamera().cameras.length == 2;
|
|
3924
|
+
const mainRenderTargetSingleViewWidth = isRenderingStereo ? mainRenderTarget.width / 2 : mainRenderTarget.width;
|
|
3925
|
+
const scaledHeight = Math.round(mainRenderTarget.height *
|
|
3926
|
+
(this.renderTargetWidth / mainRenderTargetSingleViewWidth));
|
|
3927
|
+
if (!this.virtualRenderTarget ||
|
|
3928
|
+
this.virtualRenderTarget.width != this.renderTargetWidth) {
|
|
3929
|
+
this.virtualRenderTarget?.dispose();
|
|
3930
|
+
this.virtualRenderTarget = new THREE.WebGLRenderTarget(this.renderTargetWidth, scaledHeight, { colorSpace: THREE.SRGBColorSpace });
|
|
3931
|
+
}
|
|
3932
|
+
const xrIsPresenting = renderer.xr.isPresenting;
|
|
3933
|
+
renderer.xr.isPresenting = false;
|
|
3934
|
+
const virtualRenderTarget = this.virtualRenderTarget;
|
|
3935
|
+
renderer.setRenderTarget(virtualRenderTarget);
|
|
3936
|
+
renderer.clearColor();
|
|
3937
|
+
renderer.clearDepth();
|
|
3938
|
+
renderSceneFn();
|
|
3939
|
+
renderer.setRenderTarget(mainRenderTarget);
|
|
3940
|
+
renderer.xr.isPresenting = xrIsPresenting;
|
|
3941
|
+
const expectedBufferLength = virtualRenderTarget.width * virtualRenderTarget.height * 4;
|
|
3942
|
+
if (this.virtualBuffer.length != expectedBufferLength) {
|
|
3943
|
+
this.virtualBuffer = new Uint8Array(expectedBufferLength);
|
|
3512
3944
|
}
|
|
3513
3945
|
const buffer = this.virtualBuffer;
|
|
3514
|
-
await renderer.readRenderTargetPixelsAsync(
|
|
3515
|
-
flipBufferVertically(buffer,
|
|
3946
|
+
await renderer.readRenderTargetPixelsAsync(virtualRenderTarget, 0, 0, virtualRenderTarget.width, virtualRenderTarget.height, buffer);
|
|
3947
|
+
flipBufferVertically(buffer, virtualRenderTarget.width, virtualRenderTarget.height);
|
|
3516
3948
|
const canvas = this.virtualCanvas ||
|
|
3517
3949
|
(this.virtualCanvas = document.createElement('canvas'));
|
|
3518
|
-
canvas.width =
|
|
3519
|
-
canvas.height =
|
|
3950
|
+
canvas.width = virtualRenderTarget.width;
|
|
3951
|
+
canvas.height = virtualRenderTarget.height;
|
|
3520
3952
|
const context = canvas.getContext('2d');
|
|
3521
3953
|
if (!context) {
|
|
3522
3954
|
throw new Error('Failed to get 2D context');
|
|
3523
3955
|
}
|
|
3524
|
-
const imageData = new ImageData(new Uint8ClampedArray(buffer),
|
|
3956
|
+
const imageData = new ImageData(new Uint8ClampedArray(buffer), virtualRenderTarget.width, virtualRenderTarget.height);
|
|
3525
3957
|
context.putImageData(imageData, 0, 0);
|
|
3526
3958
|
return canvas.toDataURL();
|
|
3527
3959
|
}
|
|
@@ -3538,24 +3970,31 @@ class ScreenshotSynthesizer {
|
|
|
3538
3970
|
}
|
|
3539
3971
|
this.pendingScreenshotRequests.length = remainingRequests;
|
|
3540
3972
|
}
|
|
3541
|
-
async createVirtualRealImageDataURL(renderer, deviceCamera) {
|
|
3973
|
+
async createVirtualRealImageDataURL(renderer, renderSceneFn, deviceCamera) {
|
|
3542
3974
|
if (!deviceCamera.loaded) {
|
|
3543
|
-
console.
|
|
3975
|
+
console.debug('Waiting for device camera to be loaded');
|
|
3544
3976
|
return null;
|
|
3545
3977
|
}
|
|
3546
|
-
|
|
3547
|
-
|
|
3548
|
-
|
|
3549
|
-
const
|
|
3550
|
-
|
|
3978
|
+
const mainRenderTarget = renderer.getRenderTarget();
|
|
3979
|
+
const isRenderingStereo = renderer.xr.isPresenting && renderer.xr.getCamera().cameras.length == 2;
|
|
3980
|
+
const mainRenderTargetSingleViewWidth = isRenderingStereo ? mainRenderTarget.width / 2 : mainRenderTarget.width;
|
|
3981
|
+
const scaledHeight = Math.round(mainRenderTarget.height *
|
|
3982
|
+
(this.renderTargetWidth / mainRenderTargetSingleViewWidth));
|
|
3983
|
+
if (!this.virtualRealRenderTarget ||
|
|
3984
|
+
this.virtualRealRenderTarget.height != scaledHeight) {
|
|
3985
|
+
this.virtualRealRenderTarget?.dispose();
|
|
3986
|
+
this.virtualRealRenderTarget = new THREE.WebGLRenderTarget(this.renderTargetWidth, scaledHeight, { colorSpace: THREE.SRGBColorSpace });
|
|
3987
|
+
}
|
|
3988
|
+
const renderTarget = this.virtualRealRenderTarget;
|
|
3551
3989
|
renderer.setRenderTarget(renderTarget);
|
|
3990
|
+
const xrIsPresenting = renderer.xr.isPresenting;
|
|
3991
|
+
renderer.xr.isPresenting = false;
|
|
3552
3992
|
const quad = this.getFullScreenQuad();
|
|
3553
3993
|
quad.material.map = deviceCamera.texture;
|
|
3554
3994
|
quad.render(renderer);
|
|
3555
|
-
|
|
3556
|
-
|
|
3557
|
-
|
|
3558
|
-
renderer.setRenderTarget(virtualRenderTarget);
|
|
3995
|
+
renderSceneFn();
|
|
3996
|
+
renderer.xr.isPresenting = xrIsPresenting;
|
|
3997
|
+
renderer.setRenderTarget(mainRenderTarget);
|
|
3559
3998
|
if (this.virtualRealBuffer.length !=
|
|
3560
3999
|
renderTarget.width * renderTarget.height * 4) {
|
|
3561
4000
|
this.virtualRealBuffer =
|
|
@@ -3912,7 +4351,6 @@ class XRButton {
|
|
|
3912
4351
|
this.sessionManager.addEventListener(WebXRSessionEventType.READY, () => this.onSessionReady());
|
|
3913
4352
|
this.sessionManager.addEventListener(WebXRSessionEventType.SESSION_START, () => this.onSessionStarted());
|
|
3914
4353
|
this.sessionManager.addEventListener(WebXRSessionEventType.SESSION_END, this.onSessionEnded.bind(this));
|
|
3915
|
-
this.sessionManager.initialize();
|
|
3916
4354
|
}
|
|
3917
4355
|
createSimulatorButton() {
|
|
3918
4356
|
this.simulatorButtonElement.classList.add(XRBUTTON_CLASS);
|
|
@@ -5503,6 +5941,8 @@ class SimulatorOptions {
|
|
|
5503
5941
|
// canvas.
|
|
5504
5942
|
// This is a temporary option until we figure out why splats look faded.
|
|
5505
5943
|
this.renderToRenderTexture = true;
|
|
5944
|
+
// Blending mode when rendering the virtual scene.
|
|
5945
|
+
this.blendingMode = 'normal';
|
|
5506
5946
|
deepMerge(this, options);
|
|
5507
5947
|
}
|
|
5508
5948
|
}
|
|
@@ -5700,6 +6140,13 @@ class Options {
|
|
|
5700
6140
|
* Whether to request a stencil buffer.
|
|
5701
6141
|
*/
|
|
5702
6142
|
this.stencil = false;
|
|
6143
|
+
/**
|
|
6144
|
+
* Any additional required features when initializing webxr.
|
|
6145
|
+
*/
|
|
6146
|
+
this.webxrRequiredFeatures = [];
|
|
6147
|
+
// "local-floor" sets the scene origin at the user's feet,
|
|
6148
|
+
// "local" sets the scene origin near their head.
|
|
6149
|
+
this.referenceSpaceType = 'local-floor';
|
|
5703
6150
|
this.controllers = new InputOptions();
|
|
5704
6151
|
this.depth = new DepthOptions();
|
|
5705
6152
|
this.lighting = new LightingOptions();
|
|
@@ -6459,7 +6906,7 @@ class SimulatorDepth {
|
|
|
6459
6906
|
data: this.depthBuffer.buffer,
|
|
6460
6907
|
rawValueToMeters: 1.0,
|
|
6461
6908
|
};
|
|
6462
|
-
this.depth.
|
|
6909
|
+
this.depth.updateCPUDepthData(depthData, 0);
|
|
6463
6910
|
}
|
|
6464
6911
|
}
|
|
6465
6912
|
|
|
@@ -8253,8 +8700,17 @@ class Simulator extends Script {
|
|
|
8253
8700
|
this.setupStereoCameras(camera);
|
|
8254
8701
|
}
|
|
8255
8702
|
this.virtualSceneRenderTarget = new THREE.WebGLRenderTarget(renderer.domElement.width, renderer.domElement.height, { stencilBuffer: options.stencil });
|
|
8703
|
+
const virtualSceneMaterial = new THREE.MeshBasicMaterial({ map: this.virtualSceneRenderTarget.texture, transparent: true });
|
|
8704
|
+
if (this.options.blendingMode === 'screen') {
|
|
8705
|
+
virtualSceneMaterial.blending = THREE.CustomBlending;
|
|
8706
|
+
virtualSceneMaterial.blendSrc = THREE.OneFactor;
|
|
8707
|
+
virtualSceneMaterial.blendDst =
|
|
8708
|
+
THREE.OneMinusSrcColorFactor;
|
|
8709
|
+
virtualSceneMaterial.blendEquation =
|
|
8710
|
+
THREE.AddEquation;
|
|
8711
|
+
}
|
|
8256
8712
|
this.virtualSceneFullScreenQuad =
|
|
8257
|
-
new FullScreenQuad(
|
|
8713
|
+
new FullScreenQuad(virtualSceneMaterial);
|
|
8258
8714
|
this.renderer = renderer;
|
|
8259
8715
|
this.mainCamera = camera;
|
|
8260
8716
|
this.mainScene = scene;
|
|
@@ -8365,6 +8821,8 @@ class AudioListener extends Script {
|
|
|
8365
8821
|
super();
|
|
8366
8822
|
this.isCapturing = false;
|
|
8367
8823
|
this.latestAudioBuffer = null;
|
|
8824
|
+
this.accumulatedChunks = [];
|
|
8825
|
+
this.isAccumulating = false;
|
|
8368
8826
|
this.options = {
|
|
8369
8827
|
sampleRate: 16000,
|
|
8370
8828
|
channelCount: 1,
|
|
@@ -8385,6 +8843,10 @@ class AudioListener extends Script {
|
|
|
8385
8843
|
return;
|
|
8386
8844
|
this.onAudioData = callbacks.onAudioData;
|
|
8387
8845
|
this.onError = callbacks.onError;
|
|
8846
|
+
this.isAccumulating = callbacks.accumulate || false;
|
|
8847
|
+
if (this.isAccumulating) {
|
|
8848
|
+
this.accumulatedChunks = [];
|
|
8849
|
+
}
|
|
8388
8850
|
try {
|
|
8389
8851
|
await this.setupAudioCapture();
|
|
8390
8852
|
this.isCapturing = true;
|
|
@@ -8414,6 +8876,10 @@ class AudioListener extends Script {
|
|
|
8414
8876
|
this.processorNode.port.onmessage = (event) => {
|
|
8415
8877
|
if (event.data.type === 'audioData') {
|
|
8416
8878
|
this.latestAudioBuffer = event.data.data;
|
|
8879
|
+
// Accumulate chunks if requested
|
|
8880
|
+
if (this.isAccumulating) {
|
|
8881
|
+
this.accumulatedChunks.push(event.data.data);
|
|
8882
|
+
}
|
|
8417
8883
|
this.onAudioData?.(event.data.data);
|
|
8418
8884
|
this.streamToAI(event.data.data);
|
|
8419
8885
|
}
|
|
@@ -8473,6 +8939,8 @@ class AudioListener extends Script {
|
|
|
8473
8939
|
this.onAudioData = undefined;
|
|
8474
8940
|
this.onError = undefined;
|
|
8475
8941
|
this.latestAudioBuffer = null;
|
|
8942
|
+
this.accumulatedChunks = [];
|
|
8943
|
+
this.isAccumulating = false;
|
|
8476
8944
|
this.aiService = undefined;
|
|
8477
8945
|
}
|
|
8478
8946
|
static isSupported() {
|
|
@@ -8487,6 +8955,34 @@ class AudioListener extends Script {
|
|
|
8487
8955
|
clearLatestAudioBuffer() {
|
|
8488
8956
|
this.latestAudioBuffer = null;
|
|
8489
8957
|
}
|
|
8958
|
+
/**
|
|
8959
|
+
* Gets all accumulated audio chunks as a single combined buffer
|
|
8960
|
+
*/
|
|
8961
|
+
getAccumulatedBuffer() {
|
|
8962
|
+
if (this.accumulatedChunks.length === 0)
|
|
8963
|
+
return null;
|
|
8964
|
+
const totalLength = this.accumulatedChunks.reduce((sum, chunk) => sum + chunk.byteLength, 0);
|
|
8965
|
+
const combined = new ArrayBuffer(totalLength);
|
|
8966
|
+
const combinedArray = new Uint8Array(combined);
|
|
8967
|
+
let offset = 0;
|
|
8968
|
+
for (const chunk of this.accumulatedChunks) {
|
|
8969
|
+
combinedArray.set(new Uint8Array(chunk), offset);
|
|
8970
|
+
offset += chunk.byteLength;
|
|
8971
|
+
}
|
|
8972
|
+
return combined;
|
|
8973
|
+
}
|
|
8974
|
+
/**
|
|
8975
|
+
* Clears accumulated chunks
|
|
8976
|
+
*/
|
|
8977
|
+
clearAccumulatedBuffer() {
|
|
8978
|
+
this.accumulatedChunks = [];
|
|
8979
|
+
}
|
|
8980
|
+
/**
|
|
8981
|
+
* Gets the number of accumulated chunks
|
|
8982
|
+
*/
|
|
8983
|
+
getAccumulatedChunkCount() {
|
|
8984
|
+
return this.accumulatedChunks.length;
|
|
8985
|
+
}
|
|
8490
8986
|
dispose() {
|
|
8491
8987
|
this.stopCapture();
|
|
8492
8988
|
super.dispose();
|
|
@@ -8498,15 +8994,55 @@ class AudioPlayer extends Script {
|
|
|
8498
8994
|
super();
|
|
8499
8995
|
this.options = {};
|
|
8500
8996
|
this.audioQueue = [];
|
|
8501
|
-
this.isPlaying = false;
|
|
8502
8997
|
this.nextStartTime = 0;
|
|
8998
|
+
this.volume = 1.0;
|
|
8999
|
+
this.category = 'speech';
|
|
8503
9000
|
this.options = { sampleRate: 24000, channelCount: 1, ...options };
|
|
9001
|
+
if (options.category) {
|
|
9002
|
+
this.category = options.category;
|
|
9003
|
+
}
|
|
9004
|
+
}
|
|
9005
|
+
/**
|
|
9006
|
+
* Sets the CategoryVolumes instance for this player to respect
|
|
9007
|
+
* master/category volumes
|
|
9008
|
+
*/
|
|
9009
|
+
setCategoryVolumes(categoryVolumes) {
|
|
9010
|
+
this.categoryVolumes = categoryVolumes;
|
|
9011
|
+
this.updateGainNodeVolume();
|
|
9012
|
+
}
|
|
9013
|
+
/**
|
|
9014
|
+
* Sets the specific volume for this player (0.0 to 1.0)
|
|
9015
|
+
*/
|
|
9016
|
+
setVolume(level) {
|
|
9017
|
+
this.volume = Math.max(0, Math.min(1, level));
|
|
9018
|
+
this.updateGainNodeVolume();
|
|
9019
|
+
}
|
|
9020
|
+
/**
|
|
9021
|
+
* Updates the gain node volume based on category volumes
|
|
9022
|
+
* Public so CoreSound can update it when master volume changes
|
|
9023
|
+
*/
|
|
9024
|
+
updateGainNodeVolume() {
|
|
9025
|
+
if (this.gainNode && this.categoryVolumes) {
|
|
9026
|
+
const effectiveVolume = this.categoryVolumes.getEffectiveVolume(this.category, this.volume);
|
|
9027
|
+
this.gainNode.gain.value = effectiveVolume;
|
|
9028
|
+
}
|
|
9029
|
+
else if (this.gainNode) {
|
|
9030
|
+
this.gainNode.gain.value = this.volume;
|
|
9031
|
+
}
|
|
8504
9032
|
}
|
|
8505
9033
|
async initializeAudioContext() {
|
|
8506
9034
|
if (!this.audioContext) {
|
|
8507
9035
|
this.audioContext =
|
|
8508
9036
|
new AudioContext({ sampleRate: this.options.sampleRate });
|
|
8509
9037
|
this.nextStartTime = this.audioContext.currentTime;
|
|
9038
|
+
// Create gain node for volume control
|
|
9039
|
+
this.gainNode = this.audioContext.createGain();
|
|
9040
|
+
this.gainNode.connect(this.audioContext.destination);
|
|
9041
|
+
this.updateGainNodeVolume();
|
|
9042
|
+
}
|
|
9043
|
+
// Ensure audio context is running (not suspended)
|
|
9044
|
+
if (this.audioContext.state === 'suspended') {
|
|
9045
|
+
await this.audioContext.resume();
|
|
8510
9046
|
}
|
|
8511
9047
|
}
|
|
8512
9048
|
async playAudioChunk(base64AudioData) {
|
|
@@ -8521,31 +9057,31 @@ class AudioPlayer extends Script {
|
|
|
8521
9057
|
channelData[i] = int16View[i] / 32768.0;
|
|
8522
9058
|
}
|
|
8523
9059
|
this.audioQueue.push(audioBuffer);
|
|
8524
|
-
|
|
8525
|
-
|
|
8526
|
-
|
|
8527
|
-
|
|
8528
|
-
|
|
8529
|
-
this.
|
|
8530
|
-
|
|
9060
|
+
this.scheduleAudioBuffers();
|
|
9061
|
+
}
|
|
9062
|
+
scheduleAudioBuffers() {
|
|
9063
|
+
const SCHEDULE_AHEAD_TIME = 0.2;
|
|
9064
|
+
while (this.audioQueue.length > 0 &&
|
|
9065
|
+
this.nextStartTime <=
|
|
9066
|
+
this.audioContext.currentTime + SCHEDULE_AHEAD_TIME) {
|
|
9067
|
+
const audioBuffer = this.audioQueue.shift();
|
|
9068
|
+
const currentTime = this.audioContext.currentTime;
|
|
9069
|
+
const startTime = Math.max(this.nextStartTime, currentTime);
|
|
9070
|
+
const source = this.audioContext.createBufferSource();
|
|
9071
|
+
source.buffer = audioBuffer;
|
|
9072
|
+
// Connect through gain node for volume control
|
|
9073
|
+
source.connect(this.gainNode || this.audioContext.destination);
|
|
9074
|
+
source.onended = () => this.scheduleAudioBuffers();
|
|
9075
|
+
// Start playback
|
|
9076
|
+
source.start(startTime);
|
|
9077
|
+
this.nextStartTime = startTime + audioBuffer.duration;
|
|
8531
9078
|
}
|
|
8532
|
-
this.isPlaying = true;
|
|
8533
|
-
const audioBuffer = this.audioQueue.shift();
|
|
8534
|
-
const currentTime = this.audioContext.currentTime;
|
|
8535
|
-
const startTime = Math.max(this.nextStartTime, currentTime);
|
|
8536
|
-
const source = this.audioContext.createBufferSource();
|
|
8537
|
-
source.buffer = audioBuffer;
|
|
8538
|
-
source.connect(this.audioContext.destination);
|
|
8539
|
-
source.onended = () => this.playNextAudioBuffer();
|
|
8540
|
-
source.start(startTime);
|
|
8541
|
-
this.nextStartTime = startTime + audioBuffer.duration;
|
|
8542
9079
|
}
|
|
8543
9080
|
clearQueue() {
|
|
8544
9081
|
this.audioQueue = [];
|
|
8545
|
-
this.isPlaying = false;
|
|
8546
9082
|
}
|
|
8547
9083
|
getIsPlaying() {
|
|
8548
|
-
return this.
|
|
9084
|
+
return this.nextStartTime > this.audioContext.currentTime;
|
|
8549
9085
|
}
|
|
8550
9086
|
getQueueLength() {
|
|
8551
9087
|
return this.audioQueue.length;
|
|
@@ -8563,10 +9099,12 @@ class AudioPlayer extends Script {
|
|
|
8563
9099
|
if (this.audioContext) {
|
|
8564
9100
|
this.audioContext.close();
|
|
8565
9101
|
this.audioContext = undefined;
|
|
9102
|
+
this.gainNode = undefined;
|
|
9103
|
+
this.nextStartTime = 0; // Reset timing
|
|
8566
9104
|
}
|
|
8567
9105
|
}
|
|
8568
9106
|
static isSupported() {
|
|
8569
|
-
return !!('AudioContext' in window
|
|
9107
|
+
return !!('AudioContext' in window);
|
|
8570
9108
|
}
|
|
8571
9109
|
dispose() {
|
|
8572
9110
|
this.stop();
|
|
@@ -9262,7 +9800,10 @@ class CoreSound extends Script {
|
|
|
9262
9800
|
new BackgroundMusic(this.listener, this.categoryVolumes);
|
|
9263
9801
|
this.spatialAudio = new SpatialAudio(this.listener, this.categoryVolumes);
|
|
9264
9802
|
this.audioListener = new AudioListener();
|
|
9265
|
-
|
|
9803
|
+
// Initialize with 48kHz for general audio playback
|
|
9804
|
+
// Gemini Live uses 24kHz but that gets handled automatically via playAIAudio
|
|
9805
|
+
this.audioPlayer = new AudioPlayer({ sampleRate: 48000 });
|
|
9806
|
+
this.audioPlayer.setCategoryVolumes(this.categoryVolumes);
|
|
9266
9807
|
camera.add(this.listener);
|
|
9267
9808
|
this.add(this.backgroundMusic);
|
|
9268
9809
|
this.add(this.spatialAudio);
|
|
@@ -9283,6 +9824,7 @@ class CoreSound extends Script {
|
|
|
9283
9824
|
}
|
|
9284
9825
|
setMasterVolume(level) {
|
|
9285
9826
|
this.categoryVolumes.masterVolume = THREE.MathUtils.clamp(level, 0.0, 1.0);
|
|
9827
|
+
this.audioPlayer?.updateGainNodeVolume();
|
|
9286
9828
|
}
|
|
9287
9829
|
getMasterVolume() {
|
|
9288
9830
|
return this.categoryVolumes.isMuted ? 0.0 :
|
|
@@ -9300,17 +9842,49 @@ class CoreSound extends Script {
|
|
|
9300
9842
|
1.0;
|
|
9301
9843
|
}
|
|
9302
9844
|
async enableAudio(options = {}) {
|
|
9303
|
-
const { streamToAI = true } = options;
|
|
9845
|
+
const { streamToAI = true, accumulate = false } = options;
|
|
9304
9846
|
if (streamToAI && this.speechRecognizer?.isListening) {
|
|
9305
9847
|
console.log('Disabling SpeechRecognizer while streaming audio.');
|
|
9306
9848
|
this.speechRecognizer.stop();
|
|
9307
9849
|
}
|
|
9308
9850
|
this.audioListener.setAIStreaming(streamToAI);
|
|
9309
|
-
await this.audioListener.startCapture({});
|
|
9851
|
+
await this.audioListener.startCapture({ accumulate });
|
|
9310
9852
|
}
|
|
9311
9853
|
disableAudio() {
|
|
9312
9854
|
this.audioListener?.stopCapture();
|
|
9313
9855
|
}
|
|
9856
|
+
/**
|
|
9857
|
+
* Starts recording audio with chunk accumulation
|
|
9858
|
+
*/
|
|
9859
|
+
async startRecording() {
|
|
9860
|
+
await this.audioListener.startCapture({ accumulate: true });
|
|
9861
|
+
}
|
|
9862
|
+
/**
|
|
9863
|
+
* Stops recording and returns the accumulated audio buffer
|
|
9864
|
+
*/
|
|
9865
|
+
stopRecording() {
|
|
9866
|
+
const buffer = this.audioListener.getAccumulatedBuffer();
|
|
9867
|
+
this.audioListener.stopCapture();
|
|
9868
|
+
return buffer;
|
|
9869
|
+
}
|
|
9870
|
+
/**
|
|
9871
|
+
* Gets the accumulated recording buffer without stopping
|
|
9872
|
+
*/
|
|
9873
|
+
getRecordedBuffer() {
|
|
9874
|
+
return this.audioListener.getAccumulatedBuffer();
|
|
9875
|
+
}
|
|
9876
|
+
/**
|
|
9877
|
+
* Clears the accumulated recording buffer
|
|
9878
|
+
*/
|
|
9879
|
+
clearRecordedBuffer() {
|
|
9880
|
+
this.audioListener.clearAccumulatedBuffer();
|
|
9881
|
+
}
|
|
9882
|
+
/**
|
|
9883
|
+
* Gets the sample rate being used for recording
|
|
9884
|
+
*/
|
|
9885
|
+
getRecordingSampleRate() {
|
|
9886
|
+
return this.audioListener.audioContext?.sampleRate || 48000;
|
|
9887
|
+
}
|
|
9314
9888
|
setAIStreaming(enabled) {
|
|
9315
9889
|
this.audioListener?.setAIStreaming(enabled);
|
|
9316
9890
|
}
|
|
@@ -9318,6 +9892,16 @@ class CoreSound extends Script {
|
|
|
9318
9892
|
return this.audioListener?.aiService !== null;
|
|
9319
9893
|
}
|
|
9320
9894
|
async playAIAudio(base64AudioData) {
|
|
9895
|
+
// Gemini Live API outputs audio at 24kHz
|
|
9896
|
+
// Only recreate AudioContext if sample rate needs to change
|
|
9897
|
+
const currentRate = this.audioPlayer['options'].sampleRate;
|
|
9898
|
+
if (currentRate !== 24000) {
|
|
9899
|
+
this.audioPlayer['options'].sampleRate = 24000;
|
|
9900
|
+
// Only stop if context exists and is different sample rate
|
|
9901
|
+
if (this.audioPlayer['audioContext']) {
|
|
9902
|
+
this.audioPlayer.stop(); // Reset context with new sample rate
|
|
9903
|
+
}
|
|
9904
|
+
}
|
|
9321
9905
|
await this.audioPlayer.playAudioChunk(base64AudioData);
|
|
9322
9906
|
}
|
|
9323
9907
|
stopAIAudio() {
|
|
@@ -9326,6 +9910,26 @@ class CoreSound extends Script {
|
|
|
9326
9910
|
isAIAudioPlaying() {
|
|
9327
9911
|
return this.audioPlayer?.getIsPlaying();
|
|
9328
9912
|
}
|
|
9913
|
+
/**
|
|
9914
|
+
* Plays a raw audio buffer (Int16 PCM data) with proper sample rate
|
|
9915
|
+
*/
|
|
9916
|
+
async playRecordedAudio(audioBuffer, sampleRate) {
|
|
9917
|
+
if (!audioBuffer)
|
|
9918
|
+
return;
|
|
9919
|
+
// Update sample rate if needed
|
|
9920
|
+
if (sampleRate && sampleRate !== this.audioPlayer['options'].sampleRate) {
|
|
9921
|
+
this.audioPlayer['options'].sampleRate = sampleRate;
|
|
9922
|
+
this.audioPlayer.stop(); // Reset context with new sample rate
|
|
9923
|
+
}
|
|
9924
|
+
// Convert ArrayBuffer to base64
|
|
9925
|
+
const bytes = new Uint8Array(audioBuffer);
|
|
9926
|
+
let binary = '';
|
|
9927
|
+
for (let i = 0; i < bytes.length; i++) {
|
|
9928
|
+
binary += String.fromCharCode(bytes[i]);
|
|
9929
|
+
}
|
|
9930
|
+
const base64Audio = btoa(binary);
|
|
9931
|
+
await this.audioPlayer.playAudioChunk(base64Audio);
|
|
9932
|
+
}
|
|
9329
9933
|
isAudioEnabled() {
|
|
9330
9934
|
return this.audioListener?.getIsCapturing();
|
|
9331
9935
|
}
|
|
@@ -9849,7 +10453,6 @@ class TextView extends View {
|
|
|
9849
10453
|
texture.offset.x = this.imageOffsetX;
|
|
9850
10454
|
const textObj = this.textObj;
|
|
9851
10455
|
textObj.material.map = texture;
|
|
9852
|
-
textObj.material.needsUpdate = true;
|
|
9853
10456
|
textObj.sync();
|
|
9854
10457
|
});
|
|
9855
10458
|
}
|
|
@@ -9930,9 +10533,22 @@ class IconButton extends TextView {
|
|
|
9930
10533
|
// Applies all provided options to this instance.
|
|
9931
10534
|
Object.assign(this, options);
|
|
9932
10535
|
}
|
|
10536
|
+
/**
|
|
10537
|
+
* Initializes the component and sets the render order.
|
|
10538
|
+
*/
|
|
10539
|
+
async init(_) {
|
|
10540
|
+
await super.init();
|
|
10541
|
+
if (this.mesh) {
|
|
10542
|
+
this.mesh.renderOrder = this.renderOrder;
|
|
10543
|
+
}
|
|
10544
|
+
if (this.textObj) {
|
|
10545
|
+
this.textObj.renderOrder = this.renderOrder + 1;
|
|
10546
|
+
}
|
|
10547
|
+
}
|
|
10548
|
+
/**
|
|
10549
|
+
|
|
9933
10550
|
/**
|
|
9934
10551
|
* Handles behavior when the cursor hovers over the button.
|
|
9935
|
-
* @override
|
|
9936
10552
|
*/
|
|
9937
10553
|
onHoverOver() {
|
|
9938
10554
|
if (!this.ux)
|
|
@@ -9941,7 +10557,6 @@ class IconButton extends TextView {
|
|
|
9941
10557
|
}
|
|
9942
10558
|
/**
|
|
9943
10559
|
* Handles behavior when the cursor moves off the button.
|
|
9944
|
-
* @override
|
|
9945
10560
|
*/
|
|
9946
10561
|
onHoverOut() {
|
|
9947
10562
|
if (!this.ux)
|
|
@@ -10037,7 +10652,6 @@ class ImageView extends View {
|
|
|
10037
10652
|
// If no source, ensure no texture is displayed.
|
|
10038
10653
|
if (this.material.map) {
|
|
10039
10654
|
this.material.map = null;
|
|
10040
|
-
this.material.needsUpdate = true;
|
|
10041
10655
|
}
|
|
10042
10656
|
this.texture?.dispose();
|
|
10043
10657
|
this.texture = undefined;
|
|
@@ -10047,7 +10661,6 @@ class ImageView extends View {
|
|
|
10047
10661
|
this.texture = this.textureLoader.load(this.src, (loadedTexture) => {
|
|
10048
10662
|
loadedTexture.colorSpace = THREE.SRGBColorSpace;
|
|
10049
10663
|
this.material.map = loadedTexture;
|
|
10050
|
-
this.material.needsUpdate = true;
|
|
10051
10664
|
// Updates layout after the image has loaded to get correct dimensions.
|
|
10052
10665
|
this.updateLayout();
|
|
10053
10666
|
});
|
|
@@ -10291,14 +10904,19 @@ class TextButton extends TextView {
|
|
|
10291
10904
|
// Applies our own overrides to the default values.
|
|
10292
10905
|
this.fontSize = options.fontSize ?? this.fontSize;
|
|
10293
10906
|
this.fontColor = options.fontColor ?? this.fontColor;
|
|
10907
|
+
this.width = options.width ?? this.width;
|
|
10908
|
+
this.height = options.height ?? this.height;
|
|
10294
10909
|
}
|
|
10295
10910
|
/**
|
|
10296
10911
|
* Initializes the text object after async dependencies are loaded.
|
|
10297
|
-
* @override
|
|
10298
10912
|
*/
|
|
10299
10913
|
async init() {
|
|
10300
10914
|
await super.init();
|
|
10301
10915
|
this.textObj.position.set(0, 0, VIEW_DEPTH_GAP);
|
|
10916
|
+
if (this.mesh) {
|
|
10917
|
+
this.mesh.renderOrder = this.renderOrder;
|
|
10918
|
+
}
|
|
10919
|
+
this.textObj.renderOrder = this.renderOrder + 1;
|
|
10302
10920
|
// Disable raycasting on the text part so it doesn't interfere
|
|
10303
10921
|
// with the main button geometry's interaction.
|
|
10304
10922
|
this.textObj.raycast = () => { };
|
|
@@ -10464,7 +11082,6 @@ class VideoView extends View {
|
|
|
10464
11082
|
videoTextureInstance.colorSpace = THREE.SRGBColorSpace;
|
|
10465
11083
|
this.texture = videoTextureInstance; // Update internal texture reference
|
|
10466
11084
|
this.material.map = this.texture;
|
|
10467
|
-
this.material.needsUpdate = true;
|
|
10468
11085
|
const onLoadedMetadata = () => {
|
|
10469
11086
|
if (this.video.videoWidth && this.video.videoHeight) {
|
|
10470
11087
|
this.videoAspectRatio =
|
|
@@ -10490,7 +11107,6 @@ class VideoView extends View {
|
|
|
10490
11107
|
loadFromVideoTexture(videoTextureInstance) {
|
|
10491
11108
|
this.texture = videoTextureInstance;
|
|
10492
11109
|
this.material.map = this.texture;
|
|
10493
|
-
this.material.needsUpdate = true;
|
|
10494
11110
|
this.video = this.texture.image; // Underlying HTMLVideoElement
|
|
10495
11111
|
if (this.video && this.video.videoWidth && this.video.videoHeight) {
|
|
10496
11112
|
this.videoAspectRatio = this.video.videoWidth / this.video.videoHeight;
|
|
@@ -13132,8 +13748,9 @@ class Core {
|
|
|
13132
13748
|
this.ui = new UI();
|
|
13133
13749
|
/** Manages all (spatial) audio playback. */
|
|
13134
13750
|
this.sound = new CoreSound();
|
|
13751
|
+
this.renderSceneBound = this.renderScene.bind(this);
|
|
13135
13752
|
/** Manages the desktop XR simulator. */
|
|
13136
|
-
this.simulator = new Simulator(this.
|
|
13753
|
+
this.simulator = new Simulator(this.renderSceneBound);
|
|
13137
13754
|
/** Manages drag-and-drop interactions. */
|
|
13138
13755
|
this.dragManager = new DragManager();
|
|
13139
13756
|
/** Manages drag-and-drop interactions. */
|
|
@@ -13204,6 +13821,7 @@ class Core {
|
|
|
13204
13821
|
this.registry.register(this.camera, THREE.Camera);
|
|
13205
13822
|
this.registry.register(this.camera, THREE.PerspectiveCamera);
|
|
13206
13823
|
this.renderer = new THREE.WebGLRenderer({
|
|
13824
|
+
canvas: options.canvas,
|
|
13207
13825
|
antialias: options.antialias,
|
|
13208
13826
|
stencil: options.stencil,
|
|
13209
13827
|
alpha: true,
|
|
@@ -13212,13 +13830,15 @@ class Core {
|
|
|
13212
13830
|
this.renderer.setPixelRatio(window.devicePixelRatio);
|
|
13213
13831
|
this.renderer.setSize(window.innerWidth, window.innerHeight);
|
|
13214
13832
|
this.renderer.xr.enabled = true;
|
|
13833
|
+
// disable built-in occlusion
|
|
13834
|
+
this.renderer.xr.getDepthSensingMesh = function () { return null; };
|
|
13215
13835
|
this.registry.register(this.renderer);
|
|
13216
|
-
|
|
13217
|
-
|
|
13218
|
-
|
|
13219
|
-
|
|
13220
|
-
|
|
13221
|
-
|
|
13836
|
+
this.renderer.xr.setReferenceSpaceType(options.referenceSpaceType);
|
|
13837
|
+
if (!options.canvas) {
|
|
13838
|
+
const xrContainer = document.createElement('div');
|
|
13839
|
+
document.body.appendChild(xrContainer);
|
|
13840
|
+
xrContainer.appendChild(this.renderer.domElement);
|
|
13841
|
+
}
|
|
13222
13842
|
this.options = options;
|
|
13223
13843
|
// Sets up controllers.
|
|
13224
13844
|
if (options.controllers.enabled) {
|
|
@@ -13235,17 +13855,16 @@ class Core {
|
|
|
13235
13855
|
// Sets up device camera.
|
|
13236
13856
|
if (options.deviceCamera?.enabled) {
|
|
13237
13857
|
this.deviceCamera = new XRDeviceCamera(options.deviceCamera);
|
|
13238
|
-
await this.deviceCamera.init();
|
|
13239
13858
|
this.registry.register(this.deviceCamera);
|
|
13240
13859
|
}
|
|
13241
|
-
const webXRRequiredFeatures =
|
|
13860
|
+
const webXRRequiredFeatures = options.webxrRequiredFeatures;
|
|
13242
13861
|
this.webXRSettings.requiredFeatures = webXRRequiredFeatures;
|
|
13243
13862
|
// Sets up depth.
|
|
13244
13863
|
if (options.depth.enabled) {
|
|
13245
13864
|
webXRRequiredFeatures.push('depth-sensing');
|
|
13246
13865
|
webXRRequiredFeatures.push('local-floor');
|
|
13247
13866
|
this.webXRSettings.depthSensing = {
|
|
13248
|
-
usagePreference: [
|
|
13867
|
+
usagePreference: [],
|
|
13249
13868
|
dataFormatPreference: [this.options.depth.useFloat32 ? 'float32' : 'luminance-alpha'],
|
|
13250
13869
|
};
|
|
13251
13870
|
this.depth.init(this.camera, options.depth, this.renderer, this.registry, this.scene);
|
|
@@ -13364,7 +13983,7 @@ class Core {
|
|
|
13364
13983
|
script.update(time, frame);
|
|
13365
13984
|
}
|
|
13366
13985
|
this.renderSimulatorAndScene();
|
|
13367
|
-
this.screenshotSynthesizer.onAfterRender(this.renderer, this.deviceCamera);
|
|
13986
|
+
this.screenshotSynthesizer.onAfterRender(this.renderer, this.renderSceneBound, this.deviceCamera);
|
|
13368
13987
|
if (this.simulatorRunning) {
|
|
13369
13988
|
this.simulator.renderSimulatorScene();
|
|
13370
13989
|
}
|
|
@@ -13384,7 +14003,10 @@ class Core {
|
|
|
13384
14003
|
* scripts.
|
|
13385
14004
|
* @param session - The newly started WebXR session.
|
|
13386
14005
|
*/
|
|
13387
|
-
onXRSessionStarted(session) {
|
|
14006
|
+
async onXRSessionStarted(session) {
|
|
14007
|
+
if (this.options.deviceCamera?.enabled) {
|
|
14008
|
+
await this.deviceCamera.init();
|
|
14009
|
+
}
|
|
13388
14010
|
this.scriptsManager.onXRSessionStarted(session);
|
|
13389
14011
|
}
|
|
13390
14012
|
async startSimulator() {
|
|
@@ -14010,14 +14632,14 @@ class MaterialSymbolsView extends View {
|
|
|
14010
14632
|
const paths = svgData.paths;
|
|
14011
14633
|
const group = new THREE.Group();
|
|
14012
14634
|
const scale = 1 / Math.max(viewWidth, viewHeight);
|
|
14635
|
+
const material = new THREE.MeshBasicMaterial({
|
|
14636
|
+
color: this.iconColor,
|
|
14637
|
+
transparent: true,
|
|
14638
|
+
side: THREE.DoubleSide,
|
|
14639
|
+
depthWrite: false,
|
|
14640
|
+
});
|
|
14013
14641
|
for (let i = 0; i < paths.length; i++) {
|
|
14014
14642
|
const path = paths[i];
|
|
14015
|
-
const material = new THREE.MeshBasicMaterial({
|
|
14016
|
-
color: this.iconColor,
|
|
14017
|
-
side: THREE.DoubleSide,
|
|
14018
|
-
depthWrite: false,
|
|
14019
|
-
depthTest: false
|
|
14020
|
-
});
|
|
14021
14643
|
const shapes = SVGLoader.createShapes(path);
|
|
14022
14644
|
for (let j = 0; j < shapes.length; j++) {
|
|
14023
14645
|
const shape = shapes[j];
|