xrblocks 0.2.0 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +25 -9
- package/build/addons/ai/AudioCaptureProcessorCode.d.ts +1 -0
- package/build/addons/ai/AudioCaptureProcessorCode.js +27 -0
- package/build/addons/ai/GeminiManager.d.ts +7 -3
- package/build/addons/ai/GeminiManager.js +48 -23
- package/build/addons/objects/SimpleDecalGeometry.js +9 -5
- package/build/addons/simulator/instructions/CustomInstruction.js +8 -9
- package/build/addons/simulator/instructions/HandsInstructions.js +17 -10
- package/build/addons/simulator/instructions/NavigationInstructions.js +10 -9
- package/build/addons/simulator/instructions/SimulatorInstructions.js +17 -18
- package/build/addons/simulator/instructions/SimulatorInstructionsCard.js +69 -75
- package/build/addons/simulator/instructions/SimulatorInstructionsEvents.js +4 -1
- package/build/addons/simulator/instructions/UserInstructions.js +18 -15
- package/build/addons/simulator/ui/EnterXRButton.js +17 -17
- package/build/addons/simulator/ui/GeminiLiveApiKeyInput.js +45 -39
- package/build/addons/simulator/ui/HandPosePanel.js +20 -10
- package/build/addons/simulator/ui/MicButton.js +23 -18
- package/build/addons/simulator/ui/ModeIndicator.js +17 -17
- package/build/addons/ui/TextBillboard.js +1 -1
- package/build/addons/utils/Palette.js +3 -15
- package/build/addons/virtualkeyboard/Keyboard.js +24 -21
- package/build/addons/volumes/VolumetricCloud.glsl.js +1 -1
- package/build/addons/volumes/VolumetricCloud.js +8 -5
- package/build/agent/Tool.d.ts +3 -1
- package/build/ai/AI.d.ts +2 -2
- package/build/ai/Gemini.d.ts +1 -5
- package/build/camera/XRDeviceCamera.d.ts +1 -1
- package/build/core/Core.d.ts +3 -1
- package/build/core/Options.d.ts +7 -0
- package/build/core/components/ScreenshotSynthesizer.d.ts +2 -2
- package/build/core/components/XRTransition.d.ts +1 -1
- package/build/depth/DepthMesh.d.ts +1 -1
- package/build/input/Hands.d.ts +1 -1
- package/build/input/Input.d.ts +1 -1
- package/build/input/gestures/GestureEvents.d.ts +23 -0
- package/build/input/gestures/GestureRecognition.d.ts +43 -0
- package/build/input/gestures/GestureRecognitionOptions.d.ts +43 -0
- package/build/input/gestures/GestureTypes.d.ts +16 -0
- package/build/input/gestures/providers/HeuristicGestureDetectors.d.ts +2 -0
- package/build/simulator/Simulator.d.ts +2 -0
- package/build/simulator/SimulatorControls.d.ts +1 -1
- package/build/simulator/controlModes/SimulatorControlMode.d.ts +1 -1
- package/build/simulator/handPoses/HandPoseJoints.d.ts +2 -2
- package/build/simulator/userActions/PinchOnButtonAction.d.ts +2 -2
- package/build/simulator/userActions/WalkTowardsPanelAction.d.ts +1 -1
- package/build/singletons.d.ts +2 -2
- package/build/sound/CoreSound.d.ts +1 -1
- package/build/stereo/utils.d.ts +1 -1
- package/build/ui/components/MaterialSymbolsView.d.ts +1 -1
- package/build/ui/components/ScrollingTroikaTextView.d.ts +1 -1
- package/build/ui/interaction/ModelViewer.d.ts +6 -2
- package/build/utils/ModelLoader.d.ts +1 -1
- package/build/utils/SparkRendererHolder.d.ts +5 -0
- package/build/utils/Types.d.ts +2 -2
- package/build/video/VideoStream.d.ts +1 -1
- package/build/world/World.d.ts +1 -1
- package/build/world/objects/ObjectDetector.d.ts +1 -1
- package/build/world/planes/PlaneDetector.d.ts +1 -1
- package/build/xrblocks.d.ts +3 -0
- package/build/xrblocks.js +6782 -6020
- package/build/xrblocks.js.map +1 -1
- package/build/xrblocks.min.js +1 -1
- package/build/xrblocks.min.js.map +1 -1
- package/package.json +13 -8
package/README.md
CHANGED
|
@@ -24,13 +24,18 @@
|
|
|
24
24
|
|
|
25
25
|
**XR Blocks** is a lightweight, cross-platform library for rapidly prototyping
|
|
26
26
|
advanced XR and AI experiences. Built upon [three.js](https://threejs.org), it
|
|
27
|
-
targets Chrome v136+ with WebXR support on Android XR
|
|
28
|
-
|
|
29
|
-
|
|
27
|
+
targets Chrome v136+ with WebXR support on Android XR (e.g.,
|
|
28
|
+
[Galaxy XR](https://www.samsung.com/us/xr/galaxy-xr/galaxy-xr/)) and also
|
|
29
|
+
includes a powerful desktop simulator for development. The framework emphasizes
|
|
30
|
+
a user-centric, developer-friendly SDK designed to simplify the creation of
|
|
30
31
|
immersive applications with features like:
|
|
31
32
|
|
|
32
33
|
- **Hand Tracking & Gestures:** Access advanced hand tracking, custom
|
|
33
34
|
gestures with TensorFlow Lite / PyTorch models, and interaction events.
|
|
35
|
+
- **Gesture Recognition:** Opt into pinch, open-palm, fist, thumbs-up, point,
|
|
36
|
+
and spread detection with `options.enableGestures()`, tune providers or
|
|
37
|
+
thresholds, and subscribe to `gesturestart`/`gestureupdate`/`gestureend`
|
|
38
|
+
events from the shared subsystem.
|
|
34
39
|
- **World Understanding:** Present samples with depth sensing, geometry-aware
|
|
35
40
|
physics, and object recognition with Gemini in both XR and desktop simulator.
|
|
36
41
|
- **AI Integration:** Seamlessly connect to Gemini for multimodal
|
|
@@ -69,8 +74,8 @@ code below:
|
|
|
69
74
|
<script type="importmap">
|
|
70
75
|
{
|
|
71
76
|
"imports": {
|
|
72
|
-
"three": "https://cdn.jsdelivr.net/npm/three@0.
|
|
73
|
-
"three/addons/": "https://cdn.jsdelivr.net/npm/three@0.
|
|
77
|
+
"three": "https://cdn.jsdelivr.net/npm/three@0.181.0/build/three.module.js",
|
|
78
|
+
"three/addons/": "https://cdn.jsdelivr.net/npm/three@0.181.0/examples/jsm/",
|
|
74
79
|
"xrblocks": "https://cdn.jsdelivr.net/gh/google/xrblocks@build/xrblocks.js",
|
|
75
80
|
"xrblocks/addons/": "https://cdn.jsdelivr.net/gh/google/xrblocks@build/addons/"
|
|
76
81
|
}
|
|
@@ -79,8 +84,8 @@ code below:
|
|
|
79
84
|
</head>
|
|
80
85
|
<body>
|
|
81
86
|
<script type="module">
|
|
82
|
-
import * as THREE from
|
|
83
|
-
import * as xb from
|
|
87
|
+
import * as THREE from 'three';
|
|
88
|
+
import * as xb from 'xrblocks';
|
|
84
89
|
|
|
85
90
|
/**
|
|
86
91
|
* A basic example of XRBlocks to render a cylinder and pinch to change its color.
|
|
@@ -117,7 +122,7 @@ code below:
|
|
|
117
122
|
}
|
|
118
123
|
|
|
119
124
|
// When the page content is loaded, add our script and initialize XR Blocks.
|
|
120
|
-
document.addEventListener(
|
|
125
|
+
document.addEventListener('DOMContentLoaded', function () {
|
|
121
126
|
xb.add(new MainScript());
|
|
122
127
|
xb.init(new xb.Options());
|
|
123
128
|
});
|
|
@@ -140,8 +145,15 @@ npm ci
|
|
|
140
145
|
|
|
141
146
|
# Build xrblocks.js.
|
|
142
147
|
npm run build
|
|
148
|
+
|
|
149
|
+
# After making changes, check ESLint and run Prettier
|
|
150
|
+
npm run lint # ESLint check
|
|
151
|
+
npm run format # Prettier format
|
|
143
152
|
```
|
|
144
153
|
|
|
154
|
+
XR Blocks uses ESLint for linting and Prettier for formatting.
|
|
155
|
+
If coding in VSCode, make sure to install the [ESLint extension](https://marketplace.visualstudio.com/items?itemName=dbaeumer.vscode-eslint) and the [Prettier extension](https://marketplace.visualstudio.com/items?itemName=esbenp.prettier-vscode). Then set Prettier as your default formatter.
|
|
156
|
+
|
|
145
157
|
This is not an officially supported Google product, but will be actively
|
|
146
158
|
maintained by the XR Labs team and external collaborators. This project is not
|
|
147
159
|
eligible for the
|
|
@@ -233,7 +245,11 @@ These references are built with XR Blocks:
|
|
|
233
245
|
We call for contributors to integrate our prior art into XR Blocks to enhance
|
|
234
246
|
reproducibility and knowledge sharing:
|
|
235
247
|
|
|
236
|
-
E.g., integrating models from https://visualblocks.withgoogle.com
|
|
248
|
+
E.g., integrating models from <https://visualblocks.withgoogle.com> and [Transformers.js](https://huggingface.co/docs/transformers.js/en/index)
|
|
249
|
+
to XR Blocks; bringing more
|
|
250
|
+
[depth-based interaction](https://augmentedperception.github.io/depthlab/) to
|
|
251
|
+
XR Blocks; and add more samples and demos. For large commits, feel free to add
|
|
252
|
+
an issue before working on it so that your work won't be duplicated with others.
|
|
237
253
|
|
|
238
254
|
```bibtex
|
|
239
255
|
@inproceedings{Du2023Rapsai,
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare const AUDIO_CAPTURE_PROCESSOR_CODE = "\n // Audio worklet processor for capturing audio data\n class AudioCaptureProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n }\n\n process(inputs, outputs, parameters) {\n const input = inputs[0];\n\n if (input && input[0]) {\n const inputData = input[0];\n const pcmData = new Int16Array(inputData.length);\n for (let i = 0; i < inputData.length; i++) {\n pcmData[i] = Math.max(-32768, Math.min(32767, inputData[i] * 32768));\n }\n this.port.postMessage({type: 'audioData', data: pcmData.buffer});\n }\n\n return true;\n }\n }\n\n registerProcessor('audio-capture-processor', AudioCaptureProcessor);\n";
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
const AUDIO_CAPTURE_PROCESSOR_CODE = `
|
|
2
|
+
// Audio worklet processor for capturing audio data
|
|
3
|
+
class AudioCaptureProcessor extends AudioWorkletProcessor {
|
|
4
|
+
constructor() {
|
|
5
|
+
super();
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
process(inputs, outputs, parameters) {
|
|
9
|
+
const input = inputs[0];
|
|
10
|
+
|
|
11
|
+
if (input && input[0]) {
|
|
12
|
+
const inputData = input[0];
|
|
13
|
+
const pcmData = new Int16Array(inputData.length);
|
|
14
|
+
for (let i = 0; i < inputData.length; i++) {
|
|
15
|
+
pcmData[i] = Math.max(-32768, Math.min(32767, inputData[i] * 32768));
|
|
16
|
+
}
|
|
17
|
+
this.port.postMessage({type: 'audioData', data: pcmData.buffer});
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
return true;
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
registerProcessor('audio-capture-processor', AudioCaptureProcessor);
|
|
25
|
+
`;
|
|
26
|
+
|
|
27
|
+
export { AUDIO_CAPTURE_PROCESSOR_CODE };
|
|
@@ -9,6 +9,7 @@ export interface GeminiManagerEventMap extends THREE.Object3DEventMap {
|
|
|
9
9
|
message: string;
|
|
10
10
|
};
|
|
11
11
|
turnComplete: object;
|
|
12
|
+
interrupted: object;
|
|
12
13
|
}
|
|
13
14
|
export declare class GeminiManager extends xb.Script<GeminiManagerEventMap> {
|
|
14
15
|
xrDeviceCamera?: xb.XRDeviceCamera;
|
|
@@ -17,6 +18,7 @@ export declare class GeminiManager extends xb.Script<GeminiManagerEventMap> {
|
|
|
17
18
|
audioContext: AudioContext | null;
|
|
18
19
|
sourceNode: MediaStreamAudioSourceNode | null;
|
|
19
20
|
processorNode: AudioWorkletNode | null;
|
|
21
|
+
queuedSourceNodes: Set<AudioScheduledSourceNode>;
|
|
20
22
|
isAIRunning: boolean;
|
|
21
23
|
audioQueue: AudioBuffer[];
|
|
22
24
|
nextAudioStartTime: number;
|
|
@@ -26,12 +28,13 @@ export declare class GeminiManager extends xb.Script<GeminiManagerEventMap> {
|
|
|
26
28
|
tools: xb.Tool[];
|
|
27
29
|
constructor();
|
|
28
30
|
init(): void;
|
|
29
|
-
startGeminiLive({ liveParams }?: {
|
|
30
|
-
liveParams?:
|
|
31
|
+
startGeminiLive({ liveParams, model, }?: {
|
|
32
|
+
liveParams?: GoogleGenAITypes.LiveConnectConfig;
|
|
33
|
+
model?: string;
|
|
31
34
|
}): Promise<void>;
|
|
32
35
|
stopGeminiLive(): Promise<void>;
|
|
33
36
|
setupAudioCapture(): Promise<void>;
|
|
34
|
-
startLiveAI(params:
|
|
37
|
+
startLiveAI(params: GoogleGenAITypes.LiveConnectConfig, model?: string): Promise<void>;
|
|
35
38
|
startScreenshotCapture(intervalMs?: number): void;
|
|
36
39
|
captureAndSendScreenshot(): void;
|
|
37
40
|
sendAudioData(audioBuffer: ArrayBuffer): void;
|
|
@@ -39,6 +42,7 @@ export declare class GeminiManager extends xb.Script<GeminiManagerEventMap> {
|
|
|
39
42
|
initializeAudioContext(): Promise<void>;
|
|
40
43
|
playAudioChunk(audioData: string): Promise<void>;
|
|
41
44
|
scheduleAudioBuffers(): void;
|
|
45
|
+
stopPlayingAudio(): void;
|
|
42
46
|
cleanup(): void;
|
|
43
47
|
handleAIMessage(message: GoogleGenAITypes.LiveServerMessage): void;
|
|
44
48
|
arrayBufferToBase64(buffer: ArrayBuffer): string;
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import * as xb from 'xrblocks';
|
|
2
|
+
import { AUDIO_CAPTURE_PROCESSOR_CODE } from './AudioCaptureProcessorCode.js';
|
|
2
3
|
|
|
3
4
|
class GeminiManager extends xb.Script {
|
|
4
5
|
constructor() {
|
|
@@ -8,6 +9,7 @@ class GeminiManager extends xb.Script {
|
|
|
8
9
|
this.audioContext = null;
|
|
9
10
|
this.sourceNode = null;
|
|
10
11
|
this.processorNode = null;
|
|
12
|
+
this.queuedSourceNodes = new Set();
|
|
11
13
|
// AI state
|
|
12
14
|
this.isAIRunning = false;
|
|
13
15
|
// Audio playback setup
|
|
@@ -22,19 +24,19 @@ class GeminiManager extends xb.Script {
|
|
|
22
24
|
this.xrDeviceCamera = xb.core.deviceCamera;
|
|
23
25
|
this.ai = xb.core.ai;
|
|
24
26
|
}
|
|
25
|
-
async startGeminiLive({ liveParams } = {}) {
|
|
27
|
+
async startGeminiLive({ liveParams, model, } = {}) {
|
|
26
28
|
if (this.isAIRunning || !this.ai) {
|
|
27
29
|
console.warn('AI already running or not available');
|
|
28
30
|
return;
|
|
29
31
|
}
|
|
30
32
|
liveParams = liveParams || {};
|
|
31
33
|
liveParams.tools = liveParams.tools || [];
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
}
|
|
34
|
+
liveParams.tools.push({
|
|
35
|
+
functionDeclarations: this.tools.map((tool) => tool.toJSON()),
|
|
36
|
+
});
|
|
35
37
|
try {
|
|
36
38
|
await this.setupAudioCapture();
|
|
37
|
-
await this.startLiveAI(liveParams);
|
|
39
|
+
await this.startLiveAI(liveParams, model);
|
|
38
40
|
this.startScreenshotCapture();
|
|
39
41
|
this.isAIRunning = true;
|
|
40
42
|
}
|
|
@@ -67,19 +69,21 @@ class GeminiManager extends xb.Script {
|
|
|
67
69
|
sampleRate: 16000,
|
|
68
70
|
channelCount: 1,
|
|
69
71
|
echoCancellation: true,
|
|
70
|
-
noiseSuppression: true
|
|
71
|
-
}
|
|
72
|
+
noiseSuppression: true,
|
|
73
|
+
},
|
|
72
74
|
});
|
|
73
75
|
const audioTracks = this.audioStream.getAudioTracks();
|
|
74
76
|
if (audioTracks.length === 0) {
|
|
75
77
|
throw new Error('No audio tracks found.');
|
|
76
78
|
}
|
|
77
79
|
this.audioContext = new AudioContext({ sampleRate: 16000 });
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
80
|
+
const blob = new Blob([AUDIO_CAPTURE_PROCESSOR_CODE], {
|
|
81
|
+
type: 'text/javascript',
|
|
82
|
+
});
|
|
83
|
+
const blobUrl = URL.createObjectURL(blob);
|
|
84
|
+
await this.audioContext.audioWorklet.addModule(blobUrl);
|
|
85
|
+
this.sourceNode = this.audioContext.createMediaStreamSource(this.audioStream);
|
|
86
|
+
this.processorNode = new AudioWorkletNode(this.audioContext, 'audio-capture-processor');
|
|
83
87
|
this.processorNode.port.onmessage = (event) => {
|
|
84
88
|
if (event.data.type === 'audioData' && this.isAIRunning) {
|
|
85
89
|
this.sendAudioData(event.data.data);
|
|
@@ -88,7 +92,7 @@ class GeminiManager extends xb.Script {
|
|
|
88
92
|
this.sourceNode.connect(this.processorNode);
|
|
89
93
|
this.processorNode.connect(this.audioContext.destination);
|
|
90
94
|
}
|
|
91
|
-
async startLiveAI(params) {
|
|
95
|
+
async startLiveAI(params, model) {
|
|
92
96
|
return new Promise((resolve, reject) => {
|
|
93
97
|
this.ai.setLiveCallbacks({
|
|
94
98
|
onopen: () => {
|
|
@@ -103,9 +107,9 @@ class GeminiManager extends xb.Script {
|
|
|
103
107
|
},
|
|
104
108
|
onclose: () => {
|
|
105
109
|
this.isAIRunning = false;
|
|
106
|
-
}
|
|
110
|
+
},
|
|
107
111
|
});
|
|
108
|
-
this.ai.startLiveSession(params).catch(reject);
|
|
112
|
+
this.ai.startLiveSession(params, model).catch(reject);
|
|
109
113
|
});
|
|
110
114
|
}
|
|
111
115
|
startScreenshotCapture(intervalMs = 1000) {
|
|
@@ -126,9 +130,9 @@ class GeminiManager extends xb.Script {
|
|
|
126
130
|
});
|
|
127
131
|
if (typeof base64Image == 'string') {
|
|
128
132
|
// Strip the data URL prefix if present
|
|
129
|
-
const base64Data = base64Image.startsWith('data:')
|
|
130
|
-
base64Image.split(',')[1]
|
|
131
|
-
base64Image;
|
|
133
|
+
const base64Data = base64Image.startsWith('data:')
|
|
134
|
+
? base64Image.split(',')[1]
|
|
135
|
+
: base64Image;
|
|
132
136
|
this.sendVideoFrame(base64Data);
|
|
133
137
|
}
|
|
134
138
|
}
|
|
@@ -196,13 +200,25 @@ class GeminiManager extends xb.Script {
|
|
|
196
200
|
source.buffer = audioBuffer;
|
|
197
201
|
source.connect(this.audioContext.destination);
|
|
198
202
|
source.onended = () => {
|
|
203
|
+
source.disconnect();
|
|
204
|
+
this.queuedSourceNodes.delete(source);
|
|
199
205
|
this.scheduleAudioBuffers();
|
|
200
206
|
};
|
|
201
207
|
const startTime = Math.max(this.nextAudioStartTime, this.audioContext.currentTime);
|
|
202
208
|
source.start(startTime);
|
|
209
|
+
this.queuedSourceNodes.add(source);
|
|
203
210
|
this.nextAudioStartTime = startTime + audioBuffer.duration;
|
|
204
211
|
}
|
|
205
212
|
}
|
|
213
|
+
stopPlayingAudio() {
|
|
214
|
+
this.audioQueue = [];
|
|
215
|
+
this.nextAudioStartTime = 0;
|
|
216
|
+
for (const source of this.queuedSourceNodes) {
|
|
217
|
+
source.stop();
|
|
218
|
+
source.disconnect();
|
|
219
|
+
}
|
|
220
|
+
this.queuedSourceNodes.clear();
|
|
221
|
+
}
|
|
206
222
|
cleanup() {
|
|
207
223
|
if (this.screenshotInterval) {
|
|
208
224
|
clearInterval(this.screenshotInterval);
|
|
@@ -223,7 +239,7 @@ class GeminiManager extends xb.Script {
|
|
|
223
239
|
this.audioContext = null;
|
|
224
240
|
}
|
|
225
241
|
if (this.audioStream) {
|
|
226
|
-
this.audioStream.getTracks().forEach(track => track.stop());
|
|
242
|
+
this.audioStream.getTracks().forEach((track) => track.stop());
|
|
227
243
|
this.audioStream = null;
|
|
228
244
|
}
|
|
229
245
|
}
|
|
@@ -232,16 +248,21 @@ class GeminiManager extends xb.Script {
|
|
|
232
248
|
this.playAudioChunk(message.data);
|
|
233
249
|
}
|
|
234
250
|
for (const functionCall of message.toolCall?.functionCalls ?? []) {
|
|
235
|
-
const tool = this.tools.find(tool => tool.name == functionCall.name);
|
|
251
|
+
const tool = this.tools.find((tool) => tool.name == functionCall.name);
|
|
236
252
|
if (tool) {
|
|
237
253
|
const exec = tool.execute(functionCall.args);
|
|
238
|
-
exec
|
|
254
|
+
exec
|
|
255
|
+
.then((result) => {
|
|
239
256
|
this.ai.sendToolResponse({
|
|
240
257
|
functionResponses: {
|
|
241
258
|
id: functionCall.id,
|
|
242
259
|
name: functionCall.name,
|
|
243
|
-
response: {
|
|
244
|
-
|
|
260
|
+
response: {
|
|
261
|
+
output: result.data,
|
|
262
|
+
error: result.error,
|
|
263
|
+
...result.metadata,
|
|
264
|
+
},
|
|
265
|
+
},
|
|
245
266
|
});
|
|
246
267
|
})
|
|
247
268
|
.catch((error) => console.error('Tool error:', error));
|
|
@@ -260,6 +281,10 @@ class GeminiManager extends xb.Script {
|
|
|
260
281
|
this.dispatchEvent({ type: 'outputTranscription', message: text });
|
|
261
282
|
}
|
|
262
283
|
}
|
|
284
|
+
if (message.serverContent.interrupted) {
|
|
285
|
+
this.stopPlayingAudio();
|
|
286
|
+
this.dispatchEvent({ type: 'interrupted' });
|
|
287
|
+
}
|
|
263
288
|
if (message.serverContent.turnComplete) {
|
|
264
289
|
this.dispatchEvent({ type: 'turnComplete' });
|
|
265
290
|
}
|
|
@@ -24,8 +24,7 @@ class SimpleDecalGeometry extends THREE.BufferGeometry {
|
|
|
24
24
|
projectorMatrix.makeRotationFromQuaternion(orientation);
|
|
25
25
|
projectorMatrix.setPosition(position);
|
|
26
26
|
projectorMatrix.scale(scale);
|
|
27
|
-
projectorMatrix
|
|
28
|
-
.invert(); // Inverts the matrix for projection calculations.
|
|
27
|
+
projectorMatrix.invert(); // Inverts the matrix for projection calculations.
|
|
29
28
|
// Accesses the vertices, UVs, and indices from the geometry attributes.
|
|
30
29
|
const vertices = this.attributes.position.array;
|
|
31
30
|
const uvs = this.attributes.uv.array;
|
|
@@ -46,15 +45,20 @@ class SimpleDecalGeometry extends THREE.BufferGeometry {
|
|
|
46
45
|
uvs[2 * i] = vector4.x + 0.5;
|
|
47
46
|
uvs[2 * i + 1] = vector4.y + 0.5;
|
|
48
47
|
// Checks if the vertex is within the -0.5 to 0.5 range in all dimensions.
|
|
49
|
-
vertexBounded[i] = Number(vector4.x >= -0.5 &&
|
|
50
|
-
vector4.
|
|
48
|
+
vertexBounded[i] = Number(vector4.x >= -0.5 &&
|
|
49
|
+
vector4.x <= 0.5 &&
|
|
50
|
+
vector4.y >= -0.5 &&
|
|
51
|
+
vector4.y <= 0.5 &&
|
|
52
|
+
vector4.z >= -0.5 &&
|
|
53
|
+
vector4.z <= 0.5);
|
|
51
54
|
}
|
|
52
55
|
// Creates a list of indices that correspond to bounded vertices only.
|
|
53
56
|
const goodIndices = [];
|
|
54
57
|
for (let i = 0; i < indices.length / 3; ++i) {
|
|
55
58
|
// Adds the triangle indices if any of its vertices are inside the
|
|
56
59
|
// bounding box.
|
|
57
|
-
if (vertexBounded[indices[3 * i]] ||
|
|
60
|
+
if (vertexBounded[indices[3 * i]] ||
|
|
61
|
+
vertexBounded[indices[3 * i + 1]] ||
|
|
58
62
|
vertexBounded[indices[3 * i + 2]]) {
|
|
59
63
|
goodIndices.push(indices[3 * i]);
|
|
60
64
|
goodIndices.push(indices[3 * i + 1]);
|
|
@@ -10,15 +10,14 @@ let CustomInstruction = class CustomInstruction extends SimulatorInstructionsCar
|
|
|
10
10
|
return html `${this.customInstruction.header}`;
|
|
11
11
|
}
|
|
12
12
|
getImageContents() {
|
|
13
|
-
return this.customInstruction.videoSrc
|
|
14
|
-
|
|
15
|
-
<
|
|
16
|
-
src=${this.customInstruction.videoSrc}
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
html ``;
|
|
13
|
+
return this.customInstruction.videoSrc
|
|
14
|
+
? html `
|
|
15
|
+
<video playsinline autoplay muted loop>
|
|
16
|
+
<source src=${this.customInstruction.videoSrc} type="video/webm" />
|
|
17
|
+
Your browser does not support the video tag.
|
|
18
|
+
</video>
|
|
19
|
+
`
|
|
20
|
+
: html ``;
|
|
22
21
|
}
|
|
23
22
|
getDescriptionContents() {
|
|
24
23
|
return html `${this.customInstruction.description}`;
|
|
@@ -11,10 +11,8 @@ let HandsInstructions = class HandsInstructions extends SimulatorInstructionsCar
|
|
|
11
11
|
getImageContents() {
|
|
12
12
|
return html `
|
|
13
13
|
<video playsinline autoplay muted loop>
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
type="video/webm">
|
|
17
|
-
Your browser does not support the video tag.
|
|
14
|
+
<source src="${SIMULATOR_HANDS_VIDEO_PATH}" type="video/webm" />
|
|
15
|
+
Your browser does not support the video tag.
|
|
18
16
|
</video>
|
|
19
17
|
`;
|
|
20
18
|
}
|
|
@@ -22,14 +20,23 @@ let HandsInstructions = class HandsInstructions extends SimulatorInstructionsCar
|
|
|
22
20
|
return html `
|
|
23
21
|
<h2>Hands Mode</h2>
|
|
24
22
|
<p>
|
|
25
|
-
From Navigation Mode, press <strong>Left Shift</strong> to enter
|
|
26
|
-
This mode allows for precise manipulation
|
|
23
|
+
From Navigation Mode, press <strong>Left Shift</strong> to enter
|
|
24
|
+
<strong>Hands Mode</strong>. This mode allows for precise manipulation
|
|
25
|
+
of virtual hands.
|
|
27
26
|
</p>
|
|
28
27
|
<ul>
|
|
29
|
-
|
|
30
|
-
<
|
|
31
|
-
|
|
32
|
-
|
|
28
|
+
<li>
|
|
29
|
+
<strong>Move Hand:</strong> Use the W, A, S, D keys to move it
|
|
30
|
+
forward, left, backward, and right.
|
|
31
|
+
</li>
|
|
32
|
+
<li>
|
|
33
|
+
<strong>Elevate Hand:</strong> Use the Q (up) and E (down) keys.
|
|
34
|
+
</li>
|
|
35
|
+
<li>
|
|
36
|
+
<strong>Switch Active Hand:</strong> Press the T key to toggle between
|
|
37
|
+
hands.
|
|
38
|
+
</li>
|
|
39
|
+
<li><strong>Simulate Pinch:</strong> Press the Spacebar.</li>
|
|
33
40
|
</ul>
|
|
34
41
|
`;
|
|
35
42
|
}
|
|
@@ -11,10 +11,8 @@ let NavigationInstructions = class NavigationInstructions extends SimulatorInstr
|
|
|
11
11
|
getImageContents() {
|
|
12
12
|
return html `
|
|
13
13
|
<video playsinline autoplay muted loop>
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
type="video/webm">
|
|
17
|
-
Your browser does not support the video tag.
|
|
14
|
+
<source src=${SIMULATOR_NAVIGATION_VIDEO_PATH} type="video/webm" />
|
|
15
|
+
Your browser does not support the video tag.
|
|
18
16
|
</video>
|
|
19
17
|
`;
|
|
20
18
|
}
|
|
@@ -22,13 +20,16 @@ let NavigationInstructions = class NavigationInstructions extends SimulatorInstr
|
|
|
22
20
|
return html `
|
|
23
21
|
<h2>Navigation Mode</h2>
|
|
24
22
|
<p>
|
|
25
|
-
Press <strong>Left Shift</strong> to toggle Navigation Mode.
|
|
26
|
-
|
|
23
|
+
Press <strong>Left Shift</strong> to toggle Navigation Mode. In this
|
|
24
|
+
mode, virtual hands appear and the mouse controls the camera view.
|
|
27
25
|
</p>
|
|
28
26
|
<ul>
|
|
29
|
-
|
|
30
|
-
<
|
|
31
|
-
|
|
27
|
+
<li>
|
|
28
|
+
<strong>Move Forward/Backward/Sideways:</strong> Use the W, A, S, D
|
|
29
|
+
keys.
|
|
30
|
+
</li>
|
|
31
|
+
<li><strong>Move Up/Down:</strong> Use the Q and E keys.</li>
|
|
32
|
+
<li><strong>Rotate Camera:</strong> Click and drag the mouse.</li>
|
|
32
33
|
</ul>
|
|
33
34
|
`;
|
|
34
35
|
}
|
|
@@ -13,26 +13,23 @@ import 'xrblocks';
|
|
|
13
13
|
let SimulatorInstructions = class SimulatorInstructions extends LitElement {
|
|
14
14
|
static { this.styles = css `
|
|
15
15
|
:host {
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
16
|
+
background: #000000aa;
|
|
17
|
+
position: absolute;
|
|
18
|
+
top: 0;
|
|
19
|
+
left: 0;
|
|
20
|
+
display: flex;
|
|
21
|
+
height: 100%;
|
|
22
|
+
width: 100%;
|
|
23
|
+
justify-content: center;
|
|
24
|
+
align-items: center;
|
|
25
25
|
}
|
|
26
26
|
`; }
|
|
27
27
|
constructor() {
|
|
28
28
|
super();
|
|
29
29
|
this.steps = [
|
|
30
|
-
html `
|
|
31
|
-
|
|
32
|
-
html `
|
|
33
|
-
<xrblocks-simulator-navigation-instructions />`,
|
|
34
|
-
html `
|
|
35
|
-
<xrblocks-simulator-hands-instructions />`
|
|
30
|
+
html ` <xrblocks-simulator-user-instructions />`,
|
|
31
|
+
html ` <xrblocks-simulator-navigation-instructions />`,
|
|
32
|
+
html ` <xrblocks-simulator-hands-instructions />`,
|
|
36
33
|
];
|
|
37
34
|
this.customInstructions = [];
|
|
38
35
|
this.step = 0;
|
|
@@ -50,9 +47,11 @@ let SimulatorInstructions = class SimulatorInstructions extends LitElement {
|
|
|
50
47
|
this.step++;
|
|
51
48
|
}
|
|
52
49
|
render() {
|
|
53
|
-
return this.step < this.steps.length
|
|
54
|
-
this.steps[this.step]
|
|
55
|
-
html `<xrblocks-simulator-custom-instruction
|
|
50
|
+
return this.step < this.steps.length
|
|
51
|
+
? this.steps[this.step]
|
|
52
|
+
: html `<xrblocks-simulator-custom-instruction
|
|
53
|
+
.customInstruction=${this.customInstructions[this.step - this.steps.length]}
|
|
54
|
+
/>`;
|
|
56
55
|
}
|
|
57
56
|
};
|
|
58
57
|
__decorate([
|