xrblocks 0.5.1 → 0.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/build/ai/AI.d.ts +1 -1
- package/build/camera/CameraUtils.d.ts +20 -4
- package/build/depth/Depth.d.ts +7 -4
- package/build/simulator/SimulatorDepth.d.ts +16 -5
- package/build/xrblocks.js +210 -59
- package/build/xrblocks.js.map +1 -1
- package/build/xrblocks.min.js +1 -1
- package/build/xrblocks.min.js.map +1 -1
- package/package.json +4 -4
package/build/xrblocks.js
CHANGED
|
@@ -14,9 +14,9 @@
|
|
|
14
14
|
* limitations under the License.
|
|
15
15
|
*
|
|
16
16
|
* @file xrblocks.js
|
|
17
|
-
* @version v0.
|
|
18
|
-
* @commitid
|
|
19
|
-
* @builddate 2025-12-
|
|
17
|
+
* @version v0.6.0
|
|
18
|
+
* @commitid 64e2279
|
|
19
|
+
* @builddate 2025-12-19T21:53:04.057Z
|
|
20
20
|
* @description XR Blocks SDK, built from source with the above commit ID.
|
|
21
21
|
* @agent When using with Gemini to create XR apps, use **Gemini Canvas** mode,
|
|
22
22
|
* and follow rules below:
|
|
@@ -286,7 +286,7 @@ class GenerateSkyboxTool extends Tool {
|
|
|
286
286
|
async execute(args) {
|
|
287
287
|
try {
|
|
288
288
|
const image = await this.ai.generate('Generate a 360 equirectangular skybox image for the prompt of:' +
|
|
289
|
-
args.prompt, 'image', 'Generate a 360 equirectangular skybox image for the prompt'
|
|
289
|
+
args.prompt, 'image', 'Generate a 360 equirectangular skybox image for the prompt');
|
|
290
290
|
if (image) {
|
|
291
291
|
console.log('Applying texture...');
|
|
292
292
|
this.scene.background = new THREE.TextureLoader().load(image);
|
|
@@ -1365,7 +1365,7 @@ class Gemini extends BaseAIModel {
|
|
|
1365
1365
|
}
|
|
1366
1366
|
return { text: response.text || null };
|
|
1367
1367
|
}
|
|
1368
|
-
async generate(prompt, type = 'image', systemInstruction = 'Generate an image', model = 'gemini-2.5-flash-image
|
|
1368
|
+
async generate(prompt, type = 'image', systemInstruction = 'Generate an image', model = 'gemini-2.5-flash-image') {
|
|
1369
1369
|
if (!this.isAvailable())
|
|
1370
1370
|
return;
|
|
1371
1371
|
let contents;
|
|
@@ -1674,7 +1674,7 @@ class AI extends Script {
|
|
|
1674
1674
|
* In XR mode, show a 3D UI to instruct users to get an API key.
|
|
1675
1675
|
*/
|
|
1676
1676
|
triggerKeyPopup() { }
|
|
1677
|
-
async generate(prompt, type = 'image', systemInstruction = 'Generate an image', model =
|
|
1677
|
+
async generate(prompt, type = 'image', systemInstruction = 'Generate an image', model = undefined) {
|
|
1678
1678
|
return this.model.generate(prompt, type, systemInstruction, model);
|
|
1679
1679
|
}
|
|
1680
1680
|
/**
|
|
@@ -3075,6 +3075,9 @@ class Depth {
|
|
|
3075
3075
|
// Whether we're counting the number of depth clients.
|
|
3076
3076
|
this.depthClientsInitialized = false;
|
|
3077
3077
|
this.depthClients = new Set();
|
|
3078
|
+
this.depthProjectionMatrices = [];
|
|
3079
|
+
this.depthViewMatrices = [];
|
|
3080
|
+
this.depthViewProjectionMatrices = [];
|
|
3078
3081
|
if (Depth.instance) {
|
|
3079
3082
|
return Depth.instance;
|
|
3080
3083
|
}
|
|
@@ -3157,16 +3160,34 @@ class Depth {
|
|
|
3157
3160
|
vertexPosition.multiplyScalar(-depth / vertexPosition.z);
|
|
3158
3161
|
return vertexPosition;
|
|
3159
3162
|
}
|
|
3160
|
-
|
|
3161
|
-
|
|
3163
|
+
updateDepthMatrices(depthData, viewId) {
|
|
3164
|
+
// Populate depth view and projection matrices.
|
|
3165
|
+
while (viewId >= this.depthViewMatrices.length) {
|
|
3166
|
+
this.depthViewMatrices.push(new THREE.Matrix4());
|
|
3167
|
+
this.depthViewProjectionMatrices.push(new THREE.Matrix4());
|
|
3168
|
+
this.depthProjectionMatrices.push(new THREE.Matrix4());
|
|
3169
|
+
}
|
|
3170
|
+
if (depthData.projectionMatrix && depthData.transform) {
|
|
3171
|
+
this.depthProjectionMatrices[viewId].fromArray(depthData.projectionMatrix);
|
|
3172
|
+
this.depthViewMatrices[viewId].fromArray(depthData.transform.inverse.matrix);
|
|
3173
|
+
}
|
|
3174
|
+
else {
|
|
3175
|
+
const camera = this.renderer.xr?.getCamera()?.cameras?.[viewId] ?? this.camera;
|
|
3176
|
+
this.depthProjectionMatrices[viewId].copy(camera.projectionMatrix);
|
|
3177
|
+
this.depthViewMatrices[viewId].copy(camera.matrixWorldInverse);
|
|
3178
|
+
}
|
|
3179
|
+
this.depthViewProjectionMatrices[viewId].multiplyMatrices(this.depthProjectionMatrices[viewId], this.depthViewMatrices[viewId]);
|
|
3180
|
+
}
|
|
3181
|
+
updateCPUDepthData(depthData, viewId = 0) {
|
|
3182
|
+
this.cpuDepthData[viewId] = depthData;
|
|
3162
3183
|
// Workaround for b/382679381.
|
|
3163
3184
|
this.rawValueToMeters = depthData.rawValueToMeters;
|
|
3164
3185
|
if (this.options.useFloat32) {
|
|
3165
3186
|
this.rawValueToMeters = 1.0;
|
|
3166
3187
|
}
|
|
3167
3188
|
// Updates Depth Array.
|
|
3168
|
-
if (this.depthArray[
|
|
3169
|
-
this.depthArray[
|
|
3189
|
+
if (this.depthArray[viewId] == null) {
|
|
3190
|
+
this.depthArray[viewId] = this.options.useFloat32
|
|
3170
3191
|
? new Float32Array(depthData.data)
|
|
3171
3192
|
: new Uint16Array(depthData.data);
|
|
3172
3193
|
this.width = depthData.width;
|
|
@@ -3174,20 +3195,21 @@ class Depth {
|
|
|
3174
3195
|
}
|
|
3175
3196
|
else {
|
|
3176
3197
|
// Copies the data from an ArrayBuffer to the existing TypedArray.
|
|
3177
|
-
this.depthArray[
|
|
3198
|
+
this.depthArray[viewId].set(this.options.useFloat32
|
|
3178
3199
|
? new Float32Array(depthData.data)
|
|
3179
3200
|
: new Uint16Array(depthData.data));
|
|
3180
3201
|
}
|
|
3181
3202
|
// Updates Depth Texture.
|
|
3182
3203
|
if (this.options.depthTexture.enabled && this.depthTextures) {
|
|
3183
|
-
this.depthTextures.updateData(depthData,
|
|
3204
|
+
this.depthTextures.updateData(depthData, viewId);
|
|
3184
3205
|
}
|
|
3185
|
-
if (this.options.depthMesh.enabled && this.depthMesh &&
|
|
3206
|
+
if (this.options.depthMesh.enabled && this.depthMesh && viewId == 0) {
|
|
3186
3207
|
this.depthMesh.updateDepth(depthData);
|
|
3187
3208
|
}
|
|
3209
|
+
this.updateDepthMatrices(depthData, viewId);
|
|
3188
3210
|
}
|
|
3189
|
-
updateGPUDepthData(depthData,
|
|
3190
|
-
this.gpuDepthData[
|
|
3211
|
+
updateGPUDepthData(depthData, viewId = 0) {
|
|
3212
|
+
this.gpuDepthData[viewId] = depthData;
|
|
3191
3213
|
// Workaround for b/382679381.
|
|
3192
3214
|
this.rawValueToMeters = depthData.rawValueToMeters;
|
|
3193
3215
|
if (this.options.useFloat32) {
|
|
@@ -3200,8 +3222,8 @@ class Depth {
|
|
|
3200
3222
|
? this.depthMesh.convertGPUToGPU(depthData)
|
|
3201
3223
|
: null;
|
|
3202
3224
|
if (cpuDepth) {
|
|
3203
|
-
if (this.depthArray[
|
|
3204
|
-
this.depthArray[
|
|
3225
|
+
if (this.depthArray[viewId] == null) {
|
|
3226
|
+
this.depthArray[viewId] = this.options.useFloat32
|
|
3205
3227
|
? new Float32Array(cpuDepth.data)
|
|
3206
3228
|
: new Uint16Array(cpuDepth.data);
|
|
3207
3229
|
this.width = cpuDepth.width;
|
|
@@ -3209,16 +3231,16 @@ class Depth {
|
|
|
3209
3231
|
}
|
|
3210
3232
|
else {
|
|
3211
3233
|
// Copies the data from an ArrayBuffer to the existing TypedArray.
|
|
3212
|
-
this.depthArray[
|
|
3234
|
+
this.depthArray[viewId].set(this.options.useFloat32
|
|
3213
3235
|
? new Float32Array(cpuDepth.data)
|
|
3214
3236
|
: new Uint16Array(cpuDepth.data));
|
|
3215
3237
|
}
|
|
3216
3238
|
}
|
|
3217
3239
|
// Updates Depth Texture.
|
|
3218
3240
|
if (this.options.depthTexture.enabled && this.depthTextures) {
|
|
3219
|
-
this.depthTextures.updateNativeTexture(depthData, this.renderer,
|
|
3241
|
+
this.depthTextures.updateNativeTexture(depthData, this.renderer, viewId);
|
|
3220
3242
|
}
|
|
3221
|
-
if (this.options.depthMesh.enabled && this.depthMesh &&
|
|
3243
|
+
if (this.options.depthMesh.enabled && this.depthMesh && viewId == 0) {
|
|
3222
3244
|
if (cpuDepth) {
|
|
3223
3245
|
this.depthMesh.updateDepth(cpuDepth);
|
|
3224
3246
|
}
|
|
@@ -3226,11 +3248,12 @@ class Depth {
|
|
|
3226
3248
|
this.depthMesh.updateGPUDepth(depthData);
|
|
3227
3249
|
}
|
|
3228
3250
|
}
|
|
3251
|
+
this.updateDepthMatrices(depthData, viewId);
|
|
3229
3252
|
}
|
|
3230
|
-
getTexture(
|
|
3253
|
+
getTexture(viewId) {
|
|
3231
3254
|
if (!this.options.depthTexture.enabled)
|
|
3232
3255
|
return undefined;
|
|
3233
|
-
return this.depthTextures?.get(
|
|
3256
|
+
return this.depthTextures?.get(viewId);
|
|
3234
3257
|
}
|
|
3235
3258
|
update(frame) {
|
|
3236
3259
|
if (!this.options.enabled)
|
|
@@ -3264,16 +3287,9 @@ class Depth {
|
|
|
3264
3287
|
return;
|
|
3265
3288
|
}
|
|
3266
3289
|
}
|
|
3267
|
-
|
|
3268
|
-
|
|
3269
|
-
|
|
3270
|
-
});
|
|
3271
|
-
session.addEventListener('end', () => {
|
|
3272
|
-
this.xrRefSpace = undefined;
|
|
3273
|
-
});
|
|
3274
|
-
}
|
|
3275
|
-
else {
|
|
3276
|
-
const pose = frame.getViewerPose(this.xrRefSpace);
|
|
3290
|
+
const xrRefSpace = this.renderer.xr.getReferenceSpace();
|
|
3291
|
+
if (xrRefSpace) {
|
|
3292
|
+
const pose = frame.getViewerPose(xrRefSpace);
|
|
3277
3293
|
if (pose) {
|
|
3278
3294
|
for (let view_id = 0; view_id < pose.views.length; ++view_id) {
|
|
3279
3295
|
const view = pose.views[view_id];
|
|
@@ -3348,10 +3364,10 @@ const aspectRatios = {
|
|
|
3348
3364
|
*
|
|
3349
3365
|
* @param rgbUv - The RGB UV coordinate, e.g., \{ u: 0.5, v: 0.5 \}.
|
|
3350
3366
|
* @param xrDeviceCamera - The device camera instance.
|
|
3351
|
-
* @returns The transformed UV coordinate in the
|
|
3367
|
+
* @returns The transformed UV coordinate in the render camera clip space, or null if
|
|
3352
3368
|
* inputs are invalid.
|
|
3353
3369
|
*/
|
|
3354
|
-
function
|
|
3370
|
+
function transformRgbToRenderCameraClip(rgbUv, xrDeviceCamera) {
|
|
3355
3371
|
if (xrDeviceCamera?.simulatorCamera) {
|
|
3356
3372
|
// The simulator camera crops the viewport image to match its aspect ratio,
|
|
3357
3373
|
// while the depth map covers the entire viewport, so we adjust for this.
|
|
@@ -3369,7 +3385,7 @@ function transformRgbToDepthUv(rgbUv, xrDeviceCamera) {
|
|
|
3369
3385
|
const relativeHeight = viewportAspect / cameraAspect;
|
|
3370
3386
|
v = v * relativeHeight + (1.0 - relativeHeight) / 2.0;
|
|
3371
3387
|
}
|
|
3372
|
-
return
|
|
3388
|
+
return new THREE.Vector2(2 * u - 1, 2 * v - 1);
|
|
3373
3389
|
}
|
|
3374
3390
|
if (!aspectRatios || !aspectRatios.depth || !aspectRatios.RGB) {
|
|
3375
3391
|
console.error('Invalid aspect ratios provided.');
|
|
@@ -3408,10 +3424,34 @@ function transformRgbToDepthUv(rgbUv, xrDeviceCamera) {
|
|
|
3408
3424
|
// Apply the final user-controlled scaling (zoom and stretch).
|
|
3409
3425
|
const finalNormX = u_fitted * params.scale * params.scaleX;
|
|
3410
3426
|
const finalNormY = v_fitted * params.scale * params.scaleY;
|
|
3411
|
-
|
|
3412
|
-
|
|
3413
|
-
|
|
3414
|
-
|
|
3427
|
+
return new THREE.Vector2(2 * finalNormX, 2 * finalNormY);
|
|
3428
|
+
}
|
|
3429
|
+
/**
|
|
3430
|
+
* Maps a UV coordinate from a RGB space to a destination depth space,
|
|
3431
|
+
* applying Brown-Conrady distortion and affine transformations based on
|
|
3432
|
+
* aspect ratios. If the simulator camera is used, no transformation is applied.
|
|
3433
|
+
*
|
|
3434
|
+
* @param rgbUv - The RGB UV coordinate, e.g., \{ u: 0.5, v: 0.5 \}.
|
|
3435
|
+
* @param renderCameraWorldFromClip - Render camera world from clip, i.e. inverse of the View Projection matrix.
|
|
3436
|
+
* @param depthCameraClipFromWorld - Depth camera clip from world, i.e.
|
|
3437
|
+
* @param xrDeviceCamera - The device camera instance.
|
|
3438
|
+
* @returns The transformed UV coordinate in the depth image space, or null if
|
|
3439
|
+
* inputs are invalid.
|
|
3440
|
+
*/
|
|
3441
|
+
function transformRgbToDepthUv(rgbUv, renderCameraWorldFromClip, depthCameraClipFromWorld, xrDeviceCamera) {
|
|
3442
|
+
// Render camera clip space coordinates.
|
|
3443
|
+
const clipCoords = transformRgbToRenderCameraClip(rgbUv, xrDeviceCamera);
|
|
3444
|
+
if (!clipCoords) {
|
|
3445
|
+
return null;
|
|
3446
|
+
}
|
|
3447
|
+
// Backwards project from the render camera to depth camera.
|
|
3448
|
+
const depthClipCoord = new THREE.Vector4(clipCoords.x, clipCoords.y, 1, 1);
|
|
3449
|
+
depthClipCoord.applyMatrix4(renderCameraWorldFromClip);
|
|
3450
|
+
depthClipCoord.applyMatrix4(depthCameraClipFromWorld);
|
|
3451
|
+
depthClipCoord.multiplyScalar(1 / depthClipCoord.w);
|
|
3452
|
+
const finalU = 0.5 * depthClipCoord.x + 0.5;
|
|
3453
|
+
const finalV = 1.0 - (0.5 * depthClipCoord.y + 0.5);
|
|
3454
|
+
return { u: finalU, v: finalV };
|
|
3415
3455
|
}
|
|
3416
3456
|
/**
|
|
3417
3457
|
* Retrieves the world space position of a given RGB UV coordinate.
|
|
@@ -3421,19 +3461,30 @@ function transformRgbToDepthUv(rgbUv, xrDeviceCamera) {
|
|
|
3421
3461
|
*
|
|
3422
3462
|
* @param rgbUv - The RGB UV coordinate, e.g., \{ u: 0.5, v: 0.5 \}.
|
|
3423
3463
|
* @param depthArray - Array containing depth data.
|
|
3424
|
-
* @param
|
|
3464
|
+
* @param projectionMatrix - XRView object with corresponding
|
|
3425
3465
|
* projection matrix.
|
|
3426
|
-
* @param matrixWorld -
|
|
3466
|
+
* @param matrixWorld - Rendering camera's model matrix.
|
|
3427
3467
|
* @param xrDeviceCamera - The device camera instance.
|
|
3428
3468
|
* @param xrDepth - The SDK's Depth module.
|
|
3429
3469
|
* @returns Vertex at (u, v) in world space.
|
|
3430
3470
|
*/
|
|
3431
|
-
function transformRgbUvToWorld(rgbUv, depthArray,
|
|
3432
|
-
if (!depthArray || !
|
|
3433
|
-
|
|
3434
|
-
|
|
3471
|
+
function transformRgbUvToWorld(rgbUv, depthArray, projectionMatrix, matrixWorld, xrDeviceCamera, xrDepth = Depth.instance) {
|
|
3472
|
+
if (!depthArray || !projectionMatrix || !matrixWorld || !xrDepth) {
|
|
3473
|
+
throw new Error('Missing parameter in transformRgbUvToWorld');
|
|
3474
|
+
}
|
|
3475
|
+
const worldFromClip = matrixWorld
|
|
3476
|
+
.clone()
|
|
3477
|
+
.invert()
|
|
3478
|
+
.premultiply(projectionMatrix)
|
|
3479
|
+
.invert();
|
|
3480
|
+
const depthProjectionMatrixInverse = xrDepth.depthProjectionMatrices[0]
|
|
3481
|
+
.clone()
|
|
3482
|
+
.invert();
|
|
3483
|
+
const depthClipFromWorld = xrDepth.depthViewProjectionMatrices[0];
|
|
3484
|
+
const depthModelMatrix = xrDepth.depthViewMatrices[0].clone().invert();
|
|
3485
|
+
const depthUV = transformRgbToDepthUv(rgbUv, worldFromClip, depthClipFromWorld, xrDeviceCamera);
|
|
3435
3486
|
if (!depthUV) {
|
|
3436
|
-
|
|
3487
|
+
throw new Error('Failed to get depth UV');
|
|
3437
3488
|
}
|
|
3438
3489
|
const { u: depthU, v: depthV } = depthUV;
|
|
3439
3490
|
const depthX = Math.round(clamp(depthU * xrDepth.width, 0, xrDepth.width - 1));
|
|
@@ -3444,12 +3495,13 @@ function transformRgbUvToWorld(rgbUv, depthArray, viewProjectionMatrix, matrixWo
|
|
|
3444
3495
|
// Convert UV to normalized device coordinates and create a point on the near
|
|
3445
3496
|
// plane.
|
|
3446
3497
|
const viewSpacePosition = new THREE.Vector3(2.0 * (depthU - 0.5), 2.0 * (depthV - 0.5), -1);
|
|
3447
|
-
const viewProjectionMatrixInverse = viewProjectionMatrix.clone().invert();
|
|
3448
3498
|
// Unproject the point from clip space to view space and scale it along the
|
|
3449
3499
|
// ray from the camera to the correct depth. Camera looks down -Z axis.
|
|
3450
|
-
viewSpacePosition.applyMatrix4(
|
|
3500
|
+
viewSpacePosition.applyMatrix4(depthProjectionMatrixInverse);
|
|
3451
3501
|
viewSpacePosition.multiplyScalar(-depthInMeters / viewSpacePosition.z);
|
|
3452
|
-
const worldPosition = viewSpacePosition
|
|
3502
|
+
const worldPosition = viewSpacePosition
|
|
3503
|
+
.clone()
|
|
3504
|
+
.applyMatrix4(depthModelMatrix);
|
|
3453
3505
|
return worldPosition;
|
|
3454
3506
|
}
|
|
3455
3507
|
/**
|
|
@@ -6429,6 +6481,13 @@ function computePinch(context, config) {
|
|
|
6429
6481
|
const index = getJoint(context, 'index-finger-tip');
|
|
6430
6482
|
if (!thumb || !index)
|
|
6431
6483
|
return undefined;
|
|
6484
|
+
const supportMetrics = ['middle', 'ring', 'pinky']
|
|
6485
|
+
.map((finger) => computeFingerMetric(context, finger))
|
|
6486
|
+
.filter(Boolean);
|
|
6487
|
+
const supportCurl = supportMetrics.length > 0
|
|
6488
|
+
? average(supportMetrics.map((metrics) => metrics.curlRatio))
|
|
6489
|
+
: 1;
|
|
6490
|
+
const supportPenalty = clamp01((supportCurl - 1.05) / 0.35);
|
|
6432
6491
|
const handScale = estimateHandScale(context);
|
|
6433
6492
|
const threshold = config.threshold ?? Math.max(0.018, handScale * 0.35);
|
|
6434
6493
|
const distance = thumb.distanceTo(index);
|
|
@@ -6437,10 +6496,12 @@ function computePinch(context, config) {
|
|
|
6437
6496
|
}
|
|
6438
6497
|
const tightness = clamp01(1 - distance / (threshold * 0.85));
|
|
6439
6498
|
const loosePenalty = clamp01(1 - distance / (threshold * 1.4));
|
|
6440
|
-
|
|
6499
|
+
let confidence = clamp01(distance <= threshold ? tightness : loosePenalty * 0.4);
|
|
6500
|
+
confidence *= 1 - supportPenalty * 0.45;
|
|
6501
|
+
confidence = clamp01(confidence);
|
|
6441
6502
|
return {
|
|
6442
6503
|
confidence,
|
|
6443
|
-
data: { distance, threshold },
|
|
6504
|
+
data: { distance, threshold, supportPenalty },
|
|
6444
6505
|
};
|
|
6445
6506
|
}
|
|
6446
6507
|
function computeOpenPalm(context, config) {
|
|
@@ -6449,21 +6510,29 @@ function computeOpenPalm(context, config) {
|
|
|
6449
6510
|
return undefined;
|
|
6450
6511
|
const handScale = estimateHandScale(context);
|
|
6451
6512
|
const palmWidth = getPalmWidth(context) ?? handScale * 0.85;
|
|
6513
|
+
const palmUp = getPalmUp(context);
|
|
6452
6514
|
const extensionScores = fingerMetrics.map(({ tipDistance }) => clamp01((tipDistance - handScale * 0.5) / (handScale * 0.45)));
|
|
6453
6515
|
const straightnessScores = fingerMetrics.map(({ curlRatio }) => clamp01((curlRatio - 1.1) / 0.5));
|
|
6516
|
+
const orientationScore = palmUp && fingerMetrics.length
|
|
6517
|
+
? average(fingerMetrics.map((metrics) => fingerAlignmentScore(context, metrics, palmUp)))
|
|
6518
|
+
: 0.5;
|
|
6454
6519
|
const neighbors = getAdjacentFingerDistances(context);
|
|
6455
6520
|
const spreadScore = neighbors.average !== Infinity && palmWidth > EPSILON
|
|
6456
6521
|
? clamp01((neighbors.average - palmWidth * 0.55) / (palmWidth * 0.35))
|
|
6457
6522
|
: 0;
|
|
6458
6523
|
const extensionScore = average(extensionScores);
|
|
6459
6524
|
const straightScore = average(straightnessScores);
|
|
6460
|
-
const confidence = clamp01(extensionScore * 0.
|
|
6525
|
+
const confidence = clamp01(extensionScore * 0.4 +
|
|
6526
|
+
straightScore * 0.25 +
|
|
6527
|
+
spreadScore * 0.2 +
|
|
6528
|
+
orientationScore * 0.15);
|
|
6461
6529
|
return {
|
|
6462
6530
|
confidence,
|
|
6463
6531
|
data: {
|
|
6464
6532
|
extensionScore,
|
|
6465
6533
|
straightScore,
|
|
6466
6534
|
spreadScore,
|
|
6535
|
+
orientationScore,
|
|
6467
6536
|
threshold: config.threshold,
|
|
6468
6537
|
},
|
|
6469
6538
|
};
|
|
@@ -6480,15 +6549,26 @@ function computeFist(context, config) {
|
|
|
6480
6549
|
const clusterScore = neighbors.average !== Infinity && palmWidth > EPSILON
|
|
6481
6550
|
? clamp01((palmWidth * 0.5 - neighbors.average) / (palmWidth * 0.35))
|
|
6482
6551
|
: 0;
|
|
6552
|
+
const thumbTip = getJoint(context, 'thumb-tip');
|
|
6553
|
+
const indexBase = getFingerJoint(context, 'index', 'phalanx-proximal') ??
|
|
6554
|
+
getFingerJoint(context, 'index', 'metacarpal');
|
|
6555
|
+
const thumbWrapScore = thumbTip && indexBase && palmWidth > EPSILON
|
|
6556
|
+
? clamp01((palmWidth * 0.55 - thumbTip.distanceTo(indexBase)) /
|
|
6557
|
+
(palmWidth * 0.35))
|
|
6558
|
+
: 0;
|
|
6483
6559
|
const tipScore = clamp01((handScale * 0.55 - tipAverage) / (handScale * 0.25));
|
|
6484
6560
|
const curlScore = clamp01((1.08 - curlAverage) / 0.25);
|
|
6485
|
-
const confidence = clamp01(tipScore * 0.
|
|
6561
|
+
const confidence = clamp01(tipScore * 0.45 +
|
|
6562
|
+
curlScore * 0.3 +
|
|
6563
|
+
clusterScore * 0.1 +
|
|
6564
|
+
thumbWrapScore * 0.15);
|
|
6486
6565
|
return {
|
|
6487
6566
|
confidence,
|
|
6488
6567
|
data: {
|
|
6489
6568
|
tipAverage,
|
|
6490
6569
|
curlAverage,
|
|
6491
6570
|
clusterScore,
|
|
6571
|
+
thumbWrapScore,
|
|
6492
6572
|
threshold: config.threshold,
|
|
6493
6573
|
},
|
|
6494
6574
|
};
|
|
@@ -6525,8 +6605,8 @@ function computeThumbsUp(context, config) {
|
|
|
6525
6605
|
orientationScore = clamp01((alignment - 0.35) / 0.35);
|
|
6526
6606
|
}
|
|
6527
6607
|
}
|
|
6528
|
-
const confidence = clamp01(thumbExtendedScore * 0.
|
|
6529
|
-
curledScore * 0.
|
|
6608
|
+
const confidence = clamp01(thumbExtendedScore * 0.3 +
|
|
6609
|
+
curledScore * 0.35 +
|
|
6530
6610
|
orientationScore * 0.2 +
|
|
6531
6611
|
separationScore * 0.15);
|
|
6532
6612
|
return {
|
|
@@ -6550,17 +6630,33 @@ function computePoint(context, config) {
|
|
|
6550
6630
|
if (!otherMetrics.length)
|
|
6551
6631
|
return undefined;
|
|
6552
6632
|
const handScale = estimateHandScale(context);
|
|
6633
|
+
const palmWidth = getPalmWidth(context) ?? handScale * 0.85;
|
|
6634
|
+
const palmUp = getPalmUp(context);
|
|
6553
6635
|
const indexCurlScore = clamp01((indexMetrics.curlRatio - 1.2) / 0.35);
|
|
6554
6636
|
const indexReachScore = clamp01((indexMetrics.tipDistance - handScale * 0.6) / (handScale * 0.25));
|
|
6637
|
+
const indexDirectionScore = palmUp && indexMetrics
|
|
6638
|
+
? fingerAlignmentScore(context, indexMetrics, palmUp)
|
|
6639
|
+
: 0.4;
|
|
6555
6640
|
const othersCurl = average(otherMetrics.map((metrics) => metrics.curlRatio));
|
|
6556
6641
|
const othersCurledScore = clamp01((1.05 - othersCurl) / 0.25);
|
|
6557
|
-
const
|
|
6642
|
+
const thumbTip = getJoint(context, 'thumb-tip');
|
|
6643
|
+
const thumbTuckedScore = thumbTip && indexMetrics.metacarpal && palmWidth > EPSILON
|
|
6644
|
+
? clamp01((palmWidth * 0.75 - thumbTip.distanceTo(indexMetrics.metacarpal)) /
|
|
6645
|
+
(palmWidth * 0.4))
|
|
6646
|
+
: 0.5;
|
|
6647
|
+
const confidence = clamp01(indexCurlScore * 0.35 +
|
|
6648
|
+
indexReachScore * 0.25 +
|
|
6649
|
+
othersCurledScore * 0.2 +
|
|
6650
|
+
indexDirectionScore * 0.1 +
|
|
6651
|
+
thumbTuckedScore * 0.1);
|
|
6558
6652
|
return {
|
|
6559
6653
|
confidence,
|
|
6560
6654
|
data: {
|
|
6561
6655
|
indexCurlScore,
|
|
6562
6656
|
indexReachScore,
|
|
6563
6657
|
othersCurledScore,
|
|
6658
|
+
indexDirectionScore,
|
|
6659
|
+
thumbTuckedScore,
|
|
6564
6660
|
threshold: config.threshold,
|
|
6565
6661
|
},
|
|
6566
6662
|
};
|
|
@@ -6572,16 +6668,21 @@ function computeSpread(context, config) {
|
|
|
6572
6668
|
const handScale = estimateHandScale(context);
|
|
6573
6669
|
const palmWidth = getPalmWidth(context) ?? handScale * 0.85;
|
|
6574
6670
|
const neighbors = getAdjacentFingerDistances(context);
|
|
6671
|
+
const palmUp = getPalmUp(context);
|
|
6575
6672
|
const spreadScore = neighbors.average !== Infinity && palmWidth > EPSILON
|
|
6576
6673
|
? clamp01((neighbors.average - palmWidth * 0.6) / (palmWidth * 0.35))
|
|
6577
6674
|
: 0;
|
|
6578
6675
|
const extensionScore = clamp01((average(fingerMetrics.map((metrics) => metrics.curlRatio)) - 1.15) / 0.45);
|
|
6579
|
-
const
|
|
6676
|
+
const orientationScore = palmUp && fingerMetrics.length
|
|
6677
|
+
? average(fingerMetrics.map((metrics) => fingerAlignmentScore(context, metrics, palmUp)))
|
|
6678
|
+
: 0.5;
|
|
6679
|
+
const confidence = clamp01(spreadScore * 0.55 + extensionScore * 0.3 + orientationScore * 0.15);
|
|
6580
6680
|
return {
|
|
6581
6681
|
confidence,
|
|
6582
6682
|
data: {
|
|
6583
6683
|
spreadScore,
|
|
6584
6684
|
extensionScore,
|
|
6685
|
+
orientationScore,
|
|
6585
6686
|
threshold: config.threshold,
|
|
6586
6687
|
},
|
|
6587
6688
|
};
|
|
@@ -6709,6 +6810,16 @@ function getFingerJoint(context, finger, suffix) {
|
|
|
6709
6810
|
const prefix = FINGER_PREFIX[finger];
|
|
6710
6811
|
return getJoint(context, `${prefix}-${suffix}`);
|
|
6711
6812
|
}
|
|
6813
|
+
function fingerAlignmentScore(context, metrics, palmUp) {
|
|
6814
|
+
const base = metrics.metacarpal ?? getJoint(context, 'wrist');
|
|
6815
|
+
if (!base)
|
|
6816
|
+
return 0;
|
|
6817
|
+
const direction = new THREE.Vector3().subVectors(metrics.tip, base);
|
|
6818
|
+
if (direction.lengthSq() === 0)
|
|
6819
|
+
return 0;
|
|
6820
|
+
direction.normalize();
|
|
6821
|
+
return clamp01((direction.dot(palmUp) - 0.35) / 0.5);
|
|
6822
|
+
}
|
|
6712
6823
|
function clamp01(value) {
|
|
6713
6824
|
return THREE.MathUtils.clamp(value, 0, 1);
|
|
6714
6825
|
}
|
|
@@ -8137,6 +8248,15 @@ class SimulatorDepth {
|
|
|
8137
8248
|
this.depthWidth = 160;
|
|
8138
8249
|
this.depthHeight = 160;
|
|
8139
8250
|
this.depthBufferSlice = new Float32Array();
|
|
8251
|
+
/**
|
|
8252
|
+
* If true, copies the rendering camera's projection matrix each frame.
|
|
8253
|
+
*/
|
|
8254
|
+
this.autoUpdateDepthCameraProjection = true;
|
|
8255
|
+
/**
|
|
8256
|
+
* If true, copies the rendering camera's transform each frame.
|
|
8257
|
+
*/
|
|
8258
|
+
this.autoUpdateDepthCameraTransform = true;
|
|
8259
|
+
this.projectionMatrixArray = new Float32Array(16);
|
|
8140
8260
|
}
|
|
8141
8261
|
/**
|
|
8142
8262
|
* Initialize Simulator Depth.
|
|
@@ -8145,6 +8265,16 @@ class SimulatorDepth {
|
|
|
8145
8265
|
this.renderer = renderer;
|
|
8146
8266
|
this.camera = camera;
|
|
8147
8267
|
this.depth = depth;
|
|
8268
|
+
if (this.camera instanceof THREE.PerspectiveCamera) {
|
|
8269
|
+
this.depthCamera = new THREE.PerspectiveCamera();
|
|
8270
|
+
}
|
|
8271
|
+
else if (this.camera instanceof THREE.OrthographicCamera) {
|
|
8272
|
+
this.depthCamera = new THREE.OrthographicCamera();
|
|
8273
|
+
}
|
|
8274
|
+
else {
|
|
8275
|
+
throw new Error('Unknown camera type');
|
|
8276
|
+
}
|
|
8277
|
+
this.depthCamera.copy(this.camera, /*recursive=*/ false);
|
|
8148
8278
|
this.createRenderTarget();
|
|
8149
8279
|
this.depthMaterial = new SimulatorDepthMaterial();
|
|
8150
8280
|
}
|
|
@@ -8156,14 +8286,32 @@ class SimulatorDepth {
|
|
|
8156
8286
|
this.depthBuffer = new Float32Array(this.depthWidth * this.depthHeight);
|
|
8157
8287
|
}
|
|
8158
8288
|
update() {
|
|
8289
|
+
this.updateDepthCamera();
|
|
8159
8290
|
this.renderDepthScene();
|
|
8160
8291
|
this.updateDepth();
|
|
8161
8292
|
}
|
|
8293
|
+
updateDepthCamera() {
|
|
8294
|
+
const renderingCamera = this.camera;
|
|
8295
|
+
const depthCamera = this.depthCamera;
|
|
8296
|
+
if (this.autoUpdateDepthCameraProjection) {
|
|
8297
|
+
depthCamera.projectionMatrix.copy(renderingCamera.projectionMatrix);
|
|
8298
|
+
depthCamera.projectionMatrixInverse.copy(renderingCamera.projectionMatrixInverse);
|
|
8299
|
+
}
|
|
8300
|
+
if (this.autoUpdateDepthCameraTransform) {
|
|
8301
|
+
depthCamera.position.copy(renderingCamera.position);
|
|
8302
|
+
depthCamera.rotation.order = renderingCamera.rotation.order;
|
|
8303
|
+
depthCamera.quaternion.copy(renderingCamera.quaternion);
|
|
8304
|
+
depthCamera.scale.copy(renderingCamera.scale);
|
|
8305
|
+
depthCamera.matrix.copy(renderingCamera.matrix);
|
|
8306
|
+
depthCamera.matrixWorld.copy(renderingCamera.matrixWorld);
|
|
8307
|
+
depthCamera.matrixWorldInverse.copy(renderingCamera.matrixWorldInverse);
|
|
8308
|
+
}
|
|
8309
|
+
}
|
|
8162
8310
|
renderDepthScene() {
|
|
8163
8311
|
const originalRenderTarget = this.renderer.getRenderTarget();
|
|
8164
8312
|
this.renderer.setRenderTarget(this.depthRenderTarget);
|
|
8165
8313
|
this.simulatorScene.overrideMaterial = this.depthMaterial;
|
|
8166
|
-
this.renderer.render(this.simulatorScene, this.
|
|
8314
|
+
this.renderer.render(this.simulatorScene, this.depthCamera);
|
|
8167
8315
|
this.simulatorScene.overrideMaterial = null;
|
|
8168
8316
|
this.renderer.setRenderTarget(originalRenderTarget);
|
|
8169
8317
|
}
|
|
@@ -8188,11 +8336,14 @@ class SimulatorDepth {
|
|
|
8188
8336
|
// Copy the temp slice (original row i) to row j
|
|
8189
8337
|
this.depthBuffer.set(this.depthBufferSlice, j_offset);
|
|
8190
8338
|
}
|
|
8339
|
+
this.depthCamera.projectionMatrix.toArray(this.projectionMatrixArray);
|
|
8191
8340
|
const depthData = {
|
|
8192
8341
|
width: this.depthWidth,
|
|
8193
8342
|
height: this.depthHeight,
|
|
8194
8343
|
data: this.depthBuffer.buffer,
|
|
8195
8344
|
rawValueToMeters: 1.0,
|
|
8345
|
+
projectionMatrix: this.projectionMatrixArray,
|
|
8346
|
+
transform: new XRRigidTransform(this.depthCamera.position, this.depthCamera.quaternion),
|
|
8196
8347
|
};
|
|
8197
8348
|
this.depth.updateCPUDepthData(depthData, 0);
|
|
8198
8349
|
}
|
|
@@ -17159,5 +17310,5 @@ class VideoFileStream extends VideoStream {
|
|
|
17159
17310
|
}
|
|
17160
17311
|
}
|
|
17161
17312
|
|
|
17162
|
-
export { AI, AIOptions, AVERAGE_IPD_METERS, ActiveControllers, Agent, AnimatableNumber, AudioListener, AudioPlayer, BACK, BackgroundMusic, CategoryVolumes, Col, Core, CoreSound, DEFAULT_DEVICE_CAMERA_HEIGHT, DEFAULT_DEVICE_CAMERA_WIDTH, DEFAULT_RGB_TO_DEPTH_PARAMS, DOWN, Depth, DepthMesh, DepthMeshOptions, DepthOptions, DepthTextures, DetectedObject, DetectedPlane, DeviceCameraOptions, DragManager, DragMode, ExitButton, FORWARD, FreestandingSlider, GazeController, Gemini, GeminiOptions, GenerateSkyboxTool, GestureRecognition, GestureRecognitionOptions, GetWeatherTool, Grid, HAND_BONE_IDX_CONNECTION_MAP, HAND_JOINT_COUNT, HAND_JOINT_IDX_CONNECTION_MAP, HAND_JOINT_NAMES, Handedness, Hands, HandsOptions, HorizontalPager, IconButton, IconView, ImageView, Input, InputOptions, Keycodes, LEFT, LEFT_VIEW_ONLY_LAYER, LabelView, Lighting, LightingOptions, LoadingSpinnerManager, MaterialSymbolsView, MeshScript, ModelLoader, ModelViewer, MouseController, NEXT_SIMULATOR_MODE, NUM_HANDS, OCCLUDABLE_ITEMS_LAYER, ObjectDetector, ObjectsOptions, OcclusionPass, OcclusionUtils, OpenAI, OpenAIOptions, Options, PageIndicator, Pager, PagerState, Panel, PanelMesh, Physics, PhysicsOptions, PinchOnButtonAction, PlaneDetector, PlanesOptions, RIGHT, RIGHT_VIEW_ONLY_LAYER, Registry, Reticle, ReticleOptions, RotationRaycastMesh, Row, SIMULATOR_HAND_POSE_NAMES, SIMULATOR_HAND_POSE_TO_JOINTS_LEFT, SIMULATOR_HAND_POSE_TO_JOINTS_RIGHT, SOUND_PRESETS, ScreenshotSynthesizer, Script, ScriptMixin, ScriptsManager, ScrollingTroikaTextView, SetSimulatorModeEvent, ShowHandsAction, Simulator, SimulatorCamera, SimulatorControlMode, SimulatorControllerState, SimulatorControls, SimulatorDepth, SimulatorDepthMaterial, SimulatorHandPose, SimulatorHandPoseChangeRequestEvent, SimulatorHands, SimulatorInterface, SimulatorMediaDeviceInfo, SimulatorMode, SimulatorOptions, SimulatorRenderMode, SimulatorScene, SimulatorUser, SimulatorUserAction, SketchPanel, SkyboxAgent, SoundOptions, SoundSynthesizer, SpatialAudio, SpatialPanel, SpeechRecognizer, SpeechRecognizerOptions, SpeechSynthesizer, SpeechSynthesizerOptions, SplatAnchor, StreamState, TextButton, TextScrollerState, TextView, Tool, UI, UI_OVERLAY_LAYER, UP, UX, User, VIEW_DEPTH_GAP, VerticalPager, VideoFileStream, VideoStream, VideoView, View, VolumeCategory, WaitFrame, WalkTowardsPanelAction, World, WorldOptions, XRButton, XRDeviceCamera, XREffects, XRPass, XRTransitionOptions, XR_BLOCKS_ASSETS_PATH, ZERO_VECTOR3, add, ai, aspectRatios, callInitWithDependencyInjection, clamp, clampRotationToAngle, core, cropImage, extractYaw, getColorHex, getDeltaTime, getUrlParamBool, getUrlParamFloat, getUrlParamInt, getUrlParameter, getVec4ByColorString, getXrCameraLeft, getXrCameraRight, init, initScript, lerp, loadStereoImageAsTextures, loadingSpinnerManager, lookAtRotation, objectIsDescendantOf, parseBase64DataURL, placeObjectAtIntersectionFacingTarget, print, scene, showOnlyInLeftEye, showOnlyInRightEye, showReticleOnDepthMesh, transformRgbToDepthUv, transformRgbUvToWorld, traverseUtil, uninitScript, urlParams, user, world, xrDepthMeshOptions, xrDepthMeshPhysicsOptions, xrDepthMeshVisualizationOptions, xrDeviceCameraEnvironmentContinuousOptions, xrDeviceCameraEnvironmentOptions, xrDeviceCameraUserContinuousOptions, xrDeviceCameraUserOptions };
|
|
17313
|
+
export { AI, AIOptions, AVERAGE_IPD_METERS, ActiveControllers, Agent, AnimatableNumber, AudioListener, AudioPlayer, BACK, BackgroundMusic, CategoryVolumes, Col, Core, CoreSound, DEFAULT_DEVICE_CAMERA_HEIGHT, DEFAULT_DEVICE_CAMERA_WIDTH, DEFAULT_RGB_TO_DEPTH_PARAMS, DOWN, Depth, DepthMesh, DepthMeshOptions, DepthOptions, DepthTextures, DetectedObject, DetectedPlane, DeviceCameraOptions, DragManager, DragMode, ExitButton, FORWARD, FreestandingSlider, GazeController, Gemini, GeminiOptions, GenerateSkyboxTool, GestureRecognition, GestureRecognitionOptions, GetWeatherTool, Grid, HAND_BONE_IDX_CONNECTION_MAP, HAND_JOINT_COUNT, HAND_JOINT_IDX_CONNECTION_MAP, HAND_JOINT_NAMES, Handedness, Hands, HandsOptions, HorizontalPager, IconButton, IconView, ImageView, Input, InputOptions, Keycodes, LEFT, LEFT_VIEW_ONLY_LAYER, LabelView, Lighting, LightingOptions, LoadingSpinnerManager, MaterialSymbolsView, MeshScript, ModelLoader, ModelViewer, MouseController, NEXT_SIMULATOR_MODE, NUM_HANDS, OCCLUDABLE_ITEMS_LAYER, ObjectDetector, ObjectsOptions, OcclusionPass, OcclusionUtils, OpenAI, OpenAIOptions, Options, PageIndicator, Pager, PagerState, Panel, PanelMesh, Physics, PhysicsOptions, PinchOnButtonAction, PlaneDetector, PlanesOptions, RIGHT, RIGHT_VIEW_ONLY_LAYER, Registry, Reticle, ReticleOptions, RotationRaycastMesh, Row, SIMULATOR_HAND_POSE_NAMES, SIMULATOR_HAND_POSE_TO_JOINTS_LEFT, SIMULATOR_HAND_POSE_TO_JOINTS_RIGHT, SOUND_PRESETS, ScreenshotSynthesizer, Script, ScriptMixin, ScriptsManager, ScrollingTroikaTextView, SetSimulatorModeEvent, ShowHandsAction, Simulator, SimulatorCamera, SimulatorControlMode, SimulatorControllerState, SimulatorControls, SimulatorDepth, SimulatorDepthMaterial, SimulatorHandPose, SimulatorHandPoseChangeRequestEvent, SimulatorHands, SimulatorInterface, SimulatorMediaDeviceInfo, SimulatorMode, SimulatorOptions, SimulatorRenderMode, SimulatorScene, SimulatorUser, SimulatorUserAction, SketchPanel, SkyboxAgent, SoundOptions, SoundSynthesizer, SpatialAudio, SpatialPanel, SpeechRecognizer, SpeechRecognizerOptions, SpeechSynthesizer, SpeechSynthesizerOptions, SplatAnchor, StreamState, TextButton, TextScrollerState, TextView, Tool, UI, UI_OVERLAY_LAYER, UP, UX, User, VIEW_DEPTH_GAP, VerticalPager, VideoFileStream, VideoStream, VideoView, View, VolumeCategory, WaitFrame, WalkTowardsPanelAction, World, WorldOptions, XRButton, XRDeviceCamera, XREffects, XRPass, XRTransitionOptions, XR_BLOCKS_ASSETS_PATH, ZERO_VECTOR3, add, ai, aspectRatios, callInitWithDependencyInjection, clamp, clampRotationToAngle, core, cropImage, extractYaw, getColorHex, getDeltaTime, getUrlParamBool, getUrlParamFloat, getUrlParamInt, getUrlParameter, getVec4ByColorString, getXrCameraLeft, getXrCameraRight, init, initScript, lerp, loadStereoImageAsTextures, loadingSpinnerManager, lookAtRotation, objectIsDescendantOf, parseBase64DataURL, placeObjectAtIntersectionFacingTarget, print, scene, showOnlyInLeftEye, showOnlyInRightEye, showReticleOnDepthMesh, transformRgbToDepthUv, transformRgbToRenderCameraClip, transformRgbUvToWorld, traverseUtil, uninitScript, urlParams, user, world, xrDepthMeshOptions, xrDepthMeshPhysicsOptions, xrDepthMeshVisualizationOptions, xrDeviceCameraEnvironmentContinuousOptions, xrDeviceCameraEnvironmentOptions, xrDeviceCameraUserContinuousOptions, xrDeviceCameraUserOptions };
|
|
17163
17314
|
//# sourceMappingURL=xrblocks.js.map
|