sceneview-mcp 3.4.6 → 3.4.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE CHANGED
@@ -1,6 +1,6 @@
1
1
  MIT License
2
2
 
3
- Copyright (c) 2024 Thomas Gorisse
3
+ Copyright (c) 2024-2026 Thomas Gorisse
4
4
 
5
5
  Permission is hereby granted, free of charge, to any person obtaining a copy
6
6
  of this software and associated documentation files (the "Software"), to deal
package/README.md CHANGED
@@ -3,9 +3,11 @@
3
3
  [![npm version](https://img.shields.io/npm/v/sceneview-mcp?color=6c35aa)](https://www.npmjs.com/package/sceneview-mcp)
4
4
  [![npm downloads](https://img.shields.io/npm/dm/sceneview-mcp?color=blue)](https://www.npmjs.com/package/sceneview-mcp)
5
5
  [![MCP](https://img.shields.io/badge/MCP-v1.12-blue)](https://modelcontextprotocol.io/)
6
- [![License](https://img.shields.io/badge/License-Apache%202.0-green)](https://www.apache.org/licenses/LICENSE-2.0)
6
+ [![License](https://img.shields.io/badge/License-MIT-green)](./LICENSE)
7
7
  [![Node](https://img.shields.io/badge/Node-%3E%3D18-brightgreen)](https://nodejs.org/)
8
8
 
9
+ > **Disclaimer:** This tool generates code suggestions for the SceneView SDK. Generated code is provided "as is" without warranty. Always review generated code before use in production. This is not a substitute for professional software engineering review. See [TERMS.md](./TERMS.md) and [PRIVACY.md](./PRIVACY.md).
10
+
9
11
  The official [Model Context Protocol](https://modelcontextprotocol.io/) server for **SceneView** — giving AI assistants deep knowledge of the SceneView 3D/AR SDK so they generate correct, compilable Kotlin code.
10
12
 
11
13
  ---
@@ -265,6 +267,12 @@ Published to npm on each SceneView release:
265
267
  npm publish --access public
266
268
  ```
267
269
 
270
+ ## Legal
271
+
272
+ - [LICENSE](./LICENSE) — MIT License
273
+ - [TERMS.md](./TERMS.md) — Terms of Service
274
+ - [PRIVACY.md](./PRIVACY.md) — Privacy Policy (no data collected)
275
+
268
276
  ## License
269
277
 
270
- Apache 2.0 same as SceneView.
278
+ MITsee [LICENSE](./LICENSE).
package/dist/index.js CHANGED
@@ -11,7 +11,19 @@ import { MIGRATION_GUIDE } from "./migration.js";
11
11
  import { fetchKnownIssues } from "./issues.js";
12
12
  import { parseNodeSections, findNodeSection, listNodeTypes } from "./node-reference.js";
13
13
  import { PLATFORM_ROADMAP, BEST_PRACTICES, AR_SETUP_GUIDE, TROUBLESHOOTING_GUIDE } from "./guides.js";
14
+ import { buildPreviewUrl, validatePreviewInput, formatPreviewResponse } from "./preview.js";
14
15
  const __dirname = dirname(fileURLToPath(import.meta.url));
16
+ // ─── Legal disclaimer ─────────────────────────────────────────────────────────
17
+ const DISCLAIMER = '\n\n---\n*Generated code suggestion. Review before use in production. See [TERMS.md](https://github.com/SceneView/sceneview/blob/main/mcp/TERMS.md).*';
18
+ function withDisclaimer(content) {
19
+ if (content.length === 0)
20
+ return content;
21
+ const last = content[content.length - 1];
22
+ return [
23
+ ...content.slice(0, -1),
24
+ { ...last, text: last.text + DISCLAIMER },
25
+ ];
26
+ }
15
27
  let API_DOCS;
16
28
  try {
17
29
  API_DOCS = readFileSync(resolve(__dirname, "../llms.txt"), "utf-8");
@@ -205,24 +217,32 @@ server.setRequestHandler(ListToolsRequestSchema, async () => ({
205
217
  },
206
218
  {
207
219
  name: "render_3d_preview",
208
- description: "Generates an interactive 3D preview link for a glTF/GLB model. Returns a URL to sceneview.github.io/preview that renders the model in the browser with orbit controls, AR support, and sharing. Use this when you want to show a 3D model to the user — paste the link in your response and they can click to see it live. Also works with any publicly accessible .glb URL.",
220
+ description: "Generates an interactive 3D preview link. Accepts a model URL, a SceneView code snippet, or both. Returns a URL to sceneview.github.io/preview that renders the model in the browser with orbit controls, AR support, and sharing. For model URLs: embeds a model-viewer link directly. For code snippets: shows the 3D preview with the code in a companion panel. Use this when you want to show a 3D model to the user — paste the link in your response and they can click to see it live.",
209
221
  inputSchema: {
210
222
  type: "object",
211
223
  properties: {
212
224
  modelUrl: {
213
225
  type: "string",
214
- description: "Public URL to a .glb or .gltf model file. Must be HTTPS and CORS-enabled.",
226
+ description: "Public URL to a .glb or .gltf model file. Must be HTTPS and CORS-enabled. If omitted, a default model is used.",
227
+ },
228
+ codeSnippet: {
229
+ type: "string",
230
+ description: "SceneView code snippet (Kotlin or Swift) to display alongside the 3D preview in a companion panel. Useful when showing generated code together with a live preview.",
215
231
  },
216
232
  autoRotate: {
217
233
  type: "boolean",
218
- description: "Auto-rotate the model (default: true)",
234
+ description: "Auto-rotate the model (default: true).",
219
235
  },
220
236
  ar: {
221
237
  type: "boolean",
222
- description: "Enable AR mode on supported devices (default: true)",
238
+ description: "Enable AR mode on supported devices (default: true).",
239
+ },
240
+ title: {
241
+ type: "string",
242
+ description: "Custom title shown above the preview.",
223
243
  },
224
244
  },
225
- required: ["modelUrl"],
245
+ required: [],
226
246
  },
227
247
  },
228
248
  ],
@@ -262,7 +282,7 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
262
282
  const codeLang = isIos ? "swift" : "kotlin";
263
283
  const codeLabel = isIos ? "**Swift (SwiftUI):**" : "**Kotlin (Jetpack Compose):**";
264
284
  return {
265
- content: [
285
+ content: withDisclaimer([
266
286
  {
267
287
  type: "text",
268
288
  text: [
@@ -281,7 +301,7 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
281
301
  `> ${sample.prompt}`,
282
302
  ].join("\n"),
283
303
  },
284
- ],
304
+ ]),
285
305
  };
286
306
  }
287
307
  // ── list_samples ──────────────────────────────────────────────────────────
@@ -307,14 +327,14 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
307
327
  return `### \`${s.id}\`\n**${s.title}**${s.language === "swift" ? " (Swift/iOS)" : ""}\n${s.description}\n*Tags:* ${s.tags.join(", ")}\n${depLabel} \`${s.dependency}\`\n\nCall \`get_sample("${s.id}")\` for the full code.`;
308
328
  })
309
329
  .join("\n\n---\n\n");
310
- return { content: [{ type: "text", text: header + rows }] };
330
+ return { content: withDisclaimer([{ type: "text", text: header + rows }]) };
311
331
  }
312
332
  // ── get_setup ─────────────────────────────────────────────────────────────
313
333
  case "get_setup": {
314
334
  const type = request.params.arguments?.type;
315
335
  if (type === "3d") {
316
336
  return {
317
- content: [
337
+ content: withDisclaimer([
318
338
  {
319
339
  type: "text",
320
340
  text: [
@@ -330,12 +350,12 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
330
350
  `No manifest changes required for 3D-only scenes.`,
331
351
  ].join("\n"),
332
352
  },
333
- ],
353
+ ]),
334
354
  };
335
355
  }
336
356
  if (type === "ar") {
337
357
  return {
338
- content: [
358
+ content: withDisclaimer([
339
359
  {
340
360
  type: "text",
341
361
  text: [
@@ -358,7 +378,7 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
358
378
  `\`\`\``,
359
379
  ].join("\n"),
360
380
  },
361
- ],
381
+ ]),
362
382
  };
363
383
  }
364
384
  return {
@@ -377,11 +397,11 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
377
397
  }
378
398
  const issues = validateCode(code);
379
399
  const report = formatValidationReport(issues);
380
- return { content: [{ type: "text", text: report }] };
400
+ return { content: withDisclaimer([{ type: "text", text: report }]) };
381
401
  }
382
402
  // ── get_migration_guide ───────────────────────────────────────────────────
383
403
  case "get_migration_guide": {
384
- return { content: [{ type: "text", text: MIGRATION_GUIDE }] };
404
+ return { content: withDisclaimer([{ type: "text", text: MIGRATION_GUIDE }]) };
385
405
  }
386
406
  // ── get_node_reference ────────────────────────────────────────────────────
387
407
  case "get_node_reference": {
@@ -410,7 +430,7 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
410
430
  };
411
431
  }
412
432
  return {
413
- content: [
433
+ content: withDisclaimer([
414
434
  {
415
435
  type: "text",
416
436
  text: [
@@ -419,33 +439,33 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
419
439
  section.content,
420
440
  ].join("\n"),
421
441
  },
422
- ],
442
+ ]),
423
443
  };
424
444
  }
425
445
  // ── get_platform_roadmap ────────────────────────────────────────────────
426
446
  case "get_platform_roadmap": {
427
- return { content: [{ type: "text", text: PLATFORM_ROADMAP }] };
447
+ return { content: withDisclaimer([{ type: "text", text: PLATFORM_ROADMAP }]) };
428
448
  }
429
449
  // ── get_best_practices ───────────────────────────────────────────────────
430
450
  case "get_best_practices": {
431
451
  const category = request.params.arguments?.category || "all";
432
452
  const text = BEST_PRACTICES[category] ?? BEST_PRACTICES["all"];
433
- return { content: [{ type: "text", text }] };
453
+ return { content: withDisclaimer([{ type: "text", text }]) };
434
454
  }
435
455
  // ── get_ar_setup ─────────────────────────────────────────────────────────
436
456
  case "get_ar_setup": {
437
- return { content: [{ type: "text", text: AR_SETUP_GUIDE }] };
457
+ return { content: withDisclaimer([{ type: "text", text: AR_SETUP_GUIDE }]) };
438
458
  }
439
459
  // ── get_troubleshooting ──────────────────────────────────────────────────
440
460
  case "get_troubleshooting": {
441
- return { content: [{ type: "text", text: TROUBLESHOOTING_GUIDE }] };
461
+ return { content: withDisclaimer([{ type: "text", text: TROUBLESHOOTING_GUIDE }]) };
442
462
  }
443
463
  // ── get_ios_setup ─────────────────────────────────────────────────────────
444
464
  case "get_ios_setup": {
445
465
  const iosType = request.params.arguments?.type;
446
466
  if (iosType === "3d") {
447
467
  return {
448
- content: [
468
+ content: withDisclaimer([
449
469
  {
450
470
  type: "text",
451
471
  text: [
@@ -525,12 +545,12 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
525
545
  `No manifest or permission changes needed for 3D-only scenes.`,
526
546
  ].join("\n"),
527
547
  },
528
- ],
548
+ ]),
529
549
  };
530
550
  }
531
551
  if (iosType === "ar") {
532
552
  return {
533
- content: [
553
+ content: withDisclaimer([
534
554
  {
535
555
  type: "text",
536
556
  text: [
@@ -616,7 +636,7 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
616
636
  `\`\`\``,
617
637
  ].join("\n"),
618
638
  },
619
- ],
639
+ ]),
620
640
  };
621
641
  }
622
642
  return {
@@ -627,7 +647,7 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
627
647
  // ── get_web_setup ────────────────────────────────────────────────────────
628
648
  case "get_web_setup": {
629
649
  return {
630
- content: [
650
+ content: withDisclaimer([
631
651
  {
632
652
  type: "text",
633
653
  text: [
@@ -712,47 +732,26 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
712
732
  `- glTF/GLB format only (same as Android)`,
713
733
  ].join("\n"),
714
734
  },
715
- ],
735
+ ]),
716
736
  };
717
737
  }
718
738
  // ── render_3d_preview ──────────────────────────────────────────────────
719
739
  case "render_3d_preview": {
720
740
  const modelUrl = request.params.arguments?.modelUrl;
721
- const autoRotate = request.params.arguments?.autoRotate ?? true;
722
- const ar = request.params.arguments?.ar ?? true;
723
- if (!modelUrl) {
741
+ const codeSnippet = request.params.arguments?.codeSnippet;
742
+ const autoRotate = request.params.arguments?.autoRotate;
743
+ const ar = request.params.arguments?.ar;
744
+ const title = request.params.arguments?.title;
745
+ const validationError = validatePreviewInput(modelUrl, codeSnippet);
746
+ if (validationError) {
724
747
  return {
725
- content: [{ type: "text", text: "Error: modelUrl is required. Provide a public HTTPS URL to a .glb or .gltf file." }],
748
+ content: [{ type: "text", text: `Error: ${validationError}` }],
726
749
  isError: true,
727
750
  };
728
751
  }
729
- const params = new URLSearchParams();
730
- params.set("model", modelUrl);
731
- if (!autoRotate)
732
- params.set("rotate", "false");
733
- if (!ar)
734
- params.set("ar", "false");
735
- const previewUrl = `https://sceneview.github.io/preview?${params.toString()}`;
736
- return {
737
- content: [{
738
- type: "text",
739
- text: `## 3D Preview
740
-
741
- **[Click to view the 3D model interactively →](${previewUrl})**
742
-
743
- The link opens an interactive 3D viewer where you can:
744
- - 🖱️ Drag to orbit, scroll to zoom
745
- - 📱 "View in AR" on mobile devices (ARCore/ARKit)
746
- - 🔗 Share the link with anyone
747
-
748
- **Preview URL:** ${previewUrl}
749
-
750
- **Model:** ${modelUrl}
751
-
752
- ---
753
- *Powered by SceneView — 3D & AR for every platform*`,
754
- }],
755
- };
752
+ const result = buildPreviewUrl({ modelUrl, codeSnippet, autoRotate, ar, title });
753
+ const text = formatPreviewResponse(result);
754
+ return { content: withDisclaimer([{ type: "text", text }]) };
756
755
  }
757
756
  default:
758
757
  return {
@@ -0,0 +1,78 @@
1
+ // ─── 3D Preview Link Generator ───────────────────────────────────────────────
2
+ //
3
+ // Generates embeddable preview URLs for sceneview.github.io/preview.
4
+ // Two modes:
5
+ // 1. Model URL → direct model-viewer embed link
6
+ // 2. Code snippet → embed link with default model + code panel
7
+ const PREVIEW_BASE = "https://sceneview.github.io/preview";
8
+ const DEFAULT_MODEL_URL = "https://raw.githubusercontent.com/KhronosGroup/glTF-Sample-Assets/main/Models/DamagedHelmet/glTF-Binary/DamagedHelmet.glb";
9
+ /**
10
+ * Build a sceneview.github.io/preview URL from the given options.
11
+ *
12
+ * - If `modelUrl` is supplied it is embedded directly.
13
+ * - If only `codeSnippet` is supplied a default model is used and the code is
14
+ * passed via the `code` query parameter so the preview page can show it in a
15
+ * companion panel.
16
+ * - If both are supplied the model URL takes priority and the code is included.
17
+ */
18
+ export function buildPreviewUrl(options) {
19
+ const model = options.modelUrl || DEFAULT_MODEL_URL;
20
+ const hasCode = Boolean(options.codeSnippet?.trim());
21
+ const title = options.title || (hasCode && !options.modelUrl ? "SceneView Code Preview" : "3D Model Preview");
22
+ const params = new URLSearchParams();
23
+ params.set("model", model);
24
+ if (options.autoRotate === false)
25
+ params.set("rotate", "false");
26
+ if (options.ar === false)
27
+ params.set("ar", "false");
28
+ if (options.title)
29
+ params.set("title", options.title);
30
+ if (hasCode)
31
+ params.set("code", options.codeSnippet.trim());
32
+ const previewUrl = `${PREVIEW_BASE}?${params.toString()}`;
33
+ return { previewUrl, modelUrl: model, hasCode, title };
34
+ }
35
+ /**
36
+ * Validate preview input — at least one of modelUrl or codeSnippet must be provided.
37
+ */
38
+ export function validatePreviewInput(modelUrl, codeSnippet) {
39
+ if (!modelUrl && !codeSnippet) {
40
+ return "At least one of `modelUrl` or `codeSnippet` is required.";
41
+ }
42
+ if (modelUrl) {
43
+ if (!modelUrl.startsWith("https://") && !modelUrl.startsWith("http://")) {
44
+ return "modelUrl must be an HTTP(S) URL.";
45
+ }
46
+ if (!/\.(glb|gltf)(\?|$)/i.test(modelUrl)) {
47
+ return "modelUrl should point to a .glb or .gltf file.";
48
+ }
49
+ }
50
+ return null;
51
+ }
52
+ /**
53
+ * Format the preview result as a rich text response for the MCP tool.
54
+ */
55
+ export function formatPreviewResponse(result) {
56
+ const lines = [
57
+ `## ${result.title}`,
58
+ ``,
59
+ `**[Click to view the 3D model interactively \u2192](${result.previewUrl})**`,
60
+ ``,
61
+ `The link opens an interactive 3D viewer where you can:`,
62
+ `- Drag to orbit, scroll to zoom`,
63
+ `- "View in AR" on mobile devices (ARCore/ARKit)`,
64
+ `- Share the link with anyone`,
65
+ ``,
66
+ `**Preview URL:** ${result.previewUrl}`,
67
+ ``,
68
+ `**Model:** ${result.modelUrl}`,
69
+ ];
70
+ if (result.hasCode) {
71
+ lines.push(``);
72
+ lines.push(`*The code snippet is included in the preview page as a companion panel.*`);
73
+ }
74
+ lines.push(``);
75
+ lines.push(`---`);
76
+ lines.push(`*Powered by SceneView \u2014 3D & AR for every platform*`);
77
+ return lines.join("\n");
78
+ }
package/dist/samples.js CHANGED
@@ -181,6 +181,50 @@ fun PointCloudScreen() {
181
181
  ) {
182
182
  // Render point cloud model instances at detected positions
183
183
  }
184
+ }`,
185
+ },
186
+ "ar-face-mesh": {
187
+ id: "ar-face-mesh",
188
+ title: "AR Face Mesh",
189
+ description: "AR face tracking with AugmentedFaceNode — applies a textured mesh overlay to detected faces using the front camera.",
190
+ tags: ["ar", "face-tracking", "model"],
191
+ dependency: "io.github.sceneview:arsceneview:3.3.0",
192
+ prompt: "Create an AR screen that uses the front camera to detect faces and overlay a 3D mesh on them. Use SceneView `io.github.sceneview:arsceneview:3.3.0`.",
193
+ code: `@Composable
194
+ fun ARFaceMeshScreen() {
195
+ val engine = rememberEngine()
196
+ val modelLoader = rememberModelLoader(engine)
197
+ val materialLoader = rememberMaterialLoader(engine)
198
+ var trackedFaces by remember { mutableStateOf(listOf<AugmentedFace>()) }
199
+
200
+ val faceMaterial = remember(materialLoader) {
201
+ materialLoader.createColorInstance(
202
+ color = Color(0.8f, 0.6f, 0.4f, 0.5f),
203
+ metallic = 0f,
204
+ roughness = 0.9f
205
+ )
206
+ }
207
+
208
+ ARScene(
209
+ modifier = Modifier.fillMaxSize(),
210
+ engine = engine,
211
+ modelLoader = modelLoader,
212
+ sessionFeatures = setOf(Session.Feature.FRONT_CAMERA),
213
+ sessionConfiguration = { _, config ->
214
+ config.augmentedFaceMode = Config.AugmentedFaceMode.MESH3D
215
+ },
216
+ onSessionUpdated = { session, _ ->
217
+ trackedFaces = session.getAllTrackables(AugmentedFace::class.java)
218
+ .filter { it.trackingState == TrackingState.TRACKING }
219
+ }
220
+ ) {
221
+ trackedFaces.forEach { face ->
222
+ AugmentedFaceNode(
223
+ augmentedFace = face,
224
+ meshMaterialInstance = faceMaterial
225
+ )
226
+ }
227
+ }
184
228
  }`,
185
229
  },
186
230
  "gltf-camera": {
@@ -242,6 +286,67 @@ fun CameraManipulatorScreen() {
242
286
  ModelNode(modelInstance = instance, scaleToUnits = 1.0f)
243
287
  }
244
288
  }
289
+ }`,
290
+ },
291
+ "camera-animation": {
292
+ id: "camera-animation",
293
+ title: "Camera Animation",
294
+ description: "Animated camera flythrough around a 3D model — smooth orbit using LaunchedEffect and trigonometric interpolation.",
295
+ tags: ["3d", "camera", "animation", "model"],
296
+ dependency: "io.github.sceneview:sceneview:3.3.0",
297
+ prompt: "Create a 3D scene with a camera that automatically orbits around a model in a smooth circle. Include a play/pause button. Use SceneView `io.github.sceneview:sceneview:3.3.0`.",
298
+ code: `@Composable
299
+ fun CameraAnimationScreen() {
300
+ val engine = rememberEngine()
301
+ val modelLoader = rememberModelLoader(engine)
302
+ val environmentLoader = rememberEnvironmentLoader(engine)
303
+ var isOrbiting by remember { mutableStateOf(true) }
304
+ var angle by remember { mutableFloatStateOf(0f) }
305
+
306
+ val cameraNode = rememberCameraNode(engine) {
307
+ position = Position(x = 0f, y = 1.5f, z = 4f)
308
+ lookAt(Position(0f, 0f, 0f))
309
+ }
310
+
311
+ // Animate camera orbit
312
+ LaunchedEffect(isOrbiting) {
313
+ while (isOrbiting) {
314
+ withFrameNanos { _ ->
315
+ angle += 0.5f
316
+ val radians = Math.toRadians(angle.toDouble())
317
+ cameraNode.position = Position(
318
+ x = (4f * sin(radians)).toFloat(),
319
+ y = 1.5f,
320
+ z = (4f * cos(radians)).toFloat()
321
+ )
322
+ cameraNode.lookAt(Position(0f, 0f, 0f))
323
+ }
324
+ }
325
+ }
326
+
327
+ Column {
328
+ Scene(
329
+ modifier = Modifier.weight(1f).fillMaxWidth(),
330
+ engine = engine,
331
+ modelLoader = modelLoader,
332
+ cameraNode = cameraNode,
333
+ environment = rememberEnvironment(environmentLoader) {
334
+ environmentLoader.createHDREnvironment("environments/sky_2k.hdr")
335
+ ?: createEnvironment(environmentLoader)
336
+ },
337
+ mainLightNode = rememberMainLightNode(engine) { intensity = 100_000f }
338
+ ) {
339
+ rememberModelInstance(modelLoader, "models/damaged_helmet.glb")?.let { instance ->
340
+ ModelNode(modelInstance = instance, scaleToUnits = 1.0f)
341
+ }
342
+ }
343
+ Button(
344
+ onClick = { isOrbiting = !isOrbiting },
345
+ modifier = Modifier.align(Alignment.CenterHorizontally).padding(16.dp)
346
+ ) {
347
+ Text(if (isOrbiting) "Stop Orbit" else "Start Orbit")
348
+ }
349
+ }
245
350
  }`,
246
351
  },
247
352
  "autopilot-demo": {
@@ -466,6 +571,465 @@ fun PostProcessingScreen() {
466
571
  }
467
572
  // Configure view.bloomOptions, view.vignetteOptions, etc.
468
573
  // See samples/post-processing for full interactive controls
574
+ }`,
575
+ },
576
+ "video-texture": {
577
+ id: "video-texture",
578
+ title: "Video Texture",
579
+ description: "Video playback on a 3D plane using VideoNode with MediaPlayer — supports looping, chroma-key, and auto-sizing.",
580
+ tags: ["3d", "video", "model"],
581
+ dependency: "io.github.sceneview:sceneview:3.3.0",
582
+ prompt: "Create a 3D scene with a video playing on a floating 3D plane. Include play/pause controls and chroma-key support. Use SceneView `io.github.sceneview:sceneview:3.3.0`.",
583
+ code: `@Composable
584
+ fun VideoTextureScreen() {
585
+ val context = LocalContext.current
586
+ val engine = rememberEngine()
587
+ var isPlaying by remember { mutableStateOf(true) }
588
+
589
+ val player = remember {
590
+ MediaPlayer().apply {
591
+ setDataSource(context, Uri.parse("android.resource://\${context.packageName}/raw/video"))
592
+ isLooping = true
593
+ prepare()
594
+ start()
595
+ }
596
+ }
597
+ DisposableEffect(Unit) { onDispose { player.release() } }
598
+
599
+ Column {
600
+ Scene(
601
+ modifier = Modifier.weight(1f).fillMaxWidth(),
602
+ engine = engine
603
+ ) {
604
+ VideoNode(
605
+ player = player,
606
+ // size = null auto-sizes from video aspect ratio (longer edge = 1 unit)
607
+ position = Position(z = -2f),
608
+ chromaKeyColor = null // set to android.graphics.Color.GREEN for green-screen
609
+ )
610
+ }
611
+ Row(
612
+ modifier = Modifier.fillMaxWidth().padding(16.dp),
613
+ horizontalArrangement = Arrangement.Center,
614
+ verticalAlignment = Alignment.CenterVertically
615
+ ) {
616
+ Button(onClick = {
617
+ if (isPlaying) player.pause() else player.start()
618
+ isPlaying = !isPlaying
619
+ }) {
620
+ Text(if (isPlaying) "Pause" else "Play")
621
+ }
622
+ }
623
+ }
624
+ }`,
625
+ },
626
+ "multi-model-scene": {
627
+ id: "multi-model-scene",
628
+ title: "Multi-Model Scene",
629
+ description: "Scene with multiple 3D models loaded independently, positioned and scaled to create a complete environment.",
630
+ tags: ["3d", "model", "multi-model", "environment"],
631
+ dependency: "io.github.sceneview:sceneview:3.3.0",
632
+ prompt: "Create a 3D scene that loads multiple GLB models (a car, a building, and trees) and positions them to form a street scene. Use SceneView `io.github.sceneview:sceneview:3.3.0`.",
633
+ code: `@Composable
634
+ fun MultiModelScreen() {
635
+ val engine = rememberEngine()
636
+ val modelLoader = rememberModelLoader(engine)
637
+ val materialLoader = rememberMaterialLoader(engine)
638
+ val environmentLoader = rememberEnvironmentLoader(engine)
639
+
640
+ Scene(
641
+ modifier = Modifier.fillMaxSize(),
642
+ engine = engine,
643
+ modelLoader = modelLoader,
644
+ cameraManipulator = rememberCameraManipulator(
645
+ orbitHomePosition = Position(x = 0f, y = 3f, z = 8f),
646
+ targetPosition = Position(0f, 0f, 0f)
647
+ ),
648
+ environment = rememberEnvironment(environmentLoader) {
649
+ environmentLoader.createHDREnvironment("environments/sky_2k.hdr")
650
+ ?: createEnvironment(environmentLoader)
651
+ },
652
+ mainLightNode = rememberMainLightNode(engine) { intensity = 100_000f }
653
+ ) {
654
+ // Ground plane
655
+ val groundMat = remember(materialLoader) {
656
+ materialLoader.createColorInstance(Color.DarkGray, roughness = 0.9f)
657
+ }
658
+ PlaneNode(size = Size(20f, 20f), materialInstance = groundMat)
659
+
660
+ // Car in the center
661
+ rememberModelInstance(modelLoader, "models/car.glb")?.let { car ->
662
+ ModelNode(
663
+ modelInstance = car,
664
+ scaleToUnits = 2.0f,
665
+ position = Position(x = 0f, y = 0f, z = 0f),
666
+ autoAnimate = true
667
+ )
668
+ }
669
+
670
+ // Building on the left
671
+ rememberModelInstance(modelLoader, "models/building.glb")?.let { building ->
672
+ ModelNode(
673
+ modelInstance = building,
674
+ scaleToUnits = 5.0f,
675
+ position = Position(x = -6f, y = 0f, z = -3f)
676
+ )
677
+ }
678
+
679
+ // Trees along the right side
680
+ for (i in 0..2) {
681
+ rememberModelInstance(modelLoader, "models/tree.glb")?.let { tree ->
682
+ ModelNode(
683
+ modelInstance = tree,
684
+ scaleToUnits = 3.0f,
685
+ position = Position(x = 5f, y = 0f, z = i * -3f)
686
+ )
687
+ }
688
+ }
689
+ }
690
+ }`,
691
+ },
692
+ "gesture-interaction": {
693
+ id: "gesture-interaction",
694
+ title: "Gesture Interaction",
695
+ description: "Full gesture handling — tap to select, double-tap to scale, long-press for info, pinch-to-scale, drag-to-move on editable nodes.",
696
+ tags: ["3d", "gestures", "model"],
697
+ dependency: "io.github.sceneview:sceneview:3.3.0",
698
+ prompt: "Create a 3D scene with a model that responds to tap (select), double-tap (scale up), long-press (show info), and supports pinch-to-scale and drag-to-move. Use SceneView `io.github.sceneview:sceneview:3.3.0`.",
699
+ code: `@Composable
700
+ fun GestureInteractionScreen() {
701
+ val engine = rememberEngine()
702
+ val modelLoader = rememberModelLoader(engine)
703
+ val environmentLoader = rememberEnvironmentLoader(engine)
704
+ var selectedNode by remember { mutableStateOf<String?>(null) }
705
+ var infoText by remember { mutableStateOf("Tap a model to select it") }
706
+
707
+ Box(modifier = Modifier.fillMaxSize()) {
708
+ Scene(
709
+ modifier = Modifier.fillMaxSize(),
710
+ engine = engine,
711
+ modelLoader = modelLoader,
712
+ cameraManipulator = rememberCameraManipulator(),
713
+ environment = rememberEnvironment(environmentLoader) {
714
+ environmentLoader.createHDREnvironment("environments/sky_2k.hdr")
715
+ ?: createEnvironment(environmentLoader)
716
+ },
717
+ onGestureListener = rememberOnGestureListener(
718
+ onSingleTapConfirmed = { event, node ->
719
+ selectedNode = node?.name
720
+ infoText = if (node != null) "Selected: \${node.name}" else "Tap a model to select it"
721
+ },
722
+ onDoubleTap = { event, node ->
723
+ node?.let {
724
+ it.scale = if (it.scale.x > 1.5f) Scale(1f) else Scale(2f)
725
+ infoText = "Double-tap: toggled scale"
726
+ }
727
+ },
728
+ onLongPress = { event, node ->
729
+ node?.let {
730
+ infoText = "Position: \${it.worldPosition}, Scale: \${it.scale}"
731
+ }
732
+ }
733
+ )
734
+ ) {
735
+ rememberModelInstance(modelLoader, "models/damaged_helmet.glb")?.let { instance ->
736
+ ModelNode(
737
+ modelInstance = instance,
738
+ scaleToUnits = 1.0f,
739
+ isEditable = true, // enables pinch-to-scale and drag-to-move
740
+ autoAnimate = true
741
+ )
742
+ }
743
+ }
744
+
745
+ // Info overlay
746
+ Text(
747
+ text = infoText,
748
+ modifier = Modifier.align(Alignment.TopCenter).padding(24.dp)
749
+ .background(MaterialTheme.colorScheme.surface.copy(alpha = 0.8f), RoundedCornerShape(8.dp))
750
+ .padding(12.dp),
751
+ style = MaterialTheme.typography.bodyMedium
752
+ )
753
+ }
754
+ }`,
755
+ },
756
+ "environment-lighting": {
757
+ id: "environment-lighting",
758
+ title: "Environment & Lighting",
759
+ description: "Complete lighting setup — HDR environment (IBL + skybox), main directional light, point light, and spot light with LightNode.",
760
+ tags: ["3d", "environment", "lighting", "model"],
761
+ dependency: "io.github.sceneview:sceneview:3.3.0",
762
+ prompt: "Create a 3D scene with full HDR environment lighting (IBL + skybox), a directional sun light, a red point light, and a blue spot light. Use SceneView `io.github.sceneview:sceneview:3.3.0`.",
763
+ code: `@Composable
764
+ fun EnvironmentLightingScreen() {
765
+ val engine = rememberEngine()
766
+ val modelLoader = rememberModelLoader(engine)
767
+ val materialLoader = rememberMaterialLoader(engine)
768
+ val environmentLoader = rememberEnvironmentLoader(engine)
769
+
770
+ Scene(
771
+ modifier = Modifier.fillMaxSize(),
772
+ engine = engine,
773
+ modelLoader = modelLoader,
774
+ cameraManipulator = rememberCameraManipulator(
775
+ orbitHomePosition = Position(x = 0f, y = 2f, z = 5f),
776
+ targetPosition = Position(0f, 0f, 0f)
777
+ ),
778
+ // HDR environment provides both IBL (indirect lighting) and skybox (background)
779
+ environment = rememberEnvironment(environmentLoader) {
780
+ environmentLoader.createHDREnvironment("environments/sky_2k.hdr")
781
+ ?: createEnvironment(environmentLoader)
782
+ },
783
+ // Main directional light (sun)
784
+ mainLightNode = rememberMainLightNode(engine) {
785
+ intensity = 100_000f
786
+ // castShadows is true by default for the main light
787
+ }
788
+ ) {
789
+ // Floor to receive shadows
790
+ val floorMat = remember(materialLoader) {
791
+ materialLoader.createColorInstance(Color.LightGray, roughness = 0.8f)
792
+ }
793
+ PlaneNode(size = Size(10f, 10f), materialInstance = floorMat)
794
+
795
+ // Model
796
+ rememberModelInstance(modelLoader, "models/damaged_helmet.glb")?.let { instance ->
797
+ ModelNode(modelInstance = instance, scaleToUnits = 1.0f, position = Position(y = 0.5f))
798
+ }
799
+
800
+ // Red point light on the left
801
+ LightNode(
802
+ type = LightManager.Type.POINT,
803
+ apply = {
804
+ color(1.0f, 0.2f, 0.2f)
805
+ intensity(200_000f)
806
+ falloff(5.0f)
807
+ },
808
+ position = Position(x = -2f, y = 2f, z = 1f)
809
+ )
810
+
811
+ // Blue spot light on the right
812
+ LightNode(
813
+ type = LightManager.Type.SPOT,
814
+ apply = {
815
+ color(0.2f, 0.4f, 1.0f)
816
+ intensity(300_000f)
817
+ falloff(8.0f)
818
+ castShadows(true)
819
+ },
820
+ position = Position(x = 2f, y = 3f, z = 1f)
821
+ )
822
+ }
823
+ }`,
824
+ },
825
+ "procedural-geometry": {
826
+ id: "procedural-geometry",
827
+ title: "Procedural Geometry",
828
+ description: "Procedural shapes — CubeNode, SphereNode, CylinderNode, PlaneNode — with PBR materials (metallic, roughness, color).",
829
+ tags: ["3d", "geometry", "model"],
830
+ dependency: "io.github.sceneview:sceneview:3.3.0",
831
+ prompt: "Create a 3D scene showing procedural geometry shapes (cube, sphere, cylinder, plane) with different PBR materials. No model files needed. Use SceneView `io.github.sceneview:sceneview:3.3.0`.",
832
+ code: `@Composable
833
+ fun ProceduralGeometryScreen() {
834
+ val engine = rememberEngine()
835
+ val materialLoader = rememberMaterialLoader(engine)
836
+ val environmentLoader = rememberEnvironmentLoader(engine)
837
+
838
+ Scene(
839
+ modifier = Modifier.fillMaxSize(),
840
+ engine = engine,
841
+ cameraManipulator = rememberCameraManipulator(
842
+ orbitHomePosition = Position(x = 0f, y = 2f, z = 6f),
843
+ targetPosition = Position(0f, 0.5f, 0f)
844
+ ),
845
+ environment = rememberEnvironment(environmentLoader) {
846
+ environmentLoader.createHDREnvironment("environments/sky_2k.hdr")
847
+ ?: createEnvironment(environmentLoader)
848
+ },
849
+ mainLightNode = rememberMainLightNode(engine) { intensity = 100_000f }
850
+ ) {
851
+ // Floor
852
+ val floorMat = remember(materialLoader) {
853
+ materialLoader.createColorInstance(Color.DarkGray, roughness = 0.9f)
854
+ }
855
+ PlaneNode(size = Size(8f, 8f), materialInstance = floorMat)
856
+
857
+ // Red matte cube
858
+ val redMat = remember(materialLoader) {
859
+ materialLoader.createColorInstance(Color.Red, metallic = 0f, roughness = 0.6f)
860
+ }
861
+ CubeNode(
862
+ size = Size(0.6f),
863
+ center = Position(0f, 0.3f, 0f),
864
+ materialInstance = redMat,
865
+ position = Position(x = -2f)
866
+ )
867
+
868
+ // Chrome sphere
869
+ val chromeMat = remember(materialLoader) {
870
+ materialLoader.createColorInstance(Color.Gray, metallic = 1f, roughness = 0.05f, reflectance = 0.9f)
871
+ }
872
+ SphereNode(
873
+ radius = 0.4f,
874
+ materialInstance = chromeMat,
875
+ position = Position(x = -0.7f, y = 0.4f)
876
+ )
877
+
878
+ // Green cylinder
879
+ val greenMat = remember(materialLoader) {
880
+ materialLoader.createColorInstance(Color.Green, metallic = 0.2f, roughness = 0.4f)
881
+ }
882
+ CylinderNode(
883
+ radius = 0.25f,
884
+ height = 0.8f,
885
+ materialInstance = greenMat,
886
+ position = Position(x = 0.7f, y = 0.4f)
887
+ )
888
+
889
+ // Gold sphere
890
+ val goldMat = remember(materialLoader) {
891
+ materialLoader.createColorInstance(
892
+ Color(1f, 0.84f, 0f),
893
+ metallic = 1f,
894
+ roughness = 0.3f
895
+ )
896
+ }
897
+ SphereNode(
898
+ radius = 0.35f,
899
+ materialInstance = goldMat,
900
+ position = Position(x = 2f, y = 0.35f)
901
+ )
902
+ }
903
+ }`,
904
+ },
905
+ "compose-ui-3d": {
906
+ id: "compose-ui-3d",
907
+ title: "Compose UI in 3D",
908
+ description: "Embed interactive Jetpack Compose UI (Cards, Buttons, Text) inside 3D space using ViewNode.",
909
+ tags: ["3d", "compose-ui", "text"],
910
+ dependency: "io.github.sceneview:sceneview:3.3.0",
911
+ prompt: "Create a 3D scene with interactive Compose UI elements (Card with text and a button) floating in 3D space using ViewNode. Use SceneView `io.github.sceneview:sceneview:3.3.0`.",
912
+ code: `@Composable
913
+ fun ComposeUI3DScreen() {
914
+ val engine = rememberEngine()
915
+ val modelLoader = rememberModelLoader(engine)
916
+ val windowManager = rememberViewNodeManager()
917
+ var clickCount by remember { mutableIntStateOf(0) }
918
+
919
+ Scene(
920
+ modifier = Modifier.fillMaxSize(),
921
+ engine = engine,
922
+ modelLoader = modelLoader,
923
+ cameraManipulator = rememberCameraManipulator(),
924
+ viewNodeWindowManager = windowManager
925
+ ) {
926
+ // 3D model behind the UI
927
+ rememberModelInstance(modelLoader, "models/damaged_helmet.glb")?.let { instance ->
928
+ ModelNode(modelInstance = instance, scaleToUnits = 1.0f, position = Position(z = -1f))
929
+ }
930
+
931
+ // Floating Compose Card in 3D space
932
+ ViewNode(
933
+ windowManager = windowManager,
934
+ position = Position(x = 0f, y = 1.2f, z = 0.5f)
935
+ ) {
936
+ Card(
937
+ modifier = Modifier.width(200.dp).padding(8.dp),
938
+ colors = CardDefaults.cardColors(
939
+ containerColor = MaterialTheme.colorScheme.surface.copy(alpha = 0.9f)
940
+ )
941
+ ) {
942
+ Column(modifier = Modifier.padding(16.dp)) {
943
+ Text("Hello 3D World!", style = MaterialTheme.typography.titleMedium)
944
+ Text("Clicks: \$clickCount", style = MaterialTheme.typography.bodySmall)
945
+ Spacer(Modifier.height(8.dp))
946
+ Button(onClick = { clickCount++ }) {
947
+ Text("Click Me")
948
+ }
949
+ }
950
+ }
951
+ }
952
+ }
953
+ }`,
954
+ },
955
+ "node-hierarchy": {
956
+ id: "node-hierarchy",
957
+ title: "Node Hierarchy",
958
+ description: "Parent-child node relationships — a spinning solar system with planet groups orbiting a central sun.",
959
+ tags: ["3d", "hierarchy", "geometry", "animation"],
960
+ dependency: "io.github.sceneview:sceneview:3.3.0",
961
+ prompt: "Create a 3D solar system where planets orbit a sun using parent-child node hierarchies. Each planet group rotates independently. Use SceneView `io.github.sceneview:sceneview:3.3.0`.",
962
+ code: `@Composable
963
+ fun NodeHierarchyScreen() {
964
+ val engine = rememberEngine()
965
+ val materialLoader = rememberMaterialLoader(engine)
966
+ val environmentLoader = rememberEnvironmentLoader(engine)
967
+ var earthAngle by remember { mutableFloatStateOf(0f) }
968
+ var marsAngle by remember { mutableFloatStateOf(0f) }
969
+
970
+ // Animate planet orbits
971
+ LaunchedEffect(Unit) {
972
+ while (true) {
973
+ withFrameNanos { _ ->
974
+ earthAngle += 0.3f
975
+ marsAngle += 0.18f
976
+ }
977
+ }
978
+ }
979
+
980
+ Scene(
981
+ modifier = Modifier.fillMaxSize(),
982
+ engine = engine,
983
+ cameraManipulator = rememberCameraManipulator(
984
+ orbitHomePosition = Position(x = 0f, y = 4f, z = 8f),
985
+ targetPosition = Position(0f, 0f, 0f)
986
+ ),
987
+ environment = rememberEnvironment(environmentLoader) {
988
+ environmentLoader.createHDREnvironment("environments/sky_2k.hdr")
989
+ ?: createEnvironment(environmentLoader)
990
+ }
991
+ ) {
992
+ // Sun (center)
993
+ val sunMat = remember(materialLoader) {
994
+ materialLoader.createColorInstance(Color.Yellow, metallic = 0f, roughness = 1f)
995
+ }
996
+ SphereNode(radius = 0.5f, materialInstance = sunMat)
997
+
998
+ // Earth orbit group — parent node rotates, child offset creates orbit
999
+ Node(rotation = Rotation(y = earthAngle)) {
1000
+ // Earth sphere
1001
+ val earthMat = remember(materialLoader) {
1002
+ materialLoader.createColorInstance(Color.Blue, metallic = 0f, roughness = 0.7f)
1003
+ }
1004
+ SphereNode(radius = 0.2f, materialInstance = earthMat, position = Position(x = 2.5f))
1005
+
1006
+ // Moon orbits Earth (nested hierarchy)
1007
+ Node(position = Position(x = 2.5f), rotation = Rotation(y = earthAngle * 3f)) {
1008
+ val moonMat = remember(materialLoader) {
1009
+ materialLoader.createColorInstance(Color.LightGray, metallic = 0f, roughness = 0.9f)
1010
+ }
1011
+ SphereNode(radius = 0.06f, materialInstance = moonMat, position = Position(x = 0.4f))
1012
+ }
1013
+ }
1014
+
1015
+ // Mars orbit group
1016
+ Node(rotation = Rotation(y = marsAngle)) {
1017
+ val marsMat = remember(materialLoader) {
1018
+ materialLoader.createColorInstance(Color.Red, metallic = 0f, roughness = 0.8f)
1019
+ }
1020
+ SphereNode(radius = 0.15f, materialInstance = marsMat, position = Position(x = 4f))
1021
+ }
1022
+
1023
+ // Sun light
1024
+ LightNode(
1025
+ type = LightManager.Type.POINT,
1026
+ apply = {
1027
+ color(1.0f, 0.95f, 0.8f)
1028
+ intensity(500_000f)
1029
+ falloff(15.0f)
1030
+ }
1031
+ )
1032
+ }
469
1033
  }`,
470
1034
  },
471
1035
  // ─── iOS Samples ────────────────────────────────────────────────────────────
package/llms.txt CHANGED
@@ -2247,12 +2247,123 @@ ARSceneView.checkSupport { supported ->
2247
2247
 
2248
2248
  ---
2249
2249
 
2250
+ ## Android XR (Planned)
2251
+
2252
+ SceneView can be used inside Android XR's spatial layout system via the Jetpack XR SDK
2253
+ (Developer Preview). The integration embeds SceneView's `Scene {}` composable inside a
2254
+ `SpatialPanel`, giving full Filament rendering inside an XR spatial panel.
2255
+
2256
+ **Dependencies (in addition to SceneView):**
2257
+ ```kotlin
2258
+ implementation("androidx.xr.scenecore:scenecore:1.0.0-alpha12")
2259
+ implementation("androidx.xr.compose:compose:1.0.0-alpha12")
2260
+ ```
2261
+
2262
+ **Basic pattern:**
2263
+ ```kotlin
2264
+ Subspace {
2265
+ SpatialPanel(SubspaceModifier.width(1200.dp).height(800.dp)) {
2266
+ val engine = rememberEngine()
2267
+ val modelLoader = rememberModelLoader(engine)
2268
+ Scene(modifier = Modifier.fillMaxSize(), engine = engine, modelLoader = modelLoader) {
2269
+ rememberModelInstance(modelLoader, "models/helmet.glb")?.let {
2270
+ ModelNode(modelInstance = it)
2271
+ }
2272
+ }
2273
+ }
2274
+ }
2275
+ ```
2276
+
2277
+ Key XR composables: `Subspace`, `SpatialPanel`, `SpatialRow`, `SpatialColumn`, `Orbiter`,
2278
+ `SceneCoreEntity`. SceneCore entities: `GltfModelEntity`, `PanelEntity`, `AnchorEntity`,
2279
+ `SpatialEnvironment`. Status: **experimental / planned** — Jetpack XR SDK is alpha.
2280
+
2281
+ ---
2282
+
2283
+ ## visionOS Spatial Computing (Planned)
2284
+
2285
+ SceneViewSwift targets visionOS 1+ via RealityKit. Spatial features are planned for future
2286
+ releases, building on the existing `SceneView` and node types.
2287
+
2288
+ ### Scene Types
2289
+
2290
+ | Type | Style | Description | SceneViewSwift |
2291
+ |---|---|---|---|
2292
+ | Window | 2D | Standard SwiftUI window in shared space | `SceneView { }` (existing) |
2293
+ | Volume | 3D bounded | Fixed-size 3D container in shared space | `VolumetricSceneView` (planned) |
2294
+ | Immersive Space | `.mixed` | Virtual content blends with passthrough | `ImmersiveSceneView` (planned) |
2295
+ | Immersive Space | `.progressive` | Partial passthrough replacement | `ImmersiveSceneView` (planned) |
2296
+ | Immersive Space | `.full` | Fully virtual, passthrough off | `ImmersiveSceneView` (planned) |
2297
+
2298
+ ### Hand Tracking (visionOS 1+, Full Space required)
2299
+
2300
+ ARKit `HandTrackingProvider` tracks 27 joints per hand at display refresh rate.
2301
+ Requires `NSHandsTrackingUsageDescription` in Info.plist and a `SpatialTrackingSession`.
2302
+
2303
+ **Planned API:**
2304
+ ```swift
2305
+ ImmersiveSceneView { root in /* content */ }
2306
+ .handTracking(enabled: true)
2307
+ .onHandUpdate { hands in
2308
+ if let d = hands.jointDistance(.thumbTip, .indexFingerTip, hand: .right), d < 0.02 {
2309
+ // Pinch detected
2310
+ }
2311
+ }
2312
+ ```
2313
+
2314
+ ### Spatial Anchors
2315
+
2316
+ `SpatialTrackingSession` (visionOS 2.0+) unlocks ARKit data in RealityKit: anchor
2317
+ geometry extents, real-world offset, and scene understanding mesh.
2318
+
2319
+ **Planned API:**
2320
+ ```swift
2321
+ // World anchor
2322
+ SpatialAnchorNode.world(position: SIMD3<Float>(0, 1, -2))
2323
+
2324
+ // Plane anchor (on detected surface)
2325
+ SpatialAnchorNode.plane(alignment: .horizontal)
2326
+
2327
+ // Hand anchor (attached to a joint)
2328
+ SpatialAnchorNode.hand(.right, joint: .indexFingerTip)
2329
+ ```
2330
+
2331
+ ### Scene Understanding
2332
+
2333
+ Real-time mesh of surroundings enabling collision, physics, surface classification
2334
+ (floor, wall, ceiling, table, seat, door, window), and environment occlusion.
2335
+
2336
+ ### Object Manipulation (visionOS 26)
2337
+
2338
+ `ManipulationComponent` enables look, tap, drag, rotate, scale on entities with a
2339
+ single call. `EnvironmentBlendingComponent` for real-world occlusion.
2340
+ `MeshInstancesComponent` for efficient GPU instanced rendering.
2341
+
2342
+ **Planned API:**
2343
+ ```swift
2344
+ let model = try await ModelNode.load("models/chair.usdz")
2345
+ model.enableManipulation() // look + grab + drag + rotate + scale via system gestures
2346
+ ```
2347
+
2348
+ ### Cross-Platform Mapping (visionOS <-> Android XR)
2349
+
2350
+ | Feature | visionOS | Android XR |
2351
+ |---|---|---|
2352
+ | Spatial container | Volume (`.volumetric`) | `SpatialPanel` |
2353
+ | Immersive mode | `ImmersiveSpace` | `SpatialEnvironment` |
2354
+ | Hand tracking | `HandTrackingProvider` | Jetpack XR hand tracking |
2355
+ | Spatial anchors | `WorldAnchor` | `AnchorEntity` (SceneCore) |
2356
+ | Scene understanding | Scene mesh + classification | Perception APIs |
2357
+
2358
+ ---
2359
+
2250
2360
  ## Platform Coverage Summary
2251
2361
 
2252
2362
  | Platform | Renderer | Framework | Sample | Status |
2253
2363
  |---|---|---|---|---|
2254
2364
  | Android | Filament | Jetpack Compose | `samples/android-demo` | Stable |
2255
2365
  | Android TV | Filament | Compose TV | `samples/android-tv-demo` | Alpha |
2366
+ | Android XR | Filament + SceneCore | Compose for XR | — | Planned |
2256
2367
  | iOS | RealityKit | SwiftUI | `samples/ios-demo` | Alpha |
2257
2368
  | macOS | RealityKit | SwiftUI | via SceneViewSwift | Alpha |
2258
2369
  | visionOS | RealityKit | SwiftUI | via SceneViewSwift | Alpha |
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "sceneview-mcp",
3
- "version": "3.4.6",
3
+ "version": "3.4.8",
4
4
  "mcpName": "io.github.sceneview/mcp",
5
5
  "description": "MCP server for SceneView — cross-platform 3D & AR SDK for Android and iOS. Give Claude the full SceneView SDK so it writes correct, compilable code.",
6
6
  "keywords": [
@@ -43,6 +43,7 @@
43
43
  "dist/migration.js",
44
44
  "dist/node-reference.js",
45
45
  "dist/samples.js",
46
+ "dist/preview.js",
46
47
  "dist/validator.js",
47
48
  "llms.txt"
48
49
  ],