@createlex/figma-swiftui-mcp 1.2.4 → 1.2.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -13,6 +13,7 @@ const {
13
13
  setSavedProjectPath,
14
14
  writeSwiftUIScreen,
15
15
  } = require('./xcode-writer.cjs');
16
+ const { buildGenerationPrompt } = require('./local-llm-generator.cjs');
16
17
 
17
18
  const DEFAULT_PORT = 7765;
18
19
  const DEFAULT_HOST = 'localhost';
@@ -278,21 +279,35 @@ function startBridgeServer(options = {}) {
278
279
 
279
280
  const data = response.data || {};
280
281
 
281
- // Attach explicit AI workflow instructions so any agent that fetches this URL
282
- // knows exactly how to generate and write correct SwiftUI code with real images.
282
+ // Build the full generation prompt so AI tools without MCP access
283
+ // can still generate proper native SwiftUI with individual elements.
284
+ let generationPrompt = null;
285
+ try {
286
+ generationPrompt = buildGenerationPrompt(data);
287
+ } catch (_) {
288
+ // Non-fatal — prompt is a bonus, not required
289
+ }
290
+
283
291
  const assetCount = data.assetExportPlan?.candidates?.length ?? 0;
284
292
  const aiWorkflow = {
285
- step1: 'You have the Figma design context. Do NOT generate SwiftUI code from scratch — that produces grey placeholder rectangles instead of real images.',
286
- step2: 'Call the MCP tool `write_selection_to_xcode`. It generates SwiftUI with correct Image("name") asset references AND writes the PNG files to Assets.xcassets automatically.',
287
- step3: 'After write_selection_to_xcode returns, ONLY refine the code it produced: add GeometryReader for responsiveness, wire interactions. DO NOT rewrite Image("name") calls or replace them with Rectangle()/shapes.',
288
- step4: 'If the caller has no MCP access, use the structName and assetExportPlan below to write SwiftUI that references Image("name") for every asset candidate listed.',
289
- mcpTool: 'write_selection_to_xcode',
290
- mcpServer: 'figma-swiftui',
293
+ IF_YOU_HAVE_MCP_ACCESS: {
294
+ step1: 'Call the MCP tool `write_selection_to_xcode` (server: figma-swiftui). It generates SwiftUI with correct Image("name") asset references AND writes the PNG files to Assets.xcassets automatically.',
295
+ step2: 'After write_selection_to_xcode returns, ONLY refine the code it produced: add GeometryReader for responsiveness, wire @State interactions. DO NOT rewrite Image("name") calls or replace them with Rectangle()/shapes.',
296
+ mcpTool: 'write_selection_to_xcode',
297
+ mcpServer: 'figma-swiftui',
298
+ },
299
+ IF_NO_MCP_ACCESS: {
300
+ instruction: 'Use _generationPrompt.systemPrompt as the system message and _generationPrompt.userMessage as the user message to generate native SwiftUI code. Output individual SwiftUI views — Text(), VStack, HStack, ZStack, Image("name") — NOT a single composite Image().',
301
+ rule1: 'Every asset in assetExportPlan.candidates MUST become Image("assetName").resizable() — never Rectangle() or Color.',
302
+ rule2: 'Text nodes become Text("content") with .font(.system(size: X, weight: .bold)) — NEVER custom font names.',
303
+ rule3: 'Output a full SwiftUI View struct with individual elements positioned using the node geometry in the design context.',
304
+ rule4: 'End with #Preview { StructName() }.',
305
+ },
291
306
  assetCandidateCount: assetCount,
292
- warning: 'Photos, 3D renders, illustrations and icons are pre-exported as PNG assets. Always use Image("name") never reconstruct them with SwiftUI shapes or colors.',
307
+ warning: 'DO NOT generate a single Image() wrapping the whole frame. Generate individual SwiftUI elements for each node.',
293
308
  };
294
309
 
295
- res.json({ ok: true, ...data, _aiWorkflow: aiWorkflow });
310
+ res.json({ ok: true, ...data, _generationPrompt: generationPrompt, _aiWorkflow: aiWorkflow });
296
311
  });
297
312
 
298
313
  function sendBridgeMessage(ws, payload) {
@@ -35,6 +35,8 @@ RULES:
35
35
  9. End every file with a #Preview { StructName() } block.
36
36
  10. Emit best-effort code for anything complex — never emit TODO comments or placeholder stubs.
37
37
  11. If reusableComponents are present, output each as a separate <file name="ComponentName.swift"> tag.
38
+ 12. FONTS: Always use .font(.system(size: X, weight: .bold)) — NEVER reference custom font names like "Inter-Bold", "Roboto", or any fontName from Figma. Custom fonts are not bundled in the Xcode project and will cause runtime errors. System font only.
39
+ 13. IMAGES: Every node listed in assetExportPlan MUST be referenced as Image("assetName").resizable().scaledToFill() — NEVER replace with Rectangle(), Color, or shapes. These PNGs are pre-exported to Assets.xcassets.
38
40
 
39
41
  RESPONSIVE LAYOUT RULES:
40
42
  - Root frame with FILL sizing → .frame(maxWidth: .infinity)
@@ -420,7 +420,7 @@ server.registerTool('get_metadata', {
420
420
  });
421
421
 
422
422
  server.registerTool('get_design_context', {
423
- description: 'Return node metadata, asset export candidates, and generation hints for the current selection or an explicit node. AI-native workflow: call this tool, then generate SwiftUI yourself using your own model (burns zero CreateLex tokens), then call write_generated_swiftui_to_xcode to save the result. Alternatively call get_swiftui_generation_prompt to get a ready-to-use system prompt + user message.',
423
+ description: 'Return node metadata, asset export candidates, and generation hints for the current Figma selection. PREFERRED WORKFLOW: call write_selection_to_xcode instead it generates correct SwiftUI with Image("name") asset refs AND writes PNGs to Assets.xcassets in one step, no planning needed. Only call get_design_context if you need to inspect the raw node tree before generating.',
424
424
  inputSchema: {
425
425
  nodeIds: z.array(z.string()).optional().describe('Optional list of Figma node ids. If omitted, uses the current selection'),
426
426
  nodeId: z.string().optional().describe('Optional single Figma node id'),
@@ -438,7 +438,7 @@ server.registerTool('get_design_context', {
438
438
  });
439
439
 
440
440
  server.registerTool('get_swiftui_generation_prompt', {
441
- description: 'Return a ready-to-use SwiftUI system prompt and user message for AI-native generation. Call this, then feed the returned systemPrompt + userMessage to your own AI model (Claude Code, Cursor, Windsurf, ChatGPT, etc.) to generate SwiftUI burning zero CreateLex tokens. After generation, parse <file name="X.swift"> tags in the response and call write_generated_swiftui_to_xcode with the code and any additionalFiles.',
441
+ description: 'Return a ready-to-use SwiftUI system prompt and user message for AI-native generation. WARNING: prefer write_selection_to_xcode it handles Image() asset refs and PNG export automatically. Only use get_swiftui_generation_prompt when you must generate code yourself. If you do use this: (a) use .font(.system(size:weight:)) never hardcode custom font names like Inter or Roboto, (b) reference every assetExportPlan entry as Image("name") not Rectangle().',
442
442
  inputSchema: {
443
443
  nodeIds: z.array(z.string()).optional().describe('Optional list of Figma node ids. If omitted, uses the current selection'),
444
444
  nodeId: z.string().optional().describe('Optional single Figma node id'),
@@ -765,11 +765,11 @@ server.registerTool('write_generated_swiftui_to_xcode', {
765
765
  });
766
766
 
767
767
  server.registerTool('write_selection_to_xcode', {
768
- description: 'Generate SwiftUI from the connected Figma selection, write it into the configured Xcode project, and return refinement hints. After writing, the AI assistant SHOULD automatically apply the returned refinementInstructions to make the generated code adaptive and interactivereplacing raster placeholders with native SwiftUI controls, adding GeometryReader for adaptive layout, and wiring basic interactions.',
768
+ description: 'Generate SwiftUI from the connected Figma selection and write it into the configured Xcode project. THIS IS THE CORRECT TOOL TO CALL it exports real PNG assets to Assets.xcassets and generates Image("name") references automatically. Use generationMode="fidelity" for designs with blend modes, 3D renders, complex shadows, or photographic images fidelity rasterizes the whole frame as one pixel-perfect PNG instead of trying to reconstruct it with shapes.',
769
769
  inputSchema: {
770
770
  nodeIds: z.array(z.string()).optional().describe('Optional list of Figma node ids. If omitted, uses the current selection'),
771
771
  includeOverflow: z.boolean().default(false).describe('Ignore Figma clipping when generating layout'),
772
- generationMode: z.enum(['editable', 'fidelity']).default('editable').describe('Editable keeps more native SwiftUI structure; fidelity rasterizes more complex layouts'),
772
+ generationMode: z.enum(['editable', 'fidelity']).default('editable').describe('Use fidelity for designs with blend modes, 3D renders, complex shadows, or photographic content — rasterizes the whole frame as one pixel-perfect PNG. Use editable for UI screens with native controls (buttons, lists, forms).'),
773
773
  projectPath: z.string().optional().describe('Optional Xcode source folder override'),
774
774
  },
775
775
  }, async ({ nodeIds, includeOverflow, generationMode, projectPath }) => {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@createlex/figma-swiftui-mcp",
3
- "version": "1.2.4",
3
+ "version": "1.2.7",
4
4
  "description": "CreateLex MCP runtime for Figma-to-SwiftUI generation and Xcode export",
5
5
  "bin": {
6
6
  "figma-swiftui-mcp": "bin/figma-swiftui-mcp.js"