@createlex/figgen 1.4.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +164 -0
- package/bin/figgen.js +156 -0
- package/companion/bridge-server.cjs +786 -0
- package/companion/createlex-auth.cjs +364 -0
- package/companion/local-llm-generator.cjs +437 -0
- package/companion/login.mjs +189 -0
- package/companion/mcp-server.mjs +1365 -0
- package/companion/package.json +17 -0
- package/companion/server.js +65 -0
- package/companion/setup.cjs +309 -0
- package/companion/xcode-writer.cjs +516 -0
- package/package.json +50 -0
|
@@ -0,0 +1,1365 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
import { createRequire } from 'node:module';
|
|
4
|
+
import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js';
|
|
5
|
+
import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';
|
|
6
|
+
import WebSocket from 'ws';
|
|
7
|
+
import * as z from 'zod/v4';
|
|
8
|
+
|
|
9
|
+
const require = createRequire(import.meta.url);
|
|
10
|
+
const {
|
|
11
|
+
GENERATED_LAYOUT_VERSION,
|
|
12
|
+
getConfigPath,
|
|
13
|
+
getSavedProjectPath,
|
|
14
|
+
inferStructName,
|
|
15
|
+
loadProjectPath,
|
|
16
|
+
resolveWritableProjectPath,
|
|
17
|
+
setSavedProjectPath,
|
|
18
|
+
writeAssetCatalogEntries,
|
|
19
|
+
writeMultiScreenProject,
|
|
20
|
+
writeSwiftUIScreen,
|
|
21
|
+
} = require('./xcode-writer.cjs');
|
|
22
|
+
const { startBridgeServer } = require('./bridge-server.cjs');
|
|
23
|
+
const {
|
|
24
|
+
AUTH_FILE,
|
|
25
|
+
authorizeRuntimeStartup,
|
|
26
|
+
postAuthorizedApi,
|
|
27
|
+
validateRuntimeSession,
|
|
28
|
+
} = require('./createlex-auth.cjs');
|
|
29
|
+
const {
|
|
30
|
+
generateWithLocalKey,
|
|
31
|
+
buildGenerationPrompt,
|
|
32
|
+
parseClaudeResponse,
|
|
33
|
+
buildPromptContext,
|
|
34
|
+
} = require('./local-llm-generator.cjs');
|
|
35
|
+
|
|
36
|
+
const BRIDGE_HTTP_URL = process.env.FIGMA_SWIFTUI_BRIDGE_HTTP_URL || 'http://localhost:7765';
|
|
37
|
+
const BRIDGE_WS_URL = process.env.FIGMA_SWIFTUI_BRIDGE_WS_URL || 'ws://localhost:7765/bridge';
|
|
38
|
+
const REQUEST_TIMEOUT_MS = Number(process.env.FIGMA_SWIFTUI_BRIDGE_TIMEOUT_MS || 30000);
|
|
39
|
+
const AUTH_REVALIDATION_INTERVAL_MS = Number(process.env.FIGMA_SWIFTUI_AUTH_REVALIDATION_MS || (10 * 60 * 1000));
|
|
40
|
+
const RESPONSE_SIZE_CAP = Number(process.env.FIGMA_SWIFTUI_RESPONSE_SIZE_CAP || 102400); // 100 KB default
|
|
41
|
+
const MAX_DIAGNOSTICS = 20;
|
|
42
|
+
|
|
43
|
+
let bridgeSocket = null;
|
|
44
|
+
let connectPromise = null;
|
|
45
|
+
let reconnectTimer = null;
|
|
46
|
+
let authValidationTimer = null;
|
|
47
|
+
let bridgeRuntimeHandle = null;
|
|
48
|
+
let lastBridgeStatus = {
|
|
49
|
+
protocolVersion: 1,
|
|
50
|
+
pluginConnected: false,
|
|
51
|
+
connectedAgents: 0,
|
|
52
|
+
pendingRequests: 0,
|
|
53
|
+
supportedActions: [],
|
|
54
|
+
};
|
|
55
|
+
let runtimeAuthState = {
|
|
56
|
+
authorized: false,
|
|
57
|
+
bypass: false,
|
|
58
|
+
apiBaseUrl: null,
|
|
59
|
+
validatedAt: null,
|
|
60
|
+
userId: null,
|
|
61
|
+
email: null,
|
|
62
|
+
tokenSource: null,
|
|
63
|
+
expiresAt: null,
|
|
64
|
+
startupEndpoint: null,
|
|
65
|
+
};
|
|
66
|
+
|
|
67
|
+
const pendingRequests = new Map();
|
|
68
|
+
|
|
69
|
+
function readProjectPathArg(argv) {
|
|
70
|
+
const argIdx = argv.indexOf('--project');
|
|
71
|
+
if (argIdx !== -1 && argv[argIdx + 1]) {
|
|
72
|
+
return argv[argIdx + 1];
|
|
73
|
+
}
|
|
74
|
+
return null;
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
function jsonResult(data) {
|
|
78
|
+
return {
|
|
79
|
+
content: [
|
|
80
|
+
{
|
|
81
|
+
type: 'text',
|
|
82
|
+
text: JSON.stringify(data, null, 2),
|
|
83
|
+
},
|
|
84
|
+
],
|
|
85
|
+
};
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
function getPublicAuthState() {
|
|
89
|
+
return {
|
|
90
|
+
authorized: runtimeAuthState.authorized,
|
|
91
|
+
bypass: runtimeAuthState.bypass,
|
|
92
|
+
apiBaseUrl: runtimeAuthState.apiBaseUrl,
|
|
93
|
+
validatedAt: runtimeAuthState.validatedAt,
|
|
94
|
+
userId: runtimeAuthState.userId,
|
|
95
|
+
email: runtimeAuthState.email,
|
|
96
|
+
tokenSource: runtimeAuthState.tokenSource,
|
|
97
|
+
expiresAt: runtimeAuthState.expiresAt,
|
|
98
|
+
startupEndpoint: runtimeAuthState.startupEndpoint,
|
|
99
|
+
authFile: AUTH_FILE,
|
|
100
|
+
};
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
function ensureRuntimeAuthorized() {
|
|
104
|
+
if (!runtimeAuthState.authorized) {
|
|
105
|
+
throw new Error('CreateLex authentication required. Run "npx @createlex/figma-swiftui-mcp login" and ensure your subscription is active before starting figma-swiftui MCP.');
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
async function tryHostedSemanticGeneration({ nodeIds, generationMode, includeOverflow, analyze }) {
|
|
110
|
+
if (generationMode !== 'editable') {
|
|
111
|
+
return null;
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
const context = await callBridge('get_design_context', {
|
|
115
|
+
nodeIds,
|
|
116
|
+
maxDepth: 4,
|
|
117
|
+
includeScreenshot: false,
|
|
118
|
+
});
|
|
119
|
+
|
|
120
|
+
// Tier 2: BYOK — use the user's own API key (Anthropic, OpenAI, or Ollama)
|
|
121
|
+
// Bypasses CreateLex backend entirely; no subscription token consumed.
|
|
122
|
+
if (process.env.ANTHROPIC_API_KEY || process.env.HF_API_TOKEN || process.env.OPENAI_API_KEY) {
|
|
123
|
+
try {
|
|
124
|
+
const byokResult = await generateWithLocalKey(context, generationMode);
|
|
125
|
+
if (byokResult?.handled) {
|
|
126
|
+
const metadata = context?.metadata;
|
|
127
|
+
const selection = Array.isArray(metadata?.nodes)
|
|
128
|
+
? { ids: metadata.nodes.map((n) => n.id), names: metadata.nodes.map((n) => n.name) }
|
|
129
|
+
: { ids: [metadata?.id], names: [metadata?.name] };
|
|
130
|
+
|
|
131
|
+
if (analyze) {
|
|
132
|
+
return {
|
|
133
|
+
selection: metadata,
|
|
134
|
+
generated: { ...byokResult, imageCount: 0, imageNames: [] },
|
|
135
|
+
assetExportPlan: context?.assetExportPlan ?? null,
|
|
136
|
+
reusableComponents: context?.reusableComponents ?? null,
|
|
137
|
+
generationHints: context?.generationHints ?? null,
|
|
138
|
+
manualRefinementHints: context?.generationHints?.manualRefinementHints ?? [],
|
|
139
|
+
hosted: false,
|
|
140
|
+
};
|
|
141
|
+
}
|
|
142
|
+
return { ...byokResult, selection, imageCount: 0, imageNames: [], hosted: false };
|
|
143
|
+
}
|
|
144
|
+
} catch (byokError) {
|
|
145
|
+
console.error('[figma-swiftui-mcp] BYOK generation failed, falling back to hosted:', byokError.message);
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
const metadata = context?.metadata;
|
|
150
|
+
const isSingleRootNode = !!metadata && !Array.isArray(metadata?.nodes) ? true : Array.isArray(metadata?.nodes) && metadata.nodes.length === 1;
|
|
151
|
+
if (!isSingleRootNode) {
|
|
152
|
+
return null;
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
const endpoint = analyze ? '/mcp/figma-swiftui/analyze' : '/mcp/figma-swiftui/generate';
|
|
156
|
+
const { response, data } = await postAuthorizedApi(runtimeAuthState, endpoint, {
|
|
157
|
+
context,
|
|
158
|
+
generationMode,
|
|
159
|
+
includeOverflow,
|
|
160
|
+
});
|
|
161
|
+
|
|
162
|
+
if (!response.ok) {
|
|
163
|
+
throw new Error(data?.error || `Hosted figma-swiftui request failed (${response.status})`);
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
if (!data?.handled) {
|
|
167
|
+
return null;
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
const selection = Array.isArray(metadata?.nodes)
|
|
171
|
+
? {
|
|
172
|
+
ids: metadata.nodes.map((node) => node.id),
|
|
173
|
+
names: metadata.nodes.map((node) => node.name),
|
|
174
|
+
}
|
|
175
|
+
: {
|
|
176
|
+
ids: [metadata.id],
|
|
177
|
+
names: [metadata.name],
|
|
178
|
+
};
|
|
179
|
+
|
|
180
|
+
if (!analyze) {
|
|
181
|
+
return {
|
|
182
|
+
code: data.code,
|
|
183
|
+
selection,
|
|
184
|
+
imageCount: 0,
|
|
185
|
+
imageNames: [],
|
|
186
|
+
diagnostics: data.diagnostics || [],
|
|
187
|
+
provider: data.provider || 'createlex-hosted',
|
|
188
|
+
screenType: data.screenType || null,
|
|
189
|
+
assetRequests: data.assetRequests || [],
|
|
190
|
+
hosted: true,
|
|
191
|
+
};
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
return {
|
|
195
|
+
selection: metadata,
|
|
196
|
+
generated: {
|
|
197
|
+
code: data.code,
|
|
198
|
+
imageCount: 0,
|
|
199
|
+
imageNames: [],
|
|
200
|
+
diagnostics: data.diagnostics || [],
|
|
201
|
+
provider: data.provider || 'createlex-hosted',
|
|
202
|
+
screenType: data.screenType || null,
|
|
203
|
+
assetRequests: data.assetRequests || [],
|
|
204
|
+
hosted: true,
|
|
205
|
+
},
|
|
206
|
+
assetExportPlan: data.contextSummary?.assetExportPlan || context?.assetExportPlan || null,
|
|
207
|
+
reusableComponents: data.contextSummary?.reusableComponents || context?.reusableComponents || null,
|
|
208
|
+
generationHints: data.contextSummary?.generationHints || context?.generationHints || null,
|
|
209
|
+
manualRefinementHints: context?.generationHints?.manualRefinementHints || [],
|
|
210
|
+
hosted: true,
|
|
211
|
+
};
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
function resolveTargetProjectPath(projectPath) {
|
|
215
|
+
const resolved = loadProjectPath({ explicitPath: projectPath });
|
|
216
|
+
if (!resolved) {
|
|
217
|
+
throw new Error('No Xcode project path is configured. Use set_project_path first or pass projectPath explicitly.');
|
|
218
|
+
}
|
|
219
|
+
return resolved;
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
function scheduleReconnect() {
|
|
223
|
+
if (reconnectTimer) {
|
|
224
|
+
return;
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
reconnectTimer = setTimeout(() => {
|
|
228
|
+
reconnectTimer = null;
|
|
229
|
+
connectBridge().catch((error) => {
|
|
230
|
+
console.error('[figma-swiftui-mcp] Bridge reconnect failed:', error.message);
|
|
231
|
+
});
|
|
232
|
+
}, 1500);
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
function rejectPendingRequests(message) {
|
|
236
|
+
for (const [requestId, pending] of pendingRequests.entries()) {
|
|
237
|
+
clearTimeout(pending.timer);
|
|
238
|
+
pending.reject(new Error(message));
|
|
239
|
+
pendingRequests.delete(requestId);
|
|
240
|
+
}
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
function handleBridgeMessage(rawMessage) {
|
|
244
|
+
let message;
|
|
245
|
+
try {
|
|
246
|
+
message = JSON.parse(rawMessage.toString());
|
|
247
|
+
} catch (error) {
|
|
248
|
+
console.error('[figma-swiftui-mcp] Failed to parse bridge message:', error);
|
|
249
|
+
return;
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
if (message.type === 'hello-ack' || message.type === 'bridge-status') {
|
|
253
|
+
lastBridgeStatus = {
|
|
254
|
+
...lastBridgeStatus,
|
|
255
|
+
...message,
|
|
256
|
+
};
|
|
257
|
+
return;
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
if (message.type === 'bridge-response') {
|
|
261
|
+
const pending = pendingRequests.get(message.requestId);
|
|
262
|
+
if (!pending) {
|
|
263
|
+
return;
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
clearTimeout(pending.timer);
|
|
267
|
+
pendingRequests.delete(message.requestId);
|
|
268
|
+
|
|
269
|
+
if (message.ok) {
|
|
270
|
+
pending.resolve(message.data);
|
|
271
|
+
} else {
|
|
272
|
+
pending.reject(new Error(message.error || `Bridge action ${message.action} failed`));
|
|
273
|
+
}
|
|
274
|
+
return;
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
if (message.type === 'bridge-event') {
|
|
278
|
+
// Log bridge events concisely — skip full JSON payloads for noisy events
|
|
279
|
+
const summary = message.data && message.event === 'selectionchange'
|
|
280
|
+
? `${message.data.selection?.count ?? 0} node(s) selected`
|
|
281
|
+
: message.data && message.event === 'currentpagechange'
|
|
282
|
+
? `page "${message.data.currentPage?.name || 'unknown'}"`
|
|
283
|
+
: '';
|
|
284
|
+
if (summary) {
|
|
285
|
+
console.error(`[figma-swiftui-mcp] ${message.event}: ${summary}`);
|
|
286
|
+
}
|
|
287
|
+
}
|
|
288
|
+
}
|
|
289
|
+
|
|
290
|
+
async function connectBridge() {
|
|
291
|
+
if (bridgeSocket && bridgeSocket.readyState === WebSocket.OPEN) {
|
|
292
|
+
return bridgeSocket;
|
|
293
|
+
}
|
|
294
|
+
|
|
295
|
+
if (connectPromise) {
|
|
296
|
+
return connectPromise;
|
|
297
|
+
}
|
|
298
|
+
|
|
299
|
+
connectPromise = new Promise((resolve, reject) => {
|
|
300
|
+
const socket = new WebSocket(BRIDGE_WS_URL);
|
|
301
|
+
let settled = false;
|
|
302
|
+
|
|
303
|
+
socket.on('open', () => {
|
|
304
|
+
bridgeSocket = socket;
|
|
305
|
+
socket.send(JSON.stringify({
|
|
306
|
+
type: 'hello',
|
|
307
|
+
role: 'agent',
|
|
308
|
+
protocolVersion: 1,
|
|
309
|
+
}));
|
|
310
|
+
|
|
311
|
+
settled = true;
|
|
312
|
+
resolve(socket);
|
|
313
|
+
});
|
|
314
|
+
|
|
315
|
+
socket.on('message', (message) => {
|
|
316
|
+
handleBridgeMessage(message);
|
|
317
|
+
});
|
|
318
|
+
|
|
319
|
+
socket.on('close', () => {
|
|
320
|
+
if (bridgeSocket === socket) {
|
|
321
|
+
bridgeSocket = null;
|
|
322
|
+
}
|
|
323
|
+
rejectPendingRequests('Bridge socket closed');
|
|
324
|
+
scheduleReconnect();
|
|
325
|
+
});
|
|
326
|
+
|
|
327
|
+
socket.on('error', (error) => {
|
|
328
|
+
if (!settled) {
|
|
329
|
+
reject(error);
|
|
330
|
+
}
|
|
331
|
+
});
|
|
332
|
+
}).finally(() => {
|
|
333
|
+
connectPromise = null;
|
|
334
|
+
});
|
|
335
|
+
|
|
336
|
+
return connectPromise;
|
|
337
|
+
}
|
|
338
|
+
|
|
339
|
+
async function fetchBridgeStatus() {
|
|
340
|
+
try {
|
|
341
|
+
const response = await fetch(`${BRIDGE_HTTP_URL}/bridge/info`);
|
|
342
|
+
if (!response.ok) {
|
|
343
|
+
throw new Error(`HTTP ${response.status}`);
|
|
344
|
+
}
|
|
345
|
+
const data = await response.json();
|
|
346
|
+
lastBridgeStatus = {
|
|
347
|
+
...lastBridgeStatus,
|
|
348
|
+
...data,
|
|
349
|
+
};
|
|
350
|
+
return data;
|
|
351
|
+
} catch (error) {
|
|
352
|
+
return {
|
|
353
|
+
...lastBridgeStatus,
|
|
354
|
+
ok: false,
|
|
355
|
+
error: error instanceof Error ? error.message : 'Unable to reach bridge info endpoint',
|
|
356
|
+
};
|
|
357
|
+
}
|
|
358
|
+
}
|
|
359
|
+
|
|
360
|
+
async function callBridge(action, params = {}) {
|
|
361
|
+
ensureRuntimeAuthorized();
|
|
362
|
+
await connectBridge();
|
|
363
|
+
const status = await fetchBridgeStatus();
|
|
364
|
+
|
|
365
|
+
if (!status.pluginConnected) {
|
|
366
|
+
throw new Error('No active Figma plugin session is connected to the bridge');
|
|
367
|
+
}
|
|
368
|
+
|
|
369
|
+
const requestId = `mcp-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`;
|
|
370
|
+
|
|
371
|
+
return new Promise((resolve, reject) => {
|
|
372
|
+
const timer = setTimeout(() => {
|
|
373
|
+
pendingRequests.delete(requestId);
|
|
374
|
+
reject(new Error(`Timed out waiting for bridge action "${action}"`));
|
|
375
|
+
}, REQUEST_TIMEOUT_MS);
|
|
376
|
+
|
|
377
|
+
pendingRequests.set(requestId, {
|
|
378
|
+
resolve,
|
|
379
|
+
reject,
|
|
380
|
+
timer,
|
|
381
|
+
});
|
|
382
|
+
|
|
383
|
+
bridgeSocket.send(JSON.stringify({
|
|
384
|
+
type: 'bridge-request',
|
|
385
|
+
requestId,
|
|
386
|
+
action,
|
|
387
|
+
params,
|
|
388
|
+
}));
|
|
389
|
+
});
|
|
390
|
+
}
|
|
391
|
+
|
|
392
|
+
const server = new McpServer({
|
|
393
|
+
name: 'figma-swiftui-bridge',
|
|
394
|
+
version: '1.0.0',
|
|
395
|
+
});
|
|
396
|
+
|
|
397
|
+
server.registerTool('bridge_status', {
|
|
398
|
+
description: 'Check the local Figma SwiftUI bridge status, including whether a live plugin session is connected.',
|
|
399
|
+
}, async () => {
|
|
400
|
+
const data = await fetchBridgeStatus();
|
|
401
|
+
return jsonResult({
|
|
402
|
+
...data,
|
|
403
|
+
auth: getPublicAuthState(),
|
|
404
|
+
});
|
|
405
|
+
});
|
|
406
|
+
|
|
407
|
+
server.registerTool('auth_status', {
|
|
408
|
+
description: 'Check whether the local figma-swiftui MCP runtime is authorized for a paid CreateLex subscription.',
|
|
409
|
+
}, async () => {
|
|
410
|
+
return jsonResult(getPublicAuthState());
|
|
411
|
+
});
|
|
412
|
+
|
|
413
|
+
server.registerTool('get_metadata', {
|
|
414
|
+
description: 'Inspect selection, page, or node metadata in a format similar to Figma Dev MCP.',
|
|
415
|
+
inputSchema: {
|
|
416
|
+
scope: z.enum(['selection', 'page', 'node']).default('selection').describe('Metadata scope to inspect'),
|
|
417
|
+
nodeId: z.string().optional().describe('Required when scope=node'),
|
|
418
|
+
maxDepth: z.number().int().min(0).max(8).default(3).describe('Maximum child depth to include'),
|
|
419
|
+
},
|
|
420
|
+
}, async ({ scope, nodeId, maxDepth }) => {
|
|
421
|
+
const data = await callBridge('get_metadata', { scope, nodeId, maxDepth });
|
|
422
|
+
return jsonResult(data);
|
|
423
|
+
});
|
|
424
|
+
|
|
425
|
+
server.registerTool('get_design_context', {
|
|
426
|
+
description: 'Return node metadata, asset export candidates, and generation hints for the current Figma selection. PREFERRED WORKFLOW: call figma_to_swiftui instead — it handles LLM generation, Image("name") asset refs, PNG export, DesignTokens, and reusable components in one shot. Only call get_design_context if you need to inspect the raw node tree before generating.',
|
|
427
|
+
inputSchema: {
|
|
428
|
+
nodeIds: z.array(z.string()).optional().describe('Optional list of Figma node ids. If omitted, uses the current selection'),
|
|
429
|
+
nodeId: z.string().optional().describe('Optional single Figma node id'),
|
|
430
|
+
maxDepth: z.number().int().min(0).max(8).default(3).describe('Maximum child depth to include'),
|
|
431
|
+
includeScreenshot: z.boolean().default(false).describe('Include a PNG screenshot when exactly one node is targeted'),
|
|
432
|
+
},
|
|
433
|
+
}, async ({ nodeIds, nodeId, maxDepth, includeScreenshot }) => {
|
|
434
|
+
const data = await callBridge('get_design_context', {
|
|
435
|
+
nodeIds,
|
|
436
|
+
nodeId,
|
|
437
|
+
maxDepth,
|
|
438
|
+
includeScreenshot,
|
|
439
|
+
});
|
|
440
|
+
return jsonResult(data);
|
|
441
|
+
});
|
|
442
|
+
|
|
443
|
+
server.registerTool('get_swiftui_generation_prompt', {
|
|
444
|
+
description: 'Return a ready-to-use SwiftUI system prompt and user message for AI-native generation. PREFERRED: use figma_to_swiftui instead — it pre-writes assets and handles LLM generation automatically. Only use get_swiftui_generation_prompt when you need the raw prompt. If you do: (a) use .font(.system(size:weight:)) — never hardcode custom font names like Inter or Roboto, (b) reference every assetExportPlan entry as Image("name") not Rectangle().',
|
|
445
|
+
inputSchema: {
|
|
446
|
+
nodeIds: z.array(z.string()).optional().describe('Optional list of Figma node ids. If omitted, uses the current selection'),
|
|
447
|
+
nodeId: z.string().optional().describe('Optional single Figma node id'),
|
|
448
|
+
maxDepth: z.number().int().min(0).max(8).default(4).describe('Maximum child depth to include'),
|
|
449
|
+
},
|
|
450
|
+
}, async ({ nodeIds, nodeId, maxDepth }) => {
|
|
451
|
+
const context = await callBridge('get_design_context', {
|
|
452
|
+
nodeIds,
|
|
453
|
+
nodeId,
|
|
454
|
+
maxDepth,
|
|
455
|
+
includeScreenshot: false,
|
|
456
|
+
});
|
|
457
|
+
|
|
458
|
+
const prompt = buildGenerationPrompt(context);
|
|
459
|
+
if (!prompt) {
|
|
460
|
+
throw new Error('Could not extract root node from design context. Ensure a single frame or component is selected.');
|
|
461
|
+
}
|
|
462
|
+
|
|
463
|
+
return jsonResult({
|
|
464
|
+
...prompt,
|
|
465
|
+
instructions: [
|
|
466
|
+
'1. Use systemPrompt as the system message and userMessage as the user message.',
|
|
467
|
+
'2. Ask your AI to generate SwiftUI code.',
|
|
468
|
+
'3. Parse <file name="X.swift"> tags from the response — each tag is one Swift file.',
|
|
469
|
+
'4. Call write_generated_swiftui_to_xcode with: code (primary view), additionalFiles for DesignTokens.swift (dir:"shared") and components (dir:"components").',
|
|
470
|
+
],
|
|
471
|
+
});
|
|
472
|
+
});
|
|
473
|
+
|
|
474
|
+
server.registerTool('get_screenshot', {
|
|
475
|
+
description: 'Export a PNG screenshot for a single target node or the single current selection.',
|
|
476
|
+
inputSchema: {
|
|
477
|
+
nodeIds: z.array(z.string()).optional().describe('Optional list of Figma node ids. If omitted, uses the current selection'),
|
|
478
|
+
nodeId: z.string().optional().describe('Optional single Figma node id'),
|
|
479
|
+
},
|
|
480
|
+
}, async ({ nodeIds, nodeId }) => {
|
|
481
|
+
const data = await callBridge('get_screenshot', { nodeIds, nodeId });
|
|
482
|
+
return jsonResult(data);
|
|
483
|
+
});
|
|
484
|
+
|
|
485
|
+
server.registerTool('export_svg', {
|
|
486
|
+
description: 'Export an exact SVG for a single vector-friendly node or the single current selection.',
|
|
487
|
+
inputSchema: {
|
|
488
|
+
nodeIds: z.array(z.string()).optional().describe('Optional list of Figma node ids. If omitted, uses the current selection'),
|
|
489
|
+
nodeId: z.string().optional().describe('Optional single Figma node id'),
|
|
490
|
+
},
|
|
491
|
+
}, async ({ nodeIds, nodeId }) => {
|
|
492
|
+
const data = await callBridge('export_svg', { nodeIds, nodeId });
|
|
493
|
+
return jsonResult(data);
|
|
494
|
+
});
|
|
495
|
+
|
|
496
|
+
server.registerTool('write_svg_to_xcode', {
|
|
497
|
+
description: 'Export an exact SVG from Figma and write it directly into Assets.xcassets.',
|
|
498
|
+
inputSchema: {
|
|
499
|
+
nodeIds: z.array(z.string()).optional().describe('Optional list of Figma node ids. If omitted, uses the current selection'),
|
|
500
|
+
nodeId: z.string().optional().describe('Optional single Figma node id'),
|
|
501
|
+
assetName: z.string().optional().describe('Optional asset name override'),
|
|
502
|
+
projectPath: z.string().optional().describe('Optional Xcode source folder override'),
|
|
503
|
+
},
|
|
504
|
+
}, async ({ nodeIds, nodeId, assetName, projectPath }) => {
|
|
505
|
+
const targetDir = resolveTargetProjectPath(projectPath);
|
|
506
|
+
const svgExport = await callBridge('export_svg', { nodeIds, nodeId });
|
|
507
|
+
const effectiveAssetName = assetName || svgExport.suggestedAssetName || 'VectorAsset';
|
|
508
|
+
const result = writeAssetCatalogEntries({
|
|
509
|
+
targetDir,
|
|
510
|
+
assets: [
|
|
511
|
+
{
|
|
512
|
+
name: effectiveAssetName,
|
|
513
|
+
format: 'svg',
|
|
514
|
+
svg: svgExport.svg,
|
|
515
|
+
},
|
|
516
|
+
],
|
|
517
|
+
});
|
|
518
|
+
|
|
519
|
+
if (!result.ok) {
|
|
520
|
+
throw new Error(result.errors.join(' | ') || 'Failed to write SVG asset to Xcode');
|
|
521
|
+
}
|
|
522
|
+
|
|
523
|
+
return jsonResult({
|
|
524
|
+
ok: true,
|
|
525
|
+
projectPath: targetDir,
|
|
526
|
+
assetName: effectiveAssetName,
|
|
527
|
+
xcassetsDir: result.xcassetsDir,
|
|
528
|
+
files: result.files,
|
|
529
|
+
sourceNode: {
|
|
530
|
+
nodeId: svgExport.nodeId,
|
|
531
|
+
nodeName: svgExport.nodeName,
|
|
532
|
+
nodeType: svgExport.nodeType,
|
|
533
|
+
},
|
|
534
|
+
});
|
|
535
|
+
});
|
|
536
|
+
|
|
537
|
+
server.registerTool('get_project_path', {
|
|
538
|
+
description: 'Read the saved Xcode project source folder used for local SwiftUI writes.',
|
|
539
|
+
}, async () => {
|
|
540
|
+
return jsonResult({
|
|
541
|
+
projectPath: getSavedProjectPath(),
|
|
542
|
+
configPath: getConfigPath(),
|
|
543
|
+
layoutVersion: GENERATED_LAYOUT_VERSION,
|
|
544
|
+
});
|
|
545
|
+
});
|
|
546
|
+
|
|
547
|
+
server.registerTool('set_project_path', {
|
|
548
|
+
description: 'Persist the Xcode project source folder used for local SwiftUI writes.',
|
|
549
|
+
inputSchema: {
|
|
550
|
+
projectPath: z.string().describe('Path to the Xcode source folder containing Swift files and Assets.xcassets'),
|
|
551
|
+
},
|
|
552
|
+
}, async ({ projectPath }) => {
|
|
553
|
+
const resolved = resolveWritableProjectPath(projectPath);
|
|
554
|
+
const saved = setSavedProjectPath(resolved);
|
|
555
|
+
return jsonResult({
|
|
556
|
+
ok: true,
|
|
557
|
+
projectPath: saved,
|
|
558
|
+
configPath: getConfigPath(),
|
|
559
|
+
});
|
|
560
|
+
});
|
|
561
|
+
|
|
562
|
+
server.registerTool('get_document_summary', {
|
|
563
|
+
description: 'Read high-level Figma document, page, selection, and viewport metadata from the connected plugin session.',
|
|
564
|
+
}, async () => {
|
|
565
|
+
const data = await callBridge('get_document_summary');
|
|
566
|
+
return jsonResult(data);
|
|
567
|
+
});
|
|
568
|
+
|
|
569
|
+
server.registerTool('get_viewport_context', {
|
|
570
|
+
description: 'Read the active Figma viewport center, zoom, and current page metadata.',
|
|
571
|
+
}, async () => {
|
|
572
|
+
const data = await callBridge('get_viewport_context');
|
|
573
|
+
return jsonResult(data);
|
|
574
|
+
});
|
|
575
|
+
|
|
576
|
+
server.registerTool('get_selection_snapshot', {
|
|
577
|
+
description: 'Read a rich structural snapshot of the current Figma selection.',
|
|
578
|
+
inputSchema: {
|
|
579
|
+
maxDepth: z.number().int().min(0).max(8).default(3).describe('Maximum child depth to include in the snapshot'),
|
|
580
|
+
},
|
|
581
|
+
}, async ({ maxDepth }) => {
|
|
582
|
+
const data = await callBridge('get_selection_snapshot', { maxDepth });
|
|
583
|
+
return jsonResult(data);
|
|
584
|
+
});
|
|
585
|
+
|
|
586
|
+
server.registerTool('get_page_snapshot', {
|
|
587
|
+
description: 'Read the current Figma page and its child hierarchy.',
|
|
588
|
+
inputSchema: {
|
|
589
|
+
maxDepth: z.number().int().min(0).max(6).default(2).describe('Maximum child depth to include for the page snapshot'),
|
|
590
|
+
},
|
|
591
|
+
}, async ({ maxDepth }) => {
|
|
592
|
+
const data = await callBridge('get_page_snapshot', { maxDepth });
|
|
593
|
+
return jsonResult(data);
|
|
594
|
+
});
|
|
595
|
+
|
|
596
|
+
server.registerTool('get_node_snapshot', {
|
|
597
|
+
description: 'Read a rich structural snapshot for a specific Figma node id.',
|
|
598
|
+
inputSchema: {
|
|
599
|
+
nodeId: z.string().describe('Figma node id, for example "123:456"'),
|
|
600
|
+
maxDepth: z.number().int().min(0).max(8).default(3).describe('Maximum child depth to include for the node snapshot'),
|
|
601
|
+
},
|
|
602
|
+
}, async ({ nodeId, maxDepth }) => {
|
|
603
|
+
const data = await callBridge('get_node_snapshot', { nodeId, maxDepth });
|
|
604
|
+
return jsonResult(data);
|
|
605
|
+
});
|
|
606
|
+
|
|
607
|
+
server.registerTool('find_nodes', {
|
|
608
|
+
description: 'Search the current Figma page for nodes by name substring and optional node type.',
|
|
609
|
+
inputSchema: {
|
|
610
|
+
query: z.string().default('').describe('Case-insensitive name substring to search for'),
|
|
611
|
+
type: z.string().optional().describe('Optional Figma node type to filter, for example FRAME or TEXT'),
|
|
612
|
+
limit: z.number().int().min(1).max(200).default(25).describe('Maximum number of matching nodes to return'),
|
|
613
|
+
maxDepth: z.number().int().min(0).max(2).default(0).describe('Maximum child depth to include for each match'),
|
|
614
|
+
},
|
|
615
|
+
}, async ({ query, type, limit, maxDepth }) => {
|
|
616
|
+
const data = await callBridge('find_nodes', { query, type, limit, maxDepth });
|
|
617
|
+
return jsonResult(data);
|
|
618
|
+
});
|
|
619
|
+
|
|
620
|
+
server.registerTool('get_asset_export_plan', {
|
|
621
|
+
description: 'List vector/icon/image export candidates for the current selection or explicit node ids.',
|
|
622
|
+
inputSchema: {
|
|
623
|
+
nodeIds: z.array(z.string()).optional().describe('Optional list of Figma node ids. If omitted, uses the current selection'),
|
|
624
|
+
nodeId: z.string().optional().describe('Optional single Figma node id'),
|
|
625
|
+
},
|
|
626
|
+
}, async ({ nodeIds, nodeId }) => {
|
|
627
|
+
const data = await callBridge('get_asset_export_plan', { nodeIds, nodeId });
|
|
628
|
+
return jsonResult(data);
|
|
629
|
+
});
|
|
630
|
+
|
|
631
|
+
server.registerTool('extract_reusable_components', {
|
|
632
|
+
description: 'Identify repeated Figma components and repeated sibling structures that should become reusable SwiftUI components.',
|
|
633
|
+
inputSchema: {
|
|
634
|
+
nodeIds: z.array(z.string()).optional().describe('Optional list of Figma node ids. If omitted, uses the current selection or current page when scope=page'),
|
|
635
|
+
nodeId: z.string().optional().describe('Optional single Figma node id'),
|
|
636
|
+
scope: z.enum(['selection', 'page']).default('selection').describe('Analyze the current selection or scan the full current page'),
|
|
637
|
+
},
|
|
638
|
+
}, async ({ nodeIds, nodeId, scope }) => {
|
|
639
|
+
const data = await callBridge('extract_reusable_components', { nodeIds, nodeId, scope });
|
|
640
|
+
return jsonResult(data);
|
|
641
|
+
});
|
|
642
|
+
|
|
643
|
+
server.registerTool('dump_tree', {
|
|
644
|
+
description: 'Return a readable text tree for the current selection or explicit node ids.',
|
|
645
|
+
inputSchema: {
|
|
646
|
+
nodeIds: z.array(z.string()).optional().describe('Optional list of Figma node ids. If omitted, uses the current selection'),
|
|
647
|
+
},
|
|
648
|
+
}, async ({ nodeIds }) => {
|
|
649
|
+
const data = await callBridge('dump_tree', { nodeIds });
|
|
650
|
+
return jsonResult(data);
|
|
651
|
+
});
|
|
652
|
+
|
|
653
|
+
server.registerTool('generate_swiftui', {
|
|
654
|
+
description: 'Run the same SwiftUI export path used by the plugin UI, optionally including image payloads.',
|
|
655
|
+
inputSchema: {
|
|
656
|
+
nodeIds: z.array(z.string()).optional().describe('Optional list of Figma node ids. If omitted, uses the current selection'),
|
|
657
|
+
includeOverflow: z.boolean().default(false).describe('Ignore Figma clipping when generating layout'),
|
|
658
|
+
generationMode: z.enum(['editable', 'fidelity']).default('editable').describe('Editable keeps more native SwiftUI structure; fidelity rasterizes more complex layouts'),
|
|
659
|
+
includeImages: z.boolean().default(false).describe('Include base64 image payloads in the tool result'),
|
|
660
|
+
},
|
|
661
|
+
}, async ({ nodeIds, includeOverflow, generationMode, includeImages }) => {
|
|
662
|
+
if (!includeImages) {
|
|
663
|
+
const hosted = await tryHostedSemanticGeneration({
|
|
664
|
+
nodeIds,
|
|
665
|
+
generationMode,
|
|
666
|
+
includeOverflow,
|
|
667
|
+
analyze: false,
|
|
668
|
+
});
|
|
669
|
+
if (hosted) {
|
|
670
|
+
return jsonResult(hosted);
|
|
671
|
+
}
|
|
672
|
+
}
|
|
673
|
+
|
|
674
|
+
const data = await callBridge('generate_swiftui', {
|
|
675
|
+
nodeIds,
|
|
676
|
+
includeOverflow,
|
|
677
|
+
generationMode,
|
|
678
|
+
includeImages,
|
|
679
|
+
});
|
|
680
|
+
return jsonResult(data);
|
|
681
|
+
});
|
|
682
|
+
|
|
683
|
+
server.registerTool('analyze_generation', {
|
|
684
|
+
description: 'Return the generated SwiftUI plus per-node diagnostics and rasterization reasons for the current selection or explicit node ids.',
|
|
685
|
+
inputSchema: {
|
|
686
|
+
nodeIds: z.array(z.string()).optional().describe('Optional list of Figma node ids. If omitted, uses the current selection'),
|
|
687
|
+
includeOverflow: z.boolean().default(false).describe('Ignore Figma clipping when generating layout'),
|
|
688
|
+
generationMode: z.enum(['editable', 'fidelity']).default('editable').describe('Editable keeps more native SwiftUI structure; fidelity rasterizes more complex layouts'),
|
|
689
|
+
maxDepth: z.number().int().min(0).max(8).default(3).describe('Maximum child depth to include in the diagnostic node snapshot'),
|
|
690
|
+
},
|
|
691
|
+
}, async ({ nodeIds, includeOverflow, generationMode, maxDepth }) => {
|
|
692
|
+
const hosted = await tryHostedSemanticGeneration({
|
|
693
|
+
nodeIds,
|
|
694
|
+
generationMode,
|
|
695
|
+
includeOverflow,
|
|
696
|
+
analyze: true,
|
|
697
|
+
});
|
|
698
|
+
if (hosted) {
|
|
699
|
+
return jsonResult(hosted);
|
|
700
|
+
}
|
|
701
|
+
|
|
702
|
+
const data = await callBridge('analyze_generation', {
|
|
703
|
+
nodeIds,
|
|
704
|
+
includeOverflow,
|
|
705
|
+
generationMode,
|
|
706
|
+
maxDepth,
|
|
707
|
+
});
|
|
708
|
+
return jsonResult(data);
|
|
709
|
+
});
|
|
710
|
+
|
|
711
|
+
server.registerTool('write_generated_swiftui_to_xcode', {
|
|
712
|
+
description: 'Write generated SwiftUI code into the configured Xcode project. Images are automatically exported from the current Figma selection and written to Assets.xcassets — you do not need to provide them manually. Supports multiple files: use additionalFiles for DesignTokens.swift (dir: "shared"), reusable components (dir: "components"), or extra screen files (dir: "screens"). AI tools generating code with their own model should call this after generating SwiftUI from get_design_context or the /design-context URL.',
|
|
713
|
+
inputSchema: {
|
|
714
|
+
code: z.string().describe('SwiftUI source code for the primary view'),
|
|
715
|
+
structName: z.string().optional().describe('Optional Swift struct name; if omitted it will be inferred from the code'),
|
|
716
|
+
projectPath: z.string().optional().describe('Optional Xcode source folder override'),
|
|
717
|
+
images: z.array(z.object({
|
|
718
|
+
name: z.string(),
|
|
719
|
+
base64: z.string(),
|
|
720
|
+
})).default([]).describe('Optional exported image payloads'),
|
|
721
|
+
selectionNames: z.array(z.string()).default([]).describe('Optional original Figma selection names for struct inference'),
|
|
722
|
+
additionalFiles: z.array(z.object({
|
|
723
|
+
name: z.string().describe('Swift filename, e.g. DesignTokens.swift'),
|
|
724
|
+
code: z.string().describe('Full Swift source code for this file'),
|
|
725
|
+
dir: z.enum(['shared', 'components', 'screens']).optional().describe('shared=project root, components=Components/ subdir, screens=same dir as primary (default)'),
|
|
726
|
+
})).optional().describe('Additional Swift files to write: DesignTokens.swift, reusable components, etc.'),
|
|
727
|
+
},
|
|
728
|
+
}, async ({ code, structName, projectPath, images, selectionNames, additionalFiles }) => {
|
|
729
|
+
const targetDir = resolveTargetProjectPath(projectPath);
|
|
730
|
+
const effectiveStructName = inferStructName({ structName, code, selectionNames });
|
|
731
|
+
|
|
732
|
+
// Auto-fetch images from the current Figma selection when none were provided.
|
|
733
|
+
// This ensures Assets.xcassets is always populated even when the AI supplied
|
|
734
|
+
// its own SwiftUI code (e.g. from get_design_context / the /design-context URL).
|
|
735
|
+
let effectiveImages = Array.isArray(images) && images.length > 0 ? images : [];
|
|
736
|
+
if (effectiveImages.length === 0) {
|
|
737
|
+
try {
|
|
738
|
+
const generated = await callBridge('generate_swiftui', {
|
|
739
|
+
includeImages: true,
|
|
740
|
+
generationMode: 'editable',
|
|
741
|
+
includeOverflow: false,
|
|
742
|
+
});
|
|
743
|
+
if (Array.isArray(generated.images) && generated.images.length > 0) {
|
|
744
|
+
effectiveImages = generated.images;
|
|
745
|
+
}
|
|
746
|
+
} catch {
|
|
747
|
+
// Non-fatal: bridge may not be connected or selection may have no images.
|
|
748
|
+
}
|
|
749
|
+
}
|
|
750
|
+
|
|
751
|
+
const result = writeSwiftUIScreen({
|
|
752
|
+
targetDir,
|
|
753
|
+
code,
|
|
754
|
+
structName: effectiveStructName,
|
|
755
|
+
images: effectiveImages,
|
|
756
|
+
additionalFiles,
|
|
757
|
+
});
|
|
758
|
+
|
|
759
|
+
if (!result.ok) {
|
|
760
|
+
throw new Error(result.results.errors.join(' | ') || 'Failed to write generated SwiftUI to Xcode');
|
|
761
|
+
}
|
|
762
|
+
|
|
763
|
+
return jsonResult({
|
|
764
|
+
...stripImageData(result),
|
|
765
|
+
structName: effectiveStructName,
|
|
766
|
+
imagesWritten: effectiveImages.length,
|
|
767
|
+
});
|
|
768
|
+
});
|
|
769
|
+
|
|
770
|
+
server.registerTool('write_selection_to_xcode', {
|
|
771
|
+
description: 'Generate SwiftUI from the connected Figma selection and write it into the configured Xcode project. THIS IS THE CORRECT TOOL TO CALL — it exports real PNG assets to Assets.xcassets and generates Image("name") references automatically. Always use the default generationMode="editable" — individual assets include blendModeSwiftUI values so blend modes are applied correctly on each Image(). Only use generationMode="fidelity" when the ENTIRE frame is a single photographic image or illustration with no text, no interactive elements, and no distinct layers (e.g. a full-bleed photo background).',
|
|
772
|
+
inputSchema: {
|
|
773
|
+
nodeIds: z.array(z.string()).optional().describe('Optional list of Figma node ids. If omitted, uses the current selection'),
|
|
774
|
+
includeOverflow: z.boolean().default(false).describe('Ignore Figma clipping when generating layout'),
|
|
775
|
+
generationMode: z.enum(['editable', 'fidelity']).default('editable').describe('Default: editable — generates individual SwiftUI elements with correct .blendMode() on each Image(). Use fidelity ONLY when the whole frame is a single photo/illustration with no text or interactive elements.'),
|
|
776
|
+
projectPath: z.string().optional().describe('Optional Xcode source folder override'),
|
|
777
|
+
},
|
|
778
|
+
}, async ({ nodeIds, includeOverflow, generationMode, projectPath }) => {
|
|
779
|
+
const targetDir = resolveTargetProjectPath(projectPath);
|
|
780
|
+
|
|
781
|
+
// Try LLM generation (Tier 2/3) to get better code than the template generator
|
|
782
|
+
let llmCode = null;
|
|
783
|
+
let llmAdditionalFiles = [];
|
|
784
|
+
let analysisHints = null;
|
|
785
|
+
try {
|
|
786
|
+
const semanticResult = await tryHostedSemanticGeneration({
|
|
787
|
+
nodeIds,
|
|
788
|
+
generationMode,
|
|
789
|
+
includeOverflow,
|
|
790
|
+
analyze: false,
|
|
791
|
+
});
|
|
792
|
+
if (semanticResult?.code) {
|
|
793
|
+
const parsed = parseClaudeResponse(semanticResult.code);
|
|
794
|
+
llmCode = parsed.code || semanticResult.code;
|
|
795
|
+
if (parsed.designTokensCode) {
|
|
796
|
+
llmAdditionalFiles.push({ name: 'DesignTokens.swift', code: parsed.designTokensCode, dir: 'shared' });
|
|
797
|
+
}
|
|
798
|
+
for (const comp of (parsed.componentFiles || [])) {
|
|
799
|
+
llmAdditionalFiles.push({ name: comp.name, code: comp.code, dir: 'components' });
|
|
800
|
+
}
|
|
801
|
+
}
|
|
802
|
+
// Also try to get analysis hints
|
|
803
|
+
if (!semanticResult) {
|
|
804
|
+
const analysis = await tryHostedSemanticGeneration({
|
|
805
|
+
nodeIds,
|
|
806
|
+
generationMode,
|
|
807
|
+
includeOverflow,
|
|
808
|
+
analyze: true,
|
|
809
|
+
});
|
|
810
|
+
if (analysis) {
|
|
811
|
+
analysisHints = {
|
|
812
|
+
generationHints: analysis.generationHints,
|
|
813
|
+
manualRefinementHints: analysis.manualRefinementHints,
|
|
814
|
+
reusableComponents: analysis.reusableComponents,
|
|
815
|
+
assetExportPlan: analysis.assetExportPlan,
|
|
816
|
+
};
|
|
817
|
+
}
|
|
818
|
+
}
|
|
819
|
+
} catch (err) {
|
|
820
|
+
// Non-fatal — continue with plugin generation
|
|
821
|
+
console.error('[figma-swiftui-mcp] Semantic generation failed (non-fatal):', err?.message ?? err);
|
|
822
|
+
}
|
|
823
|
+
|
|
824
|
+
const generated = await callBridge('generate_swiftui', {
|
|
825
|
+
nodeIds,
|
|
826
|
+
includeOverflow,
|
|
827
|
+
generationMode,
|
|
828
|
+
includeImages: true,
|
|
829
|
+
});
|
|
830
|
+
|
|
831
|
+
// Prefer LLM-generated code over template-generated code
|
|
832
|
+
const finalCode = llmCode || generated.code;
|
|
833
|
+
|
|
834
|
+
const effectiveStructName = inferStructName({
|
|
835
|
+
code: finalCode,
|
|
836
|
+
selectionNames: generated.selection?.names ?? [],
|
|
837
|
+
});
|
|
838
|
+
|
|
839
|
+
const result = writeSwiftUIScreen({
|
|
840
|
+
targetDir,
|
|
841
|
+
code: finalCode,
|
|
842
|
+
structName: effectiveStructName,
|
|
843
|
+
images: Array.isArray(generated.images) ? generated.images : [],
|
|
844
|
+
additionalFiles: llmAdditionalFiles,
|
|
845
|
+
});
|
|
846
|
+
|
|
847
|
+
if (!result.ok) {
|
|
848
|
+
throw new Error(result.results.errors.join(' | ') || 'Failed to write generated selection to Xcode');
|
|
849
|
+
}
|
|
850
|
+
|
|
851
|
+
// Build refinement instructions from diagnostics and hints
|
|
852
|
+
const rasterNodes = (generated.diagnostics ?? []).filter(
|
|
853
|
+
(d) => d.rasterized || d.reason?.toLowerCase().includes('raster')
|
|
854
|
+
);
|
|
855
|
+
const refinementInstructions = buildRefinementInstructions({
|
|
856
|
+
diagnostics: generated.diagnostics ?? [],
|
|
857
|
+
rasterNodes,
|
|
858
|
+
analysisHints,
|
|
859
|
+
structName: effectiveStructName,
|
|
860
|
+
filePath: result.results?.swiftFile ?? null,
|
|
861
|
+
});
|
|
862
|
+
|
|
863
|
+
const imageNames = (generated.images ?? []).map((img) => img.name);
|
|
864
|
+
|
|
865
|
+
return jsonResult(buildCompactResponse({
|
|
866
|
+
result,
|
|
867
|
+
structName: effectiveStructName,
|
|
868
|
+
selection: generated.selection ?? null,
|
|
869
|
+
diagnostics: generated.diagnostics ?? [],
|
|
870
|
+
refinementInstructions,
|
|
871
|
+
analysisHints,
|
|
872
|
+
_critical: [
|
|
873
|
+
`✅ SwiftUI written to ${result.results?.swiftFile ?? 'Xcode'}. Images written to Assets.xcassets.`,
|
|
874
|
+
imageNames.length > 0
|
|
875
|
+
? `🖼 ${imageNames.length} PNG asset(s) written: ${imageNames.join(', ')}. The generated code already references them with Image("name") calls.`
|
|
876
|
+
: null,
|
|
877
|
+
'🚫 DO NOT rewrite or overwrite the generated Swift file. DO NOT replace Image("name") calls with Rectangle(), shapes, or colors.',
|
|
878
|
+
'✏️ You may ONLY: add GeometryReader for responsiveness, wire @State interactions, extract reusable sub-views. Nothing else.',
|
|
879
|
+
].filter(Boolean),
|
|
880
|
+
}));
|
|
881
|
+
});
|
|
882
|
+
|
|
883
|
+
// ---------------------------------------------------------------------------
|
|
884
|
+
// figma_to_swiftui — true one-shot Figma-to-SwiftUI generation
|
|
885
|
+
// ---------------------------------------------------------------------------
|
|
886
|
+
|
|
887
|
+
server.registerTool('figma_to_swiftui', {
|
|
888
|
+
description:
|
|
889
|
+
'One-shot Figma-to-SwiftUI: generates production-ready SwiftUI code from the current Figma selection (or all page frames) and writes it to the Xcode project — including PNG assets, DesignTokens.swift, and reusable components. ' +
|
|
890
|
+
'With BYOK keys (ANTHROPIC_API_KEY / OPENAI_API_KEY) or a CreateLex subscription this is a TRUE single-call operation. ' +
|
|
891
|
+
'Without those, it pre-writes assets and returns a structured prompt so the calling AI can generate code and finish with one more write_generated_swiftui_to_xcode call. ' +
|
|
892
|
+
'Use scope="page" to batch-generate every top-level frame on the current page.',
|
|
893
|
+
inputSchema: {
|
|
894
|
+
nodeIds: z.array(z.string()).optional().describe('Optional list of Figma node ids. If omitted, uses the current selection'),
|
|
895
|
+
scope: z.enum(['selection', 'page']).default('selection').describe('selection = current selection; page = every top-level frame on the active page'),
|
|
896
|
+
generationMode: z.enum(['editable', 'fidelity']).default('editable').describe('editable (default) keeps native SwiftUI elements; fidelity rasterizes the whole frame'),
|
|
897
|
+
includeOverflow: z.boolean().default(false).describe('Ignore Figma clipping when generating layout'),
|
|
898
|
+
projectPath: z.string().optional().describe('Optional Xcode source folder override'),
|
|
899
|
+
},
|
|
900
|
+
}, async ({ nodeIds, scope, generationMode, includeOverflow, projectPath }) => {
|
|
901
|
+
const targetDir = resolveTargetProjectPath(projectPath);
|
|
902
|
+
|
|
903
|
+
// -------------------------------------------------------------------------
|
|
904
|
+
// 1. Resolve target node ids
|
|
905
|
+
// -------------------------------------------------------------------------
|
|
906
|
+
let effectiveNodeIds = nodeIds;
|
|
907
|
+
|
|
908
|
+
if (scope === 'page') {
|
|
909
|
+
const pageSnapshot = await callBridge('get_page_snapshot', { maxDepth: 1 });
|
|
910
|
+
const topFrames = (pageSnapshot?.children ?? []).filter(
|
|
911
|
+
(c) => c.type === 'FRAME' || c.type === 'COMPONENT' || c.type === 'COMPONENT_SET'
|
|
912
|
+
);
|
|
913
|
+
if (topFrames.length === 0) {
|
|
914
|
+
throw new Error('No top-level frames found on the current page');
|
|
915
|
+
}
|
|
916
|
+
effectiveNodeIds = topFrames.map((f) => f.id);
|
|
917
|
+
}
|
|
918
|
+
|
|
919
|
+
// -------------------------------------------------------------------------
|
|
920
|
+
// 2. Get design context (node tree, tokens, asset plan, components)
|
|
921
|
+
// -------------------------------------------------------------------------
|
|
922
|
+
const context = await callBridge('get_design_context', {
|
|
923
|
+
nodeIds: effectiveNodeIds,
|
|
924
|
+
maxDepth: 4,
|
|
925
|
+
includeScreenshot: false,
|
|
926
|
+
});
|
|
927
|
+
|
|
928
|
+
// -------------------------------------------------------------------------
|
|
929
|
+
// 3. Get images from plugin generator
|
|
930
|
+
// -------------------------------------------------------------------------
|
|
931
|
+
const generated = await callBridge('generate_swiftui', {
|
|
932
|
+
nodeIds: effectiveNodeIds,
|
|
933
|
+
includeOverflow,
|
|
934
|
+
generationMode,
|
|
935
|
+
includeImages: true,
|
|
936
|
+
});
|
|
937
|
+
|
|
938
|
+
const images = Array.isArray(generated.images) ? generated.images : [];
|
|
939
|
+
const imageNames = images.map((img) => img.name);
|
|
940
|
+
|
|
941
|
+
// -------------------------------------------------------------------------
|
|
942
|
+
// 4. Attempt LLM generation (Tier 2 BYOK → Tier 3 Hosted)
|
|
943
|
+
// -------------------------------------------------------------------------
|
|
944
|
+
let llmResult = null;
|
|
945
|
+
|
|
946
|
+
// Tier 2: BYOK
|
|
947
|
+
if (process.env.ANTHROPIC_API_KEY || process.env.HF_API_TOKEN || process.env.OPENAI_API_KEY) {
|
|
948
|
+
try {
|
|
949
|
+
const byokResult = await generateWithLocalKey(context, generationMode);
|
|
950
|
+
if (byokResult?.handled) {
|
|
951
|
+
llmResult = byokResult;
|
|
952
|
+
}
|
|
953
|
+
} catch (err) {
|
|
954
|
+
console.error('[figma-swiftui-mcp] BYOK generation failed:', err?.message ?? err);
|
|
955
|
+
}
|
|
956
|
+
}
|
|
957
|
+
|
|
958
|
+
// Tier 3: Hosted (only for editable single-root)
|
|
959
|
+
if (!llmResult && generationMode === 'editable') {
|
|
960
|
+
try {
|
|
961
|
+
const metadata = context?.metadata;
|
|
962
|
+
const isSingleRoot = !!metadata && (!Array.isArray(metadata?.nodes) || metadata.nodes.length === 1);
|
|
963
|
+
if (isSingleRoot && runtimeAuthState.authorized) {
|
|
964
|
+
const { response, data } = await postAuthorizedApi(runtimeAuthState, '/mcp/figma-swiftui/generate', {
|
|
965
|
+
context,
|
|
966
|
+
generationMode,
|
|
967
|
+
includeOverflow,
|
|
968
|
+
});
|
|
969
|
+
if (response.ok && data?.handled) {
|
|
970
|
+
llmResult = {
|
|
971
|
+
handled: true,
|
|
972
|
+
provider: data.provider || 'createlex-hosted',
|
|
973
|
+
code: data.code,
|
|
974
|
+
designTokensCode: null,
|
|
975
|
+
componentFiles: [],
|
|
976
|
+
diagnostics: data.diagnostics || [],
|
|
977
|
+
};
|
|
978
|
+
}
|
|
979
|
+
}
|
|
980
|
+
} catch (err) {
|
|
981
|
+
console.error('[figma-swiftui-mcp] Hosted generation failed (non-fatal):', err?.message ?? err);
|
|
982
|
+
}
|
|
983
|
+
}
|
|
984
|
+
|
|
985
|
+
// -------------------------------------------------------------------------
|
|
986
|
+
// 5a. Tier 2/3 succeeded — parse, write everything, return success
|
|
987
|
+
// -------------------------------------------------------------------------
|
|
988
|
+
if (llmResult) {
|
|
989
|
+
const parsed = parseClaudeResponse(llmResult.code || '');
|
|
990
|
+
const finalCode = parsed.code || llmResult.code || generated.code;
|
|
991
|
+
const effectiveStructName = inferStructName({
|
|
992
|
+
code: finalCode,
|
|
993
|
+
selectionNames: generated.selection?.names ?? [],
|
|
994
|
+
});
|
|
995
|
+
|
|
996
|
+
// Build additional files
|
|
997
|
+
const additionalFiles = [];
|
|
998
|
+
if (parsed.designTokensCode || llmResult.designTokensCode) {
|
|
999
|
+
additionalFiles.push({
|
|
1000
|
+
name: 'DesignTokens.swift',
|
|
1001
|
+
code: parsed.designTokensCode || llmResult.designTokensCode,
|
|
1002
|
+
dir: 'shared',
|
|
1003
|
+
});
|
|
1004
|
+
}
|
|
1005
|
+
for (const comp of (parsed.componentFiles || llmResult.componentFiles || [])) {
|
|
1006
|
+
additionalFiles.push({ name: comp.name, code: comp.code, dir: 'components' });
|
|
1007
|
+
}
|
|
1008
|
+
|
|
1009
|
+
const result = writeSwiftUIScreen({
|
|
1010
|
+
targetDir,
|
|
1011
|
+
code: finalCode,
|
|
1012
|
+
structName: effectiveStructName,
|
|
1013
|
+
images,
|
|
1014
|
+
additionalFiles,
|
|
1015
|
+
});
|
|
1016
|
+
|
|
1017
|
+
if (!result.ok) {
|
|
1018
|
+
throw new Error(result.results.errors.join(' | ') || 'Failed to write generated SwiftUI to Xcode');
|
|
1019
|
+
}
|
|
1020
|
+
|
|
1021
|
+
return jsonResult({
|
|
1022
|
+
tier: llmResult.provider?.startsWith('local-') || llmResult.provider?.startsWith('huggingface:') ? 'byok' : 'hosted',
|
|
1023
|
+
provider: llmResult.provider,
|
|
1024
|
+
oneShot: true,
|
|
1025
|
+
structName: effectiveStructName,
|
|
1026
|
+
swiftFile: result.results?.swiftFile ?? null,
|
|
1027
|
+
additionalFiles: result.results?.additionalSwiftFiles ?? [],
|
|
1028
|
+
imagesWritten: imageNames.length,
|
|
1029
|
+
imageNames,
|
|
1030
|
+
generatedRoot: result.generatedRoot,
|
|
1031
|
+
projectPath: result.projectPath,
|
|
1032
|
+
_critical: [
|
|
1033
|
+
`✅ SwiftUI generated via ${llmResult.provider} and written to ${result.results?.swiftFile ?? 'Xcode'}.`,
|
|
1034
|
+
imageNames.length > 0
|
|
1035
|
+
? `🖼 ${imageNames.length} PNG asset(s) written: ${imageNames.join(', ')}.`
|
|
1036
|
+
: null,
|
|
1037
|
+
additionalFiles.length > 0
|
|
1038
|
+
? `📦 ${additionalFiles.length} additional file(s): ${additionalFiles.map((f) => f.name).join(', ')}.`
|
|
1039
|
+
: null,
|
|
1040
|
+
'🚫 DO NOT rewrite the generated Swift file. DO NOT replace Image("name") calls with shapes or colors.',
|
|
1041
|
+
'✏️ You may ONLY: add GeometryReader for responsiveness, wire @State interactions, extract reusable sub-views.',
|
|
1042
|
+
].filter(Boolean),
|
|
1043
|
+
});
|
|
1044
|
+
}
|
|
1045
|
+
|
|
1046
|
+
// -------------------------------------------------------------------------
|
|
1047
|
+
// 5b. Tier 1 — AI-native: pre-write assets, return structured prompt
|
|
1048
|
+
// -------------------------------------------------------------------------
|
|
1049
|
+
|
|
1050
|
+
// Pre-write images to Assets.xcassets so Claude only needs to generate code
|
|
1051
|
+
let assetsPreWritten = false;
|
|
1052
|
+
if (images.length > 0) {
|
|
1053
|
+
const assetResult = writeAssetCatalogEntries({
|
|
1054
|
+
targetDir,
|
|
1055
|
+
assets: images.map((img) => ({
|
|
1056
|
+
name: img.name,
|
|
1057
|
+
format: img.format || 'png',
|
|
1058
|
+
base64: img.base64,
|
|
1059
|
+
svg: img.svg,
|
|
1060
|
+
})),
|
|
1061
|
+
});
|
|
1062
|
+
assetsPreWritten = assetResult.ok || assetResult.files.length > 0;
|
|
1063
|
+
}
|
|
1064
|
+
|
|
1065
|
+
// Build the generation prompt
|
|
1066
|
+
const prompt = buildGenerationPrompt(context);
|
|
1067
|
+
const effectiveStructName = inferStructName({
|
|
1068
|
+
code: generated.code,
|
|
1069
|
+
selectionNames: generated.selection?.names ?? [],
|
|
1070
|
+
});
|
|
1071
|
+
|
|
1072
|
+
// Build asset manifest for the prompt so the AI knows exactly which Image() names are valid
|
|
1073
|
+
const assetManifest = (context?.assetExportPlan?.candidates ?? []).map((c) => ({
|
|
1074
|
+
name: c.assetName || c.name,
|
|
1075
|
+
blendMode: c.blendModeSwiftUI || null,
|
|
1076
|
+
type: c.suggestedType || c.type || 'image',
|
|
1077
|
+
}));
|
|
1078
|
+
|
|
1079
|
+
return jsonResult({
|
|
1080
|
+
tier: 'ai-native',
|
|
1081
|
+
oneShot: false,
|
|
1082
|
+
assetsPreWritten,
|
|
1083
|
+
imagesWritten: assetsPreWritten ? imageNames.length : 0,
|
|
1084
|
+
imageNames,
|
|
1085
|
+
structName: effectiveStructName,
|
|
1086
|
+
projectPath: targetDir,
|
|
1087
|
+
generationPrompt: prompt
|
|
1088
|
+
? {
|
|
1089
|
+
systemPrompt: prompt.systemPrompt,
|
|
1090
|
+
userMessage: prompt.userMessage + (assetManifest.length > 0
|
|
1091
|
+
? `\n\nASSET MANIFEST (already written to Assets.xcassets — use these exact names):\n${JSON.stringify(assetManifest, null, 2)}`
|
|
1092
|
+
: ''),
|
|
1093
|
+
outputStructName: prompt.outputStructName || effectiveStructName,
|
|
1094
|
+
}
|
|
1095
|
+
: null,
|
|
1096
|
+
designContext: prompt ? null : context,
|
|
1097
|
+
assetExportPlan: context?.assetExportPlan ?? null,
|
|
1098
|
+
reusableComponents: context?.reusableComponents ?? null,
|
|
1099
|
+
_critical: [
|
|
1100
|
+
assetsPreWritten
|
|
1101
|
+
? `🖼 ${imageNames.length} PNG asset(s) pre-written to Assets.xcassets: ${imageNames.join(', ')}.`
|
|
1102
|
+
: null,
|
|
1103
|
+
'Generate SwiftUI code using the generationPrompt above (systemPrompt + userMessage).',
|
|
1104
|
+
'Output code using <file name="StructName.swift"> XML tags.',
|
|
1105
|
+
'Then call write_generated_swiftui_to_xcode with the generated code and images:[] (assets are already on disk).',
|
|
1106
|
+
'⚠️ Use .font(.system(size:weight:)) ONLY — never custom font names.',
|
|
1107
|
+
'⚠️ Reference every asset as Image("name") — never Rectangle() or Color() placeholders.',
|
|
1108
|
+
].filter(Boolean),
|
|
1109
|
+
});
|
|
1110
|
+
});
|
|
1111
|
+
|
|
1112
|
+
// ---------------------------------------------------------------------------
|
|
1113
|
+
// Response-size mitigation helpers
|
|
1114
|
+
// ---------------------------------------------------------------------------
|
|
1115
|
+
|
|
1116
|
+
/**
|
|
1117
|
+
* Remove base64 / svg binary data from any image results.
|
|
1118
|
+
* Files are already on disk — the model only needs names and paths.
|
|
1119
|
+
*/
|
|
1120
|
+
function stripImageData(obj) {
|
|
1121
|
+
if (!obj || typeof obj !== 'object') return obj;
|
|
1122
|
+
|
|
1123
|
+
// Deep-clone to avoid mutating the original
|
|
1124
|
+
const clone = JSON.parse(JSON.stringify(obj));
|
|
1125
|
+
|
|
1126
|
+
const walk = (node) => {
|
|
1127
|
+
if (Array.isArray(node)) {
|
|
1128
|
+
node.forEach(walk);
|
|
1129
|
+
return;
|
|
1130
|
+
}
|
|
1131
|
+
if (node && typeof node === 'object') {
|
|
1132
|
+
delete node.base64;
|
|
1133
|
+
delete node.svg;
|
|
1134
|
+
delete node.data;
|
|
1135
|
+
for (const value of Object.values(node)) {
|
|
1136
|
+
walk(value);
|
|
1137
|
+
}
|
|
1138
|
+
}
|
|
1139
|
+
};
|
|
1140
|
+
|
|
1141
|
+
walk(clone);
|
|
1142
|
+
return clone;
|
|
1143
|
+
}
|
|
1144
|
+
|
|
1145
|
+
/**
|
|
1146
|
+
* Cap the diagnostics array, prioritising rasterized nodes first.
|
|
1147
|
+
*/
|
|
1148
|
+
function capDiagnostics(diagnostics) {
|
|
1149
|
+
if (!Array.isArray(diagnostics) || diagnostics.length <= MAX_DIAGNOSTICS) {
|
|
1150
|
+
return diagnostics;
|
|
1151
|
+
}
|
|
1152
|
+
|
|
1153
|
+
// Sort: rasterized nodes first (most useful for refinement)
|
|
1154
|
+
const sorted = [...diagnostics].sort((a, b) => {
|
|
1155
|
+
const aRaster = a.rasterized || a.reason?.toLowerCase().includes('raster') ? 1 : 0;
|
|
1156
|
+
const bRaster = b.rasterized || b.reason?.toLowerCase().includes('raster') ? 1 : 0;
|
|
1157
|
+
return bRaster - aRaster;
|
|
1158
|
+
});
|
|
1159
|
+
|
|
1160
|
+
const kept = sorted.slice(0, MAX_DIAGNOSTICS);
|
|
1161
|
+
kept.push({
|
|
1162
|
+
_truncated: true,
|
|
1163
|
+
totalCount: diagnostics.length,
|
|
1164
|
+
shownCount: MAX_DIAGNOSTICS,
|
|
1165
|
+
message: `${diagnostics.length - MAX_DIAGNOSTICS} additional diagnostic(s) omitted to reduce response size.`,
|
|
1166
|
+
});
|
|
1167
|
+
return kept;
|
|
1168
|
+
}
|
|
1169
|
+
|
|
1170
|
+
/**
|
|
1171
|
+
* Build a compact MCP response for write_selection_to_xcode.
|
|
1172
|
+
*
|
|
1173
|
+
* Strategy:
|
|
1174
|
+
* 1. Strip all binary (base64/svg) data from image results.
|
|
1175
|
+
* 2. Cap diagnostics to MAX_DIAGNOSTICS most relevant entries.
|
|
1176
|
+
* 3. Estimate serialised size; if still over RESPONSE_SIZE_CAP,
|
|
1177
|
+
* compress analysis hints to summary-only.
|
|
1178
|
+
*/
|
|
1179
|
+
function buildCompactResponse({ result, structName, selection, diagnostics, refinementInstructions, analysisHints }) {
|
|
1180
|
+
const compact = {
|
|
1181
|
+
...stripImageData(result),
|
|
1182
|
+
structName,
|
|
1183
|
+
selection,
|
|
1184
|
+
diagnostics: capDiagnostics(diagnostics),
|
|
1185
|
+
refinementInstructions,
|
|
1186
|
+
...(analysisHints ? { analysisHints } : {}),
|
|
1187
|
+
};
|
|
1188
|
+
|
|
1189
|
+
// First size check
|
|
1190
|
+
let serialised = JSON.stringify(compact);
|
|
1191
|
+
if (serialised.length <= RESPONSE_SIZE_CAP) {
|
|
1192
|
+
return compact;
|
|
1193
|
+
}
|
|
1194
|
+
|
|
1195
|
+
// Compress: remove verbose analysis hints, keep only summary-level data
|
|
1196
|
+
if (compact.analysisHints) {
|
|
1197
|
+
const { generationHints, manualRefinementHints, ...rest } = compact.analysisHints;
|
|
1198
|
+
compact.analysisHints = {
|
|
1199
|
+
...rest,
|
|
1200
|
+
_compressed: true,
|
|
1201
|
+
generationHintCount: generationHints?.interactiveElements?.length ?? 0,
|
|
1202
|
+
manualRefinementHintCount: manualRefinementHints?.length ?? 0,
|
|
1203
|
+
};
|
|
1204
|
+
}
|
|
1205
|
+
|
|
1206
|
+
// Second size check — drop the full Swift code echo if still too large
|
|
1207
|
+
serialised = JSON.stringify(compact);
|
|
1208
|
+
if (serialised.length > RESPONSE_SIZE_CAP && compact.results?.swiftFile) {
|
|
1209
|
+
// The code is already on disk at swiftFile; no need to echo it
|
|
1210
|
+
if (compact.results) {
|
|
1211
|
+
compact.results = { ...compact.results };
|
|
1212
|
+
delete compact.results.code;
|
|
1213
|
+
}
|
|
1214
|
+
compact._responseCapped = true;
|
|
1215
|
+
compact._responseNote = `Response was compressed to stay under ${Math.round(RESPONSE_SIZE_CAP / 1024)}KB. All files were written successfully to disk.`;
|
|
1216
|
+
}
|
|
1217
|
+
|
|
1218
|
+
return compact;
|
|
1219
|
+
}
|
|
1220
|
+
|
|
1221
|
+
function buildRefinementInstructions({ diagnostics, rasterNodes, analysisHints, structName, filePath }) {
|
|
1222
|
+
const instructions = [];
|
|
1223
|
+
|
|
1224
|
+
// Adaptive layout
|
|
1225
|
+
instructions.push(
|
|
1226
|
+
`Make ${structName} adaptive: replace any fixed .frame(width:height:) on the root body with GeometryReader or .frame(maxWidth: .infinity, maxHeight: .infinity) so the screen fills any device.`
|
|
1227
|
+
);
|
|
1228
|
+
|
|
1229
|
+
// Raster replacements
|
|
1230
|
+
if (rasterNodes.length > 0) {
|
|
1231
|
+
const rasterNames = rasterNodes
|
|
1232
|
+
.map((d) => d.assetName || d.nodeName || d.name)
|
|
1233
|
+
.filter(Boolean)
|
|
1234
|
+
.slice(0, 8)
|
|
1235
|
+
.join(', ');
|
|
1236
|
+
instructions.push(
|
|
1237
|
+
`IMPORTANT: ${rasterNodes.length} element(s) were rasterized and written to Assets.xcassets as PNG files${rasterNames ? ` (${rasterNames})` : ''}. ` +
|
|
1238
|
+
`The generated code already references them with Image("name") calls — DO NOT remove or replace these Image() calls. ` +
|
|
1239
|
+
`Only replace Image() calls if the asset name clearly identifies a native UI control (button, toggle, tab bar, text field). ` +
|
|
1240
|
+
`Photos, illustrations, 3D renders, icons, and decorative graphics MUST stay as Image("name") — do not attempt to reconstruct them with SwiftUI shapes or colors.`
|
|
1241
|
+
);
|
|
1242
|
+
}
|
|
1243
|
+
|
|
1244
|
+
// Hosted analysis hints
|
|
1245
|
+
if (analysisHints?.manualRefinementHints?.length) {
|
|
1246
|
+
for (const hint of analysisHints.manualRefinementHints) {
|
|
1247
|
+
const text = typeof hint === 'string' ? hint : hint.description || hint.message || hint.text || hint.title || null;
|
|
1248
|
+
if (text) {
|
|
1249
|
+
instructions.push(text);
|
|
1250
|
+
}
|
|
1251
|
+
}
|
|
1252
|
+
}
|
|
1253
|
+
|
|
1254
|
+
if (analysisHints?.generationHints?.interactiveElements?.length) {
|
|
1255
|
+
const names = analysisHints.generationHints.interactiveElements
|
|
1256
|
+
.map((e) => e.name || e.nodeId || e.id || '(unnamed)')
|
|
1257
|
+
.filter(Boolean)
|
|
1258
|
+
.join(', ');
|
|
1259
|
+
if (names) {
|
|
1260
|
+
instructions.push(`These elements likely need interaction: ${names}`);
|
|
1261
|
+
}
|
|
1262
|
+
}
|
|
1263
|
+
|
|
1264
|
+
// Reusable components
|
|
1265
|
+
if (analysisHints?.reusableComponents?.length) {
|
|
1266
|
+
const names = analysisHints.reusableComponents
|
|
1267
|
+
.map((c) => c.name || c.componentName || c.id || '(unnamed)')
|
|
1268
|
+
.filter(Boolean)
|
|
1269
|
+
.join(', ');
|
|
1270
|
+
if (names) {
|
|
1271
|
+
instructions.push(
|
|
1272
|
+
`Consider extracting ${analysisHints.reusableComponents.length} reusable component(s): ${names}`
|
|
1273
|
+
);
|
|
1274
|
+
}
|
|
1275
|
+
}
|
|
1276
|
+
|
|
1277
|
+
return {
|
|
1278
|
+
summary: `After writing ${structName}.swift, apply these refinements to make it production-ready:`,
|
|
1279
|
+
steps: instructions,
|
|
1280
|
+
filePath,
|
|
1281
|
+
};
|
|
1282
|
+
}
|
|
1283
|
+
|
|
1284
|
+
async function shutdownBridgeAndExit(message, exitCode = 1) {
|
|
1285
|
+
console.error(message);
|
|
1286
|
+
if (authValidationTimer) {
|
|
1287
|
+
clearInterval(authValidationTimer);
|
|
1288
|
+
authValidationTimer = null;
|
|
1289
|
+
}
|
|
1290
|
+
if (bridgeRuntimeHandle && typeof bridgeRuntimeHandle.close === 'function' && !bridgeRuntimeHandle.alreadyRunning) {
|
|
1291
|
+
try {
|
|
1292
|
+
await bridgeRuntimeHandle.close();
|
|
1293
|
+
} catch (error) {
|
|
1294
|
+
console.error('[figma-swiftui-mcp] Failed to close bridge cleanly:', error.message);
|
|
1295
|
+
}
|
|
1296
|
+
}
|
|
1297
|
+
process.exit(exitCode);
|
|
1298
|
+
}
|
|
1299
|
+
|
|
1300
|
+
function startAuthValidationLoop() {
|
|
1301
|
+
if (authValidationTimer) {
|
|
1302
|
+
clearInterval(authValidationTimer);
|
|
1303
|
+
}
|
|
1304
|
+
|
|
1305
|
+
authValidationTimer = setInterval(async () => {
|
|
1306
|
+
try {
|
|
1307
|
+
const validation = await validateRuntimeSession(runtimeAuthState);
|
|
1308
|
+
if (!validation.valid) {
|
|
1309
|
+
await shutdownBridgeAndExit(`[figma-swiftui-mcp] Authorization lost: ${validation.error}`);
|
|
1310
|
+
return;
|
|
1311
|
+
}
|
|
1312
|
+
|
|
1313
|
+
runtimeAuthState = {
|
|
1314
|
+
...runtimeAuthState,
|
|
1315
|
+
...validation.session,
|
|
1316
|
+
authorized: true,
|
|
1317
|
+
};
|
|
1318
|
+
|
|
1319
|
+
if (validation.refreshed) {
|
|
1320
|
+
console.error('[figma-swiftui-mcp] Refreshed CreateLex MCP authorization');
|
|
1321
|
+
}
|
|
1322
|
+
} catch (error) {
|
|
1323
|
+
await shutdownBridgeAndExit(`[figma-swiftui-mcp] Authorization revalidation failed: ${error.message}`);
|
|
1324
|
+
}
|
|
1325
|
+
}, AUTH_REVALIDATION_INTERVAL_MS);
|
|
1326
|
+
|
|
1327
|
+
authValidationTimer.unref?.();
|
|
1328
|
+
}
|
|
1329
|
+
|
|
1330
|
+
async function main() {
|
|
1331
|
+
runtimeAuthState = await authorizeRuntimeStartup();
|
|
1332
|
+
console.error(
|
|
1333
|
+
runtimeAuthState.bypass
|
|
1334
|
+
? '[figma-swiftui-mcp] Authorization bypass enabled'
|
|
1335
|
+
: `[figma-swiftui-mcp] Authorized CreateLex user ${runtimeAuthState.email || runtimeAuthState.userId || 'unknown-user'}`
|
|
1336
|
+
);
|
|
1337
|
+
|
|
1338
|
+
const bridgeHttpUrl = new URL(BRIDGE_HTTP_URL);
|
|
1339
|
+
bridgeRuntimeHandle = await startBridgeServer({
|
|
1340
|
+
host: bridgeHttpUrl.hostname,
|
|
1341
|
+
port: Number(bridgeHttpUrl.port || (bridgeHttpUrl.protocol === 'https:' ? 443 : 80)),
|
|
1342
|
+
projectPath: readProjectPathArg(process.argv),
|
|
1343
|
+
logger: {
|
|
1344
|
+
info: console.error,
|
|
1345
|
+
warn: console.error,
|
|
1346
|
+
error: console.error,
|
|
1347
|
+
},
|
|
1348
|
+
});
|
|
1349
|
+
|
|
1350
|
+
try {
|
|
1351
|
+
await connectBridge();
|
|
1352
|
+
} catch (error) {
|
|
1353
|
+
console.error('[figma-swiftui-mcp] Bridge connect failed on startup:', error.message);
|
|
1354
|
+
}
|
|
1355
|
+
|
|
1356
|
+
const transport = new StdioServerTransport();
|
|
1357
|
+
await server.connect(transport);
|
|
1358
|
+
startAuthValidationLoop();
|
|
1359
|
+
console.error('[figma-swiftui-mcp] MCP server running on stdio');
|
|
1360
|
+
}
|
|
1361
|
+
|
|
1362
|
+
main().catch((error) => {
|
|
1363
|
+
console.error('[figma-swiftui-mcp] Server error:', error);
|
|
1364
|
+
process.exit(1);
|
|
1365
|
+
});
|