unity-agent-tools 0.5.0 → 0.7.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/server.mjs +200 -3
- package/package.json +1 -1
package/dist/server.mjs
CHANGED
|
@@ -31188,6 +31188,13 @@ async function handleAIQuery(request) {
|
|
|
31188
31188
|
let resultText = "";
|
|
31189
31189
|
request.onStatus?.("\u23F3 Claude Code \uC791\uC5C5 \uC2DC\uC791...");
|
|
31190
31190
|
const serverPath = join(dirname(fileURLToPath(import.meta.url)), "server.mjs");
|
|
31191
|
+
const mcpEnv = { ...process.env };
|
|
31192
|
+
if (request.geminiApiKey)
|
|
31193
|
+
mcpEnv.GEMINI_API_KEY = request.geminiApiKey;
|
|
31194
|
+
if (request.geminiModel)
|
|
31195
|
+
mcpEnv.GEMINI_MODEL = request.geminiModel;
|
|
31196
|
+
if (request.referenceImage)
|
|
31197
|
+
mcpEnv.GEMINI_REFERENCE_IMAGE = request.referenceImage;
|
|
31191
31198
|
for await (const msg of query({
|
|
31192
31199
|
prompt: fullPrompt,
|
|
31193
31200
|
options: {
|
|
@@ -31197,7 +31204,8 @@ async function handleAIQuery(request) {
|
|
|
31197
31204
|
mcpServers: {
|
|
31198
31205
|
"unity-agent-tools": {
|
|
31199
31206
|
command: "node",
|
|
31200
|
-
args: [serverPath]
|
|
31207
|
+
args: [serverPath],
|
|
31208
|
+
env: mcpEnv
|
|
31201
31209
|
}
|
|
31202
31210
|
}
|
|
31203
31211
|
}
|
|
@@ -31237,7 +31245,7 @@ async function handleAIQuery(request) {
|
|
|
31237
31245
|
}
|
|
31238
31246
|
}
|
|
31239
31247
|
function buildFullPrompt(userPrompt, context, language, projectPath) {
|
|
31240
|
-
let prompt = "You are a Unity development expert assistant embedded in a Unity Editor plugin. You can read, create, modify, and delete files in the Unity project. You have expertise in shaders (HLSL/ShaderLab), C# scripts, materials, textures, and all Unity workflows. You can also diagnose and fix Unity errors that prevent the project from compiling or running. Do NOT ask the user for file paths or project paths \u2014 the working directory is already set to the Unity project root. When fixing errors: read the relevant source files, understand the root cause, apply the fix, and explain what you changed. Answer clearly and concisely. When the user asks you to modify or create files, do it directly.\n";
|
|
31248
|
+
let prompt = "You are a Unity development expert assistant embedded in a Unity Editor plugin. You can read, create, modify, and delete files in the Unity project. You have expertise in shaders (HLSL/ShaderLab), C# scripts, materials, textures, and all Unity workflows. You can also diagnose and fix Unity errors that prevent the project from compiling or running. You can generate images using the generate_image tool (powered by Google Nano Banana / Gemini Image). When the user asks to create a texture, sprite, icon, or any visual asset, use the generate_image tool with a detailed prompt. The generated image will appear in the Unity Editor where the user can save it to their project. Do NOT ask the user for file paths or project paths \u2014 the working directory is already set to the Unity project root. When fixing errors: read the relevant source files, understand the root cause, apply the fix, and explain what you changed. Answer clearly and concisely. When the user asks you to modify or create files, do it directly.\n";
|
|
31241
31249
|
if (projectPath) {
|
|
31242
31250
|
prompt += `Unity project path: ${projectPath}
|
|
31243
31251
|
`;
|
|
@@ -31792,6 +31800,185 @@ function registerListProjectFilesTool(server, bridge) {
|
|
|
31792
31800
|
});
|
|
31793
31801
|
}
|
|
31794
31802
|
|
|
31803
|
+
// build/gemini-handler.js
|
|
31804
|
+
async function generateImage(request) {
|
|
31805
|
+
const { apiKey, model, prompt, referenceImage } = request;
|
|
31806
|
+
if (!apiKey) {
|
|
31807
|
+
return {
|
|
31808
|
+
success: false,
|
|
31809
|
+
error: "Gemini API key not configured. Please set it in AI Chat > Settings."
|
|
31810
|
+
};
|
|
31811
|
+
}
|
|
31812
|
+
const url = `https://generativelanguage.googleapis.com/v1beta/models/${model}:generateContent?key=${apiKey}`;
|
|
31813
|
+
const parts = [];
|
|
31814
|
+
if (referenceImage) {
|
|
31815
|
+
parts.push({
|
|
31816
|
+
inlineData: {
|
|
31817
|
+
mimeType: "image/png",
|
|
31818
|
+
data: referenceImage
|
|
31819
|
+
}
|
|
31820
|
+
});
|
|
31821
|
+
}
|
|
31822
|
+
parts.push({
|
|
31823
|
+
text: prompt
|
|
31824
|
+
});
|
|
31825
|
+
const body = {
|
|
31826
|
+
contents: [
|
|
31827
|
+
{
|
|
31828
|
+
parts
|
|
31829
|
+
}
|
|
31830
|
+
],
|
|
31831
|
+
generationConfig: {
|
|
31832
|
+
responseModalities: ["TEXT", "IMAGE"]
|
|
31833
|
+
}
|
|
31834
|
+
};
|
|
31835
|
+
try {
|
|
31836
|
+
console.error(`[NanoBanana] Generating image with ${model}: "${prompt.substring(0, 60)}..."`);
|
|
31837
|
+
const response = await fetch(url, {
|
|
31838
|
+
method: "POST",
|
|
31839
|
+
headers: { "Content-Type": "application/json" },
|
|
31840
|
+
body: JSON.stringify(body)
|
|
31841
|
+
});
|
|
31842
|
+
if (!response.ok) {
|
|
31843
|
+
const errorText = await response.text();
|
|
31844
|
+
let errorMsg = `Gemini API error (${response.status})`;
|
|
31845
|
+
try {
|
|
31846
|
+
const errorJson = JSON.parse(errorText);
|
|
31847
|
+
errorMsg = errorJson.error?.message || errorMsg;
|
|
31848
|
+
} catch {
|
|
31849
|
+
errorMsg += `: ${errorText.substring(0, 200)}`;
|
|
31850
|
+
}
|
|
31851
|
+
return { success: false, error: errorMsg };
|
|
31852
|
+
}
|
|
31853
|
+
const data = await response.json();
|
|
31854
|
+
let imageBase64;
|
|
31855
|
+
let description;
|
|
31856
|
+
const candidates = data.candidates;
|
|
31857
|
+
if (candidates && candidates.length > 0) {
|
|
31858
|
+
const parts2 = candidates[0].content?.parts;
|
|
31859
|
+
if (parts2) {
|
|
31860
|
+
for (const part of parts2) {
|
|
31861
|
+
if (part.inlineData?.data) {
|
|
31862
|
+
imageBase64 = part.inlineData.data;
|
|
31863
|
+
}
|
|
31864
|
+
if (part.text) {
|
|
31865
|
+
description = part.text;
|
|
31866
|
+
}
|
|
31867
|
+
}
|
|
31868
|
+
}
|
|
31869
|
+
}
|
|
31870
|
+
if (!imageBase64) {
|
|
31871
|
+
return {
|
|
31872
|
+
success: false,
|
|
31873
|
+
error: "Gemini returned no image data. The model may not support image generation, or the prompt was filtered.",
|
|
31874
|
+
...description ? { description } : {}
|
|
31875
|
+
};
|
|
31876
|
+
}
|
|
31877
|
+
console.error("[NanoBanana] Image generated successfully.");
|
|
31878
|
+
return {
|
|
31879
|
+
success: true,
|
|
31880
|
+
imageBase64,
|
|
31881
|
+
description: description || "Generated image"
|
|
31882
|
+
};
|
|
31883
|
+
} catch (err) {
|
|
31884
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
31885
|
+
console.error(`[NanoBanana] Error: ${msg}`);
|
|
31886
|
+
return { success: false, error: `Gemini request failed: ${msg}` };
|
|
31887
|
+
}
|
|
31888
|
+
}
|
|
31889
|
+
|
|
31890
|
+
// build/tools/generate-image.js
|
|
31891
|
+
var geminiConfig = {
|
|
31892
|
+
get apiKey() {
|
|
31893
|
+
return process.env.GEMINI_API_KEY || this._apiKey;
|
|
31894
|
+
},
|
|
31895
|
+
set apiKey(v) {
|
|
31896
|
+
this._apiKey = v;
|
|
31897
|
+
},
|
|
31898
|
+
_apiKey: "",
|
|
31899
|
+
get model() {
|
|
31900
|
+
return process.env.GEMINI_MODEL || this._model;
|
|
31901
|
+
},
|
|
31902
|
+
set model(v) {
|
|
31903
|
+
this._model = v;
|
|
31904
|
+
},
|
|
31905
|
+
_model: "gemini-2.5-flash-preview-image-generation",
|
|
31906
|
+
get referenceImage() {
|
|
31907
|
+
return process.env.GEMINI_REFERENCE_IMAGE || this._referenceImage;
|
|
31908
|
+
},
|
|
31909
|
+
set referenceImage(v) {
|
|
31910
|
+
this._referenceImage = v;
|
|
31911
|
+
},
|
|
31912
|
+
_referenceImage: void 0
|
|
31913
|
+
};
|
|
31914
|
+
function registerGenerateImageTool(server, bridge) {
|
|
31915
|
+
server.tool("generate_image", "Generate an image using Google's Nano Banana (Gemini Image Generation). Use this when the user asks to create, generate, or make an image, texture, sprite, icon, or visual asset. The generated image will be displayed in the Unity Editor's AI Chat window where the user can save it to their project. You should describe what you're generating and call this tool with a detailed prompt.", {
|
|
31916
|
+
prompt: external_exports.string().describe("Detailed image generation prompt. Be specific about style, colors, composition, and content. For game textures, include terms like 'seamless', 'tileable', 'PBR', etc."),
|
|
31917
|
+
useReferenceImage: external_exports.boolean().optional().describe("Whether to include the user's reference image (if one is set in the UI). Default: true if available.")
|
|
31918
|
+
}, async ({ prompt, useReferenceImage }) => {
|
|
31919
|
+
if (!geminiConfig.apiKey) {
|
|
31920
|
+
return {
|
|
31921
|
+
content: [
|
|
31922
|
+
{
|
|
31923
|
+
type: "text",
|
|
31924
|
+
text: "Error: Gemini API key is not configured. The user needs to set it in AI Chat > Settings panel."
|
|
31925
|
+
}
|
|
31926
|
+
],
|
|
31927
|
+
isError: true
|
|
31928
|
+
};
|
|
31929
|
+
}
|
|
31930
|
+
try {
|
|
31931
|
+
const shouldUseRef = useReferenceImage !== false;
|
|
31932
|
+
const refImage = shouldUseRef && geminiConfig.referenceImage ? geminiConfig.referenceImage : void 0;
|
|
31933
|
+
const result = await generateImage({
|
|
31934
|
+
apiKey: geminiConfig.apiKey,
|
|
31935
|
+
model: geminiConfig.model,
|
|
31936
|
+
prompt,
|
|
31937
|
+
referenceImage: refImage
|
|
31938
|
+
});
|
|
31939
|
+
if (result.success && result.imageBase64) {
|
|
31940
|
+
bridge.sendRaw({
|
|
31941
|
+
method: "image/generated",
|
|
31942
|
+
imageData: result.imageBase64,
|
|
31943
|
+
description: result.description || `Generated: ${prompt.substring(0, 60)}`
|
|
31944
|
+
});
|
|
31945
|
+
return {
|
|
31946
|
+
content: [
|
|
31947
|
+
{
|
|
31948
|
+
type: "text",
|
|
31949
|
+
text: `Image generated successfully and sent to the Unity Editor.
|
|
31950
|
+
Model: ${geminiConfig.model}
|
|
31951
|
+
Prompt: "${prompt}"
|
|
31952
|
+
` + (result.description ? `Description: ${result.description}
|
|
31953
|
+
` : "") + `The user can now preview and save it to their project from the AI Chat window.`
|
|
31954
|
+
}
|
|
31955
|
+
]
|
|
31956
|
+
};
|
|
31957
|
+
} else {
|
|
31958
|
+
return {
|
|
31959
|
+
content: [
|
|
31960
|
+
{
|
|
31961
|
+
type: "text",
|
|
31962
|
+
text: `Image generation failed: ${result.error || "Unknown error"}`
|
|
31963
|
+
}
|
|
31964
|
+
],
|
|
31965
|
+
isError: true
|
|
31966
|
+
};
|
|
31967
|
+
}
|
|
31968
|
+
} catch (err) {
|
|
31969
|
+
return {
|
|
31970
|
+
content: [
|
|
31971
|
+
{
|
|
31972
|
+
type: "text",
|
|
31973
|
+
text: `Image generation error: ${err instanceof Error ? err.message : String(err)}`
|
|
31974
|
+
}
|
|
31975
|
+
],
|
|
31976
|
+
isError: true
|
|
31977
|
+
};
|
|
31978
|
+
}
|
|
31979
|
+
});
|
|
31980
|
+
}
|
|
31981
|
+
|
|
31795
31982
|
// build/resources/pipeline-info.js
|
|
31796
31983
|
function registerPipelineInfoResource(server, bridge) {
|
|
31797
31984
|
server.resource("pipeline-info", "unity://pipeline/info", {
|
|
@@ -31920,7 +32107,7 @@ function registerEditorPlatformResource(server, bridge) {
|
|
|
31920
32107
|
async function main() {
|
|
31921
32108
|
const server = new McpServer({
|
|
31922
32109
|
name: "unity-agent-tools",
|
|
31923
|
-
version: "0.
|
|
32110
|
+
version: "0.7.1"
|
|
31924
32111
|
});
|
|
31925
32112
|
const bridge = new UnityBridge("ws://localhost:8090");
|
|
31926
32113
|
const lspClient = new ShaderLspClient();
|
|
@@ -31937,6 +32124,7 @@ async function main() {
|
|
|
31937
32124
|
registerReadProjectFileTool(server, bridge);
|
|
31938
32125
|
registerWriteProjectFileTool(server, bridge);
|
|
31939
32126
|
registerListProjectFilesTool(server, bridge);
|
|
32127
|
+
registerGenerateImageTool(server, bridge);
|
|
31940
32128
|
registerPipelineInfoResource(server, bridge);
|
|
31941
32129
|
registerShaderIncludesResource(server, bridge);
|
|
31942
32130
|
registerShaderKeywordsResource(server, bridge);
|
|
@@ -31950,6 +32138,12 @@ async function main() {
|
|
|
31950
32138
|
console.error("[UnityAgent] Invalid AI query: missing id or prompt");
|
|
31951
32139
|
return;
|
|
31952
32140
|
}
|
|
32141
|
+
if (params.geminiApiKey) {
|
|
32142
|
+
geminiConfig.apiKey = params.geminiApiKey;
|
|
32143
|
+
geminiConfig.model = params.geminiModel || geminiConfig.model;
|
|
32144
|
+
geminiConfig.referenceImage = params.referenceImage || void 0;
|
|
32145
|
+
console.error(`[NanoBanana] Config updated: model=${geminiConfig.model}, hasRef=${!!geminiConfig.referenceImage}`);
|
|
32146
|
+
}
|
|
31953
32147
|
console.error(`[UnityAgent] AI query received (id=${id}): ${params.prompt.substring(0, 80)}...`);
|
|
31954
32148
|
try {
|
|
31955
32149
|
const result = await handleAIQuery({
|
|
@@ -31957,6 +32151,9 @@ async function main() {
|
|
|
31957
32151
|
context: params.context ?? params.shaderContext,
|
|
31958
32152
|
language: params.language,
|
|
31959
32153
|
projectPath: params.projectPath,
|
|
32154
|
+
geminiApiKey: params.geminiApiKey,
|
|
32155
|
+
geminiModel: params.geminiModel,
|
|
32156
|
+
referenceImage: params.referenceImage,
|
|
31960
32157
|
onChunk: (chunk) => {
|
|
31961
32158
|
bridge.sendRaw({ method: "ai/chunk", id, chunk });
|
|
31962
32159
|
},
|
package/package.json
CHANGED