@gemdoq/codi 0.1.8 → 0.1.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli.js CHANGED
@@ -952,9 +952,20 @@ var Repl = class {
952
952
  }
953
953
  return;
954
954
  }
955
- let message = input3;
956
955
  const IMAGE_EXTS = /* @__PURE__ */ new Set([".png", ".jpg", ".jpeg", ".gif", ".webp", ".bmp", ".svg"]);
956
+ const MIME_MAP = {
957
+ ".png": "image/png",
958
+ ".jpg": "image/jpeg",
959
+ ".jpeg": "image/jpeg",
960
+ ".gif": "image/gif",
961
+ ".webp": "image/webp",
962
+ ".bmp": "image/bmp",
963
+ ".svg": "image/svg+xml"
964
+ };
957
965
  const atMatches = input3.match(/@([\w.\/\\:~-]+)/g);
966
+ let hasImages = false;
967
+ const imageBlocks = [];
968
+ let message = input3;
958
969
  if (atMatches) {
959
970
  for (const match of atMatches) {
960
971
  const filePath = match.slice(1);
@@ -963,19 +974,13 @@ var Repl = class {
963
974
  if (IMAGE_EXTS.has(ext)) {
964
975
  const data = readFileSync3(filePath);
965
976
  const base64 = data.toString("base64");
966
- const mimeMap = {
967
- ".png": "image/png",
968
- ".jpg": "image/jpeg",
969
- ".jpeg": "image/jpeg",
970
- ".gif": "image/gif",
971
- ".webp": "image/webp",
972
- ".bmp": "image/bmp",
973
- ".svg": "image/svg+xml"
974
- };
975
- const mime = mimeMap[ext] || "image/png";
976
- message = message.replace(match, `
977
- [Image: ${filePath}](data:${mime};base64,${base64})
978
- `);
977
+ const mime = MIME_MAP[ext] || "image/png";
978
+ imageBlocks.push({
979
+ type: "image",
980
+ source: { type: "base64", media_type: mime, data: base64 }
981
+ });
982
+ message = message.replace(match, `[\uC774\uBBF8\uC9C0: ${path5.basename(filePath)}]`);
983
+ hasImages = true;
979
984
  } else {
980
985
  const content = readFileSync3(filePath, "utf-8");
981
986
  message = message.replace(match, `
@@ -989,7 +994,15 @@ ${content}
989
994
  }
990
995
  }
991
996
  }
992
- await this.options.onMessage(message);
997
+ if (hasImages) {
998
+ const blocks = [
999
+ { type: "text", text: message.trim() },
1000
+ ...imageBlocks
1001
+ ];
1002
+ await this.options.onMessage(blocks);
1003
+ } else {
1004
+ await this.options.onMessage(message);
1005
+ }
993
1006
  }
994
1007
  openEditor() {
995
1008
  try {
@@ -1442,11 +1455,31 @@ LLM Error: ${errMsg}`));
1442
1455
  console.log("");
1443
1456
  }
1444
1457
  const results = await executor.executeMany(response.toolCalls);
1445
- const toolResults = results.map((r) => ({
1446
- tool_use_id: r.toolUseId,
1447
- content: r.result.output,
1448
- is_error: !r.result.success
1449
- }));
1458
+ const toolResults = results.map((r) => {
1459
+ if (r.result.metadata?.isImage && r.result.metadata.imageData) {
1460
+ const blocks = [
1461
+ { type: "text", text: r.result.output },
1462
+ {
1463
+ type: "image",
1464
+ source: {
1465
+ type: "base64",
1466
+ media_type: r.result.metadata.imageMimeType || "image/png",
1467
+ data: r.result.metadata.imageData
1468
+ }
1469
+ }
1470
+ ];
1471
+ return {
1472
+ tool_use_id: r.toolUseId,
1473
+ content: blocks,
1474
+ is_error: !r.result.success
1475
+ };
1476
+ }
1477
+ return {
1478
+ tool_use_id: r.toolUseId,
1479
+ content: r.result.output,
1480
+ is_error: !r.result.success
1481
+ };
1482
+ });
1450
1483
  conversation.addToolResults(toolResults);
1451
1484
  }
1452
1485
  }
@@ -4310,13 +4343,38 @@ var AnthropicProvider = class {
4310
4343
  name: block.name,
4311
4344
  input: block.input
4312
4345
  };
4313
- case "tool_result":
4346
+ case "tool_result": {
4347
+ let trContent;
4348
+ if (typeof block.content === "string") {
4349
+ trContent = block.content;
4350
+ } else if (Array.isArray(block.content)) {
4351
+ trContent = [];
4352
+ for (const cb of block.content) {
4353
+ if (cb.type === "text") {
4354
+ trContent.push({ type: "text", text: cb.text });
4355
+ } else if (cb.type === "image") {
4356
+ trContent.push({
4357
+ type: "image",
4358
+ source: {
4359
+ type: "base64",
4360
+ media_type: cb.source.media_type,
4361
+ data: cb.source.data
4362
+ }
4363
+ });
4364
+ } else {
4365
+ trContent.push({ type: "text", text: JSON.stringify(cb) });
4366
+ }
4367
+ }
4368
+ } else {
4369
+ trContent = JSON.stringify(block.content);
4370
+ }
4314
4371
  return {
4315
4372
  type: "tool_result",
4316
4373
  tool_use_id: block.tool_use_id,
4317
- content: typeof block.content === "string" ? block.content : JSON.stringify(block.content),
4374
+ content: trContent,
4318
4375
  ...block.is_error ? { is_error: true } : {}
4319
4376
  };
4377
+ }
4320
4378
  default:
4321
4379
  return { type: "text", text: JSON.stringify(block) };
4322
4380
  }
@@ -4520,15 +4578,43 @@ var OpenAIProvider = class {
4520
4578
  }
4521
4579
  const hasToolResults = m.content.some((b) => b.type === "tool_result");
4522
4580
  if (hasToolResults) {
4581
+ const pendingImages = [];
4523
4582
  for (const block of m.content) {
4524
4583
  if (block.type === "tool_result") {
4525
- result.push({
4526
- role: "tool",
4527
- tool_call_id: block.tool_use_id,
4528
- content: typeof block.content === "string" ? block.content : JSON.stringify(block.content)
4529
- });
4584
+ if (Array.isArray(block.content)) {
4585
+ const textParts = [];
4586
+ for (const cb of block.content) {
4587
+ if (cb.type === "text") textParts.push(cb.text);
4588
+ else if (cb.type === "image") {
4589
+ pendingImages.push({
4590
+ type: "image_url",
4591
+ image_url: { url: `data:${cb.source.media_type};base64,${cb.source.data}` }
4592
+ });
4593
+ }
4594
+ }
4595
+ result.push({
4596
+ role: "tool",
4597
+ tool_call_id: block.tool_use_id,
4598
+ content: textParts.join("\n") || "(image)"
4599
+ });
4600
+ } else {
4601
+ result.push({
4602
+ role: "tool",
4603
+ tool_call_id: block.tool_use_id,
4604
+ content: block.content
4605
+ });
4606
+ }
4530
4607
  }
4531
4608
  }
4609
+ if (pendingImages.length > 0) {
4610
+ result.push({
4611
+ role: "user",
4612
+ content: [
4613
+ { type: "text", text: "\uC704 \uB3C4\uAD6C\uAC00 \uBC18\uD658\uD55C \uC774\uBBF8\uC9C0\uC785\uB2C8\uB2E4. \uC774 \uC774\uBBF8\uC9C0\uB97C \uBD84\uC11D\uC5D0 \uD65C\uC6A9\uD558\uC138\uC694." },
4614
+ ...pendingImages
4615
+ ]
4616
+ });
4617
+ }
4532
4618
  continue;
4533
4619
  }
4534
4620
  const hasToolUse = m.content.some((b) => b.type === "tool_use");
@@ -4719,7 +4805,14 @@ var OllamaProvider = class {
4719
4805
  } else if (block.type === "image") {
4720
4806
  images.push(block.source.data);
4721
4807
  } else if (block.type === "tool_result") {
4722
- textParts.push(`[Tool Result: ${typeof block.content === "string" ? block.content : JSON.stringify(block.content)}]`);
4808
+ if (Array.isArray(block.content)) {
4809
+ for (const cb of block.content) {
4810
+ if (cb.type === "text") textParts.push(cb.text);
4811
+ else if (cb.type === "image") images.push(cb.source.data);
4812
+ }
4813
+ } else {
4814
+ textParts.push(`[Tool Result: ${block.content}]`);
4815
+ }
4723
4816
  } else if (block.type === "tool_use") {
4724
4817
  textParts.push(`[Tool Call: ${block.name}(${JSON.stringify(block.input)})]`);
4725
4818
  }
@@ -5018,7 +5111,8 @@ async function main() {
5018
5111
  };
5019
5112
  const repl = new Repl({
5020
5113
  onMessage: async (message) => {
5021
- checkpointManager.create(conversation, message.slice(0, 50));
5114
+ const preview = typeof message === "string" ? message.slice(0, 50) : message.find((b) => b.type === "text")?.text?.slice(0, 50) || "image";
5115
+ checkpointManager.create(conversation, preview);
5022
5116
  if (compressor.shouldCompress(conversation)) {
5023
5117
  console.log(chalk13.dim("Auto-compacting conversation..."));
5024
5118
  await compressor.compress(conversation, provider);