@llumiverse/drivers 0.24.0 → 1.0.0-dev.20260202.145450Z

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (227) hide show
  1. package/package.json +3 -3
  2. package/src/bedrock/index.ts +109 -2
  3. package/src/vertexai/index.ts +37 -18
  4. package/src/vertexai/models/claude.ts +146 -2
  5. package/lib/cjs/adobe/firefly.js +0 -120
  6. package/lib/cjs/adobe/firefly.js.map +0 -1
  7. package/lib/cjs/azure/azure_foundry.js +0 -432
  8. package/lib/cjs/azure/azure_foundry.js.map +0 -1
  9. package/lib/cjs/bedrock/converse.js +0 -285
  10. package/lib/cjs/bedrock/converse.js.map +0 -1
  11. package/lib/cjs/bedrock/index.js +0 -1091
  12. package/lib/cjs/bedrock/index.js.map +0 -1
  13. package/lib/cjs/bedrock/nova-image-payload.js +0 -207
  14. package/lib/cjs/bedrock/nova-image-payload.js.map +0 -1
  15. package/lib/cjs/bedrock/payloads.js +0 -3
  16. package/lib/cjs/bedrock/payloads.js.map +0 -1
  17. package/lib/cjs/bedrock/s3.js +0 -107
  18. package/lib/cjs/bedrock/s3.js.map +0 -1
  19. package/lib/cjs/bedrock/twelvelabs.js +0 -87
  20. package/lib/cjs/bedrock/twelvelabs.js.map +0 -1
  21. package/lib/cjs/groq/index.js +0 -323
  22. package/lib/cjs/groq/index.js.map +0 -1
  23. package/lib/cjs/huggingface_ie.js +0 -201
  24. package/lib/cjs/huggingface_ie.js.map +0 -1
  25. package/lib/cjs/index.js +0 -31
  26. package/lib/cjs/index.js.map +0 -1
  27. package/lib/cjs/mistral/index.js +0 -173
  28. package/lib/cjs/mistral/index.js.map +0 -1
  29. package/lib/cjs/mistral/types.js +0 -83
  30. package/lib/cjs/mistral/types.js.map +0 -1
  31. package/lib/cjs/openai/azure_openai.js +0 -72
  32. package/lib/cjs/openai/azure_openai.js.map +0 -1
  33. package/lib/cjs/openai/index.js +0 -665
  34. package/lib/cjs/openai/index.js.map +0 -1
  35. package/lib/cjs/openai/openai.js +0 -21
  36. package/lib/cjs/openai/openai.js.map +0 -1
  37. package/lib/cjs/openai/openai_compatible.js +0 -62
  38. package/lib/cjs/openai/openai_compatible.js.map +0 -1
  39. package/lib/cjs/openai/openai_format.js +0 -131
  40. package/lib/cjs/openai/openai_format.js.map +0 -1
  41. package/lib/cjs/package.json +0 -3
  42. package/lib/cjs/replicate.js +0 -275
  43. package/lib/cjs/replicate.js.map +0 -1
  44. package/lib/cjs/test-driver/TestErrorCompletionStream.js +0 -20
  45. package/lib/cjs/test-driver/TestErrorCompletionStream.js.map +0 -1
  46. package/lib/cjs/test-driver/TestValidationErrorCompletionStream.js +0 -24
  47. package/lib/cjs/test-driver/TestValidationErrorCompletionStream.js.map +0 -1
  48. package/lib/cjs/test-driver/index.js +0 -109
  49. package/lib/cjs/test-driver/index.js.map +0 -1
  50. package/lib/cjs/test-driver/utils.js +0 -30
  51. package/lib/cjs/test-driver/utils.js.map +0 -1
  52. package/lib/cjs/togetherai/index.js +0 -126
  53. package/lib/cjs/togetherai/index.js.map +0 -1
  54. package/lib/cjs/togetherai/interfaces.js +0 -3
  55. package/lib/cjs/togetherai/interfaces.js.map +0 -1
  56. package/lib/cjs/vertexai/debug.js +0 -12
  57. package/lib/cjs/vertexai/debug.js.map +0 -1
  58. package/lib/cjs/vertexai/embeddings/embeddings-image.js +0 -27
  59. package/lib/cjs/vertexai/embeddings/embeddings-image.js.map +0 -1
  60. package/lib/cjs/vertexai/embeddings/embeddings-text.js +0 -23
  61. package/lib/cjs/vertexai/embeddings/embeddings-text.js.map +0 -1
  62. package/lib/cjs/vertexai/index.js +0 -576
  63. package/lib/cjs/vertexai/index.js.map +0 -1
  64. package/lib/cjs/vertexai/models/claude.js +0 -485
  65. package/lib/cjs/vertexai/models/claude.js.map +0 -1
  66. package/lib/cjs/vertexai/models/gemini.js +0 -871
  67. package/lib/cjs/vertexai/models/gemini.js.map +0 -1
  68. package/lib/cjs/vertexai/models/imagen.js +0 -303
  69. package/lib/cjs/vertexai/models/imagen.js.map +0 -1
  70. package/lib/cjs/vertexai/models/llama.js +0 -183
  71. package/lib/cjs/vertexai/models/llama.js.map +0 -1
  72. package/lib/cjs/vertexai/models.js +0 -35
  73. package/lib/cjs/vertexai/models.js.map +0 -1
  74. package/lib/cjs/watsonx/index.js +0 -161
  75. package/lib/cjs/watsonx/index.js.map +0 -1
  76. package/lib/cjs/watsonx/interfaces.js +0 -3
  77. package/lib/cjs/watsonx/interfaces.js.map +0 -1
  78. package/lib/cjs/xai/index.js +0 -65
  79. package/lib/cjs/xai/index.js.map +0 -1
  80. package/lib/esm/adobe/firefly.js +0 -116
  81. package/lib/esm/adobe/firefly.js.map +0 -1
  82. package/lib/esm/azure/azure_foundry.js +0 -426
  83. package/lib/esm/azure/azure_foundry.js.map +0 -1
  84. package/lib/esm/bedrock/converse.js +0 -278
  85. package/lib/esm/bedrock/converse.js.map +0 -1
  86. package/lib/esm/bedrock/index.js +0 -1087
  87. package/lib/esm/bedrock/index.js.map +0 -1
  88. package/lib/esm/bedrock/nova-image-payload.js +0 -203
  89. package/lib/esm/bedrock/nova-image-payload.js.map +0 -1
  90. package/lib/esm/bedrock/payloads.js +0 -2
  91. package/lib/esm/bedrock/payloads.js.map +0 -1
  92. package/lib/esm/bedrock/s3.js +0 -99
  93. package/lib/esm/bedrock/s3.js.map +0 -1
  94. package/lib/esm/bedrock/twelvelabs.js +0 -84
  95. package/lib/esm/bedrock/twelvelabs.js.map +0 -1
  96. package/lib/esm/groq/index.js +0 -316
  97. package/lib/esm/groq/index.js.map +0 -1
  98. package/lib/esm/huggingface_ie.js +0 -197
  99. package/lib/esm/huggingface_ie.js.map +0 -1
  100. package/lib/esm/index.js +0 -15
  101. package/lib/esm/index.js.map +0 -1
  102. package/lib/esm/mistral/index.js +0 -169
  103. package/lib/esm/mistral/index.js.map +0 -1
  104. package/lib/esm/mistral/types.js +0 -80
  105. package/lib/esm/mistral/types.js.map +0 -1
  106. package/lib/esm/openai/azure_openai.js +0 -68
  107. package/lib/esm/openai/azure_openai.js.map +0 -1
  108. package/lib/esm/openai/index.js +0 -660
  109. package/lib/esm/openai/index.js.map +0 -1
  110. package/lib/esm/openai/openai.js +0 -14
  111. package/lib/esm/openai/openai.js.map +0 -1
  112. package/lib/esm/openai/openai_compatible.js +0 -55
  113. package/lib/esm/openai/openai_compatible.js.map +0 -1
  114. package/lib/esm/openai/openai_format.js +0 -127
  115. package/lib/esm/openai/openai_format.js.map +0 -1
  116. package/lib/esm/replicate.js +0 -268
  117. package/lib/esm/replicate.js.map +0 -1
  118. package/lib/esm/test-driver/TestErrorCompletionStream.js +0 -16
  119. package/lib/esm/test-driver/TestErrorCompletionStream.js.map +0 -1
  120. package/lib/esm/test-driver/TestValidationErrorCompletionStream.js +0 -20
  121. package/lib/esm/test-driver/TestValidationErrorCompletionStream.js.map +0 -1
  122. package/lib/esm/test-driver/index.js +0 -91
  123. package/lib/esm/test-driver/index.js.map +0 -1
  124. package/lib/esm/test-driver/utils.js +0 -25
  125. package/lib/esm/test-driver/utils.js.map +0 -1
  126. package/lib/esm/togetherai/index.js +0 -122
  127. package/lib/esm/togetherai/index.js.map +0 -1
  128. package/lib/esm/togetherai/interfaces.js +0 -2
  129. package/lib/esm/togetherai/interfaces.js.map +0 -1
  130. package/lib/esm/vertexai/debug.js +0 -6
  131. package/lib/esm/vertexai/debug.js.map +0 -1
  132. package/lib/esm/vertexai/embeddings/embeddings-image.js +0 -24
  133. package/lib/esm/vertexai/embeddings/embeddings-image.js.map +0 -1
  134. package/lib/esm/vertexai/embeddings/embeddings-text.js +0 -20
  135. package/lib/esm/vertexai/embeddings/embeddings-text.js.map +0 -1
  136. package/lib/esm/vertexai/index.js +0 -571
  137. package/lib/esm/vertexai/index.js.map +0 -1
  138. package/lib/esm/vertexai/models/claude.js +0 -479
  139. package/lib/esm/vertexai/models/claude.js.map +0 -1
  140. package/lib/esm/vertexai/models/gemini.js +0 -866
  141. package/lib/esm/vertexai/models/gemini.js.map +0 -1
  142. package/lib/esm/vertexai/models/imagen.js +0 -299
  143. package/lib/esm/vertexai/models/imagen.js.map +0 -1
  144. package/lib/esm/vertexai/models/llama.js +0 -179
  145. package/lib/esm/vertexai/models/llama.js.map +0 -1
  146. package/lib/esm/vertexai/models.js +0 -32
  147. package/lib/esm/vertexai/models.js.map +0 -1
  148. package/lib/esm/watsonx/index.js +0 -157
  149. package/lib/esm/watsonx/index.js.map +0 -1
  150. package/lib/esm/watsonx/interfaces.js +0 -2
  151. package/lib/esm/watsonx/interfaces.js.map +0 -1
  152. package/lib/esm/xai/index.js +0 -58
  153. package/lib/esm/xai/index.js.map +0 -1
  154. package/lib/types/adobe/firefly.d.ts +0 -30
  155. package/lib/types/adobe/firefly.d.ts.map +0 -1
  156. package/lib/types/azure/azure_foundry.d.ts +0 -52
  157. package/lib/types/azure/azure_foundry.d.ts.map +0 -1
  158. package/lib/types/bedrock/converse.d.ts +0 -9
  159. package/lib/types/bedrock/converse.d.ts.map +0 -1
  160. package/lib/types/bedrock/index.d.ts +0 -68
  161. package/lib/types/bedrock/index.d.ts.map +0 -1
  162. package/lib/types/bedrock/nova-image-payload.d.ts +0 -74
  163. package/lib/types/bedrock/nova-image-payload.d.ts.map +0 -1
  164. package/lib/types/bedrock/payloads.d.ts +0 -12
  165. package/lib/types/bedrock/payloads.d.ts.map +0 -1
  166. package/lib/types/bedrock/s3.d.ts +0 -23
  167. package/lib/types/bedrock/s3.d.ts.map +0 -1
  168. package/lib/types/bedrock/twelvelabs.d.ts +0 -50
  169. package/lib/types/bedrock/twelvelabs.d.ts.map +0 -1
  170. package/lib/types/groq/index.d.ts +0 -27
  171. package/lib/types/groq/index.d.ts.map +0 -1
  172. package/lib/types/huggingface_ie.d.ts +0 -35
  173. package/lib/types/huggingface_ie.d.ts.map +0 -1
  174. package/lib/types/index.d.ts +0 -15
  175. package/lib/types/index.d.ts.map +0 -1
  176. package/lib/types/mistral/index.d.ts +0 -25
  177. package/lib/types/mistral/index.d.ts.map +0 -1
  178. package/lib/types/mistral/types.d.ts +0 -132
  179. package/lib/types/mistral/types.d.ts.map +0 -1
  180. package/lib/types/openai/azure_openai.d.ts +0 -25
  181. package/lib/types/openai/azure_openai.d.ts.map +0 -1
  182. package/lib/types/openai/index.d.ts +0 -31
  183. package/lib/types/openai/index.d.ts.map +0 -1
  184. package/lib/types/openai/openai.d.ts +0 -15
  185. package/lib/types/openai/openai.d.ts.map +0 -1
  186. package/lib/types/openai/openai_compatible.d.ts +0 -26
  187. package/lib/types/openai/openai_compatible.d.ts.map +0 -1
  188. package/lib/types/openai/openai_format.d.ts +0 -21
  189. package/lib/types/openai/openai_format.d.ts.map +0 -1
  190. package/lib/types/replicate.d.ts +0 -48
  191. package/lib/types/replicate.d.ts.map +0 -1
  192. package/lib/types/test-driver/TestErrorCompletionStream.d.ts +0 -9
  193. package/lib/types/test-driver/TestErrorCompletionStream.d.ts.map +0 -1
  194. package/lib/types/test-driver/TestValidationErrorCompletionStream.d.ts +0 -9
  195. package/lib/types/test-driver/TestValidationErrorCompletionStream.d.ts.map +0 -1
  196. package/lib/types/test-driver/index.d.ts +0 -24
  197. package/lib/types/test-driver/index.d.ts.map +0 -1
  198. package/lib/types/test-driver/utils.d.ts +0 -5
  199. package/lib/types/test-driver/utils.d.ts.map +0 -1
  200. package/lib/types/togetherai/index.d.ts +0 -23
  201. package/lib/types/togetherai/index.d.ts.map +0 -1
  202. package/lib/types/togetherai/interfaces.d.ts +0 -96
  203. package/lib/types/togetherai/interfaces.d.ts.map +0 -1
  204. package/lib/types/vertexai/debug.d.ts +0 -2
  205. package/lib/types/vertexai/debug.d.ts.map +0 -1
  206. package/lib/types/vertexai/embeddings/embeddings-image.d.ts +0 -11
  207. package/lib/types/vertexai/embeddings/embeddings-image.d.ts.map +0 -1
  208. package/lib/types/vertexai/embeddings/embeddings-text.d.ts +0 -10
  209. package/lib/types/vertexai/embeddings/embeddings-text.d.ts.map +0 -1
  210. package/lib/types/vertexai/index.d.ts +0 -65
  211. package/lib/types/vertexai/index.d.ts.map +0 -1
  212. package/lib/types/vertexai/models/claude.d.ts +0 -28
  213. package/lib/types/vertexai/models/claude.d.ts.map +0 -1
  214. package/lib/types/vertexai/models/gemini.d.ts +0 -18
  215. package/lib/types/vertexai/models/gemini.d.ts.map +0 -1
  216. package/lib/types/vertexai/models/imagen.d.ts +0 -75
  217. package/lib/types/vertexai/models/imagen.d.ts.map +0 -1
  218. package/lib/types/vertexai/models/llama.d.ts +0 -20
  219. package/lib/types/vertexai/models/llama.d.ts.map +0 -1
  220. package/lib/types/vertexai/models.d.ts +0 -15
  221. package/lib/types/vertexai/models.d.ts.map +0 -1
  222. package/lib/types/watsonx/index.d.ts +0 -27
  223. package/lib/types/watsonx/index.d.ts.map +0 -1
  224. package/lib/types/watsonx/interfaces.d.ts +0 -65
  225. package/lib/types/watsonx/interfaces.d.ts.map +0 -1
  226. package/lib/types/xai/index.d.ts +0 -18
  227. package/lib/types/xai/index.d.ts.map +0 -1
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@llumiverse/drivers",
3
- "version": "0.24.0",
3
+ "version": "1.0.0-dev.20260202.145450Z",
4
4
  "type": "module",
5
5
  "description": "LLM driver implementations. Currently supported are: openai, huggingface, bedrock, replicate.",
6
6
  "files": [
@@ -73,8 +73,8 @@
73
73
  "node-web-stream-adapters": "^0.2.1",
74
74
  "openai": "^6.10.0",
75
75
  "replicate": "^1.3.1",
76
- "@llumiverse/common": "0.24.0",
77
- "@llumiverse/core": "0.24.0"
76
+ "@llumiverse/common": "1.0.0-dev.20260202.145450Z",
77
+ "@llumiverse/core": "1.0.0-dev.20260202.145450Z"
78
78
  },
79
79
  "ts_dual_module": {
80
80
  "outDir": "lib"
@@ -2,7 +2,7 @@ import {
2
2
  Bedrock, CreateModelCustomizationJobCommand, FoundationModelSummary, GetModelCustomizationJobCommand,
3
3
  GetModelCustomizationJobCommandOutput, ModelCustomizationJobStatus, ModelModality, StopModelCustomizationJobCommand
4
4
  } from "@aws-sdk/client-bedrock";
5
- import { BedrockRuntime, ConverseRequest, ConverseResponse, ConverseStreamOutput, InferenceConfiguration, Tool } from "@aws-sdk/client-bedrock-runtime";
5
+ import { BedrockRuntime, ContentBlock, ConverseRequest, ConverseResponse, ConverseStreamOutput, InferenceConfiguration, Message, Tool } from "@aws-sdk/client-bedrock-runtime";
6
6
  import { S3Client } from "@aws-sdk/client-s3";
7
7
  import { AwsCredentialIdentity, Provider } from "@aws-sdk/types";
8
8
  import {
@@ -1182,6 +1182,14 @@ export class BedrockDriver extends AbstractDriver<BedrockDriverOptions, BedrockP
1182
1182
  token_count: undefined
1183
1183
  };
1184
1184
  }
1185
+
1186
+ /**
1187
+ * Cleanup AWS SDK clients when the driver is evicted from the cache.
1188
+ */
1189
+ destroy(): void {
1190
+ this._executor?.destroy();
1191
+ this._service?.destroy();
1192
+ }
1185
1193
  }
1186
1194
 
1187
1195
  function jobInfo(job: GetModelCustomizationJobCommandOutput, jobId: string): TrainingJob {
@@ -1261,13 +1269,112 @@ function updateConversation(conversation: ConverseRequest, prompt: ConverseReque
1261
1269
  const combinedMessages = [...(conversation?.messages || []), ...(prompt.messages || [])];
1262
1270
  const combinedSystem = prompt.system || conversation?.system;
1263
1271
 
1272
+ // Fix orphaned toolUse blocks before returning
1273
+ const fixedMessages = fixOrphanedToolUse(combinedMessages);
1274
+
1264
1275
  return {
1265
1276
  modelId: prompt?.modelId || conversation?.modelId,
1266
- messages: combinedMessages.length > 0 ? combinedMessages : [],
1277
+ messages: fixedMessages.length > 0 ? fixedMessages : [],
1267
1278
  system: combinedSystem && combinedSystem.length > 0 ? combinedSystem : undefined,
1268
1279
  };
1269
1280
  }
1270
1281
 
1282
+ /**
1283
+ * Fix orphaned toolUse blocks in the conversation.
1284
+ *
1285
+ * When an agent is stopped mid-tool-execution, the assistant message contains toolUse blocks
1286
+ * but no corresponding toolResult was added. The AWS Converse API requires that every toolUse
1287
+ * must be followed by a toolResult in the next user message.
1288
+ *
1289
+ * This function detects such cases and injects synthetic toolResult blocks indicating
1290
+ * the tools were interrupted, allowing the conversation to continue.
1291
+ */
1292
+ export function fixOrphanedToolUse(messages: Message[]): Message[] {
1293
+ if (messages.length < 2) return messages;
1294
+
1295
+ const result: Message[] = [];
1296
+
1297
+ for (let i = 0; i < messages.length; i++) {
1298
+ const current = messages[i];
1299
+ result.push(current);
1300
+
1301
+ // Check if this is an assistant message with toolUse blocks
1302
+ if (current.role === 'assistant' && current.content) {
1303
+ // Extract toolUse blocks using simple property check (same pattern as existing Bedrock code)
1304
+ const toolUseBlocks: Array<{ toolUseId: string; name: string }> = [];
1305
+ for (const block of current.content) {
1306
+ if (block.toolUse?.toolUseId) {
1307
+ toolUseBlocks.push({
1308
+ toolUseId: block.toolUse.toolUseId,
1309
+ name: block.toolUse.name ?? 'unknown'
1310
+ });
1311
+ }
1312
+ }
1313
+
1314
+ if (toolUseBlocks.length > 0) {
1315
+ // Check if the next message is a user message with matching toolResults
1316
+ const nextMessage = messages[i + 1];
1317
+
1318
+ if (nextMessage && nextMessage.role === 'user' && nextMessage.content) {
1319
+ // Get toolResult IDs from the next message using simple property check
1320
+ const toolResultIds = new Set<string>();
1321
+ for (const block of nextMessage.content) {
1322
+ if (block.toolResult?.toolUseId) {
1323
+ toolResultIds.add(block.toolResult.toolUseId);
1324
+ }
1325
+ }
1326
+
1327
+ // Find orphaned toolUse blocks (no matching toolResult)
1328
+ const orphanedToolUse = toolUseBlocks.filter(tu => !toolResultIds.has(tu.toolUseId));
1329
+
1330
+ if (orphanedToolUse.length > 0) {
1331
+ // Inject synthetic toolResults for orphaned toolUse
1332
+ const syntheticResults: ContentBlock[] = orphanedToolUse.map(tu => ({
1333
+ toolResult: {
1334
+ toolUseId: tu.toolUseId,
1335
+ content: [{
1336
+ text: `[Tool interrupted: The user stopped the operation before "${tu.name}" could execute.]`
1337
+ }]
1338
+ }
1339
+ }));
1340
+
1341
+ // Prepend synthetic results to the next user message
1342
+ const updatedNextMessage: Message = {
1343
+ ...nextMessage,
1344
+ content: [...syntheticResults, ...nextMessage.content]
1345
+ };
1346
+
1347
+ // Replace the next message in our iteration
1348
+ messages[i + 1] = updatedNextMessage;
1349
+ }
1350
+ } else if (nextMessage && nextMessage.role === 'user' && !nextMessage.content) {
1351
+ // Next message is a user message but has no content
1352
+ // We need to add toolResults
1353
+ const syntheticResults: ContentBlock[] = toolUseBlocks.map(tu => ({
1354
+ toolResult: {
1355
+ toolUseId: tu.toolUseId,
1356
+ content: [{
1357
+ text: `[Tool interrupted: The user stopped the operation before "${tu.name}" could execute.]`
1358
+ }]
1359
+ }
1360
+ }));
1361
+
1362
+ const updatedNextMessage: Message = {
1363
+ role: 'user',
1364
+ content: syntheticResults
1365
+ };
1366
+
1367
+ messages[i + 1] = updatedNextMessage;
1368
+ }
1369
+ // Note: If there's no nextMessage, we leave the conversation as-is.
1370
+ // The toolUse blocks are expected to be there - the next turn will provide toolResults.
1371
+ }
1372
+ }
1373
+ }
1374
+
1375
+ return result;
1376
+ }
1377
+
1271
1378
  function formatAmazonModalities(modalities: ModelModality[]): string[] {
1272
1379
  const standardizedModalities: string[] = [];
1273
1380
  for (const modality of modalities) {
@@ -305,32 +305,40 @@ export class VertexAIDriver extends AbstractDriver<VertexAIDriverOptions, Vertex
305
305
  // Add function calls if present (Gemini format)
306
306
  if (toolUse && toolUse.length > 0) {
307
307
  for (const tool of toolUse as any[]) {
308
- parts.push({
308
+ const functionCallPart: any = {
309
309
  functionCall: {
310
310
  name: tool.tool_name,
311
311
  args: tool.tool_input,
312
312
  }
313
- });
313
+ };
314
+ // Include thought_signature for Gemini thinking models (2.5+/3.0+)
315
+ // This must be preserved in the conversation for subsequent API calls
316
+ if (tool.thought_signature) {
317
+ functionCallPart.thoughtSignature = tool.thought_signature;
318
+ }
319
+ parts.push(functionCallPart);
314
320
  }
315
321
  }
316
322
 
317
- // Build assistant message in Gemini Content format
318
- const assistantContent: Content = {
319
- role: 'model',
320
- parts: parts.length > 0 ? parts : [{ text: '' }]
321
- };
322
-
323
323
  // Unwrap array if wrapped, otherwise treat as array
324
324
  const unwrapped = unwrapConversationArray<Content>(options.conversation);
325
325
  const existingConversation = unwrapped ?? (options.conversation as Content[] || []);
326
326
 
327
- // Combine existing conversation + prompt contents + assistant response
327
+ // Combine existing conversation + prompt contents
328
328
  let conversation: Content[] = [
329
329
  ...existingConversation,
330
330
  ...prompt.contents,
331
- assistantContent
332
331
  ];
333
332
 
333
+ // Only add assistant message if there's actual content
334
+ // (Empty text parts can cause API errors)
335
+ if (parts.length > 0) {
336
+ conversation.push({
337
+ role: 'model',
338
+ parts: parts
339
+ });
340
+ }
341
+
334
342
  // Increment turn counter
335
343
  conversation = incrementConversationTurn(conversation) as Content[];
336
344
 
@@ -398,23 +406,25 @@ export class VertexAIDriver extends AbstractDriver<VertexAIDriverOptions, Vertex
398
406
  }
399
407
  }
400
408
 
401
- // Build assistant message
402
- const assistantMessage = {
403
- role: 'assistant',
404
- content: content.length > 0 ? content : [{ type: 'text', text: '' }]
405
- };
406
-
407
409
  // Get existing conversation or start fresh
408
410
  const existingMessages = (options.conversation as any)?.messages ?? [];
409
411
  const existingSystem = (options.conversation as any)?.system ?? prompt.system;
410
412
 
411
- // Combine: existing conversation + new prompt messages + assistant response
413
+ // Build the new messages array
412
414
  const newMessages = [
413
415
  ...existingMessages,
414
416
  ...prompt.messages,
415
- assistantMessage
416
417
  ];
417
418
 
419
+ // Only add assistant message if there's actual content
420
+ // (Claude API rejects empty text content blocks)
421
+ if (content.length > 0) {
422
+ newMessages.push({
423
+ role: 'assistant',
424
+ content: content
425
+ });
426
+ }
427
+
418
428
  // Build the new conversation in ClaudePrompt format
419
429
  const conversation = {
420
430
  messages: newMessages,
@@ -682,6 +692,15 @@ export class VertexAIDriver extends AbstractDriver<VertexAIDriverOptions, Vertex
682
692
  };
683
693
  return getEmbeddingsForText(this, text_options);
684
694
  }
695
+
696
+ /**
697
+ * Cleanup Google Cloud clients when the driver is evicted from the cache.
698
+ */
699
+ destroy(): void {
700
+ this.aiplatform?.close();
701
+ this.modelGarden?.close();
702
+ this.imagenClient?.close();
703
+ }
685
704
  }
686
705
 
687
706
  //'us-central1-aiplatform.googleapis.com',
@@ -516,6 +516,136 @@ function updateConversation(conversation: ClaudePrompt | undefined | null, promp
516
516
  system: system.length > 0 ? system : undefined // If system is empty, set to undefined
517
517
  };
518
518
  }
519
+
520
+ /**
521
+ * Sanitize messages by removing empty text blocks.
522
+ * Claude API rejects messages with empty text content blocks ("text content blocks must be non-empty").
523
+ * This handles cases where streaming was interrupted and left empty text blocks.
524
+ *
525
+ * - Filters out empty text blocks from each message's content
526
+ * - Removes messages entirely if they have no content after filtering
527
+ */
528
+ function sanitizeMessages(messages: MessageParam[]): MessageParam[] {
529
+ const result: MessageParam[] = [];
530
+
531
+ for (const message of messages) {
532
+ if (typeof message.content === 'string') {
533
+ // String content - keep only if non-empty
534
+ if (message.content.trim()) {
535
+ result.push(message);
536
+ }
537
+ continue;
538
+ }
539
+
540
+ // Array content - filter out empty text blocks
541
+ const filteredContent = message.content.filter(block => {
542
+ if (block.type === 'text') {
543
+ return block.text && block.text.trim().length > 0;
544
+ }
545
+ // Keep all non-text blocks (tool_use, tool_result, image, etc.)
546
+ return true;
547
+ });
548
+
549
+ // Only include message if it has content after filtering
550
+ if (filteredContent.length > 0) {
551
+ result.push({
552
+ ...message,
553
+ content: filteredContent
554
+ });
555
+ }
556
+ }
557
+
558
+ return result;
559
+ }
560
+
561
+ /**
562
+ * Fix orphaned tool_use blocks in the conversation.
563
+ * @exported for testing
564
+ *
565
+ * When an agent is stopped mid-tool-execution, the assistant message contains tool_use blocks
566
+ * but no corresponding tool_result was added. The Anthropic API requires that every tool_use
567
+ * must be followed by a tool_result in the next user message.
568
+ *
569
+ * This function detects such cases and injects synthetic tool_result blocks indicating
570
+ * the tools were interrupted, allowing the conversation to continue.
571
+ */
572
+ export function fixOrphanedToolUse(messages: MessageParam[]): MessageParam[] {
573
+ if (messages.length < 2) return messages;
574
+
575
+ const result: MessageParam[] = [];
576
+
577
+ for (let i = 0; i < messages.length; i++) {
578
+ const current = messages[i];
579
+ result.push(current);
580
+
581
+ // Check if this is an assistant message with tool_use blocks
582
+ if (current.role === 'assistant' && Array.isArray(current.content)) {
583
+ const toolUseBlocks = current.content.filter(
584
+ (block): block is ContentBlockParam & { type: 'tool_use'; id: string; name: string } =>
585
+ block.type === 'tool_use'
586
+ );
587
+
588
+ if (toolUseBlocks.length > 0) {
589
+ // Check if the next message is a user message with matching tool_results
590
+ const nextMessage = messages[i + 1];
591
+
592
+ if (nextMessage && nextMessage.role === 'user' && Array.isArray(nextMessage.content)) {
593
+ // Get tool_result IDs from the next message
594
+ const toolResultIds = new Set(
595
+ nextMessage.content
596
+ .filter((block): block is ToolResultBlockParam => block.type === 'tool_result')
597
+ .map(block => block.tool_use_id)
598
+ );
599
+
600
+ // Find orphaned tool_use blocks (no matching tool_result)
601
+ const orphanedToolUse = toolUseBlocks.filter(block => !toolResultIds.has(block.id));
602
+
603
+ if (orphanedToolUse.length > 0) {
604
+ // Inject synthetic tool_results for orphaned tool_use
605
+ const syntheticResults: ToolResultBlockParam[] = orphanedToolUse.map(block => ({
606
+ type: 'tool_result',
607
+ tool_use_id: block.id,
608
+ content: `[Tool interrupted: The user stopped the operation before "${block.name}" could execute.]`
609
+ }));
610
+
611
+ // Prepend synthetic results to the next user message
612
+ const updatedNextMessage: MessageParam = {
613
+ ...nextMessage,
614
+ content: [...syntheticResults, ...nextMessage.content]
615
+ };
616
+
617
+ // Replace the next message in our iteration
618
+ messages[i + 1] = updatedNextMessage;
619
+ }
620
+ } else if (nextMessage && nextMessage.role === 'user') {
621
+ // Next message is a user message but not array content (plain text)
622
+ // We need to convert it and add tool_results
623
+ const syntheticResults: ToolResultBlockParam[] = toolUseBlocks.map(block => ({
624
+ type: 'tool_result',
625
+ tool_use_id: block.id,
626
+ content: `[Tool interrupted: The user stopped the operation before "${block.name}" could execute.]`
627
+ }));
628
+
629
+ const textContent: TextBlockParam = typeof nextMessage.content === 'string'
630
+ ? { type: 'text', text: nextMessage.content }
631
+ : { type: 'text', text: '' };
632
+
633
+ const updatedNextMessage: MessageParam = {
634
+ role: 'user',
635
+ content: [...syntheticResults, textContent]
636
+ };
637
+
638
+ messages[i + 1] = updatedNextMessage;
639
+ }
640
+ // Note: If there's no nextMessage, we leave the conversation as-is.
641
+ // The tool_use blocks are expected to be there - the next turn will provide tool_results.
642
+ }
643
+ }
644
+ }
645
+
646
+ return result;
647
+ }
648
+
519
649
  interface RequestOptions {
520
650
  headers?: Record<string, string>;
521
651
  }
@@ -535,10 +665,24 @@ function getClaudePayload(options: ExecutionOptions, prompt: ClaudePrompt): { pa
535
665
  };
536
666
  }
537
667
 
668
+ // Fix orphaned tool_use blocks (can occur when agent is stopped mid-tool-execution)
669
+ const fixedMessages = fixOrphanedToolUse(prompt.messages);
670
+ // Sanitize messages to remove empty text blocks (can occur from interrupted streaming)
671
+ const sanitizedMessages = sanitizeMessages(fixedMessages);
672
+
673
+ // Validate tools have input_schema.type set to 'object' as required by the Anthropic SDK
674
+ if (options.tools) {
675
+ for (const tool of options.tools) {
676
+ if (tool.input_schema.type !== 'object') {
677
+ throw new Error(`Tool "${tool.name}" has invalid input_schema.type: expected "object", got "${tool.input_schema.type}"`);
678
+ }
679
+ }
680
+ }
681
+
538
682
  const payload = {
539
- messages: prompt.messages,
683
+ messages: sanitizedMessages,
540
684
  system: prompt.system,
541
- tools: options.tools, // we are using the same shape as claude for tools
685
+ tools: options.tools as MessageCreateParamsBase['tools'],
542
686
  temperature: model_options?.temperature,
543
687
  model: modelName,
544
688
  max_tokens: maxToken(options),
@@ -1,120 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.FireflyDriver = void 0;
4
- const core_1 = require("@llumiverse/core");
5
- class FireflyDriver extends core_1.AbstractDriver {
6
- static PROVIDER = "firefly";
7
- provider = FireflyDriver.PROVIDER;
8
- endpoint;
9
- constructor(options) {
10
- super(options);
11
- if (!options.apiKey) {
12
- throw new Error("No API key provided for Firefly driver");
13
- }
14
- this.endpoint = options.endpoint || "https://firefly-api.adobe.io/v3";
15
- }
16
- async requestTextCompletion(_prompt, _options) {
17
- throw new Error("Text completion not supported by Firefly");
18
- }
19
- async requestTextCompletionStream(_prompt, _options) {
20
- throw new Error("Text completion streaming not supported by Firefly");
21
- }
22
- async requestImageGeneration(segments, options) {
23
- this.logger.debug(`[${this.provider}] Generating image with model ${options.model}`);
24
- const prompt = segments.map(s => s.content).join("\n\n");
25
- try {
26
- const payload = {
27
- prompt: prompt,
28
- };
29
- const response = await fetch(`${this.endpoint}/images/generate`, {
30
- method: 'POST',
31
- headers: {
32
- 'Content-Type': 'application/json',
33
- 'x-api-key': this.options.apiKey,
34
- },
35
- body: JSON.stringify(payload)
36
- });
37
- if (!response.ok) {
38
- const error = await response.json();
39
- throw new Error(`Firefly API error: ${error.message || response.statusText}`);
40
- }
41
- const result = await response.json();
42
- if (result.promptHasDeniedWords || result.promptHasBlockedArtists) {
43
- return {
44
- result: [],
45
- error: {
46
- message: "Prompt contains denied words or blocked artists",
47
- code: "content_policy_violation"
48
- }
49
- };
50
- }
51
- return {
52
- result: result.outputs.map(output => ({
53
- type: "image",
54
- value: output.image.url
55
- }))
56
- };
57
- }
58
- catch (error) {
59
- this.logger.error({ error }, "[Firefly] Image generation failed");
60
- return {
61
- result: [],
62
- error: {
63
- message: error.message,
64
- code: error.code || 'GENERATION_FAILED'
65
- }
66
- };
67
- }
68
- }
69
- mapSize(size) {
70
- // Default to 1024x1024 if no size specified
71
- if (!size)
72
- return { width: 1024, height: 1024 };
73
- const [width, height] = size.split('x').map(Number);
74
- return { width, height };
75
- }
76
- async listModels(_params) {
77
- return [
78
- {
79
- id: "firefly-v3-text-to-image",
80
- name: "Firefly v3 Text to Image",
81
- provider: this.provider,
82
- description: "Adobe Firefly v3 text to image generation model",
83
- tags: ["image-generation"]
84
- },
85
- {
86
- id: "firefly-v3-image-to-image",
87
- name: "Firefly v3 Image to Image",
88
- provider: this.provider,
89
- description: "Adobe Firefly v3 image to image generation model",
90
- tags: ["image-generation"]
91
- },
92
- {
93
- id: "firefly-v3-inpainting",
94
- name: "Firefly v3 Inpainting",
95
- provider: this.provider,
96
- description: "Adobe Firefly v3 inpainting model",
97
- tags: ["image-generation"]
98
- }
99
- ];
100
- }
101
- async validateConnection() {
102
- try {
103
- const response = await fetch(`${this.endpoint}/auth/validate`, {
104
- headers: {
105
- 'x-api-key': this.options.apiKey
106
- }
107
- });
108
- return response.ok;
109
- }
110
- catch (error) {
111
- this.logger.error({ error }, "[Firefly] Connection validation failed");
112
- return false;
113
- }
114
- }
115
- async generateEmbeddings(_options) {
116
- throw new Error("Embeddings not supported by Firefly");
117
- }
118
- }
119
- exports.FireflyDriver = FireflyDriver;
120
- //# sourceMappingURL=firefly.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"firefly.js","sourceRoot":"","sources":["../../../src/adobe/firefly.ts"],"names":[],"mappings":";;;AAAA,2CAAuM;AAoEvM,MAAa,aAAc,SAAQ,qBAAoC;IACnE,MAAM,CAAC,QAAQ,GAAG,SAAS,CAAC;IAC5B,QAAQ,GAAG,aAAa,CAAC,QAAQ,CAAC;IAEjB,QAAQ,CAAS;IAElC,YAAY,OAA6B;QACrC,KAAK,CAAC,OAAO,CAAC,CAAC;QAEf,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,CAAC;YAClB,MAAM,IAAI,KAAK,CAAC,wCAAwC,CAAC,CAAC;QAC9D,CAAC;QAED,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC,QAAQ,IAAI,iCAAiC,CAAC;IAC1E,CAAC;IAED,KAAK,CAAC,qBAAqB,CAAC,OAAe,EAAE,QAA0B;QACnE,MAAM,IAAI,KAAK,CAAC,0CAA0C,CAAC,CAAC;IAChE,CAAC;IAED,KAAK,CAAC,2BAA2B,CAAC,OAAe,EAAE,QAA0B;QACzE,MAAM,IAAI,KAAK,CAAC,oDAAoD,CAAC,CAAC;IAC1E,CAAC;IAED,KAAK,CAAC,sBAAsB,CAAC,QAAyB,EAAE,OAAyB;QAC7E,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,IAAI,CAAC,QAAQ,iCAAiC,OAAO,CAAC,KAAK,EAAE,CAAC,CAAC;QACrF,MAAM,MAAM,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QAGzD,IAAI,CAAC;YACD,MAAM,OAAO,GAA2B;gBACpC,MAAM,EAAE,MAAgB;aAC3B,CAAC;YAEF,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,GAAG,IAAI,CAAC,QAAQ,kBAAkB,EAAE;gBAC7D,MAAM,EAAE,MAAM;gBACd,OAAO,EAAE;oBACL,cAAc,EAAE,kBAAkB;oBAClC,WAAW,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM;iBACnC;gBACD,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC;aAChC,CAAC,CAAC;YAEH,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE,CAAC;gBACf,MAAM,KAAK,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAC;gBACpC,MAAM,IAAI,KAAK,CAAC,sBAAsB,KAAK,CAAC,OAAO,IAAI,QAAQ,CAAC,UAAU,EAAE,CAAC,CAAC;YAClF,CAAC;YAED,MAAM,MAAM,GAA4B,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAC;YAE9D,IAAI,MAAM,CAAC,oBAAoB,IAAI,MAAM,CAAC,uBAAuB,EAAE,CAAC;gBAChE,OAAO;oBACH,MAAM,EAAE,EAAE;oBACV,KAAK,EAAE;wBACH,OAAO,EAAE,iDAAiD;wBAC1D,IAAI,EAAE,0BAA0B;qBACnC;iBACJ,CAAC;YACN,CAAC;YAED,OAAO;gBACH,MAAM,EAAE,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC;oBAClC,IAAI,EAAE,OAAgB;oBACtB,KAAK,EAAE,MAAM,CAAC,KAAK,CAAC,GAAG;iBAC1B,CAAC,CAAC;aACN,CAAC;QAEN,CAAC;QAAC,OAAO,KAAU,EAAE,CAAC;YAClB,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE,KAAK,EAAE,EAAE,mCAAmC,CAAC,CAAC;YAClE,OAAO;gBACH,MAAM,EAAE,EAAE;gBACV,KAAK,EAAE;oBACH,OAAO,EAAE,KAAK,CAAC,OAAO;oBACtB,IAAI,EAAE,KAAK,CAAC,IAAI,IAAI,mBAAmB;iBAC1C;aACJ,CAAC;QACN,CAAC;IACL,CAAC;IAED,OAAO,CAAC,IAAa;QACjB,4CAA4C;QAC5C,IAAI,CAAC,IAAI;YAAE,OAAO,EAAE,KAAK,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC;QAEhD,MAAM,CAAC,KAAK,EAAE,MAAM,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;QACpD,OAAO,EAAE,KAAK,EAAE,MAAM,EAAE,CAAC;IAC7B,CAAC;IAED,KAAK,CAAC,UAAU,CAAC,OAA4B;QACzC,OAAO;YACH;gBACI,EAAE,EAAE,0BAA0B;gBAC9B,IAAI,EAAE,0BAA0B;gBAChC,QAAQ,EAAE,IAAI,CAAC,QAAQ;gBACvB,WAAW,EAAE,iDAAiD;gBAC9D,IAAI,EAAE,CAAC,kBAAkB,CAAC;aAC7B;YACD;gBACI,EAAE,EAAE,2BAA2B;gBAC/B,IAAI,EAAE,2BAA2B;gBACjC,QAAQ,EAAE,IAAI,CAAC,QAAQ;gBACvB,WAAW,EAAE,kDAAkD;gBAC/D,IAAI,EAAE,CAAC,kBAAkB,CAAC;aAC7B;YACD;gBACI,EAAE,EAAE,uBAAuB;gBAC3B,IAAI,EAAE,uBAAuB;gBAC7B,QAAQ,EAAE,IAAI,CAAC,QAAQ;gBACvB,WAAW,EAAE,mCAAmC;gBAChD,IAAI,EAAE,CAAC,kBAAkB,CAAC;aAC7B;SACJ,CAAC;IACN,CAAC;IAED,KAAK,CAAC,kBAAkB;QACpB,IAAI,CAAC;YACD,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,GAAG,IAAI,CAAC,QAAQ,gBAAgB,EAAE;gBAC3D,OAAO,EAAE;oBACL,WAAW,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM;iBACnC;aACJ,CAAC,CAAC;YACH,OAAO,QAAQ,CAAC,EAAE,CAAC;QACvB,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACb,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE,KAAK,EAAE,EAAE,wCAAwC,CAAC,CAAC;YACvE,OAAO,KAAK,CAAC;QACjB,CAAC;IACL,CAAC;IAED,KAAK,CAAC,kBAAkB,CAAC,QAA2B;QAChD,MAAM,IAAI,KAAK,CAAC,qCAAqC,CAAC,CAAC;IAC3D,CAAC;;AAjIL,sCAkIC"}