@lobehub/lobehub 2.0.0-next.101 → 2.0.0-next.102

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. package/CHANGELOG.md +25 -0
  2. package/changelog/v1.json +9 -0
  3. package/package.json +1 -1
  4. package/packages/model-bank/package.json +1 -0
  5. package/packages/model-bank/src/aiModels/aihubmix.ts +27 -0
  6. package/packages/model-bank/src/aiModels/google.ts +69 -10
  7. package/packages/model-bank/src/aiModels/index.ts +3 -0
  8. package/packages/model-bank/src/aiModels/infiniai.ts +5 -22
  9. package/packages/model-bank/src/aiModels/ollamacloud.ts +12 -0
  10. package/packages/model-bank/src/aiModels/siliconcloud.ts +0 -61
  11. package/packages/model-bank/src/aiModels/vertexai.ts +88 -1
  12. package/packages/model-bank/src/aiModels/zenmux.ts +1423 -0
  13. package/packages/model-bank/src/const/modelProvider.ts +1 -0
  14. package/packages/model-bank/src/standard-parameters/index.ts +9 -0
  15. package/packages/model-runtime/src/core/openaiCompatibleFactory/index.test.ts +2 -2
  16. package/packages/model-runtime/src/core/streams/google/index.ts +7 -2
  17. package/packages/model-runtime/src/core/streams/openai/__snapshots__/responsesStream.test.ts.snap +166 -166
  18. package/packages/model-runtime/src/index.ts +1 -1
  19. package/packages/model-runtime/src/providers/google/createImage.ts +1 -0
  20. package/packages/model-runtime/src/providers/google/index.ts +11 -1
  21. package/packages/model-runtime/src/providers/zenmux/index.test.ts +320 -0
  22. package/packages/model-runtime/src/providers/zenmux/index.ts +84 -0
  23. package/packages/model-runtime/src/runtimeMap.ts +2 -0
  24. package/packages/types/src/user/settings/keyVaults.ts +1 -0
  25. package/src/app/[variants]/(main)/image/@menu/features/ConfigPanel/components/ResolutionSelect.tsx +88 -0
  26. package/src/app/[variants]/(main)/image/@menu/features/ConfigPanel/index.tsx +9 -0
  27. package/src/config/modelProviders/index.ts +3 -0
  28. package/src/config/modelProviders/zenmux.ts +21 -0
  29. package/src/envs/llm.ts +6 -0
  30. package/src/locales/default/image.ts +8 -0
  31. package/src/store/chat/slices/aiChat/actions/__tests__/conversationLifecycle.test.ts +3 -0
  32. package/src/store/chat/slices/aiChat/actions/streamingExecutor.ts +11 -0
@@ -858,10 +858,10 @@ exports[`OpenAIResponsesStream Reasoning summary 1`] = `
858
858
  ]
859
859
  `;
860
860
 
861
- exports[`OpenAIResponsesStream should transform OpenAI stream to protocol stream 1`] = `
861
+ exports[`OpenAIResponsesStream should handle chunks with undefined values gracefully 1`] = `
862
862
  [
863
863
 
864
- "id: resp_683e7b8ca3308190b6837f20d2c015cd0cf93af363cdcf58
864
+ "id: resp_undefined_vals
865
865
  "
866
866
  ,
867
867
 
@@ -874,74 +874,58 @@ exports[`OpenAIResponsesStream should transform OpenAI stream to protocol stream
874
874
  "
875
875
  ,
876
876
 
877
- "id: resp_683e7b8ca3308190b6837f20d2c015cd0cf93af363cdcf58
877
+ "id: undefined
878
878
  "
879
879
  ,
880
880
 
881
- "event: data
881
+ "event: reasoning
882
882
  "
883
883
  ,
884
884
 
885
- "data: {"type":"response.in_progress","response":{"id":"resp_683e7b8ca3308190b6837f20d2c015cd0cf93af363cdcf58","object":"response","created_at":1748925324,"status":"in_progress","error":null,"incomplete_details":null,"instructions":null,"max_output_tokens":null,"model":"o4-mini","output":[],"parallel_tool_calls":true,"previous_response_id":null,"reasoning":{"effort":"medium","summary":null},"service_tier":"auto","store":false,"temperature":1,"text":{"format":{"type":"text"}},"tool_choice":"auto","tools":[{"type":"function","description":"a search service. Useful for when you need to answer questions about current events. Input should be a search query. Output is a JSON array of the query results","name":"lobe-web-browsing____search____builtin","parameters":{"properties":{"query":{"description":"The search query","type":"string"},"searchCategories":{"description":"The search categories you can set:","items":{"enum":["general","images","news","science","videos"],"type":"string"},"type":"array"},"searchEngines":{"description":"The search engines you can use:","items":{"enum":["google","bilibili","bing","duckduckgo","npm","pypi","github","arxiv","google scholar","z-library","reddit","imdb","brave","wikipedia","pinterest","unsplash","vimeo","youtube"],"type":"string"},"type":"array"},"searchTimeRange":{"description":"The time range you can set:","enum":["anytime","day","week","month","year"],"type":"string"}},"required":["query"],"type":"object"},"strict":true},{"type":"function","description":"A crawler can visit page content. Output is a JSON object of title, content, url and website","name":"lobe-web-browsing____crawlSinglePage____builtin","parameters":{"properties":{"url":{"description":"The url need to be crawled","type":"string"}},"required":["url"],"type":"object"},"strict":true},{"type":"function","description":"A crawler can visit multi pages. If need to visit multi website, use this one. Output is an array of JSON object of title, content, url and website","name":"lobe-web-browsing____crawlMultiPages____builtin","parameters":{"properties":{"urls":{"items":{"description":"The urls need to be crawled","type":"string"},"type":"array"}},"required":["urls"],"type":"object"},"strict":true}],"top_p":1,"truncation":"disabled","usage":null,"user":null,"metadata":{}}}
885
+ "data: undefined
886
886
 
887
887
  "
888
888
  ,
889
-
890
- "id: resp_683e7b8ca3308190b6837f20d2c015cd0cf93af363cdcf58
891
- "
892
- ,
893
-
894
- "event: data
895
- "
896
- ,
897
-
898
- "data: {"id":"rs_683e7bc80a9c81908f6e3d61ad63cc1e0cf93af363cdcf58","type":"reasoning","summary":[]}
889
+ ]
890
+ `;
899
891
 
900
- "
901
- ,
892
+ exports[`OpenAIResponsesStream should handle first chunk error with FIRST_CHUNK_ERROR_KEY 1`] = `
893
+ [
902
894
 
903
- "id: resp_683e7b8ca3308190b6837f20d2c015cd0cf93af363cdcf58
895
+ "id: first_chunk_error
904
896
  "
905
897
  ,
906
898
 
907
- "event: data
899
+ "event: error
908
900
  "
909
901
  ,
910
902
 
911
- "data: {"id":"msg_683e7bde8b0c8190970ab8c719c0fc1c0cf93af363cdcf58","type":"message","status":"in_progress","content":[],"role":"assistant"}
903
+ "data: {"body":{"message":"Invalid API key","errorType":"InvalidProviderAPIKey"},"message":"Invalid API key","type":"InvalidProviderAPIKey"}
912
904
 
913
905
  "
914
906
  ,
915
-
916
- "id: resp_683e7b8ca3308190b6837f20d2c015cd0cf93af363cdcf58
917
- "
918
- ,
919
-
920
- "event: data
921
- "
922
- ,
923
-
924
- "data: {"type":"response.content_part.added","item_id":"msg_683e7bde8b0c8190970ab8c719c0fc1c0cf93af363cdcf58","output_index":1,"content_index":0,"part":{"type":"output_text","annotations":[],"text":"Hello"}}
907
+ ]
908
+ `;
925
909
 
926
- "
927
- ,
910
+ exports[`OpenAIResponsesStream should handle first chunk error with message object 1`] = `
911
+ [
928
912
 
929
- "id: resp_683e7b8ca3308190b6837f20d2c015cd0cf93af363cdcf58
913
+ "id: first_chunk_error
930
914
  "
931
915
  ,
932
916
 
933
- "event: data
917
+ "event: error
934
918
  "
935
919
  ,
936
920
 
937
- "data: {"type":"response.content_part.added","item_id":"msg_683e7bde8b0c8190970ab8c719c0fc1c0cf93af363cdcf58","output_index":1,"content_index":0,"part":{"type":"output_text","annotations":[],"text":" world"}}
921
+ "data: {"body":{"message":{"error":"API quota exceeded","code":429}},"message":"{\\"message\\":{\\"error\\":\\"API quota exceeded\\",\\"code\\":429}}","type":"ProviderBizError"}
938
922
 
939
923
  "
940
924
  ,
941
925
  ]
942
926
  `;
943
927
 
944
- exports[`OpenAIResponsesStream should handle first chunk error with FIRST_CHUNK_ERROR_KEY 1`] = `
928
+ exports[`OpenAIResponsesStream should handle first chunk error without message 1`] = `
945
929
  [
946
930
 
947
931
  "id: first_chunk_error
@@ -952,53 +936,48 @@ exports[`OpenAIResponsesStream should handle first chunk error with FIRST_CHUNK_
952
936
  "
953
937
  ,
954
938
 
955
- "data: {"body":{"message":"Invalid API key","errorType":"InvalidProviderAPIKey"},"message":"Invalid API key","type":"InvalidProviderAPIKey"}
939
+ "data: {"body":{"code":"rate_limit_exceeded","status":429},"message":"{\\"code\\":\\"rate_limit_exceeded\\",\\"status\\":429}","type":"ProviderBizError"}
956
940
 
957
941
  "
958
942
  ,
959
943
  ]
960
944
  `;
961
945
 
962
- exports[`OpenAIResponsesStream should handle first chunk error with message object 1`] = `
946
+ exports[`OpenAIResponsesStream should handle function_call in response.output_item.added 1`] = `
963
947
  [
964
948
 
965
- "id: first_chunk_error
949
+ "id: resp_test_456
966
950
  "
967
951
  ,
968
952
 
969
- "event: error
953
+ "event: data
970
954
  "
971
955
  ,
972
956
 
973
- "data: {"body":{"message":{"error":"API quota exceeded","code":429}},"message":"{\\"message\\":{\\"error\\":\\"API quota exceeded\\",\\"code\\":429}}","type":"ProviderBizError"}
957
+ "data: "in_progress"
974
958
 
975
959
  "
976
960
  ,
977
- ]
978
- `;
979
-
980
- exports[`OpenAIResponsesStream should handle first chunk error without message 1`] = `
981
- [
982
961
 
983
- "id: first_chunk_error
962
+ "id: resp_test_456
984
963
  "
985
964
  ,
986
965
 
987
- "event: error
966
+ "event: tool_calls
988
967
  "
989
968
  ,
990
969
 
991
- "data: {"body":{"code":"rate_limit_exceeded","status":429},"message":"{\\"code\\":\\"rate_limit_exceeded\\",\\"status\\":429}","type":"ProviderBizError"}
970
+ "data: [{"function":{"arguments":"{\\"location\\": \\"San Francisco\\"}","name":"get_weather"},"id":"call_abc123","index":0,"type":"function"}]
992
971
 
993
972
  "
994
973
  ,
995
974
  ]
996
975
  `;
997
976
 
998
- exports[`OpenAIResponsesStream should handle response.created event 1`] = `
977
+ exports[`OpenAIResponsesStream should handle multiple annotations and accumulate citations 1`] = `
999
978
  [
1000
979
 
1001
- "id: resp_test_123
980
+ "id: resp_multi_citation
1002
981
  "
1003
982
  ,
1004
983
 
@@ -1010,34 +989,29 @@ exports[`OpenAIResponsesStream should handle response.created event 1`] = `
1010
989
 
1011
990
  "
1012
991
  ,
1013
- ]
1014
- `;
1015
-
1016
- exports[`OpenAIResponsesStream should handle function_call in response.output_item.added 1`] = `
1017
- [
1018
992
 
1019
- "id: resp_test_456
993
+ "id: msg_cite
1020
994
  "
1021
995
  ,
1022
996
 
1023
- "event: data
997
+ "event: text
1024
998
  "
1025
999
  ,
1026
1000
 
1027
- "data: "in_progress"
1001
+ "data: null
1028
1002
 
1029
1003
  "
1030
1004
  ,
1031
1005
 
1032
- "id: resp_test_456
1006
+ "id: msg_cite
1033
1007
  "
1034
1008
  ,
1035
1009
 
1036
- "event: tool_calls
1010
+ "event: text
1037
1011
  "
1038
1012
  ,
1039
1013
 
1040
- "data: [{"function":{"arguments":"{\\"location\\": \\"San Francisco\\"}","name":"get_weather"},"id":"call_abc123","index":0,"type":"function"}]
1014
+ "data: null
1041
1015
 
1042
1016
  "
1043
1017
  ,
@@ -1088,10 +1062,10 @@ exports[`OpenAIResponsesStream should handle multiple function_calls with increm
1088
1062
  ]
1089
1063
  `;
1090
1064
 
1091
- exports[`OpenAIResponsesStream should handle response.function_call_arguments.delta 1`] = `
1065
+ exports[`OpenAIResponsesStream should handle non-standard item types in output_item.added 1`] = `
1092
1066
  [
1093
1067
 
1094
- "id: resp_delta_test
1068
+ "id: resp_other_item
1095
1069
  "
1096
1070
  ,
1097
1071
 
@@ -1104,38 +1078,56 @@ exports[`OpenAIResponsesStream should handle response.function_call_arguments.de
1104
1078
  "
1105
1079
  ,
1106
1080
 
1107
- "id: resp_delta_test
1081
+ "id: resp_other_item
1108
1082
  "
1109
1083
  ,
1110
1084
 
1111
- "event: tool_calls
1085
+ "event: data
1112
1086
  "
1113
1087
  ,
1114
1088
 
1115
- "data: [{"function":{"arguments":"{\\"query\\":","name":"search_web"},"id":"call_delta","index":0,"type":"function"}]
1089
+ "data: {"type":"message","id":"msg_test","status":"in_progress","content":[],"role":"assistant"}
1116
1090
 
1117
1091
  "
1118
1092
  ,
1093
+ ]
1094
+ `;
1095
+
1096
+ exports[`OpenAIResponsesStream should handle response.completed with usage 1`] = `
1097
+ [
1119
1098
 
1120
- "id: resp_delta_test
1099
+ "id: resp_completed_usage
1121
1100
  "
1122
1101
  ,
1123
1102
 
1124
- "event: tool_calls
1103
+ "event: data
1125
1104
  "
1126
1105
  ,
1127
1106
 
1128
- "data: [{"function":{"arguments":" \\"OpenAI\\"}","name":"search_web"},"id":"call_delta","index":0,"type":"function"}]
1107
+ "data: "in_progress"
1108
+
1109
+ "
1110
+ ,
1111
+
1112
+ "id: resp_completed_usage
1113
+ "
1114
+ ,
1115
+
1116
+ "event: usage
1117
+ "
1118
+ ,
1119
+
1120
+ "data: {"inputCacheMissTokens":100,"inputTextTokens":100,"outputTextTokens":50,"totalInputTokens":100,"totalOutputTokens":50,"totalTokens":150}
1129
1121
 
1130
1122
  "
1131
1123
  ,
1132
1124
  ]
1133
1125
  `;
1134
1126
 
1135
- exports[`OpenAIResponsesStream should handle response.output_text.delta 1`] = `
1127
+ exports[`OpenAIResponsesStream should handle response.completed without usage 1`] = `
1136
1128
  [
1137
1129
 
1138
- "id: resp_text_delta
1130
+ "id: resp_completed_no_usage
1139
1131
  "
1140
1132
  ,
1141
1133
 
@@ -1148,38 +1140,43 @@ exports[`OpenAIResponsesStream should handle response.output_text.delta 1`] = `
1148
1140
  "
1149
1141
  ,
1150
1142
 
1151
- "id: msg_123
1143
+ "id: resp_completed_no_usage
1152
1144
  "
1153
1145
  ,
1154
1146
 
1155
- "event: text
1147
+ "event: data
1156
1148
  "
1157
1149
  ,
1158
1150
 
1159
- "data: "Hello "
1151
+ "data: {"type":"response.completed","response":{"id":"resp_completed_no_usage","status":"completed"}}
1160
1152
 
1161
1153
  "
1162
1154
  ,
1155
+ ]
1156
+ `;
1157
+
1158
+ exports[`OpenAIResponsesStream should handle response.created event 1`] = `
1159
+ [
1163
1160
 
1164
- "id: msg_123
1161
+ "id: resp_test_123
1165
1162
  "
1166
1163
  ,
1167
1164
 
1168
- "event: text
1165
+ "event: data
1169
1166
  "
1170
1167
  ,
1171
1168
 
1172
- "data: "world!"
1169
+ "data: "in_progress"
1173
1170
 
1174
1171
  "
1175
1172
  ,
1176
1173
  ]
1177
1174
  `;
1178
1175
 
1179
- exports[`OpenAIResponsesStream should handle response.reasoning_summary_part.added for first part 1`] = `
1176
+ exports[`OpenAIResponsesStream should handle response.function_call_arguments.delta 1`] = `
1180
1177
  [
1181
1178
 
1182
- "id: resp_reasoning
1179
+ "id: resp_delta_test
1183
1180
  "
1184
1181
  ,
1185
1182
 
@@ -1192,25 +1189,38 @@ exports[`OpenAIResponsesStream should handle response.reasoning_summary_part.add
1192
1189
  "
1193
1190
  ,
1194
1191
 
1195
- "id: reasoning_1
1192
+ "id: resp_delta_test
1196
1193
  "
1197
1194
  ,
1198
1195
 
1199
- "event: reasoning
1196
+ "event: tool_calls
1200
1197
  "
1201
1198
  ,
1202
1199
 
1203
- "data: ""
1200
+ "data: [{"function":{"arguments":"{\\"query\\":","name":"search_web"},"id":"call_delta","index":0,"type":"function"}]
1201
+
1202
+ "
1203
+ ,
1204
+
1205
+ "id: resp_delta_test
1206
+ "
1207
+ ,
1208
+
1209
+ "event: tool_calls
1210
+ "
1211
+ ,
1212
+
1213
+ "data: [{"function":{"arguments":" \\"OpenAI\\"}","name":"search_web"},"id":"call_delta","index":0,"type":"function"}]
1204
1214
 
1205
1215
  "
1206
1216
  ,
1207
1217
  ]
1208
1218
  `;
1209
1219
 
1210
- exports[`OpenAIResponsesStream should handle response.reasoning_summary_part.added for subsequent parts 1`] = `
1220
+ exports[`OpenAIResponsesStream should handle response.output_item.done with citations 1`] = `
1211
1221
  [
1212
1222
 
1213
- "id: resp_reasoning_multi
1223
+ "id: resp_done_citation
1214
1224
  "
1215
1225
  ,
1216
1226
 
@@ -1223,38 +1233,38 @@ exports[`OpenAIResponsesStream should handle response.reasoning_summary_part.add
1223
1233
  "
1224
1234
  ,
1225
1235
 
1226
- "id: reasoning_1
1236
+ "id: msg_final
1227
1237
  "
1228
1238
  ,
1229
1239
 
1230
- "event: reasoning
1240
+ "event: text
1231
1241
  "
1232
1242
  ,
1233
1243
 
1234
- "data: ""
1244
+ "data: null
1235
1245
 
1236
1246
  "
1237
1247
  ,
1238
1248
 
1239
- "id: reasoning_2
1249
+ "id: msg_final
1240
1250
  "
1241
1251
  ,
1242
1252
 
1243
- "event: reasoning
1253
+ "event: grounding
1244
1254
  "
1245
1255
  ,
1246
1256
 
1247
- "data: "\\n"
1257
+ "data: {"citations":[{"title":"Citation Title","url":"https://citation.com"}]}
1248
1258
 
1249
1259
  "
1250
1260
  ,
1251
1261
  ]
1252
1262
  `;
1253
1263
 
1254
- exports[`OpenAIResponsesStream should handle response.reasoning_summary_text.delta 1`] = `
1264
+ exports[`OpenAIResponsesStream should handle response.output_item.done without citations 1`] = `
1255
1265
  [
1256
1266
 
1257
- "id: resp_reasoning_delta
1267
+ "id: resp_done_no_citation
1258
1268
  "
1259
1269
  ,
1260
1270
 
@@ -1267,28 +1277,15 @@ exports[`OpenAIResponsesStream should handle response.reasoning_summary_text.del
1267
1277
  "
1268
1278
  ,
1269
1279
 
1270
- "id: reasoning_123
1271
- "
1272
- ,
1273
-
1274
- "event: reasoning
1275
- "
1276
- ,
1277
-
1278
- "data: "Thinking about"
1279
-
1280
- "
1281
- ,
1282
-
1283
- "id: reasoning_123
1280
+ "id: msg_no_cite
1284
1281
  "
1285
1282
  ,
1286
1283
 
1287
- "event: reasoning
1284
+ "event: text
1288
1285
  "
1289
1286
  ,
1290
1287
 
1291
- "data: " the problem..."
1288
+ "data: null
1292
1289
 
1293
1290
  "
1294
1291
  ,
@@ -1326,10 +1323,10 @@ exports[`OpenAIResponsesStream should handle response.output_text.annotation.add
1326
1323
  ]
1327
1324
  `;
1328
1325
 
1329
- exports[`OpenAIResponsesStream should handle multiple annotations and accumulate citations 1`] = `
1326
+ exports[`OpenAIResponsesStream should handle response.output_text.delta 1`] = `
1330
1327
  [
1331
1328
 
1332
- "id: resp_multi_citation
1329
+ "id: resp_text_delta
1333
1330
  "
1334
1331
  ,
1335
1332
 
@@ -1342,7 +1339,7 @@ exports[`OpenAIResponsesStream should handle multiple annotations and accumulate
1342
1339
  "
1343
1340
  ,
1344
1341
 
1345
- "id: msg_cite
1342
+ "id: msg_123
1346
1343
  "
1347
1344
  ,
1348
1345
 
@@ -1350,12 +1347,12 @@ exports[`OpenAIResponsesStream should handle multiple annotations and accumulate
1350
1347
  "
1351
1348
  ,
1352
1349
 
1353
- "data: null
1350
+ "data: "Hello "
1354
1351
 
1355
1352
  "
1356
1353
  ,
1357
1354
 
1358
- "id: msg_cite
1355
+ "id: msg_123
1359
1356
  "
1360
1357
  ,
1361
1358
 
@@ -1363,17 +1360,17 @@ exports[`OpenAIResponsesStream should handle multiple annotations and accumulate
1363
1360
  "
1364
1361
  ,
1365
1362
 
1366
- "data: null
1363
+ "data: "world!"
1367
1364
 
1368
1365
  "
1369
1366
  ,
1370
1367
  ]
1371
1368
  `;
1372
1369
 
1373
- exports[`OpenAIResponsesStream should handle response.output_item.done with citations 1`] = `
1370
+ exports[`OpenAIResponsesStream should handle response.reasoning_summary_part.added for first part 1`] = `
1374
1371
  [
1375
1372
 
1376
- "id: resp_done_citation
1373
+ "id: resp_reasoning
1377
1374
  "
1378
1375
  ,
1379
1376
 
@@ -1386,69 +1383,69 @@ exports[`OpenAIResponsesStream should handle response.output_item.done with cita
1386
1383
  "
1387
1384
  ,
1388
1385
 
1389
- "id: msg_final
1386
+ "id: reasoning_1
1390
1387
  "
1391
1388
  ,
1392
1389
 
1393
- "event: text
1390
+ "event: reasoning
1394
1391
  "
1395
1392
  ,
1396
1393
 
1397
- "data: null
1394
+ "data: ""
1398
1395
 
1399
1396
  "
1400
1397
  ,
1398
+ ]
1399
+ `;
1400
+
1401
+ exports[`OpenAIResponsesStream should handle response.reasoning_summary_part.added for subsequent parts 1`] = `
1402
+ [
1401
1403
 
1402
- "id: msg_final
1404
+ "id: resp_reasoning_multi
1403
1405
  "
1404
1406
  ,
1405
1407
 
1406
- "event: grounding
1408
+ "event: data
1407
1409
  "
1408
1410
  ,
1409
1411
 
1410
- "data: {"citations":[{"title":"Citation Title","url":"https://citation.com"}]}
1412
+ "data: "in_progress"
1411
1413
 
1412
1414
  "
1413
1415
  ,
1414
- ]
1415
- `;
1416
-
1417
- exports[`OpenAIResponsesStream should handle response.output_item.done without citations 1`] = `
1418
- [
1419
1416
 
1420
- "id: resp_done_no_citation
1417
+ "id: reasoning_1
1421
1418
  "
1422
1419
  ,
1423
1420
 
1424
- "event: data
1421
+ "event: reasoning
1425
1422
  "
1426
1423
  ,
1427
1424
 
1428
- "data: "in_progress"
1425
+ "data: ""
1429
1426
 
1430
1427
  "
1431
1428
  ,
1432
1429
 
1433
- "id: msg_no_cite
1430
+ "id: reasoning_2
1434
1431
  "
1435
1432
  ,
1436
1433
 
1437
- "event: text
1434
+ "event: reasoning
1438
1435
  "
1439
1436
  ,
1440
1437
 
1441
- "data: null
1438
+ "data: "\\n"
1442
1439
 
1443
1440
  "
1444
1441
  ,
1445
1442
  ]
1446
1443
  `;
1447
1444
 
1448
- exports[`OpenAIResponsesStream should handle response.completed with usage 1`] = `
1445
+ exports[`OpenAIResponsesStream should handle response.reasoning_summary_text.delta 1`] = `
1449
1446
  [
1450
1447
 
1451
- "id: resp_completed_usage
1448
+ "id: resp_reasoning_delta
1452
1449
  "
1453
1450
  ,
1454
1451
 
@@ -1461,25 +1458,38 @@ exports[`OpenAIResponsesStream should handle response.completed with usage 1`] =
1461
1458
  "
1462
1459
  ,
1463
1460
 
1464
- "id: resp_completed_usage
1461
+ "id: reasoning_123
1465
1462
  "
1466
1463
  ,
1467
1464
 
1468
- "event: usage
1465
+ "event: reasoning
1469
1466
  "
1470
1467
  ,
1471
1468
 
1472
- "data: {"inputCacheMissTokens":100,"inputTextTokens":100,"outputTextTokens":50,"totalInputTokens":100,"totalOutputTokens":50,"totalTokens":150}
1469
+ "data: "Thinking about"
1470
+
1471
+ "
1472
+ ,
1473
+
1474
+ "id: reasoning_123
1475
+ "
1476
+ ,
1477
+
1478
+ "event: reasoning
1479
+ "
1480
+ ,
1481
+
1482
+ "data: " the problem..."
1473
1483
 
1474
1484
  "
1475
1485
  ,
1476
1486
  ]
1477
1487
  `;
1478
1488
 
1479
- exports[`OpenAIResponsesStream should handle response.completed without usage 1`] = `
1489
+ exports[`OpenAIResponsesStream should handle unknown chunk type as data 1`] = `
1480
1490
  [
1481
1491
 
1482
- "id: resp_completed_no_usage
1492
+ "id: resp_unknown
1483
1493
  "
1484
1494
  ,
1485
1495
 
@@ -1492,7 +1502,7 @@ exports[`OpenAIResponsesStream should handle response.completed without usage 1`
1492
1502
  "
1493
1503
  ,
1494
1504
 
1495
- "id: resp_completed_no_usage
1505
+ "id: resp_unknown
1496
1506
  "
1497
1507
  ,
1498
1508
 
@@ -1500,17 +1510,17 @@ exports[`OpenAIResponsesStream should handle response.completed without usage 1`
1500
1510
  "
1501
1511
  ,
1502
1512
 
1503
- "data: {"type":"response.completed","response":{"id":"resp_completed_no_usage","status":"completed"}}
1513
+ "data: {"type":"response.unknown_event","data":"some data"}
1504
1514
 
1505
1515
  "
1506
1516
  ,
1507
1517
  ]
1508
1518
  `;
1509
1519
 
1510
- exports[`OpenAIResponsesStream should handle unknown chunk type as data 1`] = `
1520
+ exports[`OpenAIResponsesStream should transform OpenAI stream to protocol stream 1`] = `
1511
1521
  [
1512
1522
 
1513
- "id: resp_unknown
1523
+ "id: resp_683e7b8ca3308190b6837f20d2c015cd0cf93af363cdcf58
1514
1524
  "
1515
1525
  ,
1516
1526
 
@@ -1523,7 +1533,7 @@ exports[`OpenAIResponsesStream should handle unknown chunk type as data 1`] = `
1523
1533
  "
1524
1534
  ,
1525
1535
 
1526
- "id: resp_unknown
1536
+ "id: resp_683e7b8ca3308190b6837f20d2c015cd0cf93af363cdcf58
1527
1537
  "
1528
1538
  ,
1529
1539
 
@@ -1531,17 +1541,12 @@ exports[`OpenAIResponsesStream should handle unknown chunk type as data 1`] = `
1531
1541
  "
1532
1542
  ,
1533
1543
 
1534
- "data: {"type":"response.unknown_event","data":"some data"}
1544
+ "data: {"type":"response.in_progress","response":{"id":"resp_683e7b8ca3308190b6837f20d2c015cd0cf93af363cdcf58","object":"response","created_at":1748925324,"status":"in_progress","error":null,"incomplete_details":null,"instructions":null,"max_output_tokens":null,"model":"o4-mini","output":[],"parallel_tool_calls":true,"previous_response_id":null,"reasoning":{"effort":"medium","summary":null},"service_tier":"auto","store":false,"temperature":1,"text":{"format":{"type":"text"}},"tool_choice":"auto","tools":[{"type":"function","description":"a search service. Useful for when you need to answer questions about current events. Input should be a search query. Output is a JSON array of the query results","name":"lobe-web-browsing____search____builtin","parameters":{"properties":{"query":{"description":"The search query","type":"string"},"searchCategories":{"description":"The search categories you can set:","items":{"enum":["general","images","news","science","videos"],"type":"string"},"type":"array"},"searchEngines":{"description":"The search engines you can use:","items":{"enum":["google","bilibili","bing","duckduckgo","npm","pypi","github","arxiv","google scholar","z-library","reddit","imdb","brave","wikipedia","pinterest","unsplash","vimeo","youtube"],"type":"string"},"type":"array"},"searchTimeRange":{"description":"The time range you can set:","enum":["anytime","day","week","month","year"],"type":"string"}},"required":["query"],"type":"object"},"strict":true},{"type":"function","description":"A crawler can visit page content. Output is a JSON object of title, content, url and website","name":"lobe-web-browsing____crawlSinglePage____builtin","parameters":{"properties":{"url":{"description":"The url need to be crawled","type":"string"}},"required":["url"],"type":"object"},"strict":true},{"type":"function","description":"A crawler can visit multi pages. If need to visit multi website, use this one. Output is an array of JSON object of title, content, url and website","name":"lobe-web-browsing____crawlMultiPages____builtin","parameters":{"properties":{"urls":{"items":{"description":"The urls need to be crawled","type":"string"},"type":"array"}},"required":["urls"],"type":"object"},"strict":true}],"top_p":1,"truncation":"disabled","usage":null,"user":null,"metadata":{}}}
1535
1545
 
1536
1546
  "
1537
1547
  ,
1538
- ]
1539
- `;
1540
-
1541
- exports[`OpenAIResponsesStream should handle non-standard item types in output_item.added 1`] = `
1542
- [
1543
1548
 
1544
- "id: resp_other_item
1549
+ "id: resp_683e7b8ca3308190b6837f20d2c015cd0cf93af363cdcf58
1545
1550
  "
1546
1551
  ,
1547
1552
 
@@ -1549,12 +1554,12 @@ exports[`OpenAIResponsesStream should handle non-standard item types in output_i
1549
1554
  "
1550
1555
  ,
1551
1556
 
1552
- "data: "in_progress"
1557
+ "data: {"id":"rs_683e7bc80a9c81908f6e3d61ad63cc1e0cf93af363cdcf58","type":"reasoning","summary":[]}
1553
1558
 
1554
1559
  "
1555
1560
  ,
1556
1561
 
1557
- "id: resp_other_item
1562
+ "id: resp_683e7b8ca3308190b6837f20d2c015cd0cf93af363cdcf58
1558
1563
  "
1559
1564
  ,
1560
1565
 
@@ -1562,17 +1567,12 @@ exports[`OpenAIResponsesStream should handle non-standard item types in output_i
1562
1567
  "
1563
1568
  ,
1564
1569
 
1565
- "data: {"type":"message","id":"msg_test","status":"in_progress","content":[],"role":"assistant"}
1570
+ "data: {"id":"msg_683e7bde8b0c8190970ab8c719c0fc1c0cf93af363cdcf58","type":"message","status":"in_progress","content":[],"role":"assistant"}
1566
1571
 
1567
1572
  "
1568
1573
  ,
1569
- ]
1570
- `;
1571
-
1572
- exports[`OpenAIResponsesStream should handle chunks with undefined values gracefully 1`] = `
1573
- [
1574
1574
 
1575
- "id: resp_undefined_vals
1575
+ "id: resp_683e7b8ca3308190b6837f20d2c015cd0cf93af363cdcf58
1576
1576
  "
1577
1577
  ,
1578
1578
 
@@ -1580,20 +1580,20 @@ exports[`OpenAIResponsesStream should handle chunks with undefined values gracef
1580
1580
  "
1581
1581
  ,
1582
1582
 
1583
- "data: "in_progress"
1583
+ "data: {"type":"response.content_part.added","item_id":"msg_683e7bde8b0c8190970ab8c719c0fc1c0cf93af363cdcf58","output_index":1,"content_index":0,"part":{"type":"output_text","annotations":[],"text":"Hello"}}
1584
1584
 
1585
1585
  "
1586
1586
  ,
1587
1587
 
1588
- "id: undefined
1588
+ "id: resp_683e7b8ca3308190b6837f20d2c015cd0cf93af363cdcf58
1589
1589
  "
1590
1590
  ,
1591
1591
 
1592
- "event: reasoning
1592
+ "event: data
1593
1593
  "
1594
1594
  ,
1595
1595
 
1596
- "data: undefined
1596
+ "data: {"type":"response.content_part.added","item_id":"msg_683e7bde8b0c8190970ab8c719c0fc1c0cf93af363cdcf58","output_index":1,"content_index":0,"part":{"type":"output_text","annotations":[],"text":" world"}}
1597
1597
 
1598
1598
  "
1599
1599
  ,