@lobehub/chat 1.69.1 → 1.69.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +50 -0
- package/changelog/v1.json +18 -0
- package/locales/ar/models.json +9 -0
- package/locales/bg-BG/models.json +9 -0
- package/locales/de-DE/models.json +9 -0
- package/locales/en-US/models.json +9 -0
- package/locales/es-ES/models.json +9 -0
- package/locales/fa-IR/models.json +9 -0
- package/locales/fr-FR/models.json +9 -0
- package/locales/it-IT/models.json +9 -0
- package/locales/ja-JP/models.json +9 -0
- package/locales/ko-KR/models.json +9 -0
- package/locales/nl-NL/models.json +9 -0
- package/locales/pl-PL/models.json +9 -0
- package/locales/pt-BR/models.json +9 -0
- package/locales/ru-RU/models.json +9 -0
- package/locales/tr-TR/models.json +9 -0
- package/locales/vi-VN/models.json +9 -0
- package/locales/zh-CN/models.json +9 -0
- package/locales/zh-TW/models.json +9 -0
- package/package.json +3 -3
- package/src/app/[variants]/(auth)/next-auth/signin/AuthSignInBox.tsx +161 -0
- package/src/app/[variants]/(auth)/next-auth/signin/page.tsx +11 -0
- package/src/app/[variants]/(main)/profile/(home)/features/SSOProvidersList/index.tsx +1 -1
- package/src/{app/[variants]/(main)/profile/(home)/features/SSOProvidersList → components/NextAuth}/AuthIcons.tsx +8 -6
- package/src/libs/agent-runtime/UniformRuntime/index.ts +114 -0
- package/src/libs/agent-runtime/anthropic/handleAnthropicError.ts +15 -0
- package/src/libs/agent-runtime/anthropic/index.test.ts +10 -1
- package/src/libs/agent-runtime/anthropic/index.ts +58 -40
- package/src/libs/agent-runtime/azureai/index.ts +7 -1
- package/src/libs/agent-runtime/github/index.ts +20 -25
- package/src/libs/agent-runtime/index.ts +2 -0
- package/src/libs/agent-runtime/openai/index.ts +2 -22
- package/src/libs/agent-runtime/types/type.ts +1 -1
- package/src/libs/agent-runtime/utils/openaiCompatibleFactory/index.ts +11 -7
- package/src/libs/agent-runtime/utils/openaiHelpers.ts +22 -0
- package/src/libs/next-auth/auth.config.ts +1 -0
- package/src/middleware.ts +1 -1
@@ -168,6 +168,8 @@ export const LobeOpenAICompatibleFactory = <T extends Record<string, any> = any>
|
|
168
168
|
return class LobeOpenAICompatibleAI implements LobeRuntimeAI {
|
169
169
|
client!: OpenAI;
|
170
170
|
|
171
|
+
private id: string;
|
172
|
+
|
171
173
|
baseURL!: string;
|
172
174
|
protected _options: ConstructorOptions<T>;
|
173
175
|
|
@@ -192,6 +194,8 @@ export const LobeOpenAICompatibleFactory = <T extends Record<string, any> = any>
|
|
192
194
|
}
|
193
195
|
|
194
196
|
this.baseURL = baseURL || this.client.baseURL;
|
197
|
+
|
198
|
+
this.id = options.id || provider;
|
195
199
|
}
|
196
200
|
|
197
201
|
async chat({ responseMode, ...payload }: ChatStreamPayload, options?: ChatCompetitionOptions) {
|
@@ -210,7 +214,7 @@ export const LobeOpenAICompatibleFactory = <T extends Record<string, any> = any>
|
|
210
214
|
const streamOptions: OpenAIStreamOptions = {
|
211
215
|
bizErrorTypeTransformer: chatCompletion?.handleStreamBizErrorType,
|
212
216
|
callbacks: options?.callback,
|
213
|
-
provider,
|
217
|
+
provider: this.id,
|
214
218
|
};
|
215
219
|
|
216
220
|
if (customClient?.createChatCompletionStream) {
|
@@ -368,7 +372,7 @@ export const LobeOpenAICompatibleFactory = <T extends Record<string, any> = any>
|
|
368
372
|
if (errorResult)
|
369
373
|
return AgentRuntimeError.chat({
|
370
374
|
...errorResult,
|
371
|
-
provider,
|
375
|
+
provider: this.id,
|
372
376
|
} as ChatCompletionErrorPayload);
|
373
377
|
}
|
374
378
|
|
@@ -379,7 +383,7 @@ export const LobeOpenAICompatibleFactory = <T extends Record<string, any> = any>
|
|
379
383
|
endpoint: desensitizedEndpoint,
|
380
384
|
error: error as any,
|
381
385
|
errorType: ErrorType.invalidAPIKey,
|
382
|
-
provider:
|
386
|
+
provider: this.id as ModelProvider,
|
383
387
|
});
|
384
388
|
}
|
385
389
|
|
@@ -397,7 +401,7 @@ export const LobeOpenAICompatibleFactory = <T extends Record<string, any> = any>
|
|
397
401
|
endpoint: desensitizedEndpoint,
|
398
402
|
error: errorResult,
|
399
403
|
errorType: AgentRuntimeErrorType.InsufficientQuota,
|
400
|
-
provider:
|
404
|
+
provider: this.id as ModelProvider,
|
401
405
|
});
|
402
406
|
}
|
403
407
|
|
@@ -406,7 +410,7 @@ export const LobeOpenAICompatibleFactory = <T extends Record<string, any> = any>
|
|
406
410
|
endpoint: desensitizedEndpoint,
|
407
411
|
error: errorResult,
|
408
412
|
errorType: AgentRuntimeErrorType.ModelNotFound,
|
409
|
-
provider:
|
413
|
+
provider: this.id as ModelProvider,
|
410
414
|
});
|
411
415
|
}
|
412
416
|
|
@@ -417,7 +421,7 @@ export const LobeOpenAICompatibleFactory = <T extends Record<string, any> = any>
|
|
417
421
|
endpoint: desensitizedEndpoint,
|
418
422
|
error: errorResult,
|
419
423
|
errorType: AgentRuntimeErrorType.ExceededContextWindow,
|
420
|
-
provider:
|
424
|
+
provider: this.id as ModelProvider,
|
421
425
|
});
|
422
426
|
}
|
423
427
|
}
|
@@ -426,7 +430,7 @@ export const LobeOpenAICompatibleFactory = <T extends Record<string, any> = any>
|
|
426
430
|
endpoint: desensitizedEndpoint,
|
427
431
|
error: errorResult,
|
428
432
|
errorType: RuntimeError || ErrorType.bizError,
|
429
|
-
provider:
|
433
|
+
provider: this.id as ModelProvider,
|
430
434
|
});
|
431
435
|
}
|
432
436
|
};
|
@@ -1,5 +1,7 @@
|
|
1
1
|
import OpenAI from 'openai';
|
2
2
|
|
3
|
+
import { disableStreamModels, systemToUserModels } from '@/const/models';
|
4
|
+
import { ChatStreamPayload, OpenAIChatMessage } from '@/libs/agent-runtime';
|
3
5
|
import { imageUrlToBase64 } from '@/utils/imageToBase64';
|
4
6
|
|
5
7
|
import { parseDataUri } from './uriParser';
|
@@ -38,3 +40,23 @@ export const convertOpenAIMessages = async (messages: OpenAI.ChatCompletionMessa
|
|
38
40
|
})),
|
39
41
|
)) as OpenAI.ChatCompletionMessageParam[];
|
40
42
|
};
|
43
|
+
|
44
|
+
export const pruneReasoningPayload = (payload: ChatStreamPayload) => {
|
45
|
+
return {
|
46
|
+
...payload,
|
47
|
+
frequency_penalty: 0,
|
48
|
+
messages: payload.messages.map((message: OpenAIChatMessage) => ({
|
49
|
+
...message,
|
50
|
+
role:
|
51
|
+
message.role === 'system'
|
52
|
+
? systemToUserModels.has(payload.model)
|
53
|
+
? 'user'
|
54
|
+
: 'developer'
|
55
|
+
: message.role,
|
56
|
+
})),
|
57
|
+
presence_penalty: 0,
|
58
|
+
stream: !disableStreamModels.has(payload.model),
|
59
|
+
temperature: 1,
|
60
|
+
top_p: 1,
|
61
|
+
};
|
62
|
+
};
|