@lobehub/chat 1.5.2 โ 1.5.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +26 -0
- package/package.json +1 -1
- package/src/app/api/chat/[provider]/route.ts +0 -3
- package/src/app/api/chat/models/[provider]/route.ts +0 -3
- package/src/app/api/chat/openai/route.test.ts +28 -0
- package/src/app/api/chat/openai/route.ts +25 -0
- package/src/app/api/openai/stt/route.ts +21 -3
- package/src/app/api/openai/tts/route.ts +21 -3
- package/src/app/api/text-to-image/[provider]/route.ts +21 -5
- package/src/config/llm.ts +0 -8
- package/src/database/server/models/session.ts +4 -1
- package/src/app/api/config.test.ts +0 -43
- package/src/app/api/config.ts +0 -14
package/CHANGELOG.md
CHANGED
|
@@ -2,6 +2,32 @@
|
|
|
2
2
|
|
|
3
3
|
# Changelog
|
|
4
4
|
|
|
5
|
+
### [Version 1.5.3](https://github.com/lobehub/lobe-chat/compare/v1.5.2...v1.5.3)
|
|
6
|
+
|
|
7
|
+
<sup>Released on **2024-07-17**</sup>
|
|
8
|
+
|
|
9
|
+
#### ๐ Bug Fixes
|
|
10
|
+
|
|
11
|
+
- **misc**: Fix `OpenAI` deployment restrictions, fix cant duplicate assistant.
|
|
12
|
+
|
|
13
|
+
<br/>
|
|
14
|
+
|
|
15
|
+
<details>
|
|
16
|
+
<summary><kbd>Improvements and Fixes</kbd></summary>
|
|
17
|
+
|
|
18
|
+
#### What's fixed
|
|
19
|
+
|
|
20
|
+
- **misc**: Fix `OpenAI` deployment restrictions, closes [#3206](https://github.com/lobehub/lobe-chat/issues/3206) ([3d7a35d](https://github.com/lobehub/lobe-chat/commit/3d7a35d))
|
|
21
|
+
- **misc**: Fix cant duplicate assistant, closes [#3242](https://github.com/lobehub/lobe-chat/issues/3242) ([0edc851](https://github.com/lobehub/lobe-chat/commit/0edc851))
|
|
22
|
+
|
|
23
|
+
</details>
|
|
24
|
+
|
|
25
|
+
<div align="right">
|
|
26
|
+
|
|
27
|
+
[](#readme-top)
|
|
28
|
+
|
|
29
|
+
</div>
|
|
30
|
+
|
|
5
31
|
### [Version 1.5.2](https://github.com/lobehub/lobe-chat/compare/v1.5.1...v1.5.2)
|
|
6
32
|
|
|
7
33
|
<sup>Released on **2024-07-17**</sup>
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@lobehub/chat",
|
|
3
|
-
"version": "1.5.
|
|
3
|
+
"version": "1.5.3",
|
|
4
4
|
"description": "Lobe Chat - an open-source, high-performance chatbot framework that supports speech synthesis, multimodal, and extensible Function Call plugin system. Supports one-click free deployment of your private ChatGPT/LLM web application.",
|
|
5
5
|
"keywords": [
|
|
6
6
|
"framework",
|
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import { getPreferredRegion } from '@/app/api/config';
|
|
2
1
|
import { createErrorResponse } from '@/app/api/errorResponse';
|
|
3
2
|
import { AgentRuntime, ChatCompletionErrorPayload } from '@/libs/agent-runtime';
|
|
4
3
|
import { ChatErrorType } from '@/types/fetch';
|
|
@@ -10,8 +9,6 @@ import { createTraceOptions, initAgentRuntimeWithUserPayload } from '../agentRun
|
|
|
10
9
|
|
|
11
10
|
export const runtime = 'edge';
|
|
12
11
|
|
|
13
|
-
export const preferredRegion = getPreferredRegion();
|
|
14
|
-
|
|
15
12
|
export const POST = checkAuth(async (req: Request, { params, jwtPayload, createRuntime }) => {
|
|
16
13
|
const { provider } = params;
|
|
17
14
|
|
|
@@ -1,6 +1,5 @@
|
|
|
1
1
|
import { NextResponse } from 'next/server';
|
|
2
2
|
|
|
3
|
-
import { getPreferredRegion } from '@/app/api/config';
|
|
4
3
|
import { createErrorResponse } from '@/app/api/errorResponse';
|
|
5
4
|
import { ChatCompletionErrorPayload, ModelProvider } from '@/libs/agent-runtime';
|
|
6
5
|
import { ChatErrorType } from '@/types/fetch';
|
|
@@ -10,8 +9,6 @@ import { initAgentRuntimeWithUserPayload } from '../../agentRuntime';
|
|
|
10
9
|
|
|
11
10
|
export const runtime = 'edge';
|
|
12
11
|
|
|
13
|
-
export const preferredRegion = getPreferredRegion();
|
|
14
|
-
|
|
15
12
|
const noNeedAPIKey = (provider: string) =>
|
|
16
13
|
[ModelProvider.OpenRouter, ModelProvider.TogetherAI].includes(provider as any);
|
|
17
14
|
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
// @vitest-environment edge-runtime
|
|
2
|
+
import { describe, expect, it, vi } from 'vitest';
|
|
3
|
+
|
|
4
|
+
import { POST as UniverseRoute } from '../[provider]/route';
|
|
5
|
+
import { POST, preferredRegion, runtime } from './route';
|
|
6
|
+
|
|
7
|
+
// ๆจกๆ '../[provider]/route'
|
|
8
|
+
vi.mock('../[provider]/route', () => ({
|
|
9
|
+
POST: vi.fn().mockResolvedValue('mocked response'),
|
|
10
|
+
}));
|
|
11
|
+
|
|
12
|
+
describe('Configuration tests', () => {
|
|
13
|
+
it('should have runtime set to "edge"', () => {
|
|
14
|
+
expect(runtime).toBe('edge');
|
|
15
|
+
});
|
|
16
|
+
|
|
17
|
+
it('should contain specific regions in preferredRegion', () => {
|
|
18
|
+
expect(preferredRegion).not.contain(['hkg1']);
|
|
19
|
+
});
|
|
20
|
+
});
|
|
21
|
+
|
|
22
|
+
describe('OpenAI POST function tests', () => {
|
|
23
|
+
it('should call UniverseRoute with correct parameters', async () => {
|
|
24
|
+
const mockRequest = new Request('https://example.com', { method: 'POST' });
|
|
25
|
+
await POST(mockRequest);
|
|
26
|
+
expect(UniverseRoute).toHaveBeenCalledWith(mockRequest, { params: { provider: 'openai' } });
|
|
27
|
+
});
|
|
28
|
+
});
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import { POST as UniverseRoute } from '../[provider]/route';
|
|
2
|
+
|
|
3
|
+
export const runtime = 'edge';
|
|
4
|
+
|
|
5
|
+
export const preferredRegion = [
|
|
6
|
+
'arn1',
|
|
7
|
+
'bom1',
|
|
8
|
+
'cdg1',
|
|
9
|
+
'cle1',
|
|
10
|
+
'cpt1',
|
|
11
|
+
'dub1',
|
|
12
|
+
'fra1',
|
|
13
|
+
'gru1',
|
|
14
|
+
'hnd1',
|
|
15
|
+
'iad1',
|
|
16
|
+
'icn1',
|
|
17
|
+
'kix1',
|
|
18
|
+
'lhr1',
|
|
19
|
+
'pdx1',
|
|
20
|
+
'sfo1',
|
|
21
|
+
'sin1',
|
|
22
|
+
'syd1',
|
|
23
|
+
];
|
|
24
|
+
|
|
25
|
+
export const POST = async (req: Request) => UniverseRoute(req, { params: { provider: 'openai' } });
|
|
@@ -1,11 +1,29 @@
|
|
|
1
1
|
import { OpenAISTTPayload } from '@lobehub/tts';
|
|
2
2
|
import { createOpenaiAudioTranscriptions } from '@lobehub/tts/server';
|
|
3
3
|
|
|
4
|
-
import {
|
|
5
|
-
import { createBizOpenAI } from '../createBizOpenAI';
|
|
4
|
+
import { createBizOpenAI } from '@/app/api/openai/createBizOpenAI';
|
|
6
5
|
|
|
7
6
|
export const runtime = 'edge';
|
|
8
|
-
|
|
7
|
+
|
|
8
|
+
export const preferredRegion = [
|
|
9
|
+
'arn1',
|
|
10
|
+
'bom1',
|
|
11
|
+
'cdg1',
|
|
12
|
+
'cle1',
|
|
13
|
+
'cpt1',
|
|
14
|
+
'dub1',
|
|
15
|
+
'fra1',
|
|
16
|
+
'gru1',
|
|
17
|
+
'hnd1',
|
|
18
|
+
'iad1',
|
|
19
|
+
'icn1',
|
|
20
|
+
'kix1',
|
|
21
|
+
'lhr1',
|
|
22
|
+
'pdx1',
|
|
23
|
+
'sfo1',
|
|
24
|
+
'sin1',
|
|
25
|
+
'syd1',
|
|
26
|
+
];
|
|
9
27
|
|
|
10
28
|
export const POST = async (req: Request) => {
|
|
11
29
|
const formData = await req.formData();
|
|
@@ -1,11 +1,29 @@
|
|
|
1
1
|
import { OpenAITTSPayload } from '@lobehub/tts';
|
|
2
2
|
import { createOpenaiAudioSpeech } from '@lobehub/tts/server';
|
|
3
3
|
|
|
4
|
-
import {
|
|
5
|
-
import { createBizOpenAI } from '../createBizOpenAI';
|
|
4
|
+
import { createBizOpenAI } from '@/app/api/openai/createBizOpenAI';
|
|
6
5
|
|
|
7
6
|
export const runtime = 'edge';
|
|
8
|
-
|
|
7
|
+
|
|
8
|
+
export const preferredRegion = [
|
|
9
|
+
'arn1',
|
|
10
|
+
'bom1',
|
|
11
|
+
'cdg1',
|
|
12
|
+
'cle1',
|
|
13
|
+
'cpt1',
|
|
14
|
+
'dub1',
|
|
15
|
+
'fra1',
|
|
16
|
+
'gru1',
|
|
17
|
+
'hnd1',
|
|
18
|
+
'iad1',
|
|
19
|
+
'icn1',
|
|
20
|
+
'kix1',
|
|
21
|
+
'lhr1',
|
|
22
|
+
'pdx1',
|
|
23
|
+
'sfo1',
|
|
24
|
+
'sin1',
|
|
25
|
+
'syd1',
|
|
26
|
+
];
|
|
9
27
|
|
|
10
28
|
export const POST = async (req: Request) => {
|
|
11
29
|
const payload = (await req.json()) as OpenAITTSPayload;
|
|
@@ -1,17 +1,33 @@
|
|
|
1
1
|
import { NextResponse } from 'next/server';
|
|
2
2
|
|
|
3
|
-
import {
|
|
3
|
+
import { initAgentRuntimeWithUserPayload } from '@/app/api/chat/agentRuntime';
|
|
4
4
|
import { createErrorResponse } from '@/app/api/errorResponse';
|
|
5
|
+
import { checkAuth } from '@/app/api/middleware/auth';
|
|
5
6
|
import { ChatCompletionErrorPayload } from '@/libs/agent-runtime';
|
|
6
7
|
import { TextToImagePayload } from '@/libs/agent-runtime/types';
|
|
7
8
|
import { ChatErrorType } from '@/types/fetch';
|
|
8
9
|
|
|
9
|
-
import { initAgentRuntimeWithUserPayload } from '../../chat/agentRuntime';
|
|
10
|
-
import { checkAuth } from '../../middleware/auth';
|
|
11
|
-
|
|
12
10
|
export const runtime = 'edge';
|
|
13
11
|
|
|
14
|
-
export const preferredRegion =
|
|
12
|
+
export const preferredRegion = [
|
|
13
|
+
'arn1',
|
|
14
|
+
'bom1',
|
|
15
|
+
'cdg1',
|
|
16
|
+
'cle1',
|
|
17
|
+
'cpt1',
|
|
18
|
+
'dub1',
|
|
19
|
+
'fra1',
|
|
20
|
+
'gru1',
|
|
21
|
+
'hnd1',
|
|
22
|
+
'iad1',
|
|
23
|
+
'icn1',
|
|
24
|
+
'kix1',
|
|
25
|
+
'lhr1',
|
|
26
|
+
'pdx1',
|
|
27
|
+
'sfo1',
|
|
28
|
+
'sin1',
|
|
29
|
+
'syd1',
|
|
30
|
+
];
|
|
15
31
|
|
|
16
32
|
// return NextResponse.json(
|
|
17
33
|
// {
|
package/src/config/llm.ts
CHANGED
|
@@ -3,12 +3,6 @@ import { createEnv } from '@t3-oss/env-nextjs';
|
|
|
3
3
|
import { z } from 'zod';
|
|
4
4
|
|
|
5
5
|
export const getLLMConfig = () => {
|
|
6
|
-
// region format: iad1,sfo1
|
|
7
|
-
let regions: string[] = [];
|
|
8
|
-
if (process.env.OPENAI_FUNCTION_REGIONS) {
|
|
9
|
-
regions = process.env.OPENAI_FUNCTION_REGIONS.split(',');
|
|
10
|
-
}
|
|
11
|
-
|
|
12
6
|
return createEnv({
|
|
13
7
|
server: {
|
|
14
8
|
API_KEY_SELECT_MODE: z.string().optional(),
|
|
@@ -17,7 +11,6 @@ export const getLLMConfig = () => {
|
|
|
17
11
|
OPENAI_API_KEY: z.string().optional(),
|
|
18
12
|
OPENAI_PROXY_URL: z.string().optional(),
|
|
19
13
|
OPENAI_MODEL_LIST: z.string().optional(),
|
|
20
|
-
OPENAI_FUNCTION_REGIONS: z.array(z.string()),
|
|
21
14
|
|
|
22
15
|
ENABLED_AZURE_OPENAI: z.boolean(),
|
|
23
16
|
AZURE_API_KEY: z.string().optional(),
|
|
@@ -99,7 +92,6 @@ export const getLLMConfig = () => {
|
|
|
99
92
|
OPENAI_API_KEY: process.env.OPENAI_API_KEY,
|
|
100
93
|
OPENAI_PROXY_URL: process.env.OPENAI_PROXY_URL,
|
|
101
94
|
OPENAI_MODEL_LIST: process.env.OPENAI_MODEL_LIST,
|
|
102
|
-
OPENAI_FUNCTION_REGIONS: regions as any,
|
|
103
95
|
|
|
104
96
|
ENABLED_AZURE_OPENAI: !!process.env.AZURE_API_KEY,
|
|
105
97
|
AZURE_API_KEY: process.env.AZURE_API_KEY,
|
|
@@ -173,8 +173,11 @@ export class SessionModel {
|
|
|
173
173
|
const { agent, ...session } = result;
|
|
174
174
|
const sessionId = this.genId();
|
|
175
175
|
|
|
176
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
177
|
+
const { id: _, slug: __, ...config } = agent;
|
|
178
|
+
|
|
176
179
|
return this.create({
|
|
177
|
-
config:
|
|
180
|
+
config: config,
|
|
178
181
|
id: sessionId,
|
|
179
182
|
session: {
|
|
180
183
|
...session,
|
|
@@ -1,43 +0,0 @@
|
|
|
1
|
-
// @vitest-environment node
|
|
2
|
-
import { describe, expect, it, vi } from 'vitest';
|
|
3
|
-
|
|
4
|
-
import { getPreferredRegion } from './config';
|
|
5
|
-
|
|
6
|
-
// Stub the global process object to safely mock environment variables
|
|
7
|
-
vi.stubGlobal('process', {
|
|
8
|
-
...process, // Preserve the original process object
|
|
9
|
-
env: { ...process.env }, // Clone the environment variables object for modification
|
|
10
|
-
});
|
|
11
|
-
|
|
12
|
-
describe('getPreferredRegion', () => {
|
|
13
|
-
beforeEach(() => {
|
|
14
|
-
// Reset environment variables before each test case
|
|
15
|
-
vi.restoreAllMocks();
|
|
16
|
-
});
|
|
17
|
-
|
|
18
|
-
it('returns default value when get config error', () => {
|
|
19
|
-
const originalProcess = global.process;
|
|
20
|
-
const originalError = console.error;
|
|
21
|
-
// @ts-ignore
|
|
22
|
-
global.process = undefined;
|
|
23
|
-
console.error = () => {};
|
|
24
|
-
|
|
25
|
-
const preferredRegion = getPreferredRegion();
|
|
26
|
-
expect(preferredRegion).toBe('auto');
|
|
27
|
-
|
|
28
|
-
global.process = originalProcess;
|
|
29
|
-
console.error = originalError;
|
|
30
|
-
});
|
|
31
|
-
|
|
32
|
-
it('return default value when preferredRegion is empty', () => {
|
|
33
|
-
process.env.OPENAI_FUNCTION_REGIONS = '';
|
|
34
|
-
const preferredRegion = getPreferredRegion();
|
|
35
|
-
expect(preferredRegion).toBe('auto');
|
|
36
|
-
});
|
|
37
|
-
|
|
38
|
-
it('return correct list values when preferredRegion is correctly passed', () => {
|
|
39
|
-
process.env.OPENAI_FUNCTION_REGIONS = 'ida1,sfo1';
|
|
40
|
-
const preferredRegion = getPreferredRegion();
|
|
41
|
-
expect(preferredRegion).toStrictEqual(['ida1', 'sfo1']);
|
|
42
|
-
});
|
|
43
|
-
});
|
package/src/app/api/config.ts
DELETED
|
@@ -1,14 +0,0 @@
|
|
|
1
|
-
import { getLLMConfig } from '@/config/llm';
|
|
2
|
-
|
|
3
|
-
export const getPreferredRegion = (region: string | string[] = 'auto') => {
|
|
4
|
-
try {
|
|
5
|
-
if (getLLMConfig().OPENAI_FUNCTION_REGIONS.length <= 0) {
|
|
6
|
-
return region;
|
|
7
|
-
}
|
|
8
|
-
|
|
9
|
-
return getLLMConfig().OPENAI_FUNCTION_REGIONS;
|
|
10
|
-
} catch (error) {
|
|
11
|
-
console.error('get server config failed, error:', error);
|
|
12
|
-
return region;
|
|
13
|
-
}
|
|
14
|
-
};
|