@lobehub/chat 0.161.2 → 0.161.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +25 -0
- package/package.json +1 -1
- package/src/config/langfuse.ts +23 -0
- package/src/config/server/app.ts +0 -12
- package/src/libs/agent-runtime/AgentRuntime.test.ts +21 -5
- package/src/libs/traces/index.test.ts +6 -5
- package/src/libs/traces/index.ts +2 -2
- package/src/server/globalConfig/index.ts +3 -2
package/CHANGELOG.md
CHANGED
|
@@ -2,6 +2,31 @@
|
|
|
2
2
|
|
|
3
3
|
# Changelog
|
|
4
4
|
|
|
5
|
+
### [Version 0.161.3](https://github.com/lobehub/lobe-chat/compare/v0.161.2...v0.161.3)
|
|
6
|
+
|
|
7
|
+
<sup>Released on **2024-05-22**</sup>
|
|
8
|
+
|
|
9
|
+
#### ♻ Code Refactoring
|
|
10
|
+
|
|
11
|
+
- **misc**: Refactor the langfuse env.
|
|
12
|
+
|
|
13
|
+
<br/>
|
|
14
|
+
|
|
15
|
+
<details>
|
|
16
|
+
<summary><kbd>Improvements and Fixes</kbd></summary>
|
|
17
|
+
|
|
18
|
+
#### Code refactoring
|
|
19
|
+
|
|
20
|
+
- **misc**: Refactor the langfuse env, closes [#2602](https://github.com/lobehub/lobe-chat/issues/2602) ([cbebfbc](https://github.com/lobehub/lobe-chat/commit/cbebfbc))
|
|
21
|
+
|
|
22
|
+
</details>
|
|
23
|
+
|
|
24
|
+
<div align="right">
|
|
25
|
+
|
|
26
|
+
[](#readme-top)
|
|
27
|
+
|
|
28
|
+
</div>
|
|
29
|
+
|
|
5
30
|
### [Version 0.161.2](https://github.com/lobehub/lobe-chat/compare/v0.161.1...v0.161.2)
|
|
6
31
|
|
|
7
32
|
<sup>Released on **2024-05-22**</sup>
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@lobehub/chat",
|
|
3
|
-
"version": "0.161.
|
|
3
|
+
"version": "0.161.3",
|
|
4
4
|
"description": "Lobe Chat - an open-source, high-performance chatbot framework that supports speech synthesis, multimodal, and extensible Function Call plugin system. Supports one-click free deployment of your private ChatGPT/LLM web application.",
|
|
5
5
|
"keywords": [
|
|
6
6
|
"framework",
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
/* eslint-disable sort-keys-fix/sort-keys-fix , typescript-sort-keys/interface */
|
|
2
|
+
import { createEnv } from '@t3-oss/env-nextjs';
|
|
3
|
+
import { z } from 'zod';
|
|
4
|
+
|
|
5
|
+
export const getLangfuseConfig = () => {
|
|
6
|
+
return createEnv({
|
|
7
|
+
runtimeEnv: {
|
|
8
|
+
ENABLE_LANGFUSE: process.env.ENABLE_LANGFUSE === '1',
|
|
9
|
+
LANGFUSE_SECRET_KEY: process.env.LANGFUSE_SECRET_KEY || '',
|
|
10
|
+
LANGFUSE_PUBLIC_KEY: process.env.LANGFUSE_PUBLIC_KEY || '',
|
|
11
|
+
LANGFUSE_HOST: process.env.LANGFUSE_HOST || 'https://cloud.langfuse.com',
|
|
12
|
+
},
|
|
13
|
+
|
|
14
|
+
server: {
|
|
15
|
+
ENABLE_LANGFUSE: z.boolean(),
|
|
16
|
+
LANGFUSE_SECRET_KEY: z.string().optional(),
|
|
17
|
+
LANGFUSE_PUBLIC_KEY: z.string().optional(),
|
|
18
|
+
LANGFUSE_HOST: z.string().url(),
|
|
19
|
+
},
|
|
20
|
+
});
|
|
21
|
+
};
|
|
22
|
+
|
|
23
|
+
export const langfuseEnv = getLangfuseConfig();
|
package/src/config/server/app.ts
CHANGED
|
@@ -14,11 +14,6 @@ declare global {
|
|
|
14
14
|
PLUGIN_SETTINGS?: string;
|
|
15
15
|
|
|
16
16
|
DEFAULT_AGENT_CONFIG?: string;
|
|
17
|
-
|
|
18
|
-
ENABLE_LANGFUSE?: string;
|
|
19
|
-
LANGFUSE_PUBLIC_KEY?: string;
|
|
20
|
-
LANGFUSE_SECRET_KEY?: string;
|
|
21
|
-
LANGFUSE_HOST?: string;
|
|
22
17
|
}
|
|
23
18
|
}
|
|
24
19
|
}
|
|
@@ -35,8 +30,6 @@ export const getAppConfig = () => {
|
|
|
35
30
|
|
|
36
31
|
DEFAULT_AGENT_CONFIG: process.env.DEFAULT_AGENT_CONFIG || '',
|
|
37
32
|
|
|
38
|
-
SHOW_ACCESS_CODE_CONFIG: !!ACCESS_CODES.length,
|
|
39
|
-
|
|
40
33
|
SITE_URL: process.env.SITE_URL,
|
|
41
34
|
|
|
42
35
|
AGENTS_INDEX_URL: !!process.env.AGENTS_INDEX_URL
|
|
@@ -48,10 +41,5 @@ export const getAppConfig = () => {
|
|
|
48
41
|
: 'https://chat-plugins.lobehub.com',
|
|
49
42
|
|
|
50
43
|
PLUGIN_SETTINGS: process.env.PLUGIN_SETTINGS,
|
|
51
|
-
|
|
52
|
-
ENABLE_LANGFUSE: process.env.ENABLE_LANGFUSE === '1',
|
|
53
|
-
LANGFUSE_SECRET_KEY: process.env.LANGFUSE_SECRET_KEY || '',
|
|
54
|
-
LANGFUSE_PUBLIC_KEY: process.env.LANGFUSE_PUBLIC_KEY || '',
|
|
55
|
-
LANGFUSE_HOST: process.env.LANGFUSE_HOST || 'https://cloud.langfuse.com',
|
|
56
44
|
};
|
|
57
45
|
};
|
|
@@ -5,7 +5,7 @@ import { ClientOptions } from 'openai';
|
|
|
5
5
|
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
|
6
6
|
|
|
7
7
|
import { createTraceOptions } from '@/app/api/chat/agentRuntime';
|
|
8
|
-
import
|
|
8
|
+
import * as langfuseCfg from '@/config/langfuse';
|
|
9
9
|
import { JWTPayload } from '@/const/auth';
|
|
10
10
|
import { TraceNameMap } from '@/const/trace';
|
|
11
11
|
import {
|
|
@@ -312,15 +312,14 @@ describe('AgentRuntime', () => {
|
|
|
312
312
|
};
|
|
313
313
|
|
|
314
314
|
const updateMock = vi.fn();
|
|
315
|
-
|
|
316
|
-
|
|
315
|
+
|
|
316
|
+
it('should call experimental_onToolCall correctly', async () => {
|
|
317
|
+
vi.spyOn(langfuseCfg, 'getLangfuseConfig').mockReturnValue({
|
|
317
318
|
ENABLE_LANGFUSE: true,
|
|
318
319
|
LANGFUSE_PUBLIC_KEY: 'abc',
|
|
319
320
|
LANGFUSE_SECRET_KEY: 'DDD',
|
|
320
321
|
} as any);
|
|
321
|
-
});
|
|
322
322
|
|
|
323
|
-
it('should call experimental_onToolCall correctly', async () => {
|
|
324
323
|
// 使用 spyOn 模拟 chat 方法
|
|
325
324
|
vi.spyOn(LobeOpenAI.prototype, 'chat').mockImplementation(
|
|
326
325
|
async (payload, { callback }: any) => {
|
|
@@ -338,6 +337,12 @@ describe('AgentRuntime', () => {
|
|
|
338
337
|
expect(updateMock).toHaveBeenCalledWith({ tags: ['Tools Call'] });
|
|
339
338
|
});
|
|
340
339
|
it('should call onStart correctly', async () => {
|
|
340
|
+
vi.spyOn(langfuseCfg, 'getLangfuseConfig').mockReturnValue({
|
|
341
|
+
ENABLE_LANGFUSE: true,
|
|
342
|
+
LANGFUSE_PUBLIC_KEY: 'abc',
|
|
343
|
+
LANGFUSE_SECRET_KEY: 'DDD',
|
|
344
|
+
} as any);
|
|
345
|
+
|
|
341
346
|
vi.spyOn(LangfuseGenerationClient.prototype, 'update').mockImplementation(updateMock);
|
|
342
347
|
vi.spyOn(LobeOpenAI.prototype, 'chat').mockImplementation(
|
|
343
348
|
async (payload, { callback }: any) => {
|
|
@@ -355,6 +360,11 @@ describe('AgentRuntime', () => {
|
|
|
355
360
|
});
|
|
356
361
|
|
|
357
362
|
it('should call onCompletion correctly', async () => {
|
|
363
|
+
vi.spyOn(langfuseCfg, 'getLangfuseConfig').mockReturnValue({
|
|
364
|
+
ENABLE_LANGFUSE: true,
|
|
365
|
+
LANGFUSE_PUBLIC_KEY: 'abc',
|
|
366
|
+
LANGFUSE_SECRET_KEY: 'DDD',
|
|
367
|
+
} as any);
|
|
358
368
|
// Spy on the chat method and trigger onCompletion callback
|
|
359
369
|
vi.spyOn(LangfuseGenerationClient.prototype, 'update').mockImplementation(updateMock);
|
|
360
370
|
vi.spyOn(LobeOpenAI.prototype, 'chat').mockImplementation(
|
|
@@ -379,6 +389,12 @@ describe('AgentRuntime', () => {
|
|
|
379
389
|
});
|
|
380
390
|
});
|
|
381
391
|
it('should call onFinal correctly', async () => {
|
|
392
|
+
vi.spyOn(langfuseCfg, 'getLangfuseConfig').mockReturnValue({
|
|
393
|
+
ENABLE_LANGFUSE: true,
|
|
394
|
+
LANGFUSE_PUBLIC_KEY: 'abc',
|
|
395
|
+
LANGFUSE_SECRET_KEY: 'DDD',
|
|
396
|
+
} as any);
|
|
397
|
+
|
|
382
398
|
vi.spyOn(LobeOpenAI.prototype, 'chat').mockImplementation(
|
|
383
399
|
async (payload, { callback }: any) => {
|
|
384
400
|
if (callback?.onFinal) {
|
|
@@ -1,14 +1,15 @@
|
|
|
1
|
+
// @vitest-environment node
|
|
1
2
|
import { Langfuse } from 'langfuse';
|
|
2
3
|
import { CreateLangfuseTraceBody } from 'langfuse-core';
|
|
3
4
|
import { describe, expect, it, vi } from 'vitest';
|
|
4
5
|
|
|
5
|
-
import * as server from '@/config/
|
|
6
|
+
import * as server from '@/config/langfuse';
|
|
6
7
|
|
|
7
8
|
import { TraceClient } from './index';
|
|
8
9
|
|
|
9
10
|
describe('TraceClient', () => {
|
|
10
11
|
it('should not initialize Langfuse client when ENABLE_LANGFUSE is false', () => {
|
|
11
|
-
vi.spyOn(server, '
|
|
12
|
+
vi.spyOn(server, 'getLangfuseConfig').mockReturnValue({
|
|
12
13
|
ENABLE_LANGFUSE: false,
|
|
13
14
|
} as any);
|
|
14
15
|
const client = new TraceClient();
|
|
@@ -16,7 +17,7 @@ describe('TraceClient', () => {
|
|
|
16
17
|
});
|
|
17
18
|
|
|
18
19
|
it('should throw error if LANGFUSE keys are missing', () => {
|
|
19
|
-
vi.spyOn(server, '
|
|
20
|
+
vi.spyOn(server, 'getLangfuseConfig').mockReturnValue({
|
|
20
21
|
ENABLE_LANGFUSE: true,
|
|
21
22
|
} as any);
|
|
22
23
|
expect(() => new TraceClient()).toThrow('NO_LANGFUSE_KEY_ERROR');
|
|
@@ -27,7 +28,7 @@ describe('TraceClient', () => {
|
|
|
27
28
|
|
|
28
29
|
vi.spyOn(Langfuse.prototype, 'trace').mockImplementation(mockTrace);
|
|
29
30
|
|
|
30
|
-
vi.spyOn(server, '
|
|
31
|
+
vi.spyOn(server, 'getLangfuseConfig').mockReturnValue({
|
|
31
32
|
ENABLE_LANGFUSE: true,
|
|
32
33
|
LANGFUSE_PUBLIC_KEY: 'public-key',
|
|
33
34
|
LANGFUSE_SECRET_KEY: 'secret-key',
|
|
@@ -45,7 +46,7 @@ describe('TraceClient', () => {
|
|
|
45
46
|
const mockShutdownAsync = vi.fn();
|
|
46
47
|
|
|
47
48
|
vi.spyOn(Langfuse.prototype, 'shutdownAsync').mockImplementation(mockShutdownAsync);
|
|
48
|
-
vi.spyOn(server, '
|
|
49
|
+
vi.spyOn(server, 'getLangfuseConfig').mockReturnValue({
|
|
49
50
|
ENABLE_LANGFUSE: true,
|
|
50
51
|
LANGFUSE_PUBLIC_KEY: 'public-key',
|
|
51
52
|
LANGFUSE_SECRET_KEY: 'secret-key',
|
package/src/libs/traces/index.ts
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { Langfuse } from 'langfuse';
|
|
2
2
|
import { CreateLangfuseTraceBody } from 'langfuse-core';
|
|
3
3
|
|
|
4
|
-
import {
|
|
4
|
+
import { getLangfuseConfig } from '@/config/langfuse';
|
|
5
5
|
import { CURRENT_VERSION } from '@/const/version';
|
|
6
6
|
import { TraceEventClient } from '@/libs/traces/event';
|
|
7
7
|
|
|
@@ -13,7 +13,7 @@ export class TraceClient {
|
|
|
13
13
|
|
|
14
14
|
constructor() {
|
|
15
15
|
const { ENABLE_LANGFUSE, LANGFUSE_PUBLIC_KEY, LANGFUSE_SECRET_KEY, LANGFUSE_HOST } =
|
|
16
|
-
|
|
16
|
+
getLangfuseConfig();
|
|
17
17
|
|
|
18
18
|
if (!ENABLE_LANGFUSE) return;
|
|
19
19
|
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import { fileEnv } from '@/config/file';
|
|
2
|
+
import { langfuseEnv } from '@/config/langfuse';
|
|
2
3
|
import { getLLMConfig } from '@/config/llm';
|
|
3
4
|
import {
|
|
4
5
|
OllamaProviderCard,
|
|
@@ -14,7 +15,7 @@ import { extractEnabledModels, transformToChatModelCards } from '@/utils/parseMo
|
|
|
14
15
|
import { parseAgentConfig } from './parseDefaultAgent';
|
|
15
16
|
|
|
16
17
|
export const getServerGlobalConfig = () => {
|
|
17
|
-
const { ACCESS_CODES,
|
|
18
|
+
const { ACCESS_CODES, DEFAULT_AGENT_CONFIG } = getServerConfig();
|
|
18
19
|
|
|
19
20
|
const {
|
|
20
21
|
ENABLED_OPENAI,
|
|
@@ -114,7 +115,7 @@ export const getServerGlobalConfig = () => {
|
|
|
114
115
|
zhipu: { enabled: ENABLED_ZHIPU },
|
|
115
116
|
},
|
|
116
117
|
telemetry: {
|
|
117
|
-
langfuse: ENABLE_LANGFUSE,
|
|
118
|
+
langfuse: langfuseEnv.ENABLE_LANGFUSE,
|
|
118
119
|
},
|
|
119
120
|
};
|
|
120
121
|
|