@lobehub/chat 1.45.15 → 1.45.16
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md
CHANGED
@@ -2,6 +2,31 @@
|
|
2
2
|
|
3
3
|
# Changelog
|
4
4
|
|
5
|
+
### [Version 1.45.16](https://github.com/lobehub/lobe-chat/compare/v1.45.15...v1.45.16)
|
6
|
+
|
7
|
+
<sup>Released on **2025-01-14**</sup>
|
8
|
+
|
9
|
+
#### ♻ Code Refactoring
|
10
|
+
|
11
|
+
- **misc**: Improve ai provider code.
|
12
|
+
|
13
|
+
<br/>
|
14
|
+
|
15
|
+
<details>
|
16
|
+
<summary><kbd>Improvements and Fixes</kbd></summary>
|
17
|
+
|
18
|
+
#### Code refactoring
|
19
|
+
|
20
|
+
- **misc**: Improve ai provider code, closes [#5442](https://github.com/lobehub/lobe-chat/issues/5442) ([32013b4](https://github.com/lobehub/lobe-chat/commit/32013b4))
|
21
|
+
|
22
|
+
</details>
|
23
|
+
|
24
|
+
<div align="right">
|
25
|
+
|
26
|
+
[](#readme-top)
|
27
|
+
|
28
|
+
</div>
|
29
|
+
|
5
30
|
### [Version 1.45.15](https://github.com/lobehub/lobe-chat/compare/v1.45.14...v1.45.15)
|
6
31
|
|
7
32
|
<sup>Released on **2025-01-14**</sup>
|
package/changelog/v1.json
CHANGED
@@ -0,0 +1,92 @@
|
|
1
|
+
---
|
2
|
+
title: Observability and Tracing for LobeChat
|
3
|
+
description: >-
|
4
|
+
Enhance your LobeChat applications with open-source observability and tracing
|
5
|
+
using Langfuse. Automatically capture detailed traces and metrics for every
|
6
|
+
request to optimize and debug your chats.
|
7
|
+
tags:
|
8
|
+
- Observability
|
9
|
+
- Tracing
|
10
|
+
- Langfuse
|
11
|
+
---
|
12
|
+
|
13
|
+
# Monitor your LobeChat application with Langfuse
|
14
|
+
|
15
|
+
## What is Langfuse?
|
16
|
+
|
17
|
+
[Langfuse](https://langfuse.com/) an **open-source LLM Observability platform**. By enabling the Langfuse integration, you can trace your application data to develop, monitor, and improve the use of LobeChat, including:
|
18
|
+
|
19
|
+
- Application [traces](https://langfuse.com/docs/tracing)
|
20
|
+
- Usage patterns
|
21
|
+
- Cost data by user and model
|
22
|
+
- [Evaluations](https://langfuse.com/docs/scores/overview)
|
23
|
+
|
24
|
+
## Get Started
|
25
|
+
|
26
|
+
<Steps>
|
27
|
+
|
28
|
+
### Set up Langfuse
|
29
|
+
|
30
|
+
Get your Langfuse API key by signing up for [Langfuse Cloud](https://cloud.langfuse.com) or [self-hosting](https://langfuse.com/docs/deployment/self-host) Langfuse.
|
31
|
+
|
32
|
+
### Set up LobeChat
|
33
|
+
|
34
|
+
There are multiple ways to [self-host LobeChat](https://lobehub.com/docs/self-hosting/start). For this example, we will use the Docker Desktop deployment.
|
35
|
+
|
36
|
+
<Tabs items={["Environment Variables", "Example in Docker Desktop"]}>
|
37
|
+
<Tab>
|
38
|
+
|
39
|
+
Before deploying LobeChat, set the following four environment variables with the Langfuse API keys you created in the previous step.
|
40
|
+
|
41
|
+
```sh
|
42
|
+
ENABLE_LANGFUSE = '1'
|
43
|
+
LANGFUSE_SECRET_KEY = 'sk-lf...'
|
44
|
+
LANGFUSE_PUBLIC_KEY = 'pk-lf...'
|
45
|
+
LANGFUSE_HOST = 'https://cloud.langfuse.com'
|
46
|
+
```
|
47
|
+
</Tab>
|
48
|
+
|
49
|
+
<Tab>
|
50
|
+
|
51
|
+
Before running the Docker container, set the environment variables in the Docker Desktop with the Langfuse API keys you created in the previous step.
|
52
|
+
|
53
|
+
<Image
|
54
|
+
alt={'Environment Variables in Docker Desktop'}
|
55
|
+
src={'https://langfuse.com/images/docs/lobechat-docker-desktop-env.png'}
|
56
|
+
/>
|
57
|
+
|
58
|
+
</Tab>
|
59
|
+
|
60
|
+
</Tabs>
|
61
|
+
|
62
|
+
### Activate Analytics in Settings
|
63
|
+
|
64
|
+
Once you have LobeChat running, navigate to the **About** tab in the **Settings** and activate analytics. This is necessary for traces to be sent to Langfuse.
|
65
|
+
|
66
|
+
<Image
|
67
|
+
alt={'LobeChat Settings'}
|
68
|
+
src={'https://langfuse.com/images/docs/lobechat-settings.png'}
|
69
|
+
/>
|
70
|
+
|
71
|
+
### See Chat Traces in Langfuse
|
72
|
+
|
73
|
+
After setting your LLM model key, you can start interacting with your LobeChat application.
|
74
|
+
|
75
|
+
<Image
|
76
|
+
alt={'LobeChat Conversation'}
|
77
|
+
src={'https://langfuse.com/images/docs/lobechat-converstation.png'}
|
78
|
+
/>
|
79
|
+
|
80
|
+
All conversations in the chat are automatically traced and sent to Langfuse. You can view the traces in the [Traces section](https://langfuse.com/docs/tracing) in the Langfuse UI.
|
81
|
+
|
82
|
+
<Image
|
83
|
+
alt={'LobeChat Example Trace'}
|
84
|
+
src={'https://langfuse.com/images/docs/lobechat-example-trace.png'}
|
85
|
+
/>
|
86
|
+
_[Example trace in the Langfuse UI](https://cloud.langfuse.com/project/cloramnkj0002jz088vzn1ja4/traces/63e9246d-3f22-4e45-936d-b0c4ccf55a1e?timestamp=2024-11-26T17%3A00%3A02.028Z&observation=7ea75a0c-d9d1-425c-9b88-27561c63b413)_
|
87
|
+
|
88
|
+
</Steps>
|
89
|
+
|
90
|
+
## Feedback
|
91
|
+
|
92
|
+
If you have any feedback or requests, please create a GitHub [Issue](https://langfuse.com/issue) or share your work with the Langfuse community on [Discord](https://discord.langfuse.com/).
|
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "@lobehub/chat",
|
3
|
-
"version": "1.45.
|
3
|
+
"version": "1.45.16",
|
4
4
|
"description": "Lobe Chat - an open-source, high-performance chatbot framework that supports speech synthesis, multimodal, and extensible Function Call plugin system. Supports one-click free deployment of your private ChatGPT/LLM web application.",
|
5
5
|
"keywords": [
|
6
6
|
"framework",
|
@@ -1,33 +1,6 @@
|
|
1
1
|
import { redirect } from 'next/navigation';
|
2
2
|
|
3
|
-
import
|
4
|
-
import Ai360Provider from '@/config/modelProviders/ai360';
|
5
|
-
import AnthropicProvider from '@/config/modelProviders/anthropic';
|
6
|
-
import BaichuanProvider from '@/config/modelProviders/baichuan';
|
7
|
-
import DeepSeekProvider from '@/config/modelProviders/deepseek';
|
8
|
-
import FireworksAIProvider from '@/config/modelProviders/fireworksai';
|
9
|
-
import GiteeAIProvider from '@/config/modelProviders/giteeai';
|
10
|
-
import GoogleProvider from '@/config/modelProviders/google';
|
11
|
-
import GroqProvider from '@/config/modelProviders/groq';
|
12
|
-
import HigressProvider from '@/config/modelProviders/higress';
|
13
|
-
import HunyuanProvider from '@/config/modelProviders/hunyuan';
|
14
|
-
import InternLMProvider from '@/config/modelProviders/internlm';
|
15
|
-
import MinimaxProvider from '@/config/modelProviders/minimax';
|
16
|
-
import MistralProvider from '@/config/modelProviders/mistral';
|
17
|
-
import MoonshotProvider from '@/config/modelProviders/moonshot';
|
18
|
-
import NovitaProvider from '@/config/modelProviders/novita';
|
19
|
-
import OpenRouterProvider from '@/config/modelProviders/openrouter';
|
20
|
-
import PerplexityProvider from '@/config/modelProviders/perplexity';
|
21
|
-
import QwenProvider from '@/config/modelProviders/qwen';
|
22
|
-
import SiliconCloudProvider from '@/config/modelProviders/siliconcloud';
|
23
|
-
import SparkProvider from '@/config/modelProviders/spark';
|
24
|
-
import StepfunProvider from '@/config/modelProviders/stepfun';
|
25
|
-
import TaichuProvider from '@/config/modelProviders/taichu';
|
26
|
-
import TogetherAIProvider from '@/config/modelProviders/togetherai';
|
27
|
-
import UpstageProvider from '@/config/modelProviders/upstage';
|
28
|
-
import XAIProvider from '@/config/modelProviders/xai';
|
29
|
-
import ZeroOneProvider from '@/config/modelProviders/zeroone';
|
30
|
-
import ZhiPuProvider from '@/config/modelProviders/zhipu';
|
3
|
+
import { DEFAULT_MODEL_PROVIDER_LIST } from '@/config/modelProviders';
|
31
4
|
import { isServerMode } from '@/const/version';
|
32
5
|
import { serverDB } from '@/database/server';
|
33
6
|
import { AiProviderModel } from '@/database/server/models/aiProvider';
|
@@ -37,43 +10,14 @@ import { getUserAuth } from '@/utils/server/auth';
|
|
37
10
|
|
38
11
|
import ProviderDetail from './index';
|
39
12
|
|
40
|
-
const DEFAULT_MODEL_PROVIDER_LIST = [
|
41
|
-
AnthropicProvider,
|
42
|
-
GoogleProvider,
|
43
|
-
DeepSeekProvider,
|
44
|
-
OpenRouterProvider,
|
45
|
-
NovitaProvider,
|
46
|
-
TogetherAIProvider,
|
47
|
-
FireworksAIProvider,
|
48
|
-
GroqProvider,
|
49
|
-
PerplexityProvider,
|
50
|
-
MistralProvider,
|
51
|
-
Ai21Provider,
|
52
|
-
UpstageProvider,
|
53
|
-
XAIProvider,
|
54
|
-
QwenProvider,
|
55
|
-
HunyuanProvider,
|
56
|
-
SparkProvider,
|
57
|
-
ZhiPuProvider,
|
58
|
-
ZeroOneProvider,
|
59
|
-
StepfunProvider,
|
60
|
-
MoonshotProvider,
|
61
|
-
BaichuanProvider,
|
62
|
-
MinimaxProvider,
|
63
|
-
Ai360Provider,
|
64
|
-
TaichuProvider,
|
65
|
-
InternLMProvider,
|
66
|
-
SiliconCloudProvider,
|
67
|
-
HigressProvider,
|
68
|
-
GiteeAIProvider,
|
69
|
-
];
|
70
|
-
|
71
13
|
const Page = async (props: PagePropsWithId) => {
|
72
14
|
const params = await props.params;
|
73
15
|
|
74
16
|
const builtinProviderCard = DEFAULT_MODEL_PROVIDER_LIST.find((v) => v.id === params.id);
|
17
|
+
// if builtin provider
|
75
18
|
if (!!builtinProviderCard) return <ProviderDetail source={'builtin'} {...builtinProviderCard} />;
|
76
19
|
|
20
|
+
// if user custom provider
|
77
21
|
if (isServerMode) {
|
78
22
|
const { userId } = await getUserAuth();
|
79
23
|
|
@@ -113,11 +113,11 @@ export const DEFAULT_MODEL_PROVIDER_LIST = [
|
|
113
113
|
BaichuanProvider,
|
114
114
|
MinimaxProvider,
|
115
115
|
Ai360Provider,
|
116
|
-
TaichuProvider,
|
117
|
-
InternLMProvider,
|
118
116
|
SiliconCloudProvider,
|
117
|
+
InternLMProvider,
|
119
118
|
HigressProvider,
|
120
119
|
GiteeAIProvider,
|
120
|
+
TaichuProvider,
|
121
121
|
];
|
122
122
|
|
123
123
|
export const filterEnabledModels = (provider: ModelProviderCard) => {
|