@lobehub/chat 1.101.1 → 1.101.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -2,6 +2,31 @@
2
2
 
3
3
  # Changelog
4
4
 
5
+ ### [Version 1.101.2](https://github.com/lobehub/lobe-chat/compare/v1.101.1...v1.101.2)
6
+
7
+ <sup>Released on **2025-07-21**</sup>
8
+
9
+ #### 💄 Styles
10
+
11
+ - **misc**: Fix lobehub provider `/chat` in desktop.
12
+
13
+ <br/>
14
+
15
+ <details>
16
+ <summary><kbd>Improvements and Fixes</kbd></summary>
17
+
18
+ #### Styles
19
+
20
+ - **misc**: Fix lobehub provider `/chat` in desktop, closes [#8508](https://github.com/lobehub/lobe-chat/issues/8508) ([c801f9c](https://github.com/lobehub/lobe-chat/commit/c801f9c))
21
+
22
+ </details>
23
+
24
+ <div align="right">
25
+
26
+ [![](https://img.shields.io/badge/-BACK_TO_TOP-151515?style=flat-square)](#readme-top)
27
+
28
+ </div>
29
+
5
30
  ### [Version 1.101.1](https://github.com/lobehub/lobe-chat/compare/v1.101.0...v1.101.1)
6
31
 
7
32
  <sup>Released on **2025-07-19**</sup>
package/Dockerfile CHANGED
@@ -53,6 +53,8 @@ ENV NEXT_PUBLIC_SENTRY_DSN="${NEXT_PUBLIC_SENTRY_DSN}" \
53
53
  SENTRY_ORG="" \
54
54
  SENTRY_PROJECT=""
55
55
 
56
+ ENV APP_URL="http://app.com"
57
+
56
58
  # Posthog
57
59
  ENV NEXT_PUBLIC_ANALYTICS_POSTHOG="${NEXT_PUBLIC_ANALYTICS_POSTHOG}" \
58
60
  NEXT_PUBLIC_POSTHOG_HOST="${NEXT_PUBLIC_POSTHOG_HOST}" \
package/changelog/v1.json CHANGED
@@ -1,4 +1,13 @@
1
1
  [
2
+ {
3
+ "children": {
4
+ "improvements": [
5
+ "Fix lobehub provider /chat in desktop."
6
+ ]
7
+ },
8
+ "date": "2025-07-21",
9
+ "version": "1.101.2"
10
+ },
2
11
  {
3
12
  "children": {
4
13
  "fixes": [
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@lobehub/chat",
3
- "version": "1.101.1",
3
+ "version": "1.101.2",
4
4
  "description": "Lobe Chat - an open-source, high-performance chatbot framework that supports speech synthesis, multimodal, and extensible Function Call plugin system. Supports one-click free deployment of your private ChatGPT/LLM web application.",
5
5
  "keywords": [
6
6
  "framework",
@@ -156,6 +156,7 @@ export { default as infiniai } from './infiniai';
156
156
  export { default as internlm } from './internlm';
157
157
  export { default as jina } from './jina';
158
158
  export { default as lmstudio } from './lmstudio';
159
+ export { default as lobehub } from './lobehub';
159
160
  export { default as minimax } from './minimax';
160
161
  export { default as mistral } from './mistral';
161
162
  export { default as modelscope } from './modelscope';
@@ -0,0 +1,7 @@
1
+ import { AIChatModelCard } from '@/types/aiModel';
2
+
3
+ const lobehubChatModels: AIChatModelCard[] = [];
4
+
5
+ export const allModels = [...lobehubChatModels];
6
+
7
+ export default allModels;
@@ -200,6 +200,7 @@ export { default as InfiniAIProviderCard } from './infiniai';
200
200
  export { default as InternLMProviderCard } from './internlm';
201
201
  export { default as JinaProviderCard } from './jina';
202
202
  export { default as LMStudioProviderCard } from './lmstudio';
203
+ export { default as LobeHubProviderCard } from './lobehub';
203
204
  export { default as MinimaxProviderCard } from './minimax';
204
205
  export { default as MistralProviderCard } from './mistral';
205
206
  export { default as ModelScopeProviderCard } from './modelscope';
@@ -0,0 +1,22 @@
1
+ import { ModelProviderCard } from '@/types/llm';
2
+
3
+ const LobeHub: ModelProviderCard = {
4
+ chatModels: [],
5
+ description:
6
+ 'LobeChat Cloud 通过官方部署的 API 来实现 AI 模型的调用,并采用 Credits 计算积分的方式来衡量 AI 模型的用量,对应大模型使用的 Tokens。',
7
+ enabled: true,
8
+ id: 'lobehub',
9
+ modelsUrl: 'https://lobehub.com/zh/docs/usage/subscription/model-pricing',
10
+ name: 'LobeHub',
11
+ settings: {
12
+ modelEditable: false,
13
+ showAddNewModel: false,
14
+ showModelFetcher: false,
15
+ },
16
+ showConfig: false,
17
+ url: 'https://lobehub.com',
18
+ };
19
+
20
+ export default LobeHub;
21
+
22
+ export const planCardModels = ['gpt-4o-mini', 'deepseek-reasoner', 'claude-3-5-sonnet-latest'];
@@ -148,7 +148,7 @@ const ModelSwitchPanel = memo<IProps>(({ children, onOpenChange, open }) => {
148
148
  // 不加限高就会导致面板超长,顶部的内容会被隐藏
149
149
  // https://github.com/user-attachments/assets/9c043c47-42c5-46ef-b5c1-bee89376f042
150
150
  style: {
151
- maxHeight: 500,
151
+ maxHeight: 550,
152
152
  overflowY: 'scroll',
153
153
  },
154
154
  }}
@@ -72,7 +72,7 @@ beforeEach(async () => {
72
72
 
73
73
  describe('AgentRuntime', () => {
74
74
  describe('should initialize with various providers', () => {
75
- const providers = Object.values(ModelProvider);
75
+ const providers = Object.values(ModelProvider).filter((i) => i !== 'lobehub');
76
76
 
77
77
  const specialProviderIds = [ModelProvider.VertexAI, ...specialProviders.map((p) => p.id)];
78
78
 
@@ -45,6 +45,7 @@ export enum ModelProvider {
45
45
  InternLM = 'internlm',
46
46
  Jina = 'jina',
47
47
  LMStudio = 'lmstudio',
48
+ LobeHub = 'lobehub',
48
49
  Minimax = 'minimax',
49
50
  Mistral = 'mistral',
50
51
  ModelScope = 'modelscope',