@lobehub/chat 1.21.14 → 1.21.16

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -2,6 +2,40 @@
2
2
 
3
3
  # Changelog
4
4
 
5
+ ### [Version 1.21.16](https://github.com/lobehub/lobe-chat/compare/v1.21.15...v1.21.16)
6
+
7
+ <sup>Released on **2024-10-12**</sup>
8
+
9
+ <br/>
10
+
11
+ <details>
12
+ <summary><kbd>Improvements and Fixes</kbd></summary>
13
+
14
+ </details>
15
+
16
+ <div align="right">
17
+
18
+ [![](https://img.shields.io/badge/-BACK_TO_TOP-151515?style=flat-square)](#readme-top)
19
+
20
+ </div>
21
+
22
+ ### [Version 1.21.15](https://github.com/lobehub/lobe-chat/compare/v1.21.14...v1.21.15)
23
+
24
+ <sup>Released on **2024-10-12**</sup>
25
+
26
+ <br/>
27
+
28
+ <details>
29
+ <summary><kbd>Improvements and Fixes</kbd></summary>
30
+
31
+ </details>
32
+
33
+ <div align="right">
34
+
35
+ [![](https://img.shields.io/badge/-BACK_TO_TOP-151515?style=flat-square)](#readme-top)
36
+
37
+ </div>
38
+
5
39
  ### [Version 1.21.14](https://github.com/lobehub/lobe-chat/compare/v1.21.13...v1.21.14)
6
40
 
7
41
  <sup>Released on **2024-10-12**</sup>
package/Dockerfile CHANGED
@@ -33,23 +33,31 @@ RUN \
33
33
  FROM base AS builder
34
34
 
35
35
  ARG USE_CN_MIRROR
36
+ ARG NEXT_PUBLIC_BASE_PATH
37
+ ARG NEXT_PUBLIC_SENTRY_DSN
38
+ ARG NEXT_PUBLIC_ANALYTICS_POSTHOG
39
+ ARG NEXT_PUBLIC_POSTHOG_HOST
40
+ ARG NEXT_PUBLIC_POSTHOG_KEY
41
+ ARG NEXT_PUBLIC_ANALYTICS_UMAMI
42
+ ARG NEXT_PUBLIC_UMAMI_SCRIPT_URL
43
+ ARG NEXT_PUBLIC_UMAMI_WEBSITE_ID
36
44
 
37
- ENV NEXT_PUBLIC_BASE_PATH=""
45
+ ENV NEXT_PUBLIC_BASE_PATH="${NEXT_PUBLIC_BASE_PATH}"
38
46
 
39
47
  # Sentry
40
- ENV NEXT_PUBLIC_SENTRY_DSN="" \
48
+ ENV NEXT_PUBLIC_SENTRY_DSN="${NEXT_PUBLIC_SENTRY_DSN}" \
41
49
  SENTRY_ORG="" \
42
50
  SENTRY_PROJECT=""
43
51
 
44
52
  # Posthog
45
- ENV NEXT_PUBLIC_ANALYTICS_POSTHOG="" \
46
- NEXT_PUBLIC_POSTHOG_HOST="" \
47
- NEXT_PUBLIC_POSTHOG_KEY=""
53
+ ENV NEXT_PUBLIC_ANALYTICS_POSTHOG="${NEXT_PUBLIC_ANALYTICS_POSTHOG}" \
54
+ NEXT_PUBLIC_POSTHOG_HOST="${NEXT_PUBLIC_POSTHOG_HOST}" \
55
+ NEXT_PUBLIC_POSTHOG_KEY="${NEXT_PUBLIC_POSTHOG_KEY}"
48
56
 
49
57
  # Umami
50
- ENV NEXT_PUBLIC_ANALYTICS_UMAMI="" \
51
- NEXT_PUBLIC_UMAMI_SCRIPT_URL="" \
52
- NEXT_PUBLIC_UMAMI_WEBSITE_ID=""
58
+ ENV NEXT_PUBLIC_ANALYTICS_UMAMI="${NEXT_PUBLIC_ANALYTICS_UMAMI}" \
59
+ NEXT_PUBLIC_UMAMI_SCRIPT_URL="${NEXT_PUBLIC_UMAMI_SCRIPT_URL}" \
60
+ NEXT_PUBLIC_UMAMI_WEBSITE_ID="${NEXT_PUBLIC_UMAMI_WEBSITE_ID}"
53
61
 
54
62
  # Node
55
63
  ENV NODE_OPTIONS="--max-old-space-size=8192"
@@ -33,27 +33,38 @@ RUN \
33
33
  FROM base AS builder
34
34
 
35
35
  ARG USE_CN_MIRROR
36
-
37
- ENV NEXT_PUBLIC_SERVICE_MODE="server" \
36
+ ARG NEXT_PUBLIC_BASE_PATH
37
+ ARG NEXT_PUBLIC_SERVICE_MODE
38
+ ARG NEXT_PUBLIC_SENTRY_DSN
39
+ ARG NEXT_PUBLIC_ANALYTICS_POSTHOG
40
+ ARG NEXT_PUBLIC_POSTHOG_HOST
41
+ ARG NEXT_PUBLIC_POSTHOG_KEY
42
+ ARG NEXT_PUBLIC_ANALYTICS_UMAMI
43
+ ARG NEXT_PUBLIC_UMAMI_SCRIPT_URL
44
+ ARG NEXT_PUBLIC_UMAMI_WEBSITE_ID
45
+
46
+ ENV NEXT_PUBLIC_BASE_PATH="${NEXT_PUBLIC_BASE_PATH}"
47
+
48
+ ENV NEXT_PUBLIC_SERVICE_MODE="${NEXT_PUBLIC_SERVICE_MODE:-server}" \
38
49
  APP_URL="http://app.com" \
39
50
  DATABASE_DRIVER="node" \
40
51
  DATABASE_URL="postgres://postgres:password@localhost:5432/postgres" \
41
52
  KEY_VAULTS_SECRET="use-for-build"
42
53
 
43
54
  # Sentry
44
- ENV NEXT_PUBLIC_SENTRY_DSN="" \
55
+ ENV NEXT_PUBLIC_SENTRY_DSN="${NEXT_PUBLIC_SENTRY_DSN}" \
45
56
  SENTRY_ORG="" \
46
57
  SENTRY_PROJECT=""
47
58
 
48
59
  # Posthog
49
- ENV NEXT_PUBLIC_ANALYTICS_POSTHOG="" \
50
- NEXT_PUBLIC_POSTHOG_HOST="" \
51
- NEXT_PUBLIC_POSTHOG_KEY=""
60
+ ENV NEXT_PUBLIC_ANALYTICS_POSTHOG="${NEXT_PUBLIC_ANALYTICS_POSTHOG}" \
61
+ NEXT_PUBLIC_POSTHOG_HOST="${NEXT_PUBLIC_POSTHOG_HOST}" \
62
+ NEXT_PUBLIC_POSTHOG_KEY="${NEXT_PUBLIC_POSTHOG_KEY}"
52
63
 
53
64
  # Umami
54
- ENV NEXT_PUBLIC_ANALYTICS_UMAMI="" \
55
- NEXT_PUBLIC_UMAMI_SCRIPT_URL="" \
56
- NEXT_PUBLIC_UMAMI_WEBSITE_ID=""
65
+ ENV NEXT_PUBLIC_ANALYTICS_UMAMI="${NEXT_PUBLIC_ANALYTICS_UMAMI}" \
66
+ NEXT_PUBLIC_UMAMI_SCRIPT_URL="${NEXT_PUBLIC_UMAMI_SCRIPT_URL}" \
67
+ NEXT_PUBLIC_UMAMI_WEBSITE_ID="${NEXT_PUBLIC_UMAMI_WEBSITE_ID}"
57
68
 
58
69
  # Node
59
70
  ENV NODE_OPTIONS="--max-old-space-size=8192"
@@ -21,3 +21,74 @@ LobeChat provides some additional configuration options when deployed, which can
21
21
  <Cards href={'environment-variables/s3'} title={'S3 Storage Service'} />
22
22
  <Cards href={'environment-variables/analytics'} title={'Data Analytics'} />
23
23
  </Cards>
24
+
25
+ ## Building a Custom Image with Overridden NEXT_PUBLIC Variables
26
+
27
+ If you need to override NEXT_PUBLIC environment variables, you can build a custom Docker image using GitHub Actions
28
+ without forking the entire LobeChat repository. Here's a guide on how to do this:
29
+
30
+ 1. Create a new GitHub repository for your custom build.
31
+
32
+ 2. In your new repository, create a `.github/workflows` directory.
33
+
34
+ 3. Inside the `.github/workflows` directory, create a file named `build-custom-lobe.yml`:
35
+ ```yaml
36
+ name: Build Custom Image
37
+
38
+ on:
39
+ workflow_dispatch: # Manual trigger
40
+
41
+ env:
42
+ REGISTRY: ghcr.io
43
+ IMAGE_NAME: ${{ github.repository_owner }}/lobe-chat-database # Name of your image
44
+
45
+ jobs:
46
+ build-and-push:
47
+ runs-on: ubuntu-latest
48
+ permissions:
49
+ contents: read
50
+ packages: write
51
+
52
+ steps:
53
+ - name: Checkout repository
54
+ uses: actions/checkout@v3
55
+ with:
56
+ repository: lobehub/lobe-chat
57
+
58
+ - name: Log in to the Container registry
59
+ uses: docker/login-action@v2
60
+ with:
61
+ registry: ${{ env.REGISTRY }}
62
+ username: ${{ github.actor }}
63
+ password: ${{ secrets.GITHUB_TOKEN }}
64
+
65
+ - name: Extract metadata (tags, labels) for Docker
66
+ id: meta
67
+ uses: docker/metadata-action@v4
68
+ with:
69
+ images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
70
+
71
+ - name: Build and push Docker image
72
+ uses: docker/build-push-action@v4
73
+ with:
74
+ context: .
75
+ file: Dockerfile.database # Change dockerfile if needed
76
+ push: true
77
+ tags: ${{ steps.meta.outputs.tags }}
78
+ labels: ${{ steps.meta.outputs.labels }}
79
+ # List all variables you need to overwrite
80
+ build-args: |
81
+ NEXT_PUBLIC_BASE_PATH=${{ secrets.NEXT_PUBLIC_BASE_PATH }}
82
+ NEXT_PUBLIC_SERVICE_MODE=${{ secrets.NEXT_PUBLIC_SERVICE_MODE }}
83
+ ```
84
+
85
+ 4. In your GitHub Repository settings > Secrets and variables > Actions > Repository secrets,
86
+ add any NEXT_PUBLIC variables you want to override
87
+
88
+ 5. Set "Read and write" permissions for workflows in Repository settings > Actions > General > Workflow permissions.
89
+
90
+ 6. To build your custom image, go to the "Actions" tab in your GitHub repository and manually trigger the
91
+ "Build Custom LobeChat Image" workflow.
92
+
93
+ This approach allows you to create a custom build with your desired NEXT_PUBLIC variables without maintaining
94
+ a full fork of the LobeChat repository. You can trigger a new build whenever you need to update your custom image.
package/netlify.toml CHANGED
@@ -3,7 +3,7 @@ command = "pnpm run build"
3
3
  publish = ".next"
4
4
 
5
5
  [build.environment]
6
- NODE_OPTIONS = "--max_old_space_size=8192"
6
+ NODE_OPTIONS = "--max_old_space_size=4096"
7
7
 
8
8
  [template.environment]
9
9
  OPENAI_API_KEY = "set your OpenAI API Key"
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@lobehub/chat",
3
- "version": "1.21.14",
3
+ "version": "1.21.16",
4
4
  "description": "Lobe Chat - an open-source, high-performance chatbot framework that supports speech synthesis, multimodal, and extensible Function Call plugin system. Supports one-click free deployment of your private ChatGPT/LLM web application.",
5
5
  "keywords": [
6
6
  "framework",
@@ -7,6 +7,7 @@ const SiliconCloud: ModelProviderCard = {
7
7
  description: 'DeepSeek V2.5 集合了先前版本的优秀特征,增强了通用和编码能力。',
8
8
  displayName: 'DeepSeek V2.5',
9
9
  enabled: true,
10
+ functionCall: true,
10
11
  id: 'deepseek-ai/DeepSeek-V2.5',
11
12
  pricing: {
12
13
  currency: 'CNY',
@@ -19,6 +20,7 @@ const SiliconCloud: ModelProviderCard = {
19
20
  description: 'Qwen2.5 是全新的大型语言模型系列,旨在优化指令式任务的处理。',
20
21
  displayName: 'Qwen2.5 7B',
21
22
  enabled: true,
23
+ functionCall: true,
22
24
  id: 'Qwen/Qwen2.5-7B-Instruct',
23
25
  pricing: {
24
26
  currency: 'CNY',
@@ -30,6 +32,7 @@ const SiliconCloud: ModelProviderCard = {
30
32
  {
31
33
  description: 'Qwen2.5 是全新的大型语言模型系列,旨在优化指令式任务的处理。',
32
34
  displayName: 'Qwen2.5 14B',
35
+ functionCall: true,
33
36
  id: 'Qwen/Qwen2.5-14B-Instruct',
34
37
  pricing: {
35
38
  currency: 'CNY',
@@ -41,6 +44,7 @@ const SiliconCloud: ModelProviderCard = {
41
44
  {
42
45
  description: 'Qwen2.5 是全新的大型语言模型系列,旨在优化指令式任务的处理。',
43
46
  displayName: 'Qwen2.5 32B',
47
+ functionCall: true,
44
48
  id: 'Qwen/Qwen2.5-32B-Instruct',
45
49
  pricing: {
46
50
  currency: 'CNY',
@@ -53,6 +57,7 @@ const SiliconCloud: ModelProviderCard = {
53
57
  description: 'Qwen2.5 是全新的大型语言模型系列,具有更强的理解和生成能力。',
54
58
  displayName: 'Qwen2.5 72B',
55
59
  enabled: true,
60
+ functionCall: true,
56
61
  id: 'Qwen/Qwen2.5-72B-Instruct',
57
62
  pricing: {
58
63
  currency: 'CNY',
@@ -88,6 +93,7 @@ const SiliconCloud: ModelProviderCard = {
88
93
  {
89
94
  description: 'InternLM2.5 提供多场景下的智能对话解决方案。',
90
95
  displayName: 'Internlm 2.5 7B',
96
+ functionCall: true,
91
97
  id: 'internlm/internlm2_5-7b-chat',
92
98
  pricing: {
93
99
  currency: 'CNY',
@@ -99,6 +105,7 @@ const SiliconCloud: ModelProviderCard = {
99
105
  {
100
106
  description: '创新的开源模型InternLM2.5,通过大规模的参数提高了对话智能。',
101
107
  displayName: 'Internlm 2.5 20B',
108
+ functionCall: true,
102
109
  id: 'internlm/internlm2_5-20b-chat',
103
110
  pricing: {
104
111
  currency: 'CNY',
@@ -110,6 +117,7 @@ const SiliconCloud: ModelProviderCard = {
110
117
  {
111
118
  description: 'GLM-4 9B 开放源码版本,为会话应用提供优化后的对话体验。',
112
119
  displayName: 'GLM-4 9B',
120
+ functionCall: true,
113
121
  id: 'THUDM/glm-4-9b-chat',
114
122
  pricing: {
115
123
  currency: 'CNY',