@lobehub/lobehub 2.0.0-next.4 → 2.0.0-next.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. package/CHANGELOG.md +25 -0
  2. package/changelog/v1.json +9 -0
  3. package/docs/development/database-schema.dbml +11 -1
  4. package/docs/self-hosting/advanced/online-search.mdx +30 -25
  5. package/docs/self-hosting/advanced/online-search.zh-CN.mdx +25 -23
  6. package/package.json +1 -1
  7. package/packages/database/migrations/0041_improve_index.sql +10 -0
  8. package/packages/database/migrations/meta/0041_snapshot.json +7784 -0
  9. package/packages/database/migrations/meta/_journal.json +7 -0
  10. package/packages/database/src/core/migrations.json +17 -0
  11. package/packages/database/src/models/session.ts +60 -19
  12. package/packages/database/src/schemas/agent.ts +10 -11
  13. package/packages/database/src/schemas/message.ts +5 -1
  14. package/packages/database/src/schemas/relations.ts +6 -4
  15. package/packages/database/src/schemas/session.ts +2 -0
  16. package/packages/database/src/schemas/topic.ts +6 -1
  17. package/packages/model-bank/src/aiModels/anthropic.ts +0 -63
  18. package/packages/model-bank/src/aiModels/higress.ts +0 -55
  19. package/packages/model-bank/src/aiModels/infiniai.ts +21 -0
  20. package/packages/model-bank/src/aiModels/ollamacloud.ts +13 -0
  21. package/packages/model-bank/src/aiModels/siliconcloud.ts +19 -0
  22. package/packages/model-runtime/src/core/streams/openai/__snapshots__/responsesStream.test.ts.snap +0 -38
  23. package/packages/model-runtime/src/providers/minimax/index.ts +5 -5
  24. package/packages/model-runtime/src/providers/search1api/index.test.ts +2 -2
  25. package/packages/web-crawler/src/crawImpl/firecrawl.ts +39 -12
  26. package/scripts/migrateServerDB/index.ts +2 -1
  27. package/src/config/modelProviders/anthropic.ts +0 -23
  28. package/src/config/modelProviders/higress.ts +0 -23
  29. package/src/config/modelProviders/minimax.ts +1 -1
  30. package/src/config/modelProviders/qiniu.ts +1 -1
  31. package/src/server/routers/lambda/session.ts +8 -5
  32. package/src/server/services/search/impls/firecrawl/index.ts +51 -11
  33. package/src/server/services/search/impls/firecrawl/type.ts +60 -9
  34. package/src/services/user/client.test.ts +4 -1
package/CHANGELOG.md CHANGED
@@ -2,6 +2,31 @@
2
2
 
3
3
  # Changelog
4
4
 
5
+ ## [Version 2.0.0-next.5](https://github.com/lobehub/lobe-chat/compare/v2.0.0-next.4...v2.0.0-next.5)
6
+
7
+ <sup>Released on **2025-10-31**</sup>
8
+
9
+ #### ♻ Code Refactoring
10
+
11
+ - **misc**: Migrating Firecrawl to v2.
12
+
13
+ <br/>
14
+
15
+ <details>
16
+ <summary><kbd>Improvements and Fixes</kbd></summary>
17
+
18
+ #### Code refactoring
19
+
20
+ - **misc**: Migrating Firecrawl to v2, closes [#9850](https://github.com/lobehub/lobe-chat/issues/9850) ([efb4c22](https://github.com/lobehub/lobe-chat/commit/efb4c22))
21
+
22
+ </details>
23
+
24
+ <div align="right">
25
+
26
+ [![](https://img.shields.io/badge/-BACK_TO_TOP-151515?style=flat-square)](#readme-top)
27
+
28
+ </div>
29
+
5
30
  ## [Version 2.0.0-next.4](https://github.com/lobehub/lobe-chat/compare/v2.0.0-next.3...v2.0.0-next.4)
6
31
 
7
32
  <sup>Released on **2025-10-31**</sup>
package/changelog/v1.json CHANGED
@@ -1,4 +1,13 @@
1
1
  [
2
+ {
3
+ "children": {
4
+ "improvements": [
5
+ "Migrating Firecrawl to v2."
6
+ ]
7
+ },
8
+ "date": "2025-10-31",
9
+ "version": "2.0.0-next.5"
10
+ },
2
11
  {
3
12
  "children": {
4
13
  "improvements": [
@@ -41,6 +41,7 @@ table agents_files {
41
41
 
42
42
  indexes {
43
43
  (file_id, agent_id, user_id) [pk]
44
+ agent_id [name: 'agents_files_agent_id_idx']
44
45
  }
45
46
  }
46
47
 
@@ -334,6 +335,7 @@ table message_groups {
334
335
 
335
336
  indexes {
336
337
  (client_id, user_id) [name: 'message_groups_client_id_user_id_unique', unique]
338
+ topic_id [name: 'message_groups_topic_id_idx']
337
339
  }
338
340
  }
339
341
 
@@ -444,6 +446,7 @@ table messages {
444
446
  user_id [name: 'messages_user_id_idx']
445
447
  session_id [name: 'messages_session_id_idx']
446
448
  thread_id [name: 'messages_thread_id_idx']
449
+ agent_id [name: 'messages_agent_id_idx']
447
450
  }
448
451
  }
449
452
 
@@ -805,6 +808,8 @@ table agents_to_sessions {
805
808
 
806
809
  indexes {
807
810
  (agent_id, session_id) [pk]
811
+ session_id [name: 'agents_to_sessions_session_id_idx']
812
+ agent_id [name: 'agents_to_sessions_agent_id_idx']
808
813
  }
809
814
  }
810
815
 
@@ -865,6 +870,8 @@ table sessions {
865
870
  (client_id, user_id) [name: 'sessions_client_id_user_id_unique', unique]
866
871
  user_id [name: 'sessions_user_id_idx']
867
872
  (id, user_id) [name: 'sessions_id_user_id_idx']
873
+ (user_id, updated_at) [name: 'sessions_user_id_updated_at_idx']
874
+ group_id [name: 'sessions_group_id_idx']
868
875
  }
869
876
  }
870
877
 
@@ -885,6 +892,7 @@ table threads {
885
892
 
886
893
  indexes {
887
894
  (client_id, user_id) [name: 'threads_client_id_user_id_unique', unique]
895
+ topic_id [name: 'threads_topic_id_idx']
888
896
  }
889
897
  }
890
898
 
@@ -917,6 +925,8 @@ table topics {
917
925
  (client_id, user_id) [name: 'topics_client_id_user_id_unique', unique]
918
926
  user_id [name: 'topics_user_id_idx']
919
927
  (id, user_id) [name: 'topics_id_user_id_idx']
928
+ session_id [name: 'topics_session_id_idx']
929
+ group_id [name: 'topics_group_id_idx']
920
930
  }
921
931
  }
922
932
 
@@ -1166,4 +1176,4 @@ ref: topic_documents.document_id > documents.id
1166
1176
 
1167
1177
  ref: topic_documents.topic_id > topics.id
1168
1178
 
1169
- ref: topics.session_id - sessions.id
1179
+ ref: topics.session_id - sessions.id
@@ -2,9 +2,11 @@
2
2
  title: >-
3
3
  Configuring Online Search Functionality - Enhancing AI's Ability to Access Web Information
4
4
 
5
+
5
6
  description: >-
6
7
  Learn how to configure the SearXNG online search functionality for LobeChat, enabling AI to access the latest web information.
7
8
 
9
+
8
10
  tags:
9
11
  - Online Search
10
12
  - SearXNG
@@ -17,7 +19,10 @@ tags:
17
19
  LobeChat supports configuring **web search functionality** for AI, enabling it to retrieve real-time information from the internet to provide more accurate and up-to-date responses. Web search supports multiple search engine providers, including [SearXNG](https://github.com/searxng/searxng), [Search1API](https://www.search1api.com), [Google](https://programmablesearchengine.google.com), and [Brave](https://brave.com/search/api), among others.
18
20
 
19
21
  <Callout type="info">
20
- Web search allows AI to access time-sensitive content, such as the latest news, technology trends, or product information. You can deploy the open-source SearXNG yourself, or choose to integrate mainstream search services like Search1API, Google, Brave, etc., combining them freely based on your use case.
22
+ Web search allows AI to access time-sensitive content, such as the latest news, technology trends,
23
+ or product information. You can deploy the open-source SearXNG yourself, or choose to integrate
24
+ mainstream search services like Search1API, Google, Brave, etc., combining them freely based on
25
+ your use case.
21
26
  </Callout>
22
27
 
23
28
  By setting the search service environment variable `SEARCH_PROVIDERS` and the corresponding API Keys, LobeChat will query multiple sources and return the results. You can also configure crawler service environment variables such as `CRAWLER_IMPLS` (e.g., `browserless`, `firecrawl`, `tavily`, etc.) to extract webpage content, enhancing the capability of search + reading.
@@ -29,20 +34,20 @@ By setting the search service environment variable `SEARCH_PROVIDERS` and the co
29
34
  Configure available web crawlers for structured extraction of webpage content.
30
35
 
31
36
  ```env
32
- CRAWLER_IMPLS="native,search1api"
37
+ CRAWLER_IMPLS="naive,search1api"
33
38
  ```
34
39
 
35
40
  Supported crawler types are listed below:
36
41
 
37
- | Value | Description | Environment Variable |
38
- | ------------- | ------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------- |
39
- | `browserless` | Headless browser crawler based on [Browserless](https://www.browserless.io/), suitable for rendering complex pages. | `BROWSERLESS_TOKEN` |
40
- | `exa` | Crawler capabilities provided by [Exa](https://exa.ai/), API required. | `EXA_API_KEY` |
41
- | `firecrawl` | [Firecrawl](https://firecrawl.dev/) headless browser API, ideal for modern websites. | `FIRECRAWL_API_KEY` |
42
- | `jina` | Crawler service from [Jina AI](https://jina.ai/), supports fast content summarization. | `JINA_READER_API_KEY` |
43
- | `native` | Built-in general-purpose crawler for standard web structures. | |
44
- | `search1api` | Page crawling capabilities from [Search1API](https://www.search1api.com), great for structured content extraction. | `SEARCH1API_API_KEY` `SEARCH1API_CRAWL_API_KEY` `SEARCH1API_SEARCH_API_KEY` |
45
- | `tavily` | Web scraping and summarization API from [Tavily](https://www.tavily.com/). | `TAVILY_API_KEY` |
42
+ | Value | Description | Environment Variable |
43
+ | ------------- | ------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------- |
44
+ | `browserless` | Headless browser crawler based on [Browserless](https://www.browserless.io/), suitable for rendering complex pages. | `BROWSERLESS_TOKEN` |
45
+ | `exa` | Crawler capabilities provided by [Exa](https://exa.ai/), API required. | `EXA_API_KEY` |
46
+ | `firecrawl` | [Firecrawl](https://firecrawl.dev/) headless browser API, ideal for modern websites. | `FIRECRAWL_API_KEY` |
47
+ | `jina` | Crawler service from [Jina AI](https://jina.ai/), supports fast content summarization. | `JINA_READER_API_KEY` |
48
+ | `naive` | Built-in general-purpose crawler for standard web structures. | |
49
+ | `search1api` | Page crawling capabilities from [Search1API](https://www.search1api.com), great for structured content extraction. | `SEARCH1API_API_KEY` `SEARCH1API_CRAWL_API_KEY` `SEARCH1API_SEARCH_API_KEY` |
50
+ | `tavily` | Web scraping and summarization API from [Tavily](https://www.tavily.com/). | `TAVILY_API_KEY` |
46
51
 
47
52
  > 💡 Setting multiple crawlers increases success rate; the system will try different ones based on priority.
48
53
 
@@ -58,19 +63,19 @@ SEARCH_PROVIDERS="searxng"
58
63
 
59
64
  Supported search engines include:
60
65
 
61
- | Value | Description | Environment Variable |
62
- | ------------ | --------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------- |
63
- | `anspire` | Search service provided by [Anspire](https://anspire.ai/). | `ANSPIRE_API_KEY` |
64
- | `bocha` | Search service from [Bocha](https://open.bochaai.com/). | `BOCHA_API_KEY` |
65
- | `brave` | [Brave](https://search.brave.com/help/api), a privacy-friendly search source. | `BRAVE_API_KEY` |
66
- | `exa` | [Exa](https://exa.ai/), a search API designed for AI. | `EXA_API_KEY` |
67
- | `firecrawl` | Search capabilities via [Firecrawl](https://firecrawl.dev/). | `FIRECRAWL_API_KEY` |
68
- | `google` | Uses [Google Programmable Search Engine](https://programmablesearchengine.google.com/). | `GOOGLE_PSE_API_KEY` `GOOGLE_PSE_ENGINE_ID` |
69
- | `jina` | Semantic search provided by [Jina AI](https://jina.ai/). | `JINA_READER_API_KEY` |
70
- | `kagi` | Premium search API by [Kagi](https://kagi.com/), requires a subscription key. | `KAGI_API_KEY` |
71
- | `search1api` | Aggregated search capabilities from [Search1API](https://www.search1api.com). | `SEARCH1API_API_KEY` `SEARCH1API_CRAWL_API_KEY` `SEARCH1API_SEARCH_API_KEY` |
72
- | `searxng` | Use a self-hosted or public [SearXNG](https://searx.space/) instance. | `SEARXNG_URL` |
73
- | `tavily` | [Tavily](https://www.tavily.com/), offers fast web summaries and answers. | `TAVILY_API_KEY` |
66
+ | Value | Description | Environment Variable |
67
+ | ------------ | --------------------------------------------------------------------------------------- | --------------------------------------------------------------------------- |
68
+ | `anspire` | Search service provided by [Anspire](https://anspire.ai/). | `ANSPIRE_API_KEY` |
69
+ | `bocha` | Search service from [Bocha](https://open.bochaai.com/). | `BOCHA_API_KEY` |
70
+ | `brave` | [Brave](https://search.brave.com/help/api), a privacy-friendly search source. | `BRAVE_API_KEY` |
71
+ | `exa` | [Exa](https://exa.ai/), a search API designed for AI. | `EXA_API_KEY` |
72
+ | `firecrawl` | Search capabilities via [Firecrawl](https://firecrawl.dev/). | `FIRECRAWL_API_KEY` |
73
+ | `google` | Uses [Google Programmable Search Engine](https://programmablesearchengine.google.com/). | `GOOGLE_PSE_API_KEY` `GOOGLE_PSE_ENGINE_ID` |
74
+ | `jina` | Semantic search provided by [Jina AI](https://jina.ai/). | `JINA_READER_API_KEY` |
75
+ | `kagi` | Premium search API by [Kagi](https://kagi.com/), requires a subscription key. | `KAGI_API_KEY` |
76
+ | `search1api` | Aggregated search capabilities from [Search1API](https://www.search1api.com). | `SEARCH1API_API_KEY` `SEARCH1API_CRAWL_API_KEY` `SEARCH1API_SEARCH_API_KEY` |
77
+ | `searxng` | Use a self-hosted or public [SearXNG](https://searx.space/) instance. | `SEARXNG_URL` |
78
+ | `tavily` | [Tavily](https://www.tavily.com/), offers fast web summaries and answers. | `TAVILY_API_KEY` |
74
79
 
75
80
  > ⚠️ Some search providers require you to apply for an API Key and configure it in your `.env` file.
76
81
 
@@ -139,7 +144,7 @@ GOOGLE_PSE_ENGINE_ID=your-google-cx-id
139
144
  Sets the access URL for the [Firecrawl](https://firecrawl.dev/) API, used for web content scraping. Default value:
140
145
 
141
146
  ```env
142
- FIRECRAWL_URL=https://api.firecrawl.dev/v1
147
+ FIRECRAWL_URL=https://api.firecrawl.dev/v2
143
148
  ```
144
149
 
145
150
  > ⚙️ Usually does not need to be changed unless you’re using a self-hosted version or a proxy service.
@@ -13,7 +13,9 @@ tags:
13
13
  LobeChat 支持为 AI 配置**联网搜索功能**,使其能够实时获取互联网信息,从而提供更准确、最新的回答。联网搜索支持多个搜索引擎提供商,包括 [SearXNG](https://github.com/searxng/searxng)、[Search1API](https://www.search1api.com)、[Google](https://programmablesearchengine.google.com)、[Brave](https://brave.com/search/api) 等。
14
14
 
15
15
  <Callout type="info">
16
- 联网搜索可以让 AI 获取时效性内容,如最新新闻、技术动态或产品信息。你可以使用开源的 SearXNG 自行部署,也可以选择集成主流搜索引擎服务,如 Search1API、Google、Brave 等,根据你的使用场景自由组合。
16
+ 联网搜索可以让 AI 获取时效性内容,如最新新闻、技术动态或产品信息。你可以使用开源的 SearXNG
17
+ 自行部署,也可以选择集成主流搜索引擎服务,如 Search1API、Google、Brave
18
+ 等,根据你的使用场景自由组合。
17
19
  </Callout>
18
20
 
19
21
  通过设置搜索服务环境变量 `SEARCH_PROVIDERS` 和对应的 API Key,LobeChat 将在多个搜索源中查询并返回结果。你还可以搭配配置爬虫服务环境变量 `CRAWLER_IMPLS`(如 `browserless`、`firecrawl`、`tavily` 等)以提取网页内容,实现搜索 + 阅读的增强能力。
@@ -25,20 +27,20 @@ LobeChat 支持为 AI 配置**联网搜索功能**,使其能够实时获取互
25
27
  配置可用的网页爬虫,用于对网页进行结构化内容提取。
26
28
 
27
29
  ```env
28
- CRAWLER_IMPLS="native,search1api"
30
+ CRAWLER_IMPLS="naive,search1api"
29
31
  ```
30
32
 
31
33
  支持的爬虫类型如下:
32
34
 
33
- | 值 | 说明 | 环境变量 |
34
- | ------------- | ---------------------------------------------------------------- | -------------------------- |
35
- | `browserless` | 基于 [Browserless](https://www.browserless.io/) 的无头浏览器爬虫,适合渲染复杂页面。 | `BROWSERLESS_TOKEN` |
36
- | `exa` | 使用 [Exa](https://exa.ai/) 提供的爬虫能力,需申请 API。 | `EXA_API_KEY` |
37
- | `firecrawl` | [Firecrawl](https://firecrawl.dev/) 无头浏览器 API,适合现代网站抓取。 | `FIRECRAWL_API_KEY` |
38
- | `jina` | 使用 [Jina AI](https://jina.ai/) 的爬虫服务,支持快速提取摘要信息。 | `JINA_READER_API_KEY` |
39
- | `native` | 内置通用爬虫,适用于标准网页结构。 | |
35
+ | 值 | 说明 | 环境变量 |
36
+ | ------------- | ---------------------------------------------------------------- | --------------------------------------------------------------------------- |
37
+ | `browserless` | 基于 [Browserless](https://www.browserless.io/) 的无头浏览器爬虫,适合渲染复杂页面。 | `BROWSERLESS_TOKEN` |
38
+ | `exa` | 使用 [Exa](https://exa.ai/) 提供的爬虫能力,需申请 API。 | `EXA_API_KEY` |
39
+ | `firecrawl` | [Firecrawl](https://firecrawl.dev/) 无头浏览器 API,适合现代网站抓取。 | `FIRECRAWL_API_KEY` |
40
+ | `jina` | 使用 [Jina AI](https://jina.ai/) 的爬虫服务,支持快速提取摘要信息。 | `JINA_READER_API_KEY` |
41
+ | `naive` | 内置简易通用爬虫,适用于标准网页结构。 | |
40
42
  | `search1api` | 利用 [Search1API](https://www.search1api.com) 提供的页面抓取能力,适合结构化内容提取。 | `SEARCH1API_API_KEY` `SEARCH1API_CRAWL_API_KEY` `SEARCH1API_SEARCH_API_KEY` |
41
- | `tavily` | 使用 [Tavily](https://www.tavily.com/) 的网页抓取与摘要 API。 | `TAVILY_API_KEY` |
43
+ | `tavily` | 使用 [Tavily](https://www.tavily.com/) 的网页抓取与摘要 API。 | `TAVILY_API_KEY` |
42
44
 
43
45
  > 💡 设置多个爬虫可提升成功率,系统将根据优先级尝试不同爬虫。
44
46
 
@@ -54,19 +56,19 @@ SEARCH_PROVIDERS="searxng"
54
56
 
55
57
  支持的搜索引擎如下:
56
58
 
57
- | 值 | 说明 | 环境变量 |
58
- | ------------ | ------------------------------------------------------------------------------------- | ------------------------------------------- |
59
- | `anspire` | 基于 [Anspire(安思派)](https://anspire.ai/) 提供的搜索服务。 | `ANSPIRE_API_KEY` |
60
- | `bocha` | 基于 [Bocha(博查)](https://open.bochaai.com/) 提供的搜索服务。 | `BOCHA_API_KEY` |
61
- | `brave` | [Brave](https://search.brave.com/help/api),隐私友好的搜索源。 | `BRAVE_API_KEY` |
62
- | `exa` | [Exa](https://exa.ai/),面向 AI 的搜索 API。 | `EXA_API_KEY` |
63
- | `firecrawl` | 支持 [Firecrawl](https://firecrawl.dev/) 提供的搜索服务。 | `FIRECRAWL_API_KEY` |
64
- | `google` | 使用 [Google Programmable Search Engine](https://programmablesearchengine.google.com/)。 | `GOOGLE_PSE_API_KEY` `GOOGLE_PSE_ENGINE_ID` |
65
- | `jina` | 使用 [Jina AI](https://jina.ai/) 提供的语义搜索服务。 | `JINA_READER_API_KEY` |
66
- | `kagi` | [Kagi](https://kagi.com/) 提供的高级搜索 API,需订阅 Key。 | `KAGI_API_KEY` |
59
+ | 值 | 说明 | 环境变量 |
60
+ | ------------ | ------------------------------------------------------------------------------------- | --------------------------------------------------------------------------- |
61
+ | `anspire` | 基于 [Anspire(安思派)](https://anspire.ai/) 提供的搜索服务。 | `ANSPIRE_API_KEY` |
62
+ | `bocha` | 基于 [Bocha(博查)](https://open.bochaai.com/) 提供的搜索服务。 | `BOCHA_API_KEY` |
63
+ | `brave` | [Brave](https://search.brave.com/help/api),隐私友好的搜索源。 | `BRAVE_API_KEY` |
64
+ | `exa` | [Exa](https://exa.ai/),面向 AI 的搜索 API。 | `EXA_API_KEY` |
65
+ | `firecrawl` | 支持 [Firecrawl](https://firecrawl.dev/) 提供的搜索服务。 | `FIRECRAWL_API_KEY` |
66
+ | `google` | 使用 [Google Programmable Search Engine](https://programmablesearchengine.google.com/)。 | `GOOGLE_PSE_API_KEY` `GOOGLE_PSE_ENGINE_ID` |
67
+ | `jina` | 使用 [Jina AI](https://jina.ai/) 提供的语义搜索服务。 | `JINA_READER_API_KEY` |
68
+ | `kagi` | [Kagi](https://kagi.com/) 提供的高级搜索 API,需订阅 Key。 | `KAGI_API_KEY` |
67
69
  | `search1api` | 使用 [Search1API](https://www.search1api.com) 聚合搜索能力。 | `SEARCH1API_API_KEY` `SEARCH1API_CRAWL_API_KEY` `SEARCH1API_SEARCH_API_KEY` |
68
- | `searxng` | 使用自托管或公共 [SearXNG](https://searx.space/) 实例。 | `SEARXNG_URL` |
69
- | `tavily` | [Tavily](https://www.tavily.com/),快速网页摘要与答案返回。 | `TAVILY_API_KEY` |
70
+ | `searxng` | 使用自托管或公共 [SearXNG](https://searx.space/) 实例。 | `SEARXNG_URL` |
71
+ | `tavily` | [Tavily](https://www.tavily.com/),快速网页摘要与答案返回。 | `TAVILY_API_KEY` |
70
72
 
71
73
  > ⚠️ 某些搜索提供商需要单独申请 API Key,并在 `.env` 中配置相关凭证。
72
74
 
@@ -135,7 +137,7 @@ GOOGLE_PSE_ENGINE_ID=your-google-cx-id
135
137
  设置 [Firecrawl](https://firecrawl.dev/) API 的访问地址。用于网页内容抓取,默认值如下:
136
138
 
137
139
  ```env
138
- FIRECRAWL_URL=https://api.firecrawl.dev/v1
140
+ FIRECRAWL_URL=https://api.firecrawl.dev/v2
139
141
  ```
140
142
 
141
143
  > ⚙️ 一般无需修改,除非你使用的是自托管版本或代理服务。
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@lobehub/lobehub",
3
- "version": "2.0.0-next.4",
3
+ "version": "2.0.0-next.5",
4
4
  "description": "LobeHub - an open-source,comprehensive AI Agent framework that supports speech synthesis, multimodal, and extensible Function Call plugin system. Supports one-click free deployment of your private ChatGPT/LLM web application.",
5
5
  "keywords": [
6
6
  "framework",
@@ -0,0 +1,10 @@
1
+ CREATE INDEX IF NOT EXISTS "agents_files_agent_id_idx" ON "agents_files" USING btree ("agent_id");--> statement-breakpoint
2
+ CREATE INDEX IF NOT EXISTS "message_groups_topic_id_idx" ON "message_groups" USING btree ("topic_id");--> statement-breakpoint
3
+ CREATE INDEX IF NOT EXISTS "messages_agent_id_idx" ON "messages" USING btree ("agent_id");--> statement-breakpoint
4
+ CREATE INDEX IF NOT EXISTS "agents_to_sessions_session_id_idx" ON "agents_to_sessions" USING btree ("session_id");--> statement-breakpoint
5
+ CREATE INDEX IF NOT EXISTS "agents_to_sessions_agent_id_idx" ON "agents_to_sessions" USING btree ("agent_id");--> statement-breakpoint
6
+ CREATE INDEX IF NOT EXISTS "sessions_user_id_updated_at_idx" ON "sessions" USING btree ("user_id","updated_at");--> statement-breakpoint
7
+ CREATE INDEX IF NOT EXISTS "sessions_group_id_idx" ON "sessions" USING btree ("group_id");--> statement-breakpoint
8
+ CREATE INDEX IF NOT EXISTS "threads_topic_id_idx" ON "threads" USING btree ("topic_id");--> statement-breakpoint
9
+ CREATE INDEX IF NOT EXISTS "topics_session_id_idx" ON "topics" USING btree ("session_id");--> statement-breakpoint
10
+ CREATE INDEX IF NOT EXISTS "topics_group_id_idx" ON "topics" USING btree ("group_id");