@lobehub/lobehub 2.0.0-next.351 → 2.0.0-next.352

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -2,6 +2,23 @@
2
2
 
3
3
  # Changelog
4
4
 
5
+ ## [Version 2.0.0-next.352](https://github.com/lobehub/lobe-chat/compare/v2.0.0-next.351...v2.0.0-next.352)
6
+
7
+ <sup>Released on **2026-01-23**</sup>
8
+
9
+ <br/>
10
+
11
+ <details>
12
+ <summary><kbd>Improvements and Fixes</kbd></summary>
13
+
14
+ </details>
15
+
16
+ <div align="right">
17
+
18
+ [![](https://img.shields.io/badge/-BACK_TO_TOP-151515?style=flat-square)](#readme-top)
19
+
20
+ </div>
21
+
5
22
  ## [Version 2.0.0-next.351](https://github.com/lobehub/lobe-chat/compare/v2.0.0-next.350...v2.0.0-next.351)
6
23
 
7
24
  <sup>Released on **2026-01-23**</sup>
package/changelog/v1.json CHANGED
@@ -1,4 +1,9 @@
1
1
  [
2
+ {
3
+ "children": {},
4
+ "date": "2026-01-23",
5
+ "version": "2.0.0-next.352"
6
+ },
2
7
  {
3
8
  "children": {
4
9
  "fixes": [
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@lobehub/lobehub",
3
- "version": "2.0.0-next.351",
3
+ "version": "2.0.0-next.352",
4
4
  "description": "LobeHub - an open-source,comprehensive AI Agent framework that supports speech synthesis, multimodal, and extensible Function Call plugin system. Supports one-click free deployment of your private ChatGPT/LLM web application.",
5
5
  "keywords": [
6
6
  "framework",
@@ -0,0 +1,119 @@
1
+ # LoCoMo Benchmark Ingest Guide
2
+
3
+ This folder contains the LoCoMo benchmark ingestor (`run.ts`). It loads `locomo10.json` and posts each sample to `/api/webhooks/memory-extraction/benchmark-locomo`, creating one user per sample with the ID pattern `locomo-user-${sampleId}`.
4
+
5
+ ## 1. Seed benchmark users
6
+
7
+ Run this SQL before ingesting so the webhook has user records to attach memories to:
8
+
9
+ ```sql
10
+ INSERT INTO users (id, email, normalized_email, username) VALUES
11
+ ('locomo-user-conv-26', 'locomo1-conv-26@example.com', 'LOCOMO1_CONV26@EXAMPLE.COM', 'locomo1-conv-26'),
12
+ ('locomo-user-conv-30', 'locomo1-conv-30@example.com', 'LOCOMO1_CONV30@EXAMPLE.COM', 'locomo1-conv-30'),
13
+ ('locomo-user-conv-41', 'locomo1-conv-41@example.com', 'LOCOMO1_CONV41@EXAMPLE.COM', 'locomo1-conv-41'),
14
+ ('locomo-user-conv-42', 'locomo1-conv-42@example.com', 'LOCOMO1_CONV42@EXAMPLE.COM', 'locomo1-conv-42'),
15
+ ('locomo-user-conv-43', 'locomo1-conv-43@example.com', 'LOCOMO1_CONV43@EXAMPLE.COM', 'locomo1-conv-43'),
16
+ ('locomo-user-conv-44', 'locomo1-conv-44@example.com', 'LOCOMO1_CONV44@EXAMPLE.COM', 'locomo1-conv-44'),
17
+ ('locomo-user-conv-47', 'locomo1-conv-47@example.com', 'LOCOMO1_CONV47@EXAMPLE.COM', 'locomo1-conv-47'),
18
+ ('locomo-user-conv-48', 'locomo1-conv-48@example.com', 'LOCOMO1_CONV48@EXAMPLE.COM', 'locomo1-conv-48'),
19
+ ('locomo-user-conv-49', 'locomo1-conv-49@example.com', 'LOCOMO1_CONV49@EXAMPLE.COM', 'locomo1-conv-49'),
20
+ ('locomo-user-conv-50', 'locomo1-conv-50@example.com', 'LOCOMO1_CONV50@EXAMPLE.COM', 'locomo1-conv-50')
21
+ ON CONFLICT (id) DO UPDATE
22
+ SET email = EXCLUDED.email,
23
+ normalized_email = EXCLUDED.normalized_email,
24
+ username = EXCLUDED.username;
25
+
26
+ -- optional: ensure settings rows exist
27
+ INSERT INTO user_settings (id) VALUES
28
+ ('locomo-user-conv-26'), ('locomo-user-conv-30'), ('locomo-user-conv-41'), ('locomo-user-conv-42'),
29
+ ('locomo-user-conv-43'), ('locomo-user-conv-44'), ('locomo-user-conv-47'), ('locomo-user-conv-48'),
30
+ ('locomo-user-conv-49'), ('locomo-user-conv-50')
31
+ ON CONFLICT DO NOTHING;
32
+ ```
33
+
34
+ ## 2. Clear benchmark memories (reset)
35
+
36
+ Use this to strip extraction metadata from topics and delete all benchmark memory rows for the same users. Each statement includes its own CTE so it works in a multi-statement script:
37
+
38
+ ```sql
39
+ WITH target_users AS (
40
+ SELECT UNNEST(ARRAY[
41
+ 'locomo-user-conv-26','locomo-user-conv-30','locomo-user-conv-41',
42
+ 'locomo-user-conv-42','locomo-user-conv-43','locomo-user-conv-44',
43
+ 'locomo-user-conv-47','locomo-user-conv-48','locomo-user-conv-49','locomo-user-conv-50'
44
+ ]) AS user_id
45
+ )
46
+ UPDATE topics t
47
+ SET metadata = metadata #- '{userMemoryExtractRunState}'
48
+ FROM target_users u
49
+ WHERE t.user_id = u.user_id;
50
+
51
+ WITH target_users AS (
52
+ SELECT UNNEST(ARRAY[
53
+ 'locomo-user-conv-26','locomo-user-conv-30','locomo-user-conv-41',
54
+ 'locomo-user-conv-42','locomo-user-conv-43','locomo-user-conv-44',
55
+ 'locomo-user-conv-47','locomo-user-conv-48','locomo-user-conv-49','locomo-user-conv-50'
56
+ ]) AS user_id
57
+ )
58
+ UPDATE topics t
59
+ SET metadata = metadata #- '{userMemoryExtractStatus}'
60
+ FROM target_users u
61
+ WHERE t.user_id = u.user_id;
62
+
63
+ WITH target_users AS (
64
+ SELECT UNNEST(ARRAY[
65
+ 'locomo-user-conv-26','locomo-user-conv-30','locomo-user-conv-41',
66
+ 'locomo-user-conv-42','locomo-user-conv-43','locomo-user-conv-44',
67
+ 'locomo-user-conv-47','locomo-user-conv-48','locomo-user-conv-49','locomo-user-conv-50'
68
+ ]) AS user_id
69
+ )
70
+ DELETE FROM user_memories_experiences USING target_users u WHERE user_memories_experiences.user_id = u.user_id;
71
+
72
+ WITH target_users AS (
73
+ SELECT UNNEST(ARRAY[
74
+ 'locomo-user-conv-26','locomo-user-conv-30','locomo-user-conv-41',
75
+ 'locomo-user-conv-42','locomo-user-conv-43','locomo-user-conv-44',
76
+ 'locomo-user-conv-47','locomo-user-conv-48','locomo-user-conv-49','locomo-user-conv-50'
77
+ ]) AS user_id
78
+ )
79
+ DELETE FROM user_memories_contexts USING target_users u WHERE user_memories_contexts.user_id = u.user_id;
80
+
81
+ WITH target_users AS (
82
+ SELECT UNNEST(ARRAY[
83
+ 'locomo-user-conv-26','locomo-user-conv-30','locomo-user-conv-41',
84
+ 'locomo-user-conv-42','locomo-user-conv-43','locomo-user-conv-44',
85
+ 'locomo-user-conv-47','locomo-user-conv-48','locomo-user-conv-49','locomo-user-conv-50'
86
+ ]) AS user_id
87
+ )
88
+ DELETE FROM user_memories_preferences USING target_users u WHERE user_memories_preferences.user_id = u.user_id;
89
+
90
+ WITH target_users AS (
91
+ SELECT UNNEST(ARRAY[
92
+ 'locomo-user-conv-26','locomo-user-conv-30','locomo-user-conv-41',
93
+ 'locomo-user-conv-42','locomo-user-conv-43','locomo-user-conv-44',
94
+ 'locomo-user-conv-47','locomo-user-conv-48','locomo-user-conv-49','locomo-user-conv-50'
95
+ ]) AS user_id
96
+ )
97
+ DELETE FROM user_memories_identities USING target_users u WHERE user_memories_identities.user_id = u.user_id;
98
+
99
+ WITH target_users AS (
100
+ SELECT UNNEST(ARRAY[
101
+ 'locomo-user-conv-26','locomo-user-conv-30','locomo-user-conv-41',
102
+ 'locomo-user-conv-42','locomo-user-conv-43','locomo-user-conv-44',
103
+ 'locomo-user-conv-47','locomo-user-conv-48','locomo-user-conv-49','locomo-user-conv-50'
104
+ ]) AS user_id
105
+ )
106
+ DELETE FROM user_memories USING target_users u WHERE user_memories.user_id = u.user_id;
107
+ ```
108
+
109
+ ## 3. Run the ingest
110
+
111
+ Set the required envs and execute:
112
+
113
+ ```bash
114
+ MEMORY_USER_MEMORY_LOBEHUB_BASE_URL="http://localhost:3000" \
115
+ MEMORY_USER_MEMORY_BENCHMARKS_LOCOMO_DATASETS="path/to/locomo/dataset/data/locomo10.json" \
116
+ bun run tsx lobehub/packages/memory-user-memory/benchmarks/locomo/run.ts
117
+ ```
118
+
119
+ Only samples whose IDs pass the filter in `run.ts` (currently `conv-26`) will ingest; adjust the filter if you need more samples.
@@ -4,11 +4,25 @@ import { exit } from 'node:process';
4
4
 
5
5
  const baseUrl = process.env.MEMORY_USER_MEMORY_LOBEHUB_BASE_URL;
6
6
  const benchmarkLoCoMoFile = process.env.MEMORY_USER_MEMORY_BENCHMARKS_LOCOMO_DATASETS;
7
+ const webhookExtraHeaders = process.env.MEMORY_USER_MEMORY_WEBHOOK_HEADERS;
7
8
 
8
9
  const post = async (path: string, body: unknown) => {
10
+ const webhookHeaders = webhookExtraHeaders?.split(',')
11
+ .filter(Boolean)
12
+ .reduce<Record<string, string>>((acc, pair) => {
13
+ const [key, value] = pair.split('=').map((s) => s.trim());
14
+ if (key && value) {
15
+ acc[key] = value;
16
+ }
17
+ return acc;
18
+ }, {});
19
+
9
20
  const res = await fetch(new URL(path, baseUrl).toString(), {
10
21
  body: JSON.stringify(body),
11
- headers: { 'Content-Type': 'application/json' },
22
+ headers: {
23
+ 'Content-Type': 'application/json',
24
+ ...webhookHeaders,
25
+ },
12
26
  method: 'POST',
13
27
  });
14
28
 
@@ -55,6 +69,8 @@ async function main() {
55
69
  userId,
56
70
  };
57
71
  try {
72
+ console.log(`[@lobechat/memory-user-memory/benchmarks/locomo] ingesting sample ${payload.sampleId} (${payload.sessions.length} sessions) for user ${userId}`);
73
+
58
74
  const res = await post('/api/webhooks/memory-extraction/benchmark-locomo', body);
59
75
  console.log(`[@lobechat/memory-user-memory/benchmarks/locomo] ingested sample ${payload.sampleId} -> insertedParts=${res.insertedParts ?? 'n/a'} memories=${res.extraction?.memoryIds?.length ?? 0} traceId=${res.extraction?.traceId ?? 'n/a'}`);
60
76
  } catch (err) {
@@ -49,6 +49,13 @@ const normalizeLayers = (layers?: string[]) => {
49
49
  return Array.from(set);
50
50
  };
51
51
 
52
+ interface SessionExtractionResult {
53
+ extraction?: Awaited<ReturnType<MemoryExtractionExecutor['extractBenchmarkSource']>>;
54
+ insertedParts: number;
55
+ sessionId: string;
56
+ sourceId: string;
57
+ }
58
+
52
59
  export const POST = async (req: Request) => {
53
60
  try {
54
61
  const { webhookHeaders } = parseMemoryExtractionConfig();
@@ -69,18 +76,38 @@ export const POST = async (req: Request) => {
69
76
  const parsed = ingestSchema.parse(json);
70
77
 
71
78
  const sourceModel = new UserMemorySourceBenchmarkLoCoMoModel(parsed.userId);
79
+ const baseSourceId = parsed.sourceId || `sample_${parsed.sampleId}`;
80
+ const executor = await MemoryExtractionExecutor.create();
81
+ const layers = normalizeLayers(parsed.layers);
82
+
83
+ const results: SessionExtractionResult[] = [];
84
+ let totalInsertedParts = 0;
85
+
86
+ await Promise.all(parsed.sessions.map(async (session) => {
87
+ const sessionSourceId = `${baseSourceId}_${session.sessionId}`;
88
+
89
+ try {
90
+ await sourceModel.upsertSource({
91
+ id: sessionSourceId,
92
+ metadata: {
93
+ ingestAt: new Date().toISOString(),
94
+ sessionId: session.sessionId,
95
+ sessionTimestamp: session.timestamp,
96
+ },
97
+ sampleId: parsed.sampleId,
98
+ sourceType: (parsed.source ?? MemorySourceType.BenchmarkLocomo) as string,
99
+ });
100
+ } catch (error) {
101
+ console.error(`[locomo-ingest-webhook] upsertSource failed for sourceId=${sessionSourceId}`, error);
102
+ return {
103
+ extraction: undefined,
104
+ insertedParts: 0,
105
+ sessionId: session.sessionId,
106
+ sourceId: sessionSourceId,
107
+ }
108
+ }
72
109
 
73
- const sourceId = parsed.sourceId || `sample_${parsed.sampleId}`;
74
- await sourceModel.upsertSource({
75
- id: sourceId,
76
- metadata: { ingestAt: new Date().toISOString() },
77
- sampleId: parsed.sampleId,
78
- sourceType: (parsed.source ?? MemorySourceType.BenchmarkLocomo) as string,
79
- });
80
-
81
- let partCounter = 0;
82
- const parts = parsed.sessions.flatMap((session) => {
83
- return session.turns.map((turn) => {
110
+ const parts = session.turns.map((turn, index) => {
84
111
  const createdAt = new Date(turn.createdAt);
85
112
  const metadata: Record<string, unknown> = {
86
113
  diaId: turn.diaId,
@@ -89,45 +116,59 @@ export const POST = async (req: Request) => {
89
116
  sessionId: session.sessionId,
90
117
  };
91
118
 
92
- const part = {
119
+ return {
93
120
  content: turn.text,
94
121
  createdAt,
95
122
  metadata,
96
- partIndex: partCounter,
123
+ partIndex: index,
97
124
  sessionId: session.sessionId,
98
125
  speaker: turn.speaker,
99
126
  };
100
- partCounter += 1;
101
- return part;
102
127
  });
103
- });
104
128
 
105
- await sourceModel.replaceParts(sourceId, parts);
129
+ sourceModel.replaceParts(sessionSourceId, parts);
106
130
 
107
- const contextProvider = new BenchmarkLocomoContextProvider({
108
- parts,
109
- sampleId: parsed.sampleId,
110
- sourceId,
111
- userId: parsed.userId,
112
- });
131
+ const contextProvider = new BenchmarkLocomoContextProvider({
132
+ parts,
133
+ sampleId: parsed.sampleId,
134
+ sourceId: sessionSourceId,
135
+ userId: parsed.userId,
136
+ });
113
137
 
114
- const executor = await MemoryExtractionExecutor.create();
115
- const layers = normalizeLayers(parsed.layers);
116
- const extraction = await executor.extractBenchmarkSource({
117
- contextProvider,
118
- forceAll: parsed.force ?? true,
119
- layers,
120
- parts,
121
- source: parsed.source ?? MemorySourceType.BenchmarkLocomo,
122
- sourceId,
123
- userId: parsed.userId,
124
- });
138
+ try {
139
+ const extraction = await executor.extractBenchmarkSource({
140
+ contextProvider,
141
+ forceAll: parsed.force ?? true,
142
+ layers,
143
+ parts,
144
+ source: parsed.source ?? MemorySourceType.BenchmarkLocomo,
145
+ sourceId: sessionSourceId,
146
+ userId: parsed.userId,
147
+ });
148
+
149
+ return {
150
+ extraction,
151
+ insertedParts: parts.length,
152
+ sessionId: session.sessionId,
153
+ sourceId: sessionSourceId,
154
+ }
155
+ } catch (error) {
156
+ console.error(`[locomo-ingest-webhook] extractBenchmarkSource failed for sourceId=${sessionSourceId}`, error);
157
+ return {
158
+ extraction: undefined,
159
+ insertedParts: parts.length,
160
+ sessionId: session.sessionId,
161
+ sourceId: sessionSourceId,
162
+ }
163
+ }
164
+ }))
125
165
 
126
166
  return NextResponse.json(
127
167
  {
128
- extraction,
129
- insertedParts: parts.length,
130
- sourceId,
168
+ baseSourceId,
169
+ insertedParts: totalInsertedParts,
170
+ results,
171
+ sourceIds: results.map((item) => item.sourceId),
131
172
  userId: parsed.userId,
132
173
  },
133
174
  { status: 200 },