@lobehub/lobehub 2.0.0-next.194 → 2.0.0-next.195
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +25 -0
- package/changelog/v1.json +9 -0
- package/package.json +1 -1
- package/packages/database/src/models/user.ts +8 -0
- package/packages/database/src/repositories/aiInfra/index.test.ts +11 -8
- package/packages/database/src/repositories/dataExporter/index.test.ts +11 -9
- package/packages/database/src/repositories/tableViewer/index.test.ts +13 -14
- package/packages/model-runtime/src/providers/zhipu/index.ts +6 -6
- package/src/envs/app.ts +2 -0
- package/src/libs/trpc/lambda/middleware/index.ts +1 -0
- package/src/libs/trpc/lambda/middleware/telemetry.test.ts +237 -0
- package/src/libs/trpc/lambda/middleware/telemetry.ts +74 -0
- package/src/server/routers/lambda/market/index.ts +1 -93
- package/src/server/routers/tools/_helpers/index.ts +1 -0
- package/src/server/routers/tools/_helpers/scheduleToolCallReport.ts +113 -0
- package/src/server/routers/tools/index.ts +2 -2
- package/src/server/routers/tools/market.ts +375 -0
- package/src/server/routers/tools/mcp.ts +77 -20
- package/src/services/chat/index.ts +0 -2
- package/src/services/codeInterpreter.ts +6 -6
- package/src/services/mcp.test.ts +60 -46
- package/src/services/mcp.ts +67 -48
- package/src/store/chat/slices/plugin/action.test.ts +191 -0
- package/src/store/chat/slices/plugin/actions/internals.ts +2 -18
- package/src/store/chat/slices/plugin/actions/pluginTypes.ts +31 -44
- package/packages/database/src/client/db.test.ts +0 -52
- package/packages/database/src/client/db.ts +0 -195
- package/packages/database/src/client/type.ts +0 -6
- package/src/server/routers/tools/codeInterpreter.ts +0 -255
|
@@ -261,12 +261,6 @@ export const pluginTypes: StateCreator<
|
|
|
261
261
|
},
|
|
262
262
|
|
|
263
263
|
invokeCloudCodeInterpreterTool: async (id, payload) => {
|
|
264
|
-
const {
|
|
265
|
-
optimisticUpdateMessageContent,
|
|
266
|
-
optimisticUpdatePluginState,
|
|
267
|
-
optimisticUpdateMessagePluginError,
|
|
268
|
-
} = get();
|
|
269
|
-
|
|
270
264
|
// Get message to extract topicId
|
|
271
265
|
const message = dbMessageSelectors.getDbMessageById(id)(get());
|
|
272
266
|
|
|
@@ -336,16 +330,16 @@ export const pluginTypes: StateCreator<
|
|
|
336
330
|
|
|
337
331
|
const context = operationId ? { operationId } : undefined;
|
|
338
332
|
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
333
|
+
// Use optimisticUpdateToolMessage to update content and state/error in a single call
|
|
334
|
+
await get().optimisticUpdateToolMessage(
|
|
335
|
+
id,
|
|
336
|
+
{
|
|
337
|
+
content: data.content,
|
|
338
|
+
pluginError: data.success ? undefined : data.error,
|
|
339
|
+
pluginState: data.success ? data.state : undefined,
|
|
340
|
+
},
|
|
341
|
+
context,
|
|
342
|
+
);
|
|
349
343
|
|
|
350
344
|
// Handle exportFile: save exported file and associate with assistant message (parent)
|
|
351
345
|
if (payload.apiName === 'exportFile' && data.success && data.state) {
|
|
@@ -422,12 +416,6 @@ export const pluginTypes: StateCreator<
|
|
|
422
416
|
},
|
|
423
417
|
|
|
424
418
|
invokeKlavisTypePlugin: async (id, payload) => {
|
|
425
|
-
const {
|
|
426
|
-
optimisticUpdateMessageContent,
|
|
427
|
-
optimisticUpdatePluginState,
|
|
428
|
-
optimisticUpdateMessagePluginError,
|
|
429
|
-
} = get();
|
|
430
|
-
|
|
431
419
|
let data: MCPToolCallResult | undefined;
|
|
432
420
|
|
|
433
421
|
// Get message to extract sessionId/topicId
|
|
@@ -510,13 +498,16 @@ export const pluginTypes: StateCreator<
|
|
|
510
498
|
// operationId already declared above, reuse it
|
|
511
499
|
const context = operationId ? { operationId } : undefined;
|
|
512
500
|
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
501
|
+
// Use optimisticUpdateToolMessage to update content and state/error in a single call
|
|
502
|
+
await get().optimisticUpdateToolMessage(
|
|
503
|
+
id,
|
|
504
|
+
{
|
|
505
|
+
content: data.content,
|
|
506
|
+
pluginError: data.success ? undefined : data.error,
|
|
507
|
+
pluginState: data.success ? data.state : undefined,
|
|
508
|
+
},
|
|
509
|
+
context,
|
|
510
|
+
);
|
|
520
511
|
|
|
521
512
|
return data.content;
|
|
522
513
|
},
|
|
@@ -561,12 +552,6 @@ export const pluginTypes: StateCreator<
|
|
|
561
552
|
},
|
|
562
553
|
|
|
563
554
|
invokeMCPTypePlugin: async (id, payload) => {
|
|
564
|
-
const {
|
|
565
|
-
optimisticUpdateMessageContent,
|
|
566
|
-
internal_constructToolsCallingContext,
|
|
567
|
-
optimisticUpdatePluginState,
|
|
568
|
-
optimisticUpdateMessagePluginError,
|
|
569
|
-
} = get();
|
|
570
555
|
let data: MCPToolCallResult | undefined;
|
|
571
556
|
|
|
572
557
|
// Get message to extract agentId/topicId
|
|
@@ -586,10 +571,9 @@ export const pluginTypes: StateCreator<
|
|
|
586
571
|
);
|
|
587
572
|
|
|
588
573
|
try {
|
|
589
|
-
const context = internal_constructToolsCallingContext(id);
|
|
590
574
|
const result = await mcpService.invokeMcpToolCall(payload, {
|
|
591
575
|
signal: abortController?.signal,
|
|
592
|
-
topicId:
|
|
576
|
+
topicId: message?.topicId,
|
|
593
577
|
});
|
|
594
578
|
|
|
595
579
|
if (!!result) data = result;
|
|
@@ -620,13 +604,16 @@ export const pluginTypes: StateCreator<
|
|
|
620
604
|
// operationId already declared above, reuse it
|
|
621
605
|
const context = operationId ? { operationId } : undefined;
|
|
622
606
|
|
|
623
|
-
|
|
624
|
-
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
|
|
629
|
-
|
|
607
|
+
// Use optimisticUpdateToolMessage to update content and state/error in a single call
|
|
608
|
+
await get().optimisticUpdateToolMessage(
|
|
609
|
+
id,
|
|
610
|
+
{
|
|
611
|
+
content: data.content,
|
|
612
|
+
pluginError: data.success ? undefined : data.error,
|
|
613
|
+
pluginState: data.success ? data.state : undefined,
|
|
614
|
+
},
|
|
615
|
+
context,
|
|
616
|
+
);
|
|
630
617
|
|
|
631
618
|
return data.content;
|
|
632
619
|
},
|
|
@@ -1,52 +0,0 @@
|
|
|
1
|
-
import { PGlite } from '@electric-sql/pglite';
|
|
2
|
-
import { beforeEach, describe, expect, it, vi } from 'vitest';
|
|
3
|
-
|
|
4
|
-
vi.mock('@electric-sql/pglite', () => ({
|
|
5
|
-
PGlite: vi.fn(() => ({})),
|
|
6
|
-
}));
|
|
7
|
-
|
|
8
|
-
vi.mock('@electric-sql/pglite/vector', () => ({
|
|
9
|
-
vector: vi.fn(),
|
|
10
|
-
}));
|
|
11
|
-
|
|
12
|
-
vi.mock('drizzle-orm/pglite', () => ({
|
|
13
|
-
drizzle: vi.fn(() => ({
|
|
14
|
-
dialect: {
|
|
15
|
-
migrate: vi.fn().mockResolvedValue(undefined),
|
|
16
|
-
},
|
|
17
|
-
})),
|
|
18
|
-
}));
|
|
19
|
-
|
|
20
|
-
beforeEach(() => {
|
|
21
|
-
vi.clearAllMocks();
|
|
22
|
-
vi.resetModules();
|
|
23
|
-
});
|
|
24
|
-
|
|
25
|
-
describe('DatabaseManager', () => {
|
|
26
|
-
describe('initializeDB', () => {
|
|
27
|
-
it('should initialize database with PGlite', async () => {
|
|
28
|
-
const { initializeDB } = await import('./db');
|
|
29
|
-
await initializeDB();
|
|
30
|
-
|
|
31
|
-
expect(PGlite).toHaveBeenCalledWith('idb://lobechat', {
|
|
32
|
-
extensions: { vector: expect.any(Function) },
|
|
33
|
-
relaxedDurability: true,
|
|
34
|
-
});
|
|
35
|
-
});
|
|
36
|
-
|
|
37
|
-
it('should only initialize once when called multiple times', async () => {
|
|
38
|
-
const { initializeDB } = await import('./db');
|
|
39
|
-
await Promise.all([initializeDB(), initializeDB()]);
|
|
40
|
-
|
|
41
|
-
expect(PGlite).toHaveBeenCalledTimes(1);
|
|
42
|
-
});
|
|
43
|
-
});
|
|
44
|
-
|
|
45
|
-
describe('clientDB proxy', () => {
|
|
46
|
-
it('should provide access to database after initialization', async () => {
|
|
47
|
-
const { clientDB, initializeDB } = await import('./db');
|
|
48
|
-
await initializeDB();
|
|
49
|
-
expect(clientDB).toBeDefined();
|
|
50
|
-
});
|
|
51
|
-
});
|
|
52
|
-
});
|
|
@@ -1,195 +0,0 @@
|
|
|
1
|
-
import { PGlite } from '@electric-sql/pglite';
|
|
2
|
-
import { vector } from '@electric-sql/pglite/vector';
|
|
3
|
-
import { sql } from 'drizzle-orm';
|
|
4
|
-
import { PgliteDatabase, drizzle } from 'drizzle-orm/pglite';
|
|
5
|
-
import { Md5 } from 'ts-md5';
|
|
6
|
-
|
|
7
|
-
import migrations from '../core/migrations.json';
|
|
8
|
-
import { DrizzleMigrationModel } from '../models/drizzleMigration';
|
|
9
|
-
import * as schema from '../schemas';
|
|
10
|
-
|
|
11
|
-
const pgliteSchemaHashCache = 'LOBE_CHAT_PGLITE_SCHEMA_HASH';
|
|
12
|
-
const DB_NAME = 'lobechat';
|
|
13
|
-
|
|
14
|
-
type DrizzleInstance = PgliteDatabase<typeof schema>;
|
|
15
|
-
|
|
16
|
-
class DatabaseManager {
|
|
17
|
-
private static instance: DatabaseManager;
|
|
18
|
-
private dbInstance: DrizzleInstance | null = null;
|
|
19
|
-
private initPromise: Promise<DrizzleInstance> | null = null;
|
|
20
|
-
private isLocalDBSchemaSynced = false;
|
|
21
|
-
|
|
22
|
-
private constructor() {}
|
|
23
|
-
|
|
24
|
-
static getInstance() {
|
|
25
|
-
if (!DatabaseManager.instance) {
|
|
26
|
-
DatabaseManager.instance = new DatabaseManager();
|
|
27
|
-
}
|
|
28
|
-
return DatabaseManager.instance;
|
|
29
|
-
}
|
|
30
|
-
|
|
31
|
-
private async migrate(): Promise<DrizzleInstance> {
|
|
32
|
-
if (this.isLocalDBSchemaSynced) return this.db;
|
|
33
|
-
|
|
34
|
-
let hash: string | undefined;
|
|
35
|
-
if (typeof localStorage !== 'undefined') {
|
|
36
|
-
const cacheHash = localStorage.getItem(pgliteSchemaHashCache);
|
|
37
|
-
hash = Md5.hashStr(JSON.stringify(migrations));
|
|
38
|
-
// if hash is the same, no need to migrate
|
|
39
|
-
if (hash === cacheHash) {
|
|
40
|
-
try {
|
|
41
|
-
const drizzleMigration = new DrizzleMigrationModel(this.db as any);
|
|
42
|
-
|
|
43
|
-
// Check if tables exist in database
|
|
44
|
-
const tableCount = await drizzleMigration.getTableCounts();
|
|
45
|
-
|
|
46
|
-
// If table count > 0, consider database properly initialized
|
|
47
|
-
if (tableCount > 0) {
|
|
48
|
-
this.isLocalDBSchemaSynced = true;
|
|
49
|
-
return this.db;
|
|
50
|
-
}
|
|
51
|
-
} catch (error) {
|
|
52
|
-
console.warn('Error checking table existence, proceeding with migration', error);
|
|
53
|
-
}
|
|
54
|
-
}
|
|
55
|
-
}
|
|
56
|
-
|
|
57
|
-
const start = Date.now();
|
|
58
|
-
try {
|
|
59
|
-
// @ts-expect-error - migrate internal API
|
|
60
|
-
await this.db.dialect.migrate(migrations, this.db.session, {});
|
|
61
|
-
|
|
62
|
-
if (typeof localStorage !== 'undefined' && hash) {
|
|
63
|
-
localStorage.setItem(pgliteSchemaHashCache, hash);
|
|
64
|
-
}
|
|
65
|
-
|
|
66
|
-
this.isLocalDBSchemaSynced = true;
|
|
67
|
-
console.info(`🗂 Migration success, take ${Date.now() - start}ms`);
|
|
68
|
-
} catch (cause) {
|
|
69
|
-
console.error('❌ Local database schema migration failed', cause);
|
|
70
|
-
throw cause;
|
|
71
|
-
}
|
|
72
|
-
|
|
73
|
-
return this.db;
|
|
74
|
-
}
|
|
75
|
-
|
|
76
|
-
async initialize(): Promise<DrizzleInstance> {
|
|
77
|
-
if (this.initPromise) return this.initPromise;
|
|
78
|
-
|
|
79
|
-
this.initPromise = (async () => {
|
|
80
|
-
if (this.dbInstance) return this.dbInstance;
|
|
81
|
-
|
|
82
|
-
const time = Date.now();
|
|
83
|
-
|
|
84
|
-
// 直接使用 pglite,自动处理 wasm 加载
|
|
85
|
-
const pglite = new PGlite(`idb://${DB_NAME}`, {
|
|
86
|
-
extensions: { vector },
|
|
87
|
-
relaxedDurability: true,
|
|
88
|
-
});
|
|
89
|
-
|
|
90
|
-
this.dbInstance = drizzle({ client: pglite, schema });
|
|
91
|
-
|
|
92
|
-
await this.migrate();
|
|
93
|
-
|
|
94
|
-
console.log(`✅ Database initialized in ${Date.now() - time}ms`);
|
|
95
|
-
|
|
96
|
-
return this.dbInstance;
|
|
97
|
-
})();
|
|
98
|
-
|
|
99
|
-
return this.initPromise;
|
|
100
|
-
}
|
|
101
|
-
|
|
102
|
-
get db(): DrizzleInstance {
|
|
103
|
-
if (!this.dbInstance) {
|
|
104
|
-
throw new Error('Database not initialized. Please call initialize() first.');
|
|
105
|
-
}
|
|
106
|
-
return this.dbInstance;
|
|
107
|
-
}
|
|
108
|
-
|
|
109
|
-
createProxy(): DrizzleInstance {
|
|
110
|
-
return new Proxy({} as DrizzleInstance, {
|
|
111
|
-
get: (target, prop) => {
|
|
112
|
-
return this.db[prop as keyof DrizzleInstance];
|
|
113
|
-
},
|
|
114
|
-
});
|
|
115
|
-
}
|
|
116
|
-
|
|
117
|
-
async resetDatabase(): Promise<void> {
|
|
118
|
-
// 1. Close existing PGlite connection
|
|
119
|
-
if (this.dbInstance) {
|
|
120
|
-
try {
|
|
121
|
-
// @ts-ignore
|
|
122
|
-
await (this.dbInstance.session as any).client.close();
|
|
123
|
-
console.log('PGlite instance closed successfully.');
|
|
124
|
-
} catch (e) {
|
|
125
|
-
console.error('Error closing PGlite instance:', e);
|
|
126
|
-
}
|
|
127
|
-
}
|
|
128
|
-
|
|
129
|
-
// 2. Reset database instance and initialization state
|
|
130
|
-
this.dbInstance = null;
|
|
131
|
-
this.initPromise = null;
|
|
132
|
-
this.isLocalDBSchemaSynced = false;
|
|
133
|
-
|
|
134
|
-
// 3. Delete IndexedDB database
|
|
135
|
-
return new Promise<void>((resolve, reject) => {
|
|
136
|
-
if (typeof indexedDB === 'undefined') {
|
|
137
|
-
console.warn('IndexedDB is not available, cannot delete database');
|
|
138
|
-
resolve();
|
|
139
|
-
return;
|
|
140
|
-
}
|
|
141
|
-
|
|
142
|
-
const dbName = `/pglite/${DB_NAME}`;
|
|
143
|
-
const request = indexedDB.deleteDatabase(dbName);
|
|
144
|
-
|
|
145
|
-
request.onsuccess = () => {
|
|
146
|
-
console.log(`✅ Database '${dbName}' reset successfully`);
|
|
147
|
-
|
|
148
|
-
if (typeof localStorage !== 'undefined') {
|
|
149
|
-
localStorage.removeItem(pgliteSchemaHashCache);
|
|
150
|
-
}
|
|
151
|
-
|
|
152
|
-
resolve();
|
|
153
|
-
};
|
|
154
|
-
|
|
155
|
-
// eslint-disable-next-line unicorn/prefer-add-event-listener
|
|
156
|
-
request.onerror = (event) => {
|
|
157
|
-
const error = (event.target as IDBOpenDBRequest)?.error;
|
|
158
|
-
console.error(`❌ Error resetting database '${dbName}':`, error);
|
|
159
|
-
reject(
|
|
160
|
-
new Error(
|
|
161
|
-
`Failed to reset database '${dbName}'. Error: ${error?.message || 'Unknown error'}`,
|
|
162
|
-
),
|
|
163
|
-
);
|
|
164
|
-
};
|
|
165
|
-
|
|
166
|
-
request.onblocked = (event) => {
|
|
167
|
-
console.warn(`Deletion of database '${dbName}' is blocked.`, event);
|
|
168
|
-
reject(
|
|
169
|
-
new Error(
|
|
170
|
-
`Failed to reset database '${dbName}' because it is blocked by other open connections.`,
|
|
171
|
-
),
|
|
172
|
-
);
|
|
173
|
-
};
|
|
174
|
-
});
|
|
175
|
-
}
|
|
176
|
-
}
|
|
177
|
-
|
|
178
|
-
// Export singleton
|
|
179
|
-
const dbManager = DatabaseManager.getInstance();
|
|
180
|
-
|
|
181
|
-
export const clientDB = dbManager.createProxy();
|
|
182
|
-
|
|
183
|
-
export const initializeDB = () => dbManager.initialize();
|
|
184
|
-
|
|
185
|
-
export const resetClientDatabase = async () => {
|
|
186
|
-
await dbManager.resetDatabase();
|
|
187
|
-
};
|
|
188
|
-
|
|
189
|
-
export const updateMigrationRecord = async (migrationHash: string) => {
|
|
190
|
-
await clientDB.execute(
|
|
191
|
-
sql`INSERT INTO "drizzle"."__drizzle_migrations" ("hash", "created_at") VALUES (${migrationHash}, ${Date.now()});`,
|
|
192
|
-
);
|
|
193
|
-
|
|
194
|
-
await initializeDB();
|
|
195
|
-
};
|
|
@@ -1,255 +0,0 @@
|
|
|
1
|
-
import { type CodeInterpreterToolName, MarketSDK } from '@lobehub/market-sdk';
|
|
2
|
-
import debug from 'debug';
|
|
3
|
-
import { z } from 'zod';
|
|
4
|
-
|
|
5
|
-
import { DocumentModel } from '@/database/models/document';
|
|
6
|
-
import { FileModel } from '@/database/models/file';
|
|
7
|
-
import { authedProcedure, router } from '@/libs/trpc/lambda';
|
|
8
|
-
import { marketUserInfo, serverDatabase } from '@/libs/trpc/lambda/middleware';
|
|
9
|
-
import { generateTrustedClientToken } from '@/libs/trusted-client';
|
|
10
|
-
import { FileS3 } from '@/server/modules/S3';
|
|
11
|
-
|
|
12
|
-
const log = debug('lobe-server:tools:code-interpreter');
|
|
13
|
-
|
|
14
|
-
const codeInterpreterProcedure = authedProcedure.use(serverDatabase).use(marketUserInfo);
|
|
15
|
-
|
|
16
|
-
// Schema for tool call request
|
|
17
|
-
const callToolSchema = z.object({
|
|
18
|
-
/** Market access token from OIDC (stored in user settings) */
|
|
19
|
-
marketAccessToken: z.string().optional(),
|
|
20
|
-
params: z.record(z.any()),
|
|
21
|
-
toolName: z.string(),
|
|
22
|
-
// Session context for isolation
|
|
23
|
-
topicId: z.string(),
|
|
24
|
-
userId: z.string(),
|
|
25
|
-
});
|
|
26
|
-
|
|
27
|
-
// Schema for getting export file upload URL
|
|
28
|
-
const getExportFileUploadUrlSchema = z.object({
|
|
29
|
-
/** Original filename from sandbox */
|
|
30
|
-
filename: z.string(),
|
|
31
|
-
/** Topic ID for organizing files */
|
|
32
|
-
topicId: z.string(),
|
|
33
|
-
});
|
|
34
|
-
|
|
35
|
-
// Schema for saving exported file content to document
|
|
36
|
-
const saveExportedFileContentSchema = z.object({
|
|
37
|
-
/** File content (text content from code-interpreter export) */
|
|
38
|
-
content: z.string(),
|
|
39
|
-
/** File ID to associate with the document */
|
|
40
|
-
fileId: z.string(),
|
|
41
|
-
/** File MIME type */
|
|
42
|
-
fileType: z.string(),
|
|
43
|
-
/** Filename */
|
|
44
|
-
filename: z.string(),
|
|
45
|
-
/** File URL */
|
|
46
|
-
url: z.string(),
|
|
47
|
-
});
|
|
48
|
-
|
|
49
|
-
export type CallToolInput = z.infer<typeof callToolSchema>;
|
|
50
|
-
export type GetExportFileUploadUrlInput = z.infer<typeof getExportFileUploadUrlSchema>;
|
|
51
|
-
export type SaveExportedFileContentInput = z.infer<typeof saveExportedFileContentSchema>;
|
|
52
|
-
|
|
53
|
-
export interface CallToolResult {
|
|
54
|
-
error?: {
|
|
55
|
-
message: string;
|
|
56
|
-
name?: string;
|
|
57
|
-
};
|
|
58
|
-
result: any;
|
|
59
|
-
sessionExpiredAndRecreated?: boolean;
|
|
60
|
-
success: boolean;
|
|
61
|
-
}
|
|
62
|
-
|
|
63
|
-
export interface GetExportFileUploadUrlResult {
|
|
64
|
-
/** The download URL after file is uploaded */
|
|
65
|
-
downloadUrl: string;
|
|
66
|
-
error?: {
|
|
67
|
-
message: string;
|
|
68
|
-
};
|
|
69
|
-
/** The S3 key where file will be stored */
|
|
70
|
-
key: string;
|
|
71
|
-
success: boolean;
|
|
72
|
-
/** Pre-signed upload URL */
|
|
73
|
-
uploadUrl: string;
|
|
74
|
-
}
|
|
75
|
-
|
|
76
|
-
export interface SaveExportedFileContentResult {
|
|
77
|
-
/** Created document ID */
|
|
78
|
-
documentId?: string;
|
|
79
|
-
error?: {
|
|
80
|
-
message: string;
|
|
81
|
-
};
|
|
82
|
-
success: boolean;
|
|
83
|
-
}
|
|
84
|
-
|
|
85
|
-
export const codeInterpreterRouter = router({
|
|
86
|
-
callTool: codeInterpreterProcedure.input(callToolSchema).mutation(async ({ input, ctx }) => {
|
|
87
|
-
const { toolName, params, userId, topicId, marketAccessToken } = input;
|
|
88
|
-
|
|
89
|
-
log('Calling cloud code interpreter tool: %s with params: %O', toolName, {
|
|
90
|
-
params,
|
|
91
|
-
topicId,
|
|
92
|
-
userId,
|
|
93
|
-
});
|
|
94
|
-
log('Market access token available: %s', marketAccessToken ? 'yes' : 'no');
|
|
95
|
-
|
|
96
|
-
// Generate trusted client token if user info is available
|
|
97
|
-
const trustedClientToken = ctx.marketUserInfo
|
|
98
|
-
? generateTrustedClientToken(ctx.marketUserInfo)
|
|
99
|
-
: undefined;
|
|
100
|
-
|
|
101
|
-
try {
|
|
102
|
-
// Initialize MarketSDK with market access token and trusted client token
|
|
103
|
-
const market = new MarketSDK({
|
|
104
|
-
accessToken: marketAccessToken,
|
|
105
|
-
baseURL: process.env.NEXT_PUBLIC_MARKET_BASE_URL,
|
|
106
|
-
trustedClientToken,
|
|
107
|
-
});
|
|
108
|
-
|
|
109
|
-
// Call market-sdk's runBuildInTool
|
|
110
|
-
// API signature: runBuildInTool(toolName, params, context, options?)
|
|
111
|
-
const response = await market.plugins.runBuildInTool(
|
|
112
|
-
toolName as CodeInterpreterToolName,
|
|
113
|
-
params as any,
|
|
114
|
-
{ topicId, userId },
|
|
115
|
-
);
|
|
116
|
-
|
|
117
|
-
log('Cloud code interpreter tool %s response: %O', toolName, response);
|
|
118
|
-
|
|
119
|
-
if (!response.success) {
|
|
120
|
-
return {
|
|
121
|
-
error: {
|
|
122
|
-
message: response.error?.message || 'Unknown error',
|
|
123
|
-
name: response.error?.code,
|
|
124
|
-
},
|
|
125
|
-
result: null,
|
|
126
|
-
sessionExpiredAndRecreated: false,
|
|
127
|
-
success: false,
|
|
128
|
-
} as CallToolResult;
|
|
129
|
-
}
|
|
130
|
-
|
|
131
|
-
return {
|
|
132
|
-
result: response.data?.result,
|
|
133
|
-
sessionExpiredAndRecreated: response.data?.sessionExpiredAndRecreated || false,
|
|
134
|
-
success: true,
|
|
135
|
-
} as CallToolResult;
|
|
136
|
-
} catch (error) {
|
|
137
|
-
log('Error calling cloud code interpreter tool %s: %O', toolName, error);
|
|
138
|
-
|
|
139
|
-
return {
|
|
140
|
-
error: {
|
|
141
|
-
message: (error as Error).message,
|
|
142
|
-
name: (error as Error).name,
|
|
143
|
-
},
|
|
144
|
-
result: null,
|
|
145
|
-
sessionExpiredAndRecreated: false,
|
|
146
|
-
success: false,
|
|
147
|
-
} as CallToolResult;
|
|
148
|
-
}
|
|
149
|
-
}),
|
|
150
|
-
|
|
151
|
-
/**
|
|
152
|
-
* Generate a pre-signed upload URL for exporting files from sandbox
|
|
153
|
-
* The URL can be used by the sandbox to upload the file directly to S3
|
|
154
|
-
*/
|
|
155
|
-
getExportFileUploadUrl: codeInterpreterProcedure
|
|
156
|
-
.input(getExportFileUploadUrlSchema)
|
|
157
|
-
|
|
158
|
-
// TODO if upload success, should add it path to files db
|
|
159
|
-
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
160
|
-
.mutation(async ({ ctx, input }) => {
|
|
161
|
-
const { filename, topicId } = input;
|
|
162
|
-
|
|
163
|
-
log('Generating export file upload URL for: %s in topic: %s', filename, topicId);
|
|
164
|
-
|
|
165
|
-
try {
|
|
166
|
-
const s3 = new FileS3();
|
|
167
|
-
|
|
168
|
-
// Generate a unique key for the exported file
|
|
169
|
-
// Format: code-interpreter-exports/{topicId}/{filename}
|
|
170
|
-
const key = `code-interpreter-exports/${topicId}/${filename}`;
|
|
171
|
-
|
|
172
|
-
// Generate pre-signed upload URL
|
|
173
|
-
const uploadUrl = await s3.createPreSignedUrl(key);
|
|
174
|
-
|
|
175
|
-
// Generate download URL (pre-signed for preview)
|
|
176
|
-
const downloadUrl = await s3.createPreSignedUrlForPreview(key);
|
|
177
|
-
|
|
178
|
-
log('Generated upload URL for key: %s', key);
|
|
179
|
-
|
|
180
|
-
return {
|
|
181
|
-
downloadUrl,
|
|
182
|
-
key,
|
|
183
|
-
success: true,
|
|
184
|
-
uploadUrl,
|
|
185
|
-
} as GetExportFileUploadUrlResult;
|
|
186
|
-
} catch (error) {
|
|
187
|
-
log('Error generating export file upload URL: %O', error);
|
|
188
|
-
|
|
189
|
-
return {
|
|
190
|
-
downloadUrl: '',
|
|
191
|
-
error: {
|
|
192
|
-
message: (error as Error).message,
|
|
193
|
-
},
|
|
194
|
-
key: '',
|
|
195
|
-
success: false,
|
|
196
|
-
uploadUrl: '',
|
|
197
|
-
} as GetExportFileUploadUrlResult;
|
|
198
|
-
}
|
|
199
|
-
}),
|
|
200
|
-
|
|
201
|
-
/**
|
|
202
|
-
* Save exported file content to documents table
|
|
203
|
-
* This creates a document record linked to the file, allowing content to be retrieved
|
|
204
|
-
* when querying messages with file attachments
|
|
205
|
-
*/
|
|
206
|
-
saveExportedFileContent: codeInterpreterProcedure
|
|
207
|
-
.input(saveExportedFileContentSchema)
|
|
208
|
-
.use(serverDatabase)
|
|
209
|
-
.mutation(async ({ ctx, input }) => {
|
|
210
|
-
const { content, fileId, fileType, filename, url } = input;
|
|
211
|
-
|
|
212
|
-
log('Saving exported file content: fileId=%s, filename=%s', fileId, filename);
|
|
213
|
-
|
|
214
|
-
try {
|
|
215
|
-
const documentModel = new DocumentModel(ctx.serverDB, ctx.userId);
|
|
216
|
-
const fileModel = new FileModel(ctx.serverDB, ctx.userId);
|
|
217
|
-
|
|
218
|
-
// Verify the file exists
|
|
219
|
-
const file = await fileModel.findById(fileId);
|
|
220
|
-
if (!file) {
|
|
221
|
-
return {
|
|
222
|
-
error: { message: 'File not found' },
|
|
223
|
-
success: false,
|
|
224
|
-
} as SaveExportedFileContentResult;
|
|
225
|
-
}
|
|
226
|
-
|
|
227
|
-
// Create document record with the file content
|
|
228
|
-
const document = await documentModel.create({
|
|
229
|
-
content,
|
|
230
|
-
fileId,
|
|
231
|
-
fileType,
|
|
232
|
-
filename,
|
|
233
|
-
source: url,
|
|
234
|
-
sourceType: 'file',
|
|
235
|
-
title: filename,
|
|
236
|
-
totalCharCount: content.length,
|
|
237
|
-
totalLineCount: content.split('\n').length,
|
|
238
|
-
});
|
|
239
|
-
|
|
240
|
-
log('Created document for exported file: documentId=%s, fileId=%s', document.id, fileId);
|
|
241
|
-
|
|
242
|
-
return {
|
|
243
|
-
documentId: document.id,
|
|
244
|
-
success: true,
|
|
245
|
-
} as SaveExportedFileContentResult;
|
|
246
|
-
} catch (error) {
|
|
247
|
-
log('Error saving exported file content: %O', error);
|
|
248
|
-
|
|
249
|
-
return {
|
|
250
|
-
error: { message: (error as Error).message },
|
|
251
|
-
success: false,
|
|
252
|
-
} as SaveExportedFileContentResult;
|
|
253
|
-
}
|
|
254
|
-
}),
|
|
255
|
-
});
|