@studious-lms/server 1.1.11 → 1.1.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LAB_CHAT_API_SPEC.md +518 -0
- package/dist/routers/_app.d.ts +442 -0
- package/dist/routers/_app.d.ts.map +1 -1
- package/dist/routers/_app.js +2 -0
- package/dist/routers/conversation.d.ts +4 -0
- package/dist/routers/conversation.d.ts.map +1 -1
- package/dist/routers/conversation.js +7 -0
- package/dist/routers/folder.d.ts +13 -0
- package/dist/routers/folder.d.ts.map +1 -1
- package/dist/routers/folder.js +20 -1
- package/dist/routers/labChat.d.ts +206 -0
- package/dist/routers/labChat.d.ts.map +1 -0
- package/dist/routers/labChat.js +741 -0
- package/dist/utils/aiUser.d.ts +13 -0
- package/dist/utils/aiUser.d.ts.map +1 -0
- package/dist/utils/aiUser.js +57 -0
- package/dist/utils/inference.d.ts +56 -0
- package/dist/utils/inference.d.ts.map +1 -0
- package/dist/utils/inference.js +135 -0
- package/package.json +2 -1
- package/prisma/migrations/20250925072732_add_lab_chat_system/migration.sql +25 -0
- package/prisma/schema.prisma +18 -0
- package/src/routers/_app.ts +2 -0
- package/src/routers/conversation.ts +7 -0
- package/src/routers/folder.ts +23 -0
- package/src/routers/labChat.ts +824 -0
- package/src/utils/aiUser.ts +65 -0
- package/src/utils/inference.ts +193 -0
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Ensure AI assistant user exists in the database
|
|
3
|
+
*/
|
|
4
|
+
export declare function ensureAIUserExists(): Promise<void>;
|
|
5
|
+
/**
|
|
6
|
+
* Get the AI user ID
|
|
7
|
+
*/
|
|
8
|
+
export declare function getAIUserId(): string;
|
|
9
|
+
/**
|
|
10
|
+
* Check if a user ID belongs to the AI assistant
|
|
11
|
+
*/
|
|
12
|
+
export declare function isAIUser(userId: string): boolean;
|
|
13
|
+
//# sourceMappingURL=aiUser.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"aiUser.d.ts","sourceRoot":"","sources":["../../src/utils/aiUser.ts"],"names":[],"mappings":"AAKA;;GAEG;AACH,wBAAsB,kBAAkB,IAAI,OAAO,CAAC,IAAI,CAAC,CA0CxD;AAED;;GAEG;AACH,wBAAgB,WAAW,IAAI,MAAM,CAEpC;AAED;;GAEG;AACH,wBAAgB,QAAQ,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAEhD"}
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
import { prisma } from '../lib/prisma.js';
|
|
2
|
+
import { logger } from './logger.js';
|
|
3
|
+
const AI_USER_ID = 'AI_ASSISTANT';
|
|
4
|
+
/**
|
|
5
|
+
* Ensure AI assistant user exists in the database
|
|
6
|
+
*/
|
|
7
|
+
export async function ensureAIUserExists() {
|
|
8
|
+
try {
|
|
9
|
+
// Check if AI user already exists
|
|
10
|
+
const existingAIUser = await prisma.user.findUnique({
|
|
11
|
+
where: { id: AI_USER_ID },
|
|
12
|
+
});
|
|
13
|
+
if (existingAIUser) {
|
|
14
|
+
return; // AI user already exists
|
|
15
|
+
}
|
|
16
|
+
// Create AI user
|
|
17
|
+
await prisma.user.create({
|
|
18
|
+
data: {
|
|
19
|
+
id: AI_USER_ID,
|
|
20
|
+
username: 'ai-assistant',
|
|
21
|
+
email: 'ai@studious-lms.com',
|
|
22
|
+
password: 'ai-system-user', // Not used for login
|
|
23
|
+
verified: true,
|
|
24
|
+
role: 'NONE', // Special role for AI
|
|
25
|
+
profile: {
|
|
26
|
+
create: {
|
|
27
|
+
displayName: 'AI Assistant',
|
|
28
|
+
bio: 'Intelligent assistant for lab chats and educational support',
|
|
29
|
+
profilePicture: null,
|
|
30
|
+
},
|
|
31
|
+
},
|
|
32
|
+
},
|
|
33
|
+
});
|
|
34
|
+
logger.info('AI user created successfully', { userId: AI_USER_ID });
|
|
35
|
+
}
|
|
36
|
+
catch (error) {
|
|
37
|
+
// If user already exists (race condition), that's okay
|
|
38
|
+
if (error instanceof Error && error.message.includes('unique constraint')) {
|
|
39
|
+
logger.info('AI user already exists (race condition handled)');
|
|
40
|
+
return;
|
|
41
|
+
}
|
|
42
|
+
logger.error('Failed to create AI user', { error });
|
|
43
|
+
throw error;
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
/**
|
|
47
|
+
* Get the AI user ID
|
|
48
|
+
*/
|
|
49
|
+
export function getAIUserId() {
|
|
50
|
+
return AI_USER_ID;
|
|
51
|
+
}
|
|
52
|
+
/**
|
|
53
|
+
* Check if a user ID belongs to the AI assistant
|
|
54
|
+
*/
|
|
55
|
+
export function isAIUser(userId) {
|
|
56
|
+
return userId === AI_USER_ID;
|
|
57
|
+
}
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
import OpenAI from 'openai';
|
|
2
|
+
export declare const inferenceClient: OpenAI;
|
|
3
|
+
export interface LabChatContext {
|
|
4
|
+
subject: string;
|
|
5
|
+
topic: string;
|
|
6
|
+
difficulty: 'beginner' | 'intermediate' | 'advanced';
|
|
7
|
+
objectives: string[];
|
|
8
|
+
resources?: string[];
|
|
9
|
+
persona: string;
|
|
10
|
+
constraints: string[];
|
|
11
|
+
examples?: any[];
|
|
12
|
+
metadata?: Record<string, any>;
|
|
13
|
+
}
|
|
14
|
+
export interface InferenceResponse {
|
|
15
|
+
content: string;
|
|
16
|
+
model: string;
|
|
17
|
+
tokensUsed: number;
|
|
18
|
+
finishReason: string;
|
|
19
|
+
}
|
|
20
|
+
/**
|
|
21
|
+
* Centralized function to send AI messages to conversations
|
|
22
|
+
* Handles database storage and Pusher broadcasting
|
|
23
|
+
*/
|
|
24
|
+
export declare function sendAIMessage(content: string, conversationId: string, options?: {
|
|
25
|
+
subject?: string;
|
|
26
|
+
customSender?: {
|
|
27
|
+
displayName: string;
|
|
28
|
+
profilePicture?: string | null;
|
|
29
|
+
};
|
|
30
|
+
}): Promise<{
|
|
31
|
+
id: string;
|
|
32
|
+
content: string;
|
|
33
|
+
senderId: string;
|
|
34
|
+
conversationId: string;
|
|
35
|
+
createdAt: Date;
|
|
36
|
+
}>;
|
|
37
|
+
/**
|
|
38
|
+
* Simple inference function for general use
|
|
39
|
+
*/
|
|
40
|
+
export declare function generateInferenceResponse(subject: string, question: string, options?: {
|
|
41
|
+
model?: string;
|
|
42
|
+
maxTokens?: number;
|
|
43
|
+
}): Promise<InferenceResponse>;
|
|
44
|
+
/**
|
|
45
|
+
* Validate inference configuration
|
|
46
|
+
*/
|
|
47
|
+
export declare function validateInferenceConfig(): boolean;
|
|
48
|
+
/**
|
|
49
|
+
* Get available inference models (for admin/config purposes)
|
|
50
|
+
*/
|
|
51
|
+
export declare function getAvailableModels(): Promise<string[]>;
|
|
52
|
+
/**
|
|
53
|
+
* Estimate token count for a message (rough approximation)
|
|
54
|
+
*/
|
|
55
|
+
export declare function estimateTokenCount(text: string): number;
|
|
56
|
+
//# sourceMappingURL=inference.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../src/utils/inference.ts"],"names":[],"mappings":"AAAA,OAAO,MAAM,MAAM,QAAQ,CAAC;AAW5B,eAAO,MAAM,eAAe,QAG1B,CAAC;AAGH,MAAM,WAAW,cAAc;IAC7B,OAAO,EAAE,MAAM,CAAC;IAChB,KAAK,EAAE,MAAM,CAAC;IACd,UAAU,EAAE,UAAU,GAAG,cAAc,GAAG,UAAU,CAAC;IACrD,UAAU,EAAE,MAAM,EAAE,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,EAAE,CAAC;IACrB,OAAO,EAAE,MAAM,CAAC;IAChB,WAAW,EAAE,MAAM,EAAE,CAAC;IACtB,QAAQ,CAAC,EAAE,GAAG,EAAE,CAAC;IACjB,QAAQ,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;CAChC;AAED,MAAM,WAAW,iBAAiB;IAChC,OAAO,EAAE,MAAM,CAAC;IAChB,KAAK,EAAE,MAAM,CAAC;IACd,UAAU,EAAE,MAAM,CAAC;IACnB,YAAY,EAAE,MAAM,CAAC;CACtB;AAED;;;GAGG;AACH,wBAAsB,aAAa,CACjC,OAAO,EAAE,MAAM,EACf,cAAc,EAAE,MAAM,EACtB,OAAO,GAAE;IACP,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,YAAY,CAAC,EAAE;QACb,WAAW,EAAE,MAAM,CAAC;QACpB,cAAc,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;KAChC,CAAC;CACE,GACL,OAAO,CAAC;IACT,EAAE,EAAE,MAAM,CAAC;IACX,OAAO,EAAE,MAAM,CAAC;IAChB,QAAQ,EAAE,MAAM,CAAC;IACjB,cAAc,EAAE,MAAM,CAAC;IACvB,SAAS,EAAE,IAAI,CAAC;CACjB,CAAC,CAmDD;AAED;;GAEG;AACH,wBAAsB,yBAAyB,CAC7C,OAAO,EAAE,MAAM,EACf,QAAQ,EAAE,MAAM,EAChB,OAAO,GAAE;IACP,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,SAAS,CAAC,EAAE,MAAM,CAAC;CACf,GACL,OAAO,CAAC,iBAAiB,CAAC,CAsC5B;AAED;;GAEG;AACH,wBAAgB,uBAAuB,IAAI,OAAO,CAMjD;AAED;;GAEG;AACH,wBAAsB,kBAAkB,IAAI,OAAO,CAAC,MAAM,EAAE,CAAC,CAW5D;AAED;;GAEG;AACH,wBAAgB,kBAAkB,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,CAGvD"}
|
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
import OpenAI from 'openai';
|
|
2
|
+
import { logger } from './logger.js';
|
|
3
|
+
import { prisma } from '../lib/prisma.js';
|
|
4
|
+
import { pusher } from '../lib/pusher.js';
|
|
5
|
+
import { ensureAIUserExists, getAIUserId } from './aiUser.js';
|
|
6
|
+
// Initialize inference client (Cohere via OpenAI SDK)
|
|
7
|
+
logger.info('Inference API Key', { apiKey: process.env.INFERENCE_API_KEY });
|
|
8
|
+
logger.info('Inference API Base URL', { baseURL: process.env.INFERENCE_API_BASE_URL });
|
|
9
|
+
export const inferenceClient = new OpenAI({
|
|
10
|
+
apiKey: process.env.INFERENCE_API_KEY,
|
|
11
|
+
baseURL: process.env.INFERENCE_API_BASE_URL,
|
|
12
|
+
});
|
|
13
|
+
/**
|
|
14
|
+
* Centralized function to send AI messages to conversations
|
|
15
|
+
* Handles database storage and Pusher broadcasting
|
|
16
|
+
*/
|
|
17
|
+
export async function sendAIMessage(content, conversationId, options = {}) {
|
|
18
|
+
// Ensure AI user exists
|
|
19
|
+
await ensureAIUserExists();
|
|
20
|
+
// Create message in database
|
|
21
|
+
const aiMessage = await prisma.message.create({
|
|
22
|
+
data: {
|
|
23
|
+
content,
|
|
24
|
+
senderId: getAIUserId(),
|
|
25
|
+
conversationId,
|
|
26
|
+
},
|
|
27
|
+
});
|
|
28
|
+
logger.info('AI Message sent', {
|
|
29
|
+
messageId: aiMessage.id,
|
|
30
|
+
conversationId,
|
|
31
|
+
contentLength: content.length,
|
|
32
|
+
});
|
|
33
|
+
// Prepare sender info
|
|
34
|
+
const senderInfo = {
|
|
35
|
+
id: getAIUserId(),
|
|
36
|
+
username: 'AI Assistant',
|
|
37
|
+
profile: {
|
|
38
|
+
displayName: options.customSender?.displayName || `${options.subject || 'AI'} Assistant`,
|
|
39
|
+
profilePicture: options.customSender?.profilePicture || null,
|
|
40
|
+
},
|
|
41
|
+
};
|
|
42
|
+
// Broadcast via Pusher
|
|
43
|
+
try {
|
|
44
|
+
await pusher.trigger(`conversation-${conversationId}`, 'new-message', {
|
|
45
|
+
id: aiMessage.id,
|
|
46
|
+
content: aiMessage.content,
|
|
47
|
+
senderId: getAIUserId(),
|
|
48
|
+
conversationId: aiMessage.conversationId,
|
|
49
|
+
createdAt: aiMessage.createdAt,
|
|
50
|
+
sender: senderInfo,
|
|
51
|
+
mentionedUserIds: [],
|
|
52
|
+
});
|
|
53
|
+
}
|
|
54
|
+
catch (error) {
|
|
55
|
+
logger.error('Failed to broadcast AI message:', { error, messageId: aiMessage.id });
|
|
56
|
+
}
|
|
57
|
+
return {
|
|
58
|
+
id: aiMessage.id,
|
|
59
|
+
content: aiMessage.content,
|
|
60
|
+
senderId: getAIUserId(),
|
|
61
|
+
conversationId: aiMessage.conversationId,
|
|
62
|
+
createdAt: aiMessage.createdAt,
|
|
63
|
+
};
|
|
64
|
+
}
|
|
65
|
+
/**
|
|
66
|
+
* Simple inference function for general use
|
|
67
|
+
*/
|
|
68
|
+
export async function generateInferenceResponse(subject, question, options = {}) {
|
|
69
|
+
const { model = 'command-r-plus', maxTokens = 500 } = options;
|
|
70
|
+
try {
|
|
71
|
+
const completion = await inferenceClient.chat.completions.create({
|
|
72
|
+
model,
|
|
73
|
+
messages: [
|
|
74
|
+
{
|
|
75
|
+
role: 'system',
|
|
76
|
+
content: `You are a helpful educational assistant for ${subject}. Provide clear, concise, and accurate answers. Keep responses educational and appropriate for students.`,
|
|
77
|
+
},
|
|
78
|
+
{
|
|
79
|
+
role: 'user',
|
|
80
|
+
content: question,
|
|
81
|
+
},
|
|
82
|
+
],
|
|
83
|
+
max_tokens: maxTokens,
|
|
84
|
+
temperature: 0.5,
|
|
85
|
+
// Remove OpenAI-specific parameters for Cohere compatibility
|
|
86
|
+
});
|
|
87
|
+
const response = completion.choices[0]?.message?.content;
|
|
88
|
+
if (!response) {
|
|
89
|
+
throw new Error('No response generated from inference API');
|
|
90
|
+
}
|
|
91
|
+
return {
|
|
92
|
+
content: response,
|
|
93
|
+
model,
|
|
94
|
+
tokensUsed: completion.usage?.total_tokens || 0,
|
|
95
|
+
finishReason: completion.choices[0]?.finish_reason || 'unknown',
|
|
96
|
+
};
|
|
97
|
+
}
|
|
98
|
+
catch (error) {
|
|
99
|
+
logger.error('Failed to generate inference response', { error, subject, question: question.substring(0, 50) + '...' });
|
|
100
|
+
throw error;
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
/**
|
|
104
|
+
* Validate inference configuration
|
|
105
|
+
*/
|
|
106
|
+
export function validateInferenceConfig() {
|
|
107
|
+
if (!process.env.INFERENCE_API_KEY) {
|
|
108
|
+
logger.error('Inference API key not configured for Cohere');
|
|
109
|
+
return false;
|
|
110
|
+
}
|
|
111
|
+
return true;
|
|
112
|
+
}
|
|
113
|
+
/**
|
|
114
|
+
* Get available inference models (for admin/config purposes)
|
|
115
|
+
*/
|
|
116
|
+
export async function getAvailableModels() {
|
|
117
|
+
try {
|
|
118
|
+
const models = await inferenceClient.models.list();
|
|
119
|
+
return models.data
|
|
120
|
+
.filter(model => model.id.includes('command'))
|
|
121
|
+
.map(model => model.id)
|
|
122
|
+
.sort();
|
|
123
|
+
}
|
|
124
|
+
catch (error) {
|
|
125
|
+
logger.error('Failed to fetch inference models', { error });
|
|
126
|
+
return ['command-r-plus', 'command-r', 'command-light']; // Fallback Cohere models
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
/**
|
|
130
|
+
* Estimate token count for a message (rough approximation)
|
|
131
|
+
*/
|
|
132
|
+
export function estimateTokenCount(text) {
|
|
133
|
+
// Rough approximation: 1 token ≈ 4 characters for English text
|
|
134
|
+
return Math.ceil(text.length / 4);
|
|
135
|
+
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@studious-lms/server",
|
|
3
|
-
"version": "1.1.
|
|
3
|
+
"version": "1.1.13",
|
|
4
4
|
"description": "Backend server for Studious application",
|
|
5
5
|
"main": "dist/exportType.js",
|
|
6
6
|
"types": "dist/exportType.d.ts",
|
|
@@ -30,6 +30,7 @@
|
|
|
30
30
|
"dotenv": "^16.5.0",
|
|
31
31
|
"express": "^4.18.3",
|
|
32
32
|
"nodemailer": "^7.0.4",
|
|
33
|
+
"openai": "^5.23.0",
|
|
33
34
|
"prisma": "^6.7.0",
|
|
34
35
|
"pusher": "^5.2.0",
|
|
35
36
|
"sharp": "^0.34.2",
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
-- CreateTable
|
|
2
|
+
CREATE TABLE "LabChat" (
|
|
3
|
+
"id" TEXT NOT NULL,
|
|
4
|
+
"title" TEXT NOT NULL,
|
|
5
|
+
"context" TEXT NOT NULL,
|
|
6
|
+
"classId" TEXT NOT NULL,
|
|
7
|
+
"conversationId" TEXT NOT NULL,
|
|
8
|
+
"createdById" TEXT NOT NULL,
|
|
9
|
+
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
10
|
+
"updatedAt" TIMESTAMP(3) NOT NULL,
|
|
11
|
+
|
|
12
|
+
CONSTRAINT "LabChat_pkey" PRIMARY KEY ("id")
|
|
13
|
+
);
|
|
14
|
+
|
|
15
|
+
-- CreateIndex
|
|
16
|
+
CREATE UNIQUE INDEX "LabChat_conversationId_key" ON "LabChat"("conversationId");
|
|
17
|
+
|
|
18
|
+
-- AddForeignKey
|
|
19
|
+
ALTER TABLE "LabChat" ADD CONSTRAINT "LabChat_classId_fkey" FOREIGN KEY ("classId") REFERENCES "Class"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
|
20
|
+
|
|
21
|
+
-- AddForeignKey
|
|
22
|
+
ALTER TABLE "LabChat" ADD CONSTRAINT "LabChat_conversationId_fkey" FOREIGN KEY ("conversationId") REFERENCES "Conversation"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
|
23
|
+
|
|
24
|
+
-- AddForeignKey
|
|
25
|
+
ALTER TABLE "LabChat" ADD CONSTRAINT "LabChat_createdById_fkey" FOREIGN KEY ("createdById") REFERENCES "User"("id") ON DELETE NO ACTION ON UPDATE CASCADE;
|
package/prisma/schema.prisma
CHANGED
|
@@ -80,6 +80,7 @@ model User {
|
|
|
80
80
|
conversationMemberships ConversationMember[]
|
|
81
81
|
sentMessages Message[] @relation("SentMessages")
|
|
82
82
|
mentions Mention[] @relation("UserMentions")
|
|
83
|
+
createdLabChats LabChat[] @relation("CreatedLabChats")
|
|
83
84
|
|
|
84
85
|
}
|
|
85
86
|
|
|
@@ -118,6 +119,7 @@ model Class {
|
|
|
118
119
|
gradingBoundaries GradingBoundary[] @relation("ClassToGradingBoundary")
|
|
119
120
|
draftFiles File[] @relation("ClassDraftFiles")
|
|
120
121
|
classFiles Folder? @relation("ClassFiles")
|
|
122
|
+
labChats LabChat[] @relation("ClassLabChats")
|
|
121
123
|
|
|
122
124
|
school School? @relation(fields: [schoolId], references: [id])
|
|
123
125
|
schoolId String?
|
|
@@ -334,6 +336,22 @@ model Conversation {
|
|
|
334
336
|
|
|
335
337
|
members ConversationMember[]
|
|
336
338
|
messages Message[]
|
|
339
|
+
labChat LabChat?
|
|
340
|
+
}
|
|
341
|
+
|
|
342
|
+
model LabChat {
|
|
343
|
+
id String @id @default(uuid())
|
|
344
|
+
title String
|
|
345
|
+
context String // JSON string for LLM context
|
|
346
|
+
classId String
|
|
347
|
+
conversationId String @unique
|
|
348
|
+
createdById String // Teacher who created the lab
|
|
349
|
+
createdAt DateTime @default(now())
|
|
350
|
+
updatedAt DateTime @updatedAt
|
|
351
|
+
|
|
352
|
+
class Class @relation("ClassLabChats", fields: [classId], references: [id], onDelete: Cascade)
|
|
353
|
+
conversation Conversation @relation(fields: [conversationId], references: [id], onDelete: Cascade)
|
|
354
|
+
createdBy User @relation("CreatedLabChats", fields: [createdById], references: [id], onDelete: NoAction)
|
|
337
355
|
}
|
|
338
356
|
|
|
339
357
|
model ConversationMember {
|
package/src/routers/_app.ts
CHANGED
|
@@ -15,6 +15,7 @@ import { folderRouter } from "./folder.js";
|
|
|
15
15
|
import { notificationRouter } from "./notifications.js";
|
|
16
16
|
import { conversationRouter } from "./conversation.js";
|
|
17
17
|
import { messageRouter } from "./message.js";
|
|
18
|
+
import { labChatRouter } from "./labChat.js";
|
|
18
19
|
|
|
19
20
|
export const appRouter = createTRPCRouter({
|
|
20
21
|
class: classRouter,
|
|
@@ -31,6 +32,7 @@ export const appRouter = createTRPCRouter({
|
|
|
31
32
|
notification: notificationRouter,
|
|
32
33
|
conversation: conversationRouter,
|
|
33
34
|
message: messageRouter,
|
|
35
|
+
labChat: labChatRouter,
|
|
34
36
|
});
|
|
35
37
|
|
|
36
38
|
// Export type router type definition
|
|
@@ -16,6 +16,12 @@ export const conversationRouter = createTRPCRouter({
|
|
|
16
16
|
},
|
|
17
17
|
},
|
|
18
18
|
include: {
|
|
19
|
+
labChat: {
|
|
20
|
+
select: {
|
|
21
|
+
id: true,
|
|
22
|
+
title: true,
|
|
23
|
+
},
|
|
24
|
+
},
|
|
19
25
|
members: {
|
|
20
26
|
include: {
|
|
21
27
|
user: {
|
|
@@ -101,6 +107,7 @@ export const conversationRouter = createTRPCRouter({
|
|
|
101
107
|
name: conversation.name,
|
|
102
108
|
createdAt: conversation.createdAt,
|
|
103
109
|
updatedAt: conversation.updatedAt,
|
|
110
|
+
labChat: conversation.labChat,
|
|
104
111
|
members: conversation.members,
|
|
105
112
|
lastMessage: conversation.messages[0] || null,
|
|
106
113
|
unreadCount,
|
package/src/routers/folder.ts
CHANGED
|
@@ -3,6 +3,7 @@ import { createTRPCRouter, protectedProcedure, protectedClassMemberProcedure, pr
|
|
|
3
3
|
import { TRPCError } from "@trpc/server";
|
|
4
4
|
import { prisma } from "../lib/prisma.js";
|
|
5
5
|
import { uploadFiles, type UploadedFile } from "../lib/fileUpload.js";
|
|
6
|
+
import { type Folder } from "@prisma/client";
|
|
6
7
|
|
|
7
8
|
const fileSchema = z.object({
|
|
8
9
|
name: z.string(),
|
|
@@ -766,4 +767,26 @@ export const folderRouter = createTRPCRouter({
|
|
|
766
767
|
|
|
767
768
|
return updatedFolder;
|
|
768
769
|
}),
|
|
770
|
+
getParents: protectedProcedure
|
|
771
|
+
.input(z.object({
|
|
772
|
+
folderId: z.string(),
|
|
773
|
+
}))
|
|
774
|
+
.query(async ({ ctx, input }) => {
|
|
775
|
+
const { folderId } = input;
|
|
776
|
+
|
|
777
|
+
let currentParent: string | null = folderId;
|
|
778
|
+
const parents: Folder[] = [];
|
|
779
|
+
while (currentParent) {
|
|
780
|
+
const parent = await prisma.folder.findFirst({
|
|
781
|
+
where: {
|
|
782
|
+
id: currentParent,
|
|
783
|
+
},
|
|
784
|
+
});
|
|
785
|
+
|
|
786
|
+
currentParent = parent?.parentFolderId;
|
|
787
|
+
parents.push(parent);
|
|
788
|
+
}
|
|
789
|
+
|
|
790
|
+
return parents;
|
|
791
|
+
}),
|
|
769
792
|
});
|