@goscribe/server 1.1.7 → 1.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. package/.env.example +43 -0
  2. package/check-difficulty.cjs +14 -0
  3. package/check-questions.cjs +14 -0
  4. package/db-summary.cjs +22 -0
  5. package/dist/routers/auth.js +1 -1
  6. package/mcq-test.cjs +36 -0
  7. package/package.json +10 -2
  8. package/prisma/migrations/20260413143206_init/migration.sql +873 -0
  9. package/prisma/schema.prisma +485 -292
  10. package/src/context.ts +4 -1
  11. package/src/lib/activity_human_description.test.ts +28 -0
  12. package/src/lib/activity_human_description.ts +239 -0
  13. package/src/lib/activity_log_service.test.ts +37 -0
  14. package/src/lib/activity_log_service.ts +353 -0
  15. package/src/lib/ai-session.ts +194 -112
  16. package/src/lib/constants.ts +14 -0
  17. package/src/lib/email.ts +230 -0
  18. package/src/lib/env.ts +23 -6
  19. package/src/lib/inference.ts +3 -3
  20. package/src/lib/logger.ts +26 -9
  21. package/src/lib/notification-service.test.ts +106 -0
  22. package/src/lib/notification-service.ts +677 -0
  23. package/src/lib/prisma.ts +6 -1
  24. package/src/lib/pusher.ts +90 -6
  25. package/src/lib/retry.ts +61 -0
  26. package/src/lib/storage.ts +2 -2
  27. package/src/lib/stripe.ts +39 -0
  28. package/src/lib/subscription_service.ts +722 -0
  29. package/src/lib/usage_service.ts +74 -0
  30. package/src/lib/worksheet-generation.test.ts +31 -0
  31. package/src/lib/worksheet-generation.ts +139 -0
  32. package/src/lib/workspace-access.ts +13 -0
  33. package/src/routers/_app.ts +11 -0
  34. package/src/routers/admin.ts +710 -0
  35. package/src/routers/annotations.ts +227 -0
  36. package/src/routers/auth.ts +432 -33
  37. package/src/routers/copilot.ts +719 -0
  38. package/src/routers/flashcards.ts +207 -80
  39. package/src/routers/members.ts +280 -80
  40. package/src/routers/notifications.ts +142 -0
  41. package/src/routers/payment.ts +448 -0
  42. package/src/routers/podcast.ts +133 -108
  43. package/src/routers/studyguide.ts +80 -74
  44. package/src/routers/worksheets.ts +300 -80
  45. package/src/routers/workspace.ts +538 -328
  46. package/src/scripts/purge-deleted-users.ts +167 -0
  47. package/src/server.ts +140 -12
  48. package/src/services/flashcard-progress.service.ts +52 -43
  49. package/src/trpc.ts +184 -5
  50. package/test-generate.js +30 -0
  51. package/test-ratio.cjs +9 -0
  52. package/zod-test.cjs +22 -0
  53. package/prisma/migrations/20250826124819_add_worksheet_difficulty_and_estimated_time/migration.sql +0 -213
  54. package/prisma/migrations/20250826133236_add_worksheet_question_progress/migration.sql +0 -31
  55. package/prisma/seed.mjs +0 -135
  56. package/src/routers/meetingsummary.ts +0 -416
@@ -1,13 +1,20 @@
1
1
  import { z } from 'zod';
2
2
  import { TRPCError } from '@trpc/server';
3
- import { router, publicProcedure, authedProcedure } from '../trpc.js';
3
+ import { router, publicProcedure, authedProcedure, verifiedProcedure } from '../trpc.js';
4
4
  import { supabaseClient } from '../lib/storage.js';
5
- import { ArtifactType } from '@prisma/client';
5
+ import { ArtifactType } from '../lib/constants.js';
6
6
  import { aiSessionService } from '../lib/ai-session.js';
7
7
  import PusherService from '../lib/pusher.js';
8
8
  import { members } from './members.js';
9
9
  import { logger } from '../lib/logger.js';
10
10
  import type { PrismaClient } from '@prisma/client';
11
+ import { getUserStorageLimit } from '../lib/subscription_service.js';
12
+ import { getUserUsage, getUserPlanLimits } from '../lib/usage_service.js';
13
+ import {
14
+ notifyArtifactFailed,
15
+ notifyArtifactReady,
16
+ notifyWorkspaceDeleted,
17
+ } from '../lib/notification-service.js';
11
18
 
12
19
  // Helper function to update and emit analysis progress
13
20
  async function updateAnalysisProgress(
@@ -22,6 +29,65 @@ async function updateAnalysisProgress(
22
29
  await PusherService.emitAnalysisProgress(workspaceId, progress);
23
30
  }
24
31
 
32
+ // DRY helper to build progress steps for artifact generation pipeline
33
+ type StepStatus = 'pending' | 'in_progress' | 'completed' | 'skipped' | 'error';
34
+ const PIPELINE_STEPS = ['fileUpload', 'fileAnalysis', 'studyGuide', 'flashcards'] as const;
35
+
36
+ function buildProgressSteps(
37
+ currentStep: typeof PIPELINE_STEPS[number],
38
+ currentStatus: StepStatus,
39
+ config: { generateStudyGuide: boolean; generateFlashcards: boolean },
40
+ overrides?: Partial<Record<typeof PIPELINE_STEPS[number], StepStatus>>
41
+ ): Record<string, { order: number; status: StepStatus }> {
42
+ const stepIndex = PIPELINE_STEPS.indexOf(currentStep);
43
+ const steps: Record<string, { order: number; status: StepStatus }> = {};
44
+
45
+ for (let i = 0; i < PIPELINE_STEPS.length; i++) {
46
+ const step = PIPELINE_STEPS[i];
47
+ let status: StepStatus;
48
+
49
+ if (overrides?.[step]) {
50
+ status = overrides[step]!;
51
+ } else if (i < stepIndex) {
52
+ status = 'completed';
53
+ } else if (i === stepIndex) {
54
+ status = currentStatus;
55
+ } else {
56
+ // Future steps: check if they're configured
57
+ if (step === 'studyGuide' && !config.generateStudyGuide) {
58
+ status = 'skipped';
59
+ } else if (step === 'flashcards' && !config.generateFlashcards) {
60
+ status = 'skipped';
61
+ } else {
62
+ status = 'pending';
63
+ }
64
+ }
65
+
66
+ steps[step] = { order: i + 1, status };
67
+ }
68
+
69
+ return steps;
70
+ }
71
+
72
+ function buildProgress(
73
+ status: string,
74
+ filename: string,
75
+ fileType: string,
76
+ currentStep: typeof PIPELINE_STEPS[number],
77
+ currentStepStatus: StepStatus,
78
+ config: { generateStudyGuide: boolean; generateFlashcards: boolean },
79
+ extra?: Record<string, any>
80
+ ) {
81
+ return {
82
+ status,
83
+ filename,
84
+ fileType,
85
+ startedAt: new Date().toISOString(),
86
+ steps: buildProgressSteps(currentStep, currentStepStatus, config, extra as any),
87
+ ...extra,
88
+ };
89
+ }
90
+
25
91
  // Helper function to calculate search relevance score
26
92
  function calculateRelevance(query: string, ...texts: (string | null | undefined)[]): number {
27
93
  const queryLower = query.toLowerCase();
@@ -86,6 +152,42 @@ export const workspace = router({
86
152
  return { workspaces, folders };
87
153
  }),
88
154
 
155
+ /**
156
+ * Fetches the entire directory tree for the user.
157
+ * Includes Folders, Workspaces (files), and Uploads (sub-files).
158
+ */
159
+ getTree: authedProcedure
160
+ .query(async ({ ctx }) => {
161
+ const userId = ctx.session.user.id;
162
+
163
+ // 1. Fetch all folders
164
+ const allFolders = await ctx.db.folder.findMany({
165
+ where: { ownerId: userId },
166
+ orderBy: { updatedAt: 'desc' },
167
+ });
168
+
169
+ // 2. Fetch all workspaces
170
+ const allWorkspaces = await ctx.db.workspace.findMany({
171
+ where: { ownerId: userId },
172
+ include: {
173
+ uploads: {
174
+ select: {
175
+ id: true,
176
+ name: true,
177
+ mimeType: true,
178
+ createdAt: true,
179
+ }
180
+ }
181
+ },
182
+ orderBy: { updatedAt: 'desc' },
183
+ });
184
+
185
+ return {
186
+ folders: allFolders,
187
+ workspaces: allWorkspaces,
188
+ };
189
+ }),
190
+
89
191
  create: authedProcedure
90
192
  .input(z.object({
91
193
  name: z.string().min(1).max(100),
@@ -114,7 +216,12 @@ export const workspace = router({
114
216
  },
115
217
  });
116
218
 
117
- aiSessionService.initSession(ws.id, ctx.session.user.id);
219
+ await aiSessionService.initSession(ws.id, ctx.session.user.id).catch((err) => {
220
+ logger.error('Failed to init AI session on workspace creation:', err);
221
+ });
222
+
223
+ await PusherService.emitLibraryUpdate(ctx.session.user.id);
224
+
118
225
  return ws;
119
226
  }),
120
227
  createFolder: authedProcedure
@@ -132,16 +239,28 @@ export const workspace = router({
132
239
  parentId: input.parentId ?? null,
133
240
  },
134
241
  });
242
+
243
+ await PusherService.emitLibraryUpdate(ctx.session.user.id);
244
+
135
245
  return folder;
136
246
  }),
137
247
  updateFolder: authedProcedure
138
248
  .input(z.object({
139
249
  id: z.string(),
140
250
  name: z.string().min(1).max(100).optional(),
141
- color: z.string().optional(),
251
+ markerColor: z.string().nullable().optional(),
142
252
  }))
143
253
  .mutation(async ({ ctx, input }) => {
144
- const folder = await ctx.db.folder.update({ where: { id: input.id }, data: { name: input.name, color: input.color ?? '#9D00FF' } });
254
+ const folder = await ctx.db.folder.update({
255
+ where: { id: input.id },
256
+ data: {
257
+ name: input.name,
258
+ markerColor: input.markerColor
259
+ }
260
+ });
261
+
262
+ await PusherService.emitLibraryUpdate(ctx.session.user.id);
263
+
145
264
  return folder;
146
265
  }),
147
266
  deleteFolder: authedProcedure
@@ -150,6 +269,9 @@ export const workspace = router({
150
269
  }))
151
270
  .mutation(async ({ ctx, input }) => {
152
271
  const folder = await ctx.db.folder.delete({ where: { id: input.id } });
272
+
273
+ await PusherService.emitLibraryUpdate(ctx.session.user.id);
274
+
153
275
  return folder;
154
276
  }),
155
277
  get: authedProcedure
@@ -182,24 +304,132 @@ export const workspace = router({
182
304
  });
183
305
 
184
306
  const spaceLeft = await ctx.db.fileAsset.aggregate({
185
- where: { workspaceId: { in: workspaces.map(ws => ws.id) }, userId: ctx.session.user.id },
307
+ where: { workspaceId: { in: workspaces.map((ws: any) => ws.id) }, userId: ctx.session.user.id },
186
308
  _sum: { size: true },
187
309
  });
188
310
 
311
+ const storageLimit = await getUserStorageLimit(ctx.session.user.id);
312
+
189
313
  return {
190
314
  workspaces: workspaces.length,
191
315
  folders: folders.length,
192
316
  lastUpdated: lastUpdated?.updatedAt,
193
317
  spaceUsed: spaceLeft._sum?.size ?? 0,
194
- spaceTotal: 1000000000,
318
+ spaceTotal: storageLimit,
195
319
  };
196
320
  }),
321
+
322
+ // Study analytics: streaks, flashcard mastery, worksheet accuracy
323
+ getStudyAnalytics: authedProcedure
324
+ .query(async ({ ctx }) => {
325
+ const userId = ctx.session.user.id;
326
+
327
+ // Gather all study activity dates
328
+ const flashcardProgress = await ctx.db.flashcardProgress.findMany({
329
+ where: { userId },
330
+ select: { lastStudiedAt: true },
331
+ });
332
+
333
+ const worksheetProgress = await ctx.db.worksheetQuestionProgress.findMany({
334
+ where: { userId },
335
+ select: { updatedAt: true, completedAt: true },
336
+ });
337
+
338
+ // Build a set of unique study days (YYYY-MM-DD)
339
+ const studyDays = new Set<string>();
340
+ for (const fp of flashcardProgress) {
341
+ if (fp.lastStudiedAt) {
342
+ studyDays.add(fp.lastStudiedAt.toISOString().split('T')[0]);
343
+ }
344
+ }
345
+ for (const wp of worksheetProgress) {
346
+ if (wp.completedAt) {
347
+ studyDays.add(wp.completedAt.toISOString().split('T')[0]);
348
+ } else {
349
+ studyDays.add(wp.updatedAt.toISOString().split('T')[0]);
350
+ }
351
+ }
352
+
353
+ // Calculate streak (consecutive days ending today or yesterday)
354
+ const sortedDays = [...studyDays].sort().reverse();
355
+ let streak = 0;
356
+
357
+ if (sortedDays.length > 0) {
358
+ const today = new Date();
359
+ today.setHours(0, 0, 0, 0);
360
+ const yesterday = new Date(today);
361
+ yesterday.setDate(yesterday.getDate() - 1);
362
+
363
+ const todayStr = today.toISOString().split('T')[0];
364
+ const yesterdayStr = yesterday.toISOString().split('T')[0];
365
+
366
+ // Streak only counts if the most recent study day is today or yesterday
367
+ if (sortedDays[0] === todayStr || sortedDays[0] === yesterdayStr) {
368
+ streak = 1;
369
+ for (let i = 1; i < sortedDays.length; i++) {
370
+ const current = new Date(sortedDays[i - 1]);
371
+ const prev = new Date(sortedDays[i]);
372
+ const diffDays = (current.getTime() - prev.getTime()) / (1000 * 60 * 60 * 24);
373
+ if (diffDays === 1) {
374
+ streak++;
375
+ } else {
376
+ break;
377
+ }
378
+ }
379
+ }
380
+ }
381
+
382
+ // Weekly activity (last 7 days)
383
+ const weeklyActivity: boolean[] = [];
384
+ const today = new Date();
385
+ today.setHours(0, 0, 0, 0);
386
+ for (let i = 6; i >= 0; i--) {
387
+ const d = new Date(today);
388
+ d.setDate(d.getDate() - i);
389
+ const dayStr = d.toISOString().split('T')[0];
390
+ weeklyActivity.push(studyDays.has(dayStr));
391
+ }
392
+
393
+ // Flashcard stats
394
+ const totalCards = await ctx.db.flashcardProgress.count({ where: { userId } });
395
+ const masteredCards = await ctx.db.flashcardProgress.count({
396
+ where: { userId, masteryLevel: { gte: 80 } },
397
+ });
398
+ const dueCards = await ctx.db.flashcardProgress.count({
399
+ where: { userId, nextReviewAt: { lte: new Date() } },
400
+ });
401
+
402
+ // Worksheet stats
403
+ const completedQuestions = await ctx.db.worksheetQuestionProgress.count({
404
+ where: { userId, completedAt: { not: null } },
405
+ });
406
+ const correctQuestions = await ctx.db.worksheetQuestionProgress.count({
407
+ where: { userId, correct: true },
408
+ });
409
+
410
+ return {
411
+ streak,
412
+ totalStudyDays: studyDays.size,
413
+ weeklyActivity,
414
+ flashcards: {
415
+ total: totalCards,
416
+ mastered: masteredCards,
417
+ dueForReview: dueCards,
418
+ },
419
+ worksheets: {
420
+ completed: completedQuestions,
421
+ correct: correctQuestions,
422
+ accuracy: completedQuestions > 0 ? Math.round((correctQuestions / completedQuestions) * 100) : 0,
423
+ },
424
+ };
425
+ }),
426
+
197
427
  update: authedProcedure
198
428
  .input(z.object({
199
429
  id: z.string(),
200
430
  name: z.string().min(1).max(100).optional(),
201
431
  description: z.string().max(500).optional(),
202
- color: z.string().optional(),
432
+ markerColor: z.string().nullable().optional(),
203
433
  icon: z.string().optional(),
204
434
  }))
205
435
  .mutation(async ({ ctx, input }) => {
@@ -212,10 +442,14 @@ export const workspace = router({
212
442
  data: {
213
443
  title: input.name ?? existed.title,
214
444
  description: input.description,
215
- color: input.color ?? existed.color,
445
+ // Preserve explicit null ("None color") instead of falling back.
446
+ markerColor: input.markerColor !== undefined ? input.markerColor : existed.markerColor,
216
447
  icon: input.icon ?? existed.icon,
217
448
  },
218
449
  });
450
+
451
+ await PusherService.emitLibraryUpdate(ctx.session.user.id);
452
+
219
453
  return updated;
220
454
  }),
221
455
  delete: authedProcedure
@@ -223,10 +457,41 @@ export const workspace = router({
223
457
  id: z.string(),
224
458
  }))
225
459
  .mutation(async ({ ctx, input }) => {
460
+ const workspaceToDelete = await ctx.db.workspace.findFirst({
461
+ where: { id: input.id, ownerId: ctx.session.user.id },
462
+ select: {
463
+ id: true,
464
+ title: true,
465
+ ownerId: true,
466
+ members: {
467
+ select: { userId: true },
468
+ },
469
+ },
470
+ });
471
+
472
+ if (!workspaceToDelete) throw new TRPCError({ code: 'NOT_FOUND' });
473
+
474
+ const actor = await ctx.db.user.findUnique({
475
+ where: { id: ctx.session.user.id },
476
+ select: { name: true, email: true },
477
+ });
478
+ const actorName = actor?.name || actor?.email || 'A user';
479
+
480
+ await notifyWorkspaceDeleted(ctx.db, {
481
+ recipientUserIds: workspaceToDelete.members.map((m) => m.userId),
482
+ actorUserId: ctx.session.user.id,
483
+ actorName,
484
+ workspaceId: workspaceToDelete.id,
485
+ workspaceTitle: workspaceToDelete.title,
486
+ });
487
+
226
488
  const deleted = await ctx.db.workspace.deleteMany({
227
489
  where: { id: input.id, ownerId: ctx.session.user.id },
228
490
  });
229
491
  if (deleted.count === 0) throw new TRPCError({ code: 'NOT_FOUND' });
492
+
493
+ await PusherService.emitLibraryUpdate(ctx.session.user.id);
494
+
230
495
  return true;
231
496
  }),
232
497
  getFolderInformation: authedProcedure
@@ -260,9 +525,11 @@ export const workspace = router({
260
525
  const user = await ctx.db.user.findFirst({ where: { id: ctx.session.user.id } });
261
526
  if (!user || !user.email) throw new TRPCError({ code: 'NOT_FOUND' });
262
527
  const sharedWith = await ctx.db.workspace.findMany({ where: { members: { some: { userId: ctx.session.user.id } } } });
263
- const invitations = await ctx.db.workspaceInvitation.findMany({ where: { email: user.email, acceptedAt: null }, include: {
264
- workspace: true,
265
- } });
528
+ const invitations = await ctx.db.workspaceInvitation.findMany({
529
+ where: { email: user.email, acceptedAt: null }, include: {
530
+ workspace: true,
531
+ }
532
+ });
266
533
 
267
534
  return { shared: sharedWith, invitations };
268
535
  }),
@@ -281,6 +548,24 @@ export const workspace = router({
281
548
  // ensure workspace belongs to user
282
549
  const ws = await ctx.db.workspace.findFirst({ where: { id: input.id, ownerId: ctx.session.user.id } });
283
550
  if (!ws) throw new TRPCError({ code: 'NOT_FOUND' });
551
+
552
+ // Check storage limit
553
+ const workspaces = await ctx.db.workspace.findMany({
554
+ where: { OR: [{ ownerId: ctx.session.user.id }, { sharedWith: { some: { id: ctx.session.user.id } } }] },
555
+ });
556
+ const spaceUsed = await ctx.db.fileAsset.aggregate({
557
+ where: { workspaceId: { in: workspaces.map((w: any) => w.id) }, userId: ctx.session.user.id },
558
+ _sum: { size: true },
559
+ });
560
+ const storageLimit = await getUserStorageLimit(ctx.session.user.id);
561
+ const totalSize = input.files.reduce((acc, file) => acc + file.size, 0);
562
+ if ((spaceUsed._sum?.size ?? 0) + totalSize > storageLimit) {
563
+ logger.warn(`Storage limit exceeded for user ${ctx.session.user.id}. Used: ${spaceUsed._sum?.size}, Tried to upload: ${totalSize}, Limit: ${storageLimit}`);
564
+ throw new TRPCError({
565
+ code: 'FORBIDDEN',
566
+ message: `Storage limit exceeded. Maximum allowed storage is ${(storageLimit / (1024 * 1024 * 1024)).toFixed(1)}GB.`
567
+ });
568
+ }
284
569
  const results = [];
285
570
 
286
571
  for (const file of input.files) {
@@ -298,13 +583,13 @@ export const workspace = router({
298
583
  // 2. Generate signed URL for direct upload
299
584
  const objectKey = `${ctx.session.user.id}/${record.id}-${file.filename}`;
300
585
  const { data: signedUrlData, error: signedUrlError } = await supabaseClient.storage
301
- .from('files')
586
+ .from('media')
302
587
  .createSignedUploadUrl(objectKey); // 5 minutes
303
588
 
304
589
  if (signedUrlError) {
305
590
  throw new TRPCError({
306
591
  code: 'INTERNAL_SERVER_ERROR',
307
- message: `Failed to generate upload URL: ${signedUrlError.message}`
592
+ message: `Failed to upload file`
308
593
  });
309
594
  }
310
595
 
@@ -312,7 +597,7 @@ export const workspace = router({
312
597
  await ctx.db.fileAsset.update({
313
598
  where: { id: record.id },
314
599
  data: {
315
- bucket: 'files',
600
+ bucket: 'media',
316
601
  objectKey: objectKey,
317
602
  },
318
603
  });
@@ -347,7 +632,7 @@ export const workspace = router({
347
632
  .from(file.bucket)
348
633
  .remove([file.objectKey])
349
634
  .catch((err: unknown) => {
350
- console.error(`Error deleting file ${file.objectKey} from bucket ${file.bucket}:`, err);
635
+ logger.error(`Error deleting file ${file.objectKey} from bucket ${file.bucket}:`, err);
351
636
  });
352
637
  }
353
638
  }
@@ -369,6 +654,23 @@ export const workspace = router({
369
654
  size: z.number(),
370
655
  }))
371
656
  .query(async ({ ctx, input }) => {
657
+ // Check storage limit
658
+ const workspaces = await ctx.db.workspace.findMany({
659
+ where: { OR: [{ ownerId: ctx.session.user.id }, { sharedWith: { some: { id: ctx.session.user.id } } }] },
660
+ });
661
+ const spaceUsed = await ctx.db.fileAsset.aggregate({
662
+ where: { workspaceId: { in: workspaces.map((w: any) => w.id) }, userId: ctx.session.user.id },
663
+ _sum: { size: true },
664
+ });
665
+ const storageLimit = await getUserStorageLimit(ctx.session.user.id);
666
+ if ((spaceUsed._sum?.size ?? 0) + input.size > storageLimit) {
667
+ logger.warn(`Storage limit exceeded for user ${ctx.session.user.id}. Used: ${spaceUsed._sum?.size}, Tried to upload: ${input.size}, Limit: ${storageLimit}`);
668
+ throw new TRPCError({
669
+ code: 'FORBIDDEN',
670
+ message: `Storage limit exceeded. Maximum allowed storage is ${(storageLimit / (1024 * 1024 * 1024)).toFixed(1)}GB.`
671
+ });
672
+ }
673
+
372
674
  const objectKey = `workspace_${ctx.session.user.id}/${input.workspaceId}-file_${input.filename}`;
373
675
  const fileAsset = await ctx.db.fileAsset.create({
374
676
  data: {
@@ -383,9 +685,10 @@ export const workspace = router({
383
685
  });
384
686
  const { data: signedUrlData, error: signedUrlError } = await supabaseClient.storage
385
687
  .from('media')
386
- .createSignedUploadUrl(objectKey); // 5 minutes
688
+ .createSignedUploadUrl(objectKey, { upsert: true });
387
689
  if (signedUrlError) {
388
- throw new TRPCError({ code: 'INTERNAL_SERVER_ERROR', message: `Failed to generate upload URL: ${signedUrlError.message}` });
690
+ logger.error('Signed upload URL error:', signedUrlError);
691
+ throw new TRPCError({ code: 'INTERNAL_SERVER_ERROR', message: `Failed to create upload URL: ${signedUrlError.message}` });
389
692
  }
390
693
 
391
694
  await ctx.db.workspace.update({
@@ -398,7 +701,7 @@ export const workspace = router({
398
701
  uploadUrl: signedUrlData.signedUrl,
399
702
  };
400
703
  }),
401
- uploadAndAnalyzeMedia: authedProcedure
704
+ uploadAndAnalyzeMedia: verifiedProcedure
402
705
  .input(z.object({
403
706
  workspaceId: z.string(),
404
707
  files: z.array(z.object({
@@ -414,7 +717,7 @@ export const workspace = router({
414
717
  where: { id: input.workspaceId, ownerId: ctx.session.user.id }
415
718
  });
416
719
  if (!workspace) {
417
- console.error('Workspace not found', { workspaceId: input.workspaceId, userId: ctx.session.user.id });
720
+ logger.error('Workspace not found', { workspaceId: input.workspaceId, userId: ctx.session.user.id });
418
721
  throw new TRPCError({ code: 'NOT_FOUND' });
419
722
  }
420
723
 
@@ -462,43 +765,18 @@ export const workspace = router({
462
765
  data: { fileBeingAnalyzed: true },
463
766
  });
464
767
 
465
- PusherService.emitAnalysisProgress(input.workspaceId, {
466
- status: 'starting',
467
- filename: primaryFile.name,
468
- fileType,
469
- startedAt: new Date().toISOString(),
470
- steps: {
471
- fileUpload: { order: 1, status: 'pending' },
472
- },
473
- });
768
+ const genConfig = { generateStudyGuide: input.generateStudyGuide, generateFlashcards: input.generateFlashcards };
769
+
770
+ PusherService.emitAnalysisProgress(input.workspaceId,
771
+ buildProgress('starting', primaryFile.name, fileType, 'fileUpload', 'pending', genConfig)
772
+ );
474
773
 
475
774
  try {
476
- await updateAnalysisProgress(ctx.db, input.workspaceId, {
477
- status: 'starting',
478
- filename: primaryFile.name,
479
- fileType,
480
- startedAt: new Date().toISOString(),
481
- steps: {
482
- fileUpload: {
483
- order: 1,
484
- status: 'pending',
485
- },
486
- fileAnalysis: {
487
- order: 2,
488
- status: 'pending',
489
- },
490
- studyGuide: {
491
- order: 3,
492
- status: input.generateStudyGuide ? 'pending' : 'skipped',
493
- },
494
- flashcards: {
495
- order: 4,
496
- status: input.generateFlashcards ? 'pending' : 'skipped',
497
- },
498
- }
499
- });
775
+ await updateAnalysisProgress(ctx.db, input.workspaceId,
776
+ buildProgress('starting', primaryFile.name, fileType, 'fileUpload', 'pending', genConfig)
777
+ );
500
778
  } catch (error) {
501
- console.error('Failed to update analysis progress:', error);
779
+ logger.error('Failed to update analysis progress:', error);
502
780
  await ctx.db.workspace.update({
503
781
  where: { id: input.workspaceId },
504
782
  data: { fileBeingAnalyzed: false },
@@ -507,30 +785,9 @@ export const workspace = router({
507
785
  throw error;
508
786
  }
509
787
 
510
- await updateAnalysisProgress(ctx.db, input.workspaceId, {
511
- status: 'uploading',
512
- filename: primaryFile.name,
513
- fileType,
514
- startedAt: new Date().toISOString(),
515
- steps: {
516
- fileUpload: {
517
- order: 1,
518
- status: 'in_progress',
519
- },
520
- fileAnalysis: {
521
- order: 2,
522
- status: 'pending',
523
- },
524
- studyGuide: {
525
- order: 3,
526
- status: input.generateStudyGuide ? 'pending' : 'skipped',
527
- },
528
- flashcards: {
529
- order: 4,
530
- status: input.generateFlashcards ? 'pending' : 'skipped',
531
- },
532
- }
533
- });
788
+ await updateAnalysisProgress(ctx.db, input.workspaceId,
789
+ buildProgress('uploading', primaryFile.name, fileType, 'fileUpload', 'in_progress', genConfig)
790
+ );
534
791
 
535
792
  // Process all files using the new process_file endpoint
536
793
  for (const file of files) {
@@ -550,7 +807,7 @@ export const workspace = router({
550
807
  });
551
808
  throw new TRPCError({
552
809
  code: 'INTERNAL_SERVER_ERROR',
553
- message: `Failed to generate signed URL for file ${file.name}: ${signedUrlError.message}`
810
+ message: `Failed to upload file`
554
811
  });
555
812
  }
556
813
 
@@ -567,7 +824,7 @@ export const workspace = router({
567
824
  currentFileType,
568
825
  maxPages
569
826
  );
570
-
827
+
571
828
  if (processResult.status === 'error') {
572
829
  logger.error(`Failed to process file ${file.name}:`, processResult.error);
573
830
  // Continue processing other files even if one fails
@@ -589,30 +846,9 @@ export const workspace = router({
589
846
  }
590
847
  }
591
848
 
592
- await updateAnalysisProgress(ctx.db, input.workspaceId, {
593
- status: 'analyzing',
594
- filename: primaryFile.name,
595
- fileType,
596
- startedAt: new Date().toISOString(),
597
- steps: {
598
- fileUpload: {
599
- order: 1,
600
- status: 'completed',
601
- },
602
- fileAnalysis: {
603
- order: 2,
604
- status: 'in_progress',
605
- },
606
- studyGuide: {
607
- order: 3,
608
- status: input.generateStudyGuide ? 'pending' : 'skipped',
609
- },
610
- flashcards: {
611
- order: 4,
612
- status: input.generateFlashcards ? 'pending' : 'skipped',
613
- },
614
- }
615
- });
849
+ await updateAnalysisProgress(ctx.db, input.workspaceId,
850
+ buildProgress('analyzing', primaryFile.name, fileType, 'fileAnalysis', 'in_progress', genConfig)
851
+ );
616
852
 
617
853
  try {
618
854
  // Analyze all files - use PDF analysis if any file is a PDF, otherwise use image analysis
@@ -626,57 +862,17 @@ export const workspace = router({
626
862
  // }
627
863
  // }
628
864
 
629
- await updateAnalysisProgress(ctx.db, input.workspaceId, {
630
- status: 'generating_artifacts',
631
- filename: primaryFile.name,
632
- fileType,
633
- startedAt: new Date().toISOString(),
634
- steps: {
635
- fileUpload: {
636
- order: 1,
637
- status: 'completed',
638
- },
639
- fileAnalysis: {
640
- order: 2,
641
- status: 'completed',
642
- },
643
- studyGuide: {
644
- order: 3,
645
- status: input.generateStudyGuide ? 'pending' : 'skipped',
646
- },
647
- flashcards: {
648
- order: 4,
649
- status: input.generateFlashcards ? 'pending' : 'skipped',
650
- },
651
- }
652
- });
865
+ await updateAnalysisProgress(ctx.db, input.workspaceId,
866
+ buildProgress('generating_artifacts', primaryFile.name, fileType, 'studyGuide', 'pending', genConfig)
867
+ );
653
868
  } catch (error) {
654
- console.error('Failed to analyze files:', error);
655
- await updateAnalysisProgress(ctx.db, input.workspaceId, {
656
- status: 'error',
657
- filename: primaryFile.name,
658
- fileType,
659
- error: `Failed to analyze ${fileType}: ${error}`,
660
- startedAt: new Date().toISOString(),
661
- steps: {
662
- fileUpload: {
663
- order: 1,
664
- status: 'completed',
665
- },
666
- fileAnalysis: {
667
- order: 2,
668
- status: 'error',
669
- },
670
- studyGuide: {
671
- order: 3,
672
- status: 'skipped',
673
- },
674
- flashcards: {
675
- order: 4,
676
- status: 'skipped',
677
- },
678
- }
679
- });
869
+ logger.error('Failed to analyze files:', error);
870
+ await updateAnalysisProgress(ctx.db, input.workspaceId,
871
+ buildProgress('error', primaryFile.name, fileType, 'fileAnalysis', 'error', genConfig, {
872
+ error: `Failed to analyze ${fileType}: ${error}`,
873
+ studyGuide: 'skipped', flashcards: 'skipped',
874
+ })
875
+ );
680
876
  await ctx.db.workspace.update({
681
877
  where: { id: input.workspaceId },
682
878
  data: { fileBeingAnalyzed: false },
@@ -700,142 +896,180 @@ export const workspace = router({
700
896
  }
701
897
  };
702
898
 
703
- // Generate artifacts
899
+ // Ensure AI session is initialized before generating artifacts
900
+ try {
901
+ await aiSessionService.initSession(input.workspaceId, ctx.session.user.id);
902
+ } catch (initError) {
903
+ logger.error('Failed to init AI session (continuing with workspace context):', initError);
904
+ }
905
+
906
+ // Fetch current usage and limits to enforce plan restrictions for auto-generation
907
+ const [usage, limits] = await Promise.all([
908
+ getUserUsage(ctx.session.user.id),
909
+ getUserPlanLimits(ctx.session.user.id)
910
+ ]);
911
+
912
+ // Generate artifacts - each step is isolated so failures don't block subsequent steps
704
913
  if (input.generateStudyGuide) {
705
- await updateAnalysisProgress(ctx.db, input.workspaceId, {
706
- status: 'generating_study_guide',
707
- filename: primaryFile.name,
708
- fileType,
709
- startedAt: new Date().toISOString(),
710
- steps: {
711
- fileUpload: {
712
- order: 1,
713
- status: 'completed',
714
- },
715
- fileAnalysis: {
716
- order: 2,
717
- status: 'completed',
718
- },
719
- studyGuide: {
720
- order: 3,
721
- status: 'in_progress',
722
- },
723
- flashcards: {
724
- order: 4,
725
- status: input.generateFlashcards ? 'pending' : 'skipped',
726
- },
727
- }
728
- });
914
+ // Enforcement: Skip if limit reached
915
+ if (limits && usage.studyGuides >= limits.maxStudyGuides) {
916
+ await updateAnalysisProgress(ctx.db, input.workspaceId,
917
+ buildProgress('skipped', primaryFile.name, fileType, 'studyGuide', 'skipped', genConfig)
918
+ );
919
+ await PusherService.emitError(input.workspaceId, 'Study guide skipped: Limit reached.', 'study_guide');
920
+ await notifyArtifactFailed(ctx.db, {
921
+ userId: ctx.session.user.id,
922
+ workspaceId: input.workspaceId,
923
+ artifactType: ArtifactType.STUDY_GUIDE,
924
+ message: 'Study guide was skipped because your plan limit was reached.',
925
+ }).catch(() => {});
926
+ } else {
927
+ try {
928
+ await updateAnalysisProgress(ctx.db, input.workspaceId,
929
+ buildProgress('generating_study_guide', primaryFile.name, fileType, 'studyGuide', 'in_progress', genConfig)
930
+ );
729
931
 
730
- const content = await aiSessionService.generateStudyGuide(input.workspaceId, ctx.session.user.id);
932
+ const content = await aiSessionService.generateStudyGuide(input.workspaceId, ctx.session.user.id);
731
933
 
732
- let artifact = await ctx.db.artifact.findFirst({
733
- where: { workspaceId: input.workspaceId, type: ArtifactType.STUDY_GUIDE },
734
- });
735
- if (!artifact) {
736
- const fileNames = files.map(f => f.name).join(', ');
737
- artifact = await ctx.db.artifact.create({
738
- data: {
739
- workspaceId: input.workspaceId,
740
- type: ArtifactType.STUDY_GUIDE,
741
- title: files.length === 1 ? `Study Guide - ${primaryFile.name}` : `Study Guide - ${files.length} files`,
742
- createdById: ctx.session.user.id,
743
- },
744
- });
745
- }
934
+ let artifact = await ctx.db.artifact.findFirst({
935
+ where: { workspaceId: input.workspaceId, type: ArtifactType.STUDY_GUIDE },
936
+ });
937
+ if (!artifact) {
938
+ artifact = await ctx.db.artifact.create({
939
+ data: {
940
+ workspaceId: input.workspaceId,
941
+ type: ArtifactType.STUDY_GUIDE,
942
+ title: files.length === 1 ? `Study Guide - ${primaryFile.name}` : `Study Guide - ${files.length} files`,
943
+ createdById: ctx.session.user.id,
944
+ },
945
+ });
946
+ }
746
947
 
747
- const lastVersion = await ctx.db.artifactVersion.findFirst({
748
- where: { artifact: { workspaceId: input.workspaceId, type: ArtifactType.STUDY_GUIDE } },
749
- orderBy: { version: 'desc' },
750
- });
948
+ const lastVersion = await ctx.db.artifactVersion.findFirst({
949
+ where: { artifact: { workspaceId: input.workspaceId, type: ArtifactType.STUDY_GUIDE } },
950
+ orderBy: { version: 'desc' },
951
+ });
751
952
 
752
- await ctx.db.artifactVersion.create({
753
- data: { artifactId: artifact.id, version: lastVersion ? lastVersion.version + 1 : 1, content: content, createdById: ctx.session.user.id },
754
- });
953
+ await ctx.db.artifactVersion.create({
954
+ data: { artifactId: artifact.id, version: lastVersion ? lastVersion.version + 1 : 1, content: content, createdById: ctx.session.user.id },
955
+ });
755
956
 
756
- results.artifacts.studyGuide = artifact;
957
+ results.artifacts.studyGuide = artifact;
958
+ await PusherService.emitStudyGuideComplete(input.workspaceId, artifact);
959
+ await notifyArtifactReady(ctx.db, {
960
+ userId: ctx.session.user.id,
961
+ workspaceId: input.workspaceId,
962
+ artifactId: artifact.id,
963
+ artifactType: ArtifactType.STUDY_GUIDE,
964
+ title: artifact.title,
965
+ }).catch(() => {});
966
+ } catch (sgError) {
967
+ logger.error('Study guide generation failed after retries:', sgError);
968
+ await PusherService.emitError(input.workspaceId, 'Study guide generation failed. Please try regenerating later.', 'study_guide');
969
+ await notifyArtifactFailed(ctx.db, {
970
+ userId: ctx.session.user.id,
971
+ workspaceId: input.workspaceId,
972
+ artifactType: ArtifactType.STUDY_GUIDE,
973
+ message: 'Study guide generation failed. Please try regenerating later.',
974
+ }).catch(() => {});
975
+ // Continue to flashcards - don't abort the whole pipeline
976
+ }
977
+ }
757
978
  }
758
979
 
759
980
  if (input.generateFlashcards) {
760
- await updateAnalysisProgress(ctx.db, input.workspaceId, {
761
- status: 'generating_flashcards',
762
- filename: primaryFile.name,
763
- fileType,
764
- startedAt: new Date().toISOString(),
765
- steps: {
766
- fileUpload: {
767
- order: 1,
768
- status: 'completed',
769
- },
770
- fileAnalysis: {
771
- order: 2,
772
- status: 'completed',
773
- },
774
- studyGuide: {
775
- order: 3,
776
- status: input.generateStudyGuide ? 'completed' : 'skipped',
777
- },
778
- flashcards: {
779
- order: 4,
780
- status: 'in_progress',
781
- },
782
- }
783
- });
784
-
785
- const content = await aiSessionService.generateFlashcardQuestions(input.workspaceId, ctx.session.user.id, 10, 'medium');
786
-
787
- const artifact = await ctx.db.artifact.create({
788
- data: {
981
+ // Enforcement: Skip if limit reached
982
+ if (limits && usage.flashcards >= limits.maxFlashcards) {
983
+ await updateAnalysisProgress(ctx.db, input.workspaceId,
984
+ buildProgress('skipped', primaryFile.name, fileType, 'flashcards', 'skipped', genConfig)
985
+ );
986
+ await PusherService.emitError(input.workspaceId, 'Flashcards skipped: Limit reached.', 'flashcards');
987
+ await notifyArtifactFailed(ctx.db, {
988
+ userId: ctx.session.user.id,
789
989
  workspaceId: input.workspaceId,
790
- type: ArtifactType.FLASHCARD_SET,
791
- title: files.length === 1 ? `Flashcards - ${primaryFile.name}` : `Flashcards - ${files.length} files`,
792
- createdById: ctx.session.user.id,
793
- },
794
- });
795
-
796
- // Parse JSON flashcard content
797
- try {
798
- const flashcardData: any = content;
990
+ artifactType: ArtifactType.FLASHCARD_SET,
991
+ message: 'Flashcards were skipped because your plan limit was reached.',
992
+ }).catch(() => {});
993
+ } else {
994
+ try {
995
+ const sgStatus = input.generateStudyGuide ? (results.artifacts.studyGuide ? 'completed' : 'error') : 'skipped';
996
+ await updateAnalysisProgress(ctx.db, input.workspaceId,
997
+ buildProgress('generating_flashcards', primaryFile.name, fileType, 'flashcards', 'in_progress', genConfig,
998
+ { studyGuide: sgStatus } as any)
999
+ );
799
1000
 
800
- let createdCards = 0;
801
- for (let i = 0; i < Math.min(flashcardData.length, 10); i++) {
802
- const card = flashcardData[i];
803
- const front = card.term || card.front || card.question || card.prompt || `Question ${i + 1}`;
804
- const back = card.definition || card.back || card.answer || card.solution || `Answer ${i + 1}`;
1001
+ const content = await aiSessionService.generateFlashcardQuestions(input.workspaceId, ctx.session.user.id, 10, 'medium');
805
1002
 
806
- await ctx.db.flashcard.create({
1003
+ const artifact = await ctx.db.artifact.create({
807
1004
  data: {
808
- artifactId: artifact.id,
809
- front: front,
810
- back: back,
811
- order: i,
812
- tags: ['ai-generated', 'medium'],
1005
+ workspaceId: input.workspaceId,
1006
+ type: ArtifactType.FLASHCARD_SET,
1007
+ title: files.length === 1 ? `Flashcards - ${primaryFile.name}` : `Flashcards - ${files.length} files`,
1008
+ createdById: ctx.session.user.id,
813
1009
  },
814
1010
  });
815
- createdCards++;
816
- }
817
1011
 
818
- } catch (parseError) {
819
- // Fallback to text parsing if JSON fails
820
- const lines = content.split('\n').filter(line => line.trim());
821
- for (let i = 0; i < Math.min(lines.length, 10); i++) {
822
- const line = lines[i];
823
- if (line.includes(' - ')) {
824
- const [front, back] = line.split(' - ');
825
- await ctx.db.flashcard.create({
826
- data: {
827
- artifactId: artifact.id,
828
- front: front.trim(),
829
- back: back.trim(),
830
- order: i,
831
- tags: ['ai-generated', 'medium'],
832
- },
833
- });
1012
+ // Parse JSON flashcard content
1013
+ try {
1014
+ const parsed = typeof content === 'string' ? JSON.parse(content) : content;
1015
+ const flashcardData = Array.isArray(parsed) ? parsed : (parsed.flashcards || []);
1016
+
1017
+ for (let i = 0; i < Math.min(flashcardData.length, 10); i++) {
1018
+ const card = flashcardData[i];
1019
+ const front = card.term || card.front || card.question || card.prompt || `Question ${i + 1}`;
1020
+ const back = card.definition || card.back || card.answer || card.solution || `Answer ${i + 1}`;
1021
+
1022
+ await ctx.db.flashcard.create({
1023
+ data: {
1024
+ artifactId: artifact.id,
1025
+ front: front,
1026
+ back: back,
1027
+ order: i,
1028
+ tags: ['ai-generated', 'medium'],
1029
+ },
1030
+ });
1031
+ }
1032
+ } catch (parseError) {
1033
+ console.error("Failed to parse flashcard JSON or create cards in workspace router:", parseError);
1034
+ // Fallback to text parsing if JSON fails
1035
+ const lines = content.split('\n').filter((line: string) => line.trim());
1036
+ for (let i = 0; i < Math.min(lines.length, 10); i++) {
1037
+ const line = lines[i];
1038
+ if (line.includes(' - ')) {
1039
+ const [front, back] = line.split(' - ');
1040
+ await ctx.db.flashcard.create({
1041
+ data: {
1042
+ artifactId: artifact.id,
1043
+ front: front.trim(),
1044
+ back: back.trim(),
1045
+ order: i,
1046
+ tags: ['ai-generated', 'medium'],
1047
+ },
1048
+ });
1049
+ }
1050
+ }
834
1051
  }
1052
+
1053
+ results.artifacts.flashcards = artifact;
1054
+ await PusherService.emitFlashcardComplete(input.workspaceId, artifact);
1055
+ await notifyArtifactReady(ctx.db, {
1056
+ userId: ctx.session.user.id,
1057
+ workspaceId: input.workspaceId,
1058
+ artifactId: artifact.id,
1059
+ artifactType: ArtifactType.FLASHCARD_SET,
1060
+ title: artifact.title,
1061
+ }).catch(() => {});
1062
+ } catch (fcError) {
1063
+ logger.error('Flashcard generation failed after retries:', fcError);
1064
+ await PusherService.emitError(input.workspaceId, 'Flashcard generation failed. Please try regenerating later.', 'flashcards');
1065
+ await notifyArtifactFailed(ctx.db, {
1066
+ userId: ctx.session.user.id,
1067
+ workspaceId: input.workspaceId,
1068
+ artifactType: ArtifactType.FLASHCARD_SET,
1069
+ message: 'Flashcard generation failed. Please try regenerating later.',
1070
+ }).catch(() => {});
835
1071
  }
836
1072
  }
837
-
838
- results.artifacts.flashcards = artifact;
839
1073
  }
840
1074
 
841
1075
  await ctx.db.workspace.update({
@@ -844,34 +1078,12 @@ export const workspace = router({
844
1078
  });
845
1079
 
846
1080
  await updateAnalysisProgress(ctx.db, input.workspaceId, {
847
- status: 'completed',
848
- filename: primaryFile.name,
849
- fileType,
850
- startedAt: new Date().toISOString(),
1081
+ ...buildProgress('completed', primaryFile.name, fileType, 'flashcards', 'completed', genConfig),
851
1082
  completedAt: new Date().toISOString(),
852
- steps: {
853
- fileUpload: {
854
- order: 1,
855
- status: 'completed',
856
- },
857
- fileAnalysis: {
858
- order: 2,
859
- status: 'completed',
860
- },
861
- studyGuide: {
862
- order: 3,
863
- status: input.generateStudyGuide ? 'completed' : 'skipped',
864
- },
865
- flashcards: {
866
- order: 4,
867
- status: input.generateFlashcards ? 'completed' : 'skipped',
868
- },
869
- }
870
-
871
1083
  });
872
1084
  return results;
873
1085
  } catch (error) {
874
- console.error('Failed to update analysis progress:', error);
1086
+ logger.error('Failed to update analysis progress:', error);
875
1087
  await ctx.db.workspace.update({
876
1088
  where: { id: input.workspaceId },
877
1089
  data: { fileBeingAnalyzed: false },
@@ -883,51 +1095,49 @@ export const workspace = router({
883
1095
  search: authedProcedure
884
1096
  .input(z.object({
885
1097
  query: z.string(),
1098
+ color: z.string().optional(),
886
1099
  limit: z.number().min(1).max(100).default(20),
887
1100
  }))
888
1101
  .query(async ({ ctx, input }) => {
889
- const { query } = input;
1102
+ const { query, color } = input;
1103
+
1104
+ // 1. Search Workspaces
890
1105
  const workspaces = await ctx.db.workspace.findMany({
891
1106
  where: {
892
1107
  ownerId: ctx.session.user.id,
893
- OR: [
894
- {
895
- title: {
896
- contains: query,
897
- mode: 'insensitive',
898
- },
899
- },
900
- {
901
- description: {
902
- contains: query,
903
- mode: 'insensitive',
904
- },
905
- },
906
- ],
907
- },
908
- orderBy: {
909
- updatedAt: 'desc',
1108
+ markerColor: color || undefined,
1109
+ ...(query ? {
1110
+ OR: [
1111
+ { title: { contains: query, mode: 'insensitive' } },
1112
+ { description: { contains: query, mode: 'insensitive' } },
1113
+ ],
1114
+ } : {}),
910
1115
  },
1116
+ orderBy: { updatedAt: 'desc' },
911
1117
  take: input.limit,
912
1118
  });
913
1119
 
914
- // Update analysisProgress for each workspace with search metadata
915
- const workspaceUpdates = workspaces.map(ws =>
916
- ctx.db.workspace.update({
917
- where: { id: ws.id },
918
- data: {
919
- analysisProgress: {
920
- lastSearched: new Date().toISOString(),
921
- searchQuery: query,
922
- matchedIn: ws.title.toLowerCase().includes(query.toLowerCase()) ? 'title' : 'description',
923
- }
924
- }
925
- })
926
- );
1120
+ // 2. Search Folders
1121
+ const folders = await ctx.db.folder.findMany({
1122
+ where: {
1123
+ ownerId: ctx.session.user.id,
1124
+ markerColor: color || undefined,
1125
+ ...(query ? {
1126
+ name: { contains: query, mode: 'insensitive' },
1127
+ } : {}),
1128
+ },
1129
+ orderBy: { updatedAt: 'desc' },
1130
+ take: input.limit,
1131
+ });
927
1132
 
928
- await Promise.all(workspaceUpdates);
1133
+ // Combined results with type discriminator
1134
+ const results = [
1135
+ ...workspaces.map((w: any) => ({ ...w, type: 'workspace' as const })),
1136
+ ...folders.map((f: any) => ({ ...f, type: 'folder' as const, title: f.name })), // normalize name to title
1137
+ ].sort((a, b) => b.updatedAt.getTime() - a.updatedAt.getTime())
1138
+ .slice(0, input.limit);
929
1139
 
930
- return workspaces;
1140
+ return results;
931
1141
  }),
932
1142
 
933
1143
  // Members sub-router