@m5kdev/backend 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +621 -0
- package/README.md +22 -0
- package/package.json +205 -0
- package/src/lib/posthog.ts +5 -0
- package/src/lib/sentry.ts +8 -0
- package/src/modules/access/access.repository.ts +36 -0
- package/src/modules/access/access.service.ts +81 -0
- package/src/modules/access/access.test.ts +216 -0
- package/src/modules/access/access.utils.ts +46 -0
- package/src/modules/ai/ai.db.ts +38 -0
- package/src/modules/ai/ai.prompt.ts +47 -0
- package/src/modules/ai/ai.repository.ts +53 -0
- package/src/modules/ai/ai.router.ts +148 -0
- package/src/modules/ai/ai.service.ts +310 -0
- package/src/modules/ai/ai.trpc.ts +22 -0
- package/src/modules/ai/ideogram/ideogram.constants.ts +170 -0
- package/src/modules/ai/ideogram/ideogram.dto.ts +64 -0
- package/src/modules/ai/ideogram/ideogram.prompt.ts +858 -0
- package/src/modules/ai/ideogram/ideogram.repository.ts +39 -0
- package/src/modules/ai/ideogram/ideogram.service.ts +14 -0
- package/src/modules/auth/auth.db.ts +224 -0
- package/src/modules/auth/auth.dto.ts +47 -0
- package/src/modules/auth/auth.lib.ts +349 -0
- package/src/modules/auth/auth.middleware.ts +62 -0
- package/src/modules/auth/auth.repository.ts +672 -0
- package/src/modules/auth/auth.service.ts +261 -0
- package/src/modules/auth/auth.trpc.ts +208 -0
- package/src/modules/auth/auth.utils.ts +117 -0
- package/src/modules/base/base.abstract.ts +62 -0
- package/src/modules/base/base.dto.ts +206 -0
- package/src/modules/base/base.grants.test.ts +861 -0
- package/src/modules/base/base.grants.ts +199 -0
- package/src/modules/base/base.repository.ts +433 -0
- package/src/modules/base/base.service.ts +154 -0
- package/src/modules/base/base.types.ts +7 -0
- package/src/modules/billing/billing.db.ts +27 -0
- package/src/modules/billing/billing.repository.ts +328 -0
- package/src/modules/billing/billing.router.ts +77 -0
- package/src/modules/billing/billing.service.ts +177 -0
- package/src/modules/billing/billing.trpc.ts +17 -0
- package/src/modules/clay/clay.repository.ts +29 -0
- package/src/modules/clay/clay.service.ts +61 -0
- package/src/modules/connect/connect.db.ts +32 -0
- package/src/modules/connect/connect.dto.ts +44 -0
- package/src/modules/connect/connect.linkedin.ts +70 -0
- package/src/modules/connect/connect.oauth.ts +288 -0
- package/src/modules/connect/connect.repository.ts +65 -0
- package/src/modules/connect/connect.router.ts +76 -0
- package/src/modules/connect/connect.service.ts +171 -0
- package/src/modules/connect/connect.trpc.ts +26 -0
- package/src/modules/connect/connect.types.ts +27 -0
- package/src/modules/crypto/crypto.db.ts +15 -0
- package/src/modules/crypto/crypto.repository.ts +13 -0
- package/src/modules/crypto/crypto.service.ts +57 -0
- package/src/modules/email/email.service.ts +222 -0
- package/src/modules/file/file.repository.ts +95 -0
- package/src/modules/file/file.router.ts +108 -0
- package/src/modules/file/file.service.ts +186 -0
- package/src/modules/recurrence/recurrence.db.ts +79 -0
- package/src/modules/recurrence/recurrence.repository.ts +70 -0
- package/src/modules/recurrence/recurrence.service.ts +105 -0
- package/src/modules/recurrence/recurrence.trpc.ts +82 -0
- package/src/modules/social/social.dto.ts +22 -0
- package/src/modules/social/social.linkedin.test.ts +277 -0
- package/src/modules/social/social.linkedin.ts +593 -0
- package/src/modules/social/social.service.ts +112 -0
- package/src/modules/social/social.types.ts +43 -0
- package/src/modules/tag/tag.db.ts +41 -0
- package/src/modules/tag/tag.dto.ts +18 -0
- package/src/modules/tag/tag.repository.ts +222 -0
- package/src/modules/tag/tag.service.ts +48 -0
- package/src/modules/tag/tag.trpc.ts +62 -0
- package/src/modules/uploads/0581796b-8845-420d-bd95-cd7de79f6d37.webm +0 -0
- package/src/modules/uploads/33b1e649-6727-4bd0-94d0-a0b363646865.webm +0 -0
- package/src/modules/uploads/49a8c4c0-54d7-4c94-bef4-c93c029f9ed0.webm +0 -0
- package/src/modules/uploads/50e31e38-a2f0-47ca-8b7d-2d7fcad9267d.webm +0 -0
- package/src/modules/uploads/72ac8cf9-c3a7-4cd8-8a78-6d8e137a4c7e.webm +0 -0
- package/src/modules/uploads/75293649-d966-46cd-a675-67518958ae9c.png +0 -0
- package/src/modules/uploads/88b7b867-ce15-4891-bf73-81305a7de1f7.wav +0 -0
- package/src/modules/uploads/a5d6fee8-6a59-42c6-9d4a-ac8a3c5e7245.webm +0 -0
- package/src/modules/uploads/c13a9785-ca5a-4983-af30-b338ed76d370.webm +0 -0
- package/src/modules/uploads/caa1a5a7-71ba-4381-902d-7e2cafdf6dcb.webm +0 -0
- package/src/modules/uploads/cbeb0b81-374d-445b-914b-40ace7c8e031.webm +0 -0
- package/src/modules/uploads/d626aa82-b10f-493f-aee7-87bfb3361dfc.webm +0 -0
- package/src/modules/uploads/d7de4c16-de0c-495d-9612-e72260a6ecca.png +0 -0
- package/src/modules/uploads/e532e38a-6421-400e-8a5f-8e7bc8ce411b.wav +0 -0
- package/src/modules/uploads/e86ec867-6adf-4c51-84e0-00b0836625e8.webm +0 -0
- package/src/modules/utils/applyPagination.ts +13 -0
- package/src/modules/utils/applySorting.ts +21 -0
- package/src/modules/utils/getConditionsFromFilters.ts +216 -0
- package/src/modules/video/video.service.ts +89 -0
- package/src/modules/webhook/webhook.constants.ts +9 -0
- package/src/modules/webhook/webhook.db.ts +15 -0
- package/src/modules/webhook/webhook.dto.ts +9 -0
- package/src/modules/webhook/webhook.repository.ts +68 -0
- package/src/modules/webhook/webhook.router.ts +29 -0
- package/src/modules/webhook/webhook.service.ts +78 -0
- package/src/modules/workflow/workflow.db.ts +29 -0
- package/src/modules/workflow/workflow.repository.ts +171 -0
- package/src/modules/workflow/workflow.service.ts +56 -0
- package/src/modules/workflow/workflow.trpc.ts +26 -0
- package/src/modules/workflow/workflow.types.ts +30 -0
- package/src/modules/workflow/workflow.utils.ts +259 -0
- package/src/test/stubs/utils.ts +2 -0
- package/src/trpc/context.ts +21 -0
- package/src/trpc/index.ts +3 -0
- package/src/trpc/procedures.ts +43 -0
- package/src/trpc/utils.ts +20 -0
- package/src/types.ts +22 -0
- package/src/utils/errors.ts +148 -0
- package/src/utils/logger.ts +8 -0
- package/src/utils/posthog.ts +43 -0
- package/src/utils/types.ts +5 -0
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
import type { AiModel } from "@m5kdev/commons/modules/ai/ai.constants";
|
|
2
|
+
import mustache from "mustache";
|
|
3
|
+
import { logger } from "#utils/logger";
|
|
4
|
+
|
|
5
|
+
export class Prompt<C extends Record<string, string>> {
|
|
6
|
+
public prompt: string;
|
|
7
|
+
public name?: string;
|
|
8
|
+
public type: "text" | "chat";
|
|
9
|
+
public config?: {
|
|
10
|
+
model?: AiModel;
|
|
11
|
+
temperature?: number;
|
|
12
|
+
supported_languages?: string[];
|
|
13
|
+
};
|
|
14
|
+
public version?: number;
|
|
15
|
+
public labels?: string[];
|
|
16
|
+
public tags?: string[];
|
|
17
|
+
|
|
18
|
+
constructor(
|
|
19
|
+
prompt: string,
|
|
20
|
+
settings?: {
|
|
21
|
+
name?: string;
|
|
22
|
+
type?: "text" | "chat";
|
|
23
|
+
config?: {
|
|
24
|
+
model?: AiModel;
|
|
25
|
+
temperature?: number;
|
|
26
|
+
supported_languages?: string[];
|
|
27
|
+
};
|
|
28
|
+
version?: number;
|
|
29
|
+
labels?: string[];
|
|
30
|
+
tags?: string[];
|
|
31
|
+
}
|
|
32
|
+
) {
|
|
33
|
+
this.prompt = prompt;
|
|
34
|
+
this.name = settings?.name;
|
|
35
|
+
this.type = settings?.type ?? "text";
|
|
36
|
+
this.config = settings?.config;
|
|
37
|
+
this.version = settings?.version;
|
|
38
|
+
this.labels = settings?.labels;
|
|
39
|
+
this.tags = settings?.tags;
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
compile(context: C): string {
|
|
43
|
+
const result = mustache.render(this.prompt.trim(), context);
|
|
44
|
+
logger.debug(`[PROMPT]: ${result.trim()}`);
|
|
45
|
+
return result.trim();
|
|
46
|
+
}
|
|
47
|
+
}
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
import type { InferInsertModel, InferSelectModel } from "drizzle-orm";
|
|
2
|
+
import { eq, sql } from "drizzle-orm";
|
|
3
|
+
import type { LibSQLDatabase } from "drizzle-orm/libsql";
|
|
4
|
+
import { ok } from "neverthrow";
|
|
5
|
+
import * as ai from "#modules/ai/ai.db";
|
|
6
|
+
import type { ServerResultAsync } from "#modules/base/base.dto";
|
|
7
|
+
import { BaseTableRepository } from "#modules/base/base.repository";
|
|
8
|
+
|
|
9
|
+
const schema = { ...ai };
|
|
10
|
+
type Schema = typeof schema;
|
|
11
|
+
type Orm = LibSQLDatabase<Schema>;
|
|
12
|
+
|
|
13
|
+
export type AiUsageRow = InferSelectModel<Schema["aiUsage"]>;
|
|
14
|
+
export type AiUsageInsert = InferInsertModel<Schema["aiUsage"]>;
|
|
15
|
+
|
|
16
|
+
export interface CreateAiUsageInput {
|
|
17
|
+
userId?: string;
|
|
18
|
+
teamId?: string;
|
|
19
|
+
organizationId?: string;
|
|
20
|
+
feature: string;
|
|
21
|
+
provider: string;
|
|
22
|
+
model: string;
|
|
23
|
+
inputTokens?: number;
|
|
24
|
+
outputTokens?: number;
|
|
25
|
+
totalTokens?: number;
|
|
26
|
+
cost?: number;
|
|
27
|
+
traceId?: string;
|
|
28
|
+
metadata?: unknown;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
export class AiUsageRepository extends BaseTableRepository<
|
|
32
|
+
Orm,
|
|
33
|
+
Schema,
|
|
34
|
+
Record<string, never>,
|
|
35
|
+
Schema["aiUsage"]
|
|
36
|
+
> {
|
|
37
|
+
getUsage(
|
|
38
|
+
userId: string
|
|
39
|
+
): ServerResultAsync<Pick<AiUsageRow, "inputTokens" | "outputTokens" | "totalTokens" | "cost">> {
|
|
40
|
+
return this.throwableAsync(async () => {
|
|
41
|
+
const [usage] = await this.orm
|
|
42
|
+
.select({
|
|
43
|
+
inputTokens: sql<number>`SUM(${this.table.inputTokens})`,
|
|
44
|
+
outputTokens: sql<number>`SUM(${this.table.outputTokens})`,
|
|
45
|
+
totalTokens: sql<number>`SUM(${this.table.totalTokens})`,
|
|
46
|
+
cost: sql<number>`SUM(${this.table.cost})`,
|
|
47
|
+
})
|
|
48
|
+
.from(this.table)
|
|
49
|
+
.where(eq(this.table.userId, userId));
|
|
50
|
+
return ok(usage);
|
|
51
|
+
});
|
|
52
|
+
}
|
|
53
|
+
}
|
|
@@ -0,0 +1,148 @@
|
|
|
1
|
+
// import { openai } from "@ai-sdk/openai";
|
|
2
|
+
// import { open}
|
|
3
|
+
// import { withTracing } from "@posthog/ai";
|
|
4
|
+
// import {
|
|
5
|
+
// appendClientMessage,
|
|
6
|
+
// appendResponseMessages,
|
|
7
|
+
// type Message,
|
|
8
|
+
// streamText,
|
|
9
|
+
// type Tool,
|
|
10
|
+
// } from "ai";
|
|
11
|
+
// import bodyParser from "body-parser";
|
|
12
|
+
// import { and, eq } from "drizzle-orm";
|
|
13
|
+
// import express, { type Response, type Router } from "express";
|
|
14
|
+
// import { v4 as uuidv4 } from "uuid";
|
|
15
|
+
// import type { AuthRequest, createAuthMiddleware } from "../auth/auth.middleware";
|
|
16
|
+
// import { type Orm, schema } from "../db";
|
|
17
|
+
// import { logger } from "../logger";
|
|
18
|
+
// import { posthogClient } from "../posthog";
|
|
19
|
+
|
|
20
|
+
// export type Tooling = Record<
|
|
21
|
+
// string,
|
|
22
|
+
// {
|
|
23
|
+
// getMesseges: (chatId: string) => Promise<Message[]>;
|
|
24
|
+
// getTools: (chatId: string) => Promise<
|
|
25
|
+
// Record<
|
|
26
|
+
// string,
|
|
27
|
+
// {
|
|
28
|
+
// tool: Tool;
|
|
29
|
+
// handler?: (
|
|
30
|
+
// chatId: string,
|
|
31
|
+
// args: unknown,
|
|
32
|
+
// user: NonNullable<AuthRequest["user"]>
|
|
33
|
+
// ) => Promise<void>;
|
|
34
|
+
// }
|
|
35
|
+
// >
|
|
36
|
+
// >;
|
|
37
|
+
// }
|
|
38
|
+
// >;
|
|
39
|
+
|
|
40
|
+
// export function tracedOpenAiModel(
|
|
41
|
+
// model: Parameters<typeof openai>[0],
|
|
42
|
+
// clientOptions: Parameters<typeof withTracing>[2]
|
|
43
|
+
// ) {
|
|
44
|
+
// return withTracing(openai(model), posthogClient, clientOptions);
|
|
45
|
+
// }
|
|
46
|
+
|
|
47
|
+
// async function getRouteSettings(id: string, name: string, settings: Tooling) {
|
|
48
|
+
// const entity = settings[name as keyof typeof settings];
|
|
49
|
+
// if (!entity) return { tools: {}, entityMesseges: [], entityTools: {} };
|
|
50
|
+
|
|
51
|
+
// const entityTools = await entity.getTools(id);
|
|
52
|
+
// const entityMesseges = await entity.getMesseges(id);
|
|
53
|
+
|
|
54
|
+
// const tools = Object.entries(entityTools).reduce<
|
|
55
|
+
// Record<string, (typeof entityTools)[keyof typeof entityTools]["tool"]>
|
|
56
|
+
// >((acc, [key, value]) => {
|
|
57
|
+
// acc[key] = value.tool as (typeof entityTools)[keyof typeof entityTools]["tool"];
|
|
58
|
+
// return acc;
|
|
59
|
+
// }, {});
|
|
60
|
+
|
|
61
|
+
// return { tools, entityMesseges, entityTools };
|
|
62
|
+
// }
|
|
63
|
+
|
|
64
|
+
// export function createAiRouter(
|
|
65
|
+
// orm: Orm,
|
|
66
|
+
// authMiddleware: ReturnType<typeof createAuthMiddleware>,
|
|
67
|
+
// settings: Tooling
|
|
68
|
+
// ) {
|
|
69
|
+
// const aiRouter: Router = express.Router();
|
|
70
|
+
|
|
71
|
+
// aiRouter.use(bodyParser.json());
|
|
72
|
+
|
|
73
|
+
// aiRouter.post("/completion/:name", authMiddleware, async (req: AuthRequest, res: Response) => {
|
|
74
|
+
// try {
|
|
75
|
+
// const { id, message } = req.body as {
|
|
76
|
+
// id: string;
|
|
77
|
+
// message: Message;
|
|
78
|
+
// };
|
|
79
|
+
// logger.info(req.body, "body");
|
|
80
|
+
// const { name } = req.params;
|
|
81
|
+
// const user = req.user!;
|
|
82
|
+
// logger.info(message, "Received message:");
|
|
83
|
+
|
|
84
|
+
// const { tools, entityMesseges, entityTools } = await getRouteSettings(id, name, settings);
|
|
85
|
+
|
|
86
|
+
// const [chat] = await orm
|
|
87
|
+
// .select()
|
|
88
|
+
// .from(schema.chats)
|
|
89
|
+
// .where(and(eq(schema.chats.id, id), eq(schema.chats.userId, user.id)));
|
|
90
|
+
// if (!chat) throw new Error("Chat not found");
|
|
91
|
+
|
|
92
|
+
// const messages = appendClientMessage({
|
|
93
|
+
// messages: (chat.conversation || []) as Message[],
|
|
94
|
+
// message,
|
|
95
|
+
// });
|
|
96
|
+
|
|
97
|
+
// // Process any tool invocations in the message
|
|
98
|
+
// const toolInvocation = message?.parts?.find((p) => p.type === "tool-invocation");
|
|
99
|
+
|
|
100
|
+
// if (toolInvocation) {
|
|
101
|
+
// logger.info(toolInvocation, "Processing tool invocation:");
|
|
102
|
+
// const tool =
|
|
103
|
+
// entityTools[toolInvocation.toolInvocation.toolName as keyof typeof entityTools];
|
|
104
|
+
// if (tool?.handler) {
|
|
105
|
+
// const result = await tool.handler(id, toolInvocation.toolInvocation.args, user);
|
|
106
|
+
// logger.info({ result }, "Tool handler result:");
|
|
107
|
+
// }
|
|
108
|
+
// }
|
|
109
|
+
|
|
110
|
+
// logger.info([...entityMesseges, ...messages], "Processed messages");
|
|
111
|
+
|
|
112
|
+
// const result = streamText({
|
|
113
|
+
// model: tracedOpenAiModel("gpt-4o", {
|
|
114
|
+
// posthogDistinctId: user.id,
|
|
115
|
+
// posthogProperties: { conversation_id: id, paid: true },
|
|
116
|
+
// posthogPrivacyMode: false,
|
|
117
|
+
// }),
|
|
118
|
+
// experimental_generateMessageId: uuidv4,
|
|
119
|
+
// messages: [...entityMesseges, ...messages],
|
|
120
|
+
// async onFinish({ response }) {
|
|
121
|
+
// logger.info(response, "Final response:");
|
|
122
|
+
// try {
|
|
123
|
+
// await orm
|
|
124
|
+
// .update(schema.chats)
|
|
125
|
+
// .set({
|
|
126
|
+
// conversation: appendResponseMessages({
|
|
127
|
+
// messages,
|
|
128
|
+
// responseMessages: response.messages,
|
|
129
|
+
// }),
|
|
130
|
+
// })
|
|
131
|
+
// .where(eq(schema.chats.id, id));
|
|
132
|
+
// } catch (error) {
|
|
133
|
+
// logger.error("Error in onFinish handler:", error);
|
|
134
|
+
// }
|
|
135
|
+
// },
|
|
136
|
+
// tools,
|
|
137
|
+
// });
|
|
138
|
+
|
|
139
|
+
// result.consumeStream();
|
|
140
|
+
// result.pipeDataStreamToResponse(res);
|
|
141
|
+
// } catch (error) {
|
|
142
|
+
// logger.error(error, "Error in ai handler");
|
|
143
|
+
// res.status(500).send({ error: "Internal Server Error" });
|
|
144
|
+
// }
|
|
145
|
+
// });
|
|
146
|
+
|
|
147
|
+
// return aiRouter;
|
|
148
|
+
// }
|
|
@@ -0,0 +1,310 @@
|
|
|
1
|
+
import {
|
|
2
|
+
type AiEmbeddingModel,
|
|
3
|
+
type AiModel,
|
|
4
|
+
OPENAI_TEXT_EMBEDDING_3_SMALL,
|
|
5
|
+
} from "@m5kdev/commons/modules/ai/ai.constants";
|
|
6
|
+
import { arrayToPseudoXML } from "@m5kdev/commons/modules/ai/ai.utils";
|
|
7
|
+
import type { Mastra } from "@mastra/core";
|
|
8
|
+
import { RequestContext } from "@mastra/core/request-context";
|
|
9
|
+
import type { FullOutput, MastraModelOutput } from "@mastra/core/stream";
|
|
10
|
+
import { MDocument } from "@mastra/rag";
|
|
11
|
+
import type { OpenRouterProvider } from "@openrouter/ai-sdk-provider";
|
|
12
|
+
import { embed, embedMany, generateObject, generateText } from "ai";
|
|
13
|
+
import { err, ok } from "neverthrow";
|
|
14
|
+
import type Replicate from "replicate";
|
|
15
|
+
import type { ZodType, z } from "zod";
|
|
16
|
+
import type { AiUsageRepository, AiUsageRow } from "#modules/ai/ai.repository";
|
|
17
|
+
import type {
|
|
18
|
+
IdeogramV3GenerateInput,
|
|
19
|
+
IdeogramV3GenerateOutput,
|
|
20
|
+
} from "#modules/ai/ideogram/ideogram.dto";
|
|
21
|
+
import type { IdeogramService } from "#modules/ai/ideogram/ideogram.service";
|
|
22
|
+
import type { User } from "#modules/auth/auth.lib";
|
|
23
|
+
import type { ServerResultAsync } from "#modules/base/base.dto";
|
|
24
|
+
import { BaseService } from "#modules/base/base.service";
|
|
25
|
+
|
|
26
|
+
type MastraAgent = ReturnType<Mastra["getAgent"]>;
|
|
27
|
+
type MastraAgentGenerateOptions = Parameters<MastraAgent["generate"]>[1];
|
|
28
|
+
type MessageListInput = { role: "user" | "assistant" | "system"; content: string }[];
|
|
29
|
+
|
|
30
|
+
export class AIService<MastraInstance extends Mastra> extends BaseService<
|
|
31
|
+
{ aiUsage?: AiUsageRepository },
|
|
32
|
+
{ ideogram?: IdeogramService }
|
|
33
|
+
> {
|
|
34
|
+
helpers = {
|
|
35
|
+
arrayToPseudoXML,
|
|
36
|
+
};
|
|
37
|
+
|
|
38
|
+
mastra?: MastraInstance;
|
|
39
|
+
openrouter?: OpenRouterProvider;
|
|
40
|
+
replicate?: Replicate;
|
|
41
|
+
|
|
42
|
+
constructor(
|
|
43
|
+
repositories: { aiUsage?: AiUsageRepository },
|
|
44
|
+
services: { ideogram?: IdeogramService },
|
|
45
|
+
libs: { mastra?: MastraInstance; openrouter?: OpenRouterProvider; replicate?: Replicate }
|
|
46
|
+
) {
|
|
47
|
+
super(repositories, services);
|
|
48
|
+
this.mastra = libs.mastra;
|
|
49
|
+
this.openrouter = libs.openrouter;
|
|
50
|
+
this.replicate = libs.replicate;
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
getMastra(): MastraInstance {
|
|
54
|
+
if (!this.mastra) {
|
|
55
|
+
throw new Error("Mastra is not available");
|
|
56
|
+
}
|
|
57
|
+
return this.mastra;
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
prepareModel(model: AiModel): any {
|
|
61
|
+
if (!this.openrouter) {
|
|
62
|
+
throw new Error("OpenRouter is not configured");
|
|
63
|
+
}
|
|
64
|
+
const openrouterModel = this.openrouter.chat(model);
|
|
65
|
+
return openrouterModel;
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
prepareEmbeddingModel(model: AiEmbeddingModel): any {
|
|
69
|
+
if (!this.openrouter) {
|
|
70
|
+
throw new Error("OpenRouter is not configured");
|
|
71
|
+
}
|
|
72
|
+
const openrouterModel = this.openrouter.textEmbeddingModel(model);
|
|
73
|
+
return openrouterModel;
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
async agentUse(
|
|
77
|
+
agent: string,
|
|
78
|
+
options: MastraAgentGenerateOptions & { prompt?: string; messages?: MessageListInput },
|
|
79
|
+
ctx?: { user: User; model?: string }
|
|
80
|
+
): ServerResultAsync<Awaited<ReturnType<MastraModelOutput<any>["getFullOutput"]>>> {
|
|
81
|
+
return this.throwableAsync(async () => {
|
|
82
|
+
this.logger.info("AGENT USE");
|
|
83
|
+
const { prompt, messages, ...rest } = options;
|
|
84
|
+
const payload = messages || prompt;
|
|
85
|
+
if (!payload) return this.error("BAD_REQUEST", "No prompt or messages provided");
|
|
86
|
+
const requestContext = options.requestContext ?? new RequestContext();
|
|
87
|
+
|
|
88
|
+
if (ctx?.user) {
|
|
89
|
+
requestContext.set("userId", ctx.user.id);
|
|
90
|
+
}
|
|
91
|
+
if (ctx?.model) {
|
|
92
|
+
requestContext.set("model", ctx.model);
|
|
93
|
+
}
|
|
94
|
+
const mAgent = this.getMastra().getAgent(agent);
|
|
95
|
+
|
|
96
|
+
const result = await mAgent.generate(payload as any, {
|
|
97
|
+
...rest,
|
|
98
|
+
requestContext: rest.requestContext ?? requestContext,
|
|
99
|
+
});
|
|
100
|
+
this.logger.info("AGENT USE DONE");
|
|
101
|
+
if (this.repository.aiUsage) {
|
|
102
|
+
const createUsageResult = await this.repository.aiUsage.create({
|
|
103
|
+
userId: ctx?.user?.id,
|
|
104
|
+
model: ctx?.model ?? "unknown",
|
|
105
|
+
provider: "openrouter",
|
|
106
|
+
feature: agent,
|
|
107
|
+
traceId: result.traceId,
|
|
108
|
+
inputTokens: result.usage.inputTokens,
|
|
109
|
+
outputTokens: result.usage.outputTokens,
|
|
110
|
+
totalTokens: result.usage.totalTokens,
|
|
111
|
+
cost: (result?.providerMetadata?.openrouter?.usage as any)?.cost ?? 0,
|
|
112
|
+
});
|
|
113
|
+
if (createUsageResult.isErr()) return err(createUsageResult.error);
|
|
114
|
+
}
|
|
115
|
+
this.logger.info("AGENT USE CREATED USAGE");
|
|
116
|
+
return ok(result);
|
|
117
|
+
});
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
async agentText(
|
|
121
|
+
agent: string,
|
|
122
|
+
options: MastraAgentGenerateOptions & { prompt?: string; messages?: MessageListInput },
|
|
123
|
+
ctx?: { user: User; model?: string }
|
|
124
|
+
): ServerResultAsync<string> {
|
|
125
|
+
const result = await this.agentUse(agent, options, ctx);
|
|
126
|
+
if (result.isErr())
|
|
127
|
+
return this.error("SERVICE_UNAVAILABLE", "AI: Agent text failed", { cause: result.error });
|
|
128
|
+
return ok(result.value.text);
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
async agentTextResult(
|
|
132
|
+
agent: string,
|
|
133
|
+
options: MastraAgentGenerateOptions & { prompt?: string; messages?: MessageListInput },
|
|
134
|
+
ctx?: { user: User; model?: string }
|
|
135
|
+
): ServerResultAsync<FullOutput<any>> {
|
|
136
|
+
const result = await this.agentUse(agent, options, ctx);
|
|
137
|
+
if (result.isErr()) return err(result.error);
|
|
138
|
+
return ok(result.value);
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
async agentObject<T extends ZodType<any>>(
|
|
142
|
+
agent: string,
|
|
143
|
+
options: MastraAgentGenerateOptions & {
|
|
144
|
+
schema: T;
|
|
145
|
+
prompt?: string;
|
|
146
|
+
messages?: MessageListInput;
|
|
147
|
+
},
|
|
148
|
+
ctx?: { user: User; model?: string }
|
|
149
|
+
): ServerResultAsync<z.infer<T>> {
|
|
150
|
+
const { schema, ...rest } = options;
|
|
151
|
+
const result = await this.agentUse(agent, { ...rest, structuredOutput: { schema } }, ctx);
|
|
152
|
+
if (result.isErr())
|
|
153
|
+
return this.error("SERVICE_UNAVAILABLE", "AI: Agent object failed", { cause: result.error });
|
|
154
|
+
return ok(result.value.object as z.infer<T>);
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
async agentObjectResult<T extends ZodType<any>>(
|
|
158
|
+
agent: string,
|
|
159
|
+
options: MastraAgentGenerateOptions & {
|
|
160
|
+
schema: T;
|
|
161
|
+
prompt?: string;
|
|
162
|
+
messages?: MessageListInput;
|
|
163
|
+
},
|
|
164
|
+
ctx?: { user: User; model?: string }
|
|
165
|
+
): ServerResultAsync<FullOutput<any> & { object: z.infer<T> }> {
|
|
166
|
+
this.logger.info("AGENT OBJECT RESULT");
|
|
167
|
+
const { schema, ...rest } = options;
|
|
168
|
+
const result = await this.agentUse(agent, { ...rest, structuredOutput: { schema } }, ctx);
|
|
169
|
+
if (result.isErr()) return err(result.error);
|
|
170
|
+
this.logger.info("AGENT OBJECT RESULT DONE");
|
|
171
|
+
return ok({ ...result.value, object: result.value.object as z.infer<T> });
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
async embedDocument(
|
|
175
|
+
value: string,
|
|
176
|
+
options?: Parameters<ReturnType<typeof MDocument.fromText>["chunk"]>[0],
|
|
177
|
+
type: "text" | "markdown" | "html" | "json" = "text",
|
|
178
|
+
model: AiEmbeddingModel = OPENAI_TEXT_EMBEDDING_3_SMALL
|
|
179
|
+
): ServerResultAsync<{ embeddings: number[][]; chunks: { text: string }[] }> {
|
|
180
|
+
return this.throwableAsync(async () => {
|
|
181
|
+
if (type === "text") {
|
|
182
|
+
const doc = MDocument.fromText(value);
|
|
183
|
+
const chunks = await doc.chunk(
|
|
184
|
+
options ?? {
|
|
185
|
+
strategy: "recursive",
|
|
186
|
+
maxSize: 512,
|
|
187
|
+
overlap: 50,
|
|
188
|
+
separators: ["\n"],
|
|
189
|
+
}
|
|
190
|
+
);
|
|
191
|
+
const embeddings = await this.embedMany(chunks, model);
|
|
192
|
+
if (embeddings.isErr()) return err(embeddings.error);
|
|
193
|
+
return ok({ embeddings: embeddings.value.embeddings, chunks });
|
|
194
|
+
}
|
|
195
|
+
return this.error("BAD_REQUEST", "Unsupported document type");
|
|
196
|
+
});
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
async embed(
|
|
200
|
+
text: string,
|
|
201
|
+
model: AiEmbeddingModel = OPENAI_TEXT_EMBEDDING_3_SMALL
|
|
202
|
+
): ServerResultAsync<{ embedding: number[] }> {
|
|
203
|
+
return this.throwableAsync(async () => {
|
|
204
|
+
const result = await embed({
|
|
205
|
+
model: this.prepareEmbeddingModel(model),
|
|
206
|
+
value: text,
|
|
207
|
+
});
|
|
208
|
+
return ok(result);
|
|
209
|
+
});
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
async embedMany(
|
|
213
|
+
chunks: { text: string }[],
|
|
214
|
+
model: AiEmbeddingModel = OPENAI_TEXT_EMBEDDING_3_SMALL
|
|
215
|
+
): ServerResultAsync<{ embeddings: number[][] }> {
|
|
216
|
+
return this.throwableAsync(async () => {
|
|
217
|
+
const result = await embedMany({
|
|
218
|
+
model: this.prepareEmbeddingModel(model),
|
|
219
|
+
values: chunks.map((chunk) => chunk.text),
|
|
220
|
+
});
|
|
221
|
+
return ok(result);
|
|
222
|
+
});
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
async generateText(
|
|
226
|
+
params: Omit<Parameters<typeof generateText>[0], "model"> & {
|
|
227
|
+
model: AiModel;
|
|
228
|
+
removeMDash?: boolean;
|
|
229
|
+
}
|
|
230
|
+
): ServerResultAsync<string> {
|
|
231
|
+
return this.throwableAsync(async () => {
|
|
232
|
+
const { removeMDash = true, model, ...rest } = params;
|
|
233
|
+
const result = await generateText({ ...rest, model: this.prepareModel(model) });
|
|
234
|
+
return ok(removeMDash ? result.text.replace(/\u2013|\u2014/g, "-") : result.text);
|
|
235
|
+
});
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
async generateObject<T extends ZodType>(
|
|
239
|
+
params: Omit<Parameters<typeof generateObject<T>>[0], "model" | "schema"> & {
|
|
240
|
+
model: AiModel;
|
|
241
|
+
schema: T;
|
|
242
|
+
}
|
|
243
|
+
): ServerResultAsync<z.infer<T>> {
|
|
244
|
+
return this.throwableAsync(async () => {
|
|
245
|
+
const model = this.prepareModel(params.model);
|
|
246
|
+
const result = await generateObject({
|
|
247
|
+
...params,
|
|
248
|
+
model,
|
|
249
|
+
schema: params.schema,
|
|
250
|
+
});
|
|
251
|
+
return ok(result.object as z.infer<T>);
|
|
252
|
+
});
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
async generateReplicate(
|
|
256
|
+
model: Parameters<Replicate["run"]>[0],
|
|
257
|
+
options: Parameters<Replicate["run"]>[1]
|
|
258
|
+
): ServerResultAsync<object> {
|
|
259
|
+
return this.throwableAsync(async () => {
|
|
260
|
+
if (!this.replicate) {
|
|
261
|
+
return this.error("INTERNAL_SERVER_ERROR", "Replicate is not configured");
|
|
262
|
+
}
|
|
263
|
+
try {
|
|
264
|
+
return ok(await this.replicate.run(model, options));
|
|
265
|
+
} catch (error) {
|
|
266
|
+
return this.error("INTERNAL_SERVER_ERROR", undefined, { cause: error });
|
|
267
|
+
}
|
|
268
|
+
});
|
|
269
|
+
}
|
|
270
|
+
|
|
271
|
+
async generateTranscript(
|
|
272
|
+
file_url: string
|
|
273
|
+
): ServerResultAsync<{ text: string; metadata: unknown }> {
|
|
274
|
+
const output = await this.generateReplicate(
|
|
275
|
+
"thomasmol/whisper-diarization:1495a9cddc83b2203b0d8d3516e38b80fd1572ebc4bc5700ac1da56a9b3ed886",
|
|
276
|
+
{
|
|
277
|
+
input: {
|
|
278
|
+
file_url,
|
|
279
|
+
},
|
|
280
|
+
}
|
|
281
|
+
);
|
|
282
|
+
|
|
283
|
+
if (output.isErr()) return err(output.error);
|
|
284
|
+
|
|
285
|
+
try {
|
|
286
|
+
const { segments } = output.value as { segments: { text: string }[] };
|
|
287
|
+
return ok({ text: segments.map((segment) => segment.text).join(""), metadata: segments });
|
|
288
|
+
} catch (error) {
|
|
289
|
+
return this.error("INTERNAL_SERVER_ERROR", undefined, { cause: error });
|
|
290
|
+
}
|
|
291
|
+
}
|
|
292
|
+
|
|
293
|
+
async generateIdeogram(
|
|
294
|
+
input: IdeogramV3GenerateInput
|
|
295
|
+
): ServerResultAsync<IdeogramV3GenerateOutput> {
|
|
296
|
+
if (!this.service.ideogram) {
|
|
297
|
+
return this.error("INTERNAL_SERVER_ERROR", "Ideogram service is not available");
|
|
298
|
+
}
|
|
299
|
+
return this.service.ideogram.generate(input);
|
|
300
|
+
}
|
|
301
|
+
|
|
302
|
+
async getUsage(
|
|
303
|
+
userId: string
|
|
304
|
+
): ServerResultAsync<Pick<AiUsageRow, "inputTokens" | "outputTokens" | "totalTokens" | "cost">> {
|
|
305
|
+
if (!this.repository.aiUsage) {
|
|
306
|
+
return this.error("INTERNAL_SERVER_ERROR", "AI usage repository is not available");
|
|
307
|
+
}
|
|
308
|
+
return this.repository.aiUsage.getUsage(userId);
|
|
309
|
+
}
|
|
310
|
+
}
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import type { Mastra } from "@mastra/core";
|
|
2
|
+
import { z } from "zod";
|
|
3
|
+
import type { AIService } from "#modules/ai/ai.service";
|
|
4
|
+
import { adminProcedure, handleTRPCResult, router } from "#trpc";
|
|
5
|
+
|
|
6
|
+
export function createAITRPC<MastraInstance extends Mastra>(aiService: AIService<MastraInstance>) {
|
|
7
|
+
return router({
|
|
8
|
+
getUserUsage: adminProcedure
|
|
9
|
+
.input(z.object({ userId: z.string() }))
|
|
10
|
+
.output(
|
|
11
|
+
z.object({
|
|
12
|
+
inputTokens: z.number().nullable(),
|
|
13
|
+
outputTokens: z.number().nullable(),
|
|
14
|
+
totalTokens: z.number().nullable(),
|
|
15
|
+
cost: z.number().nullable(),
|
|
16
|
+
})
|
|
17
|
+
)
|
|
18
|
+
.query(async ({ input }) => {
|
|
19
|
+
return handleTRPCResult(await aiService.getUsage(input.userId));
|
|
20
|
+
}),
|
|
21
|
+
});
|
|
22
|
+
}
|