@mzhub/mem-ts 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +335 -0
- package/dist/BaseAdapter-BoRh1T7O.d.mts +75 -0
- package/dist/BaseAdapter-CQVX-gcA.d.ts +75 -0
- package/dist/BaseProvider-CEoiLGj5.d.ts +34 -0
- package/dist/BaseProvider-edMh_R9t.d.mts +34 -0
- package/dist/adapters/index.d.mts +259 -0
- package/dist/adapters/index.d.ts +259 -0
- package/dist/adapters/index.js +1570 -0
- package/dist/adapters/index.js.map +1 -0
- package/dist/adapters/index.mjs +1542 -0
- package/dist/adapters/index.mjs.map +1 -0
- package/dist/index-Ci5Q9G9H.d.mts +289 -0
- package/dist/index-Dl-Q2au9.d.ts +289 -0
- package/dist/index.d.mts +1206 -0
- package/dist/index.d.ts +1206 -0
- package/dist/index.js +5126 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +5058 -0
- package/dist/index.mjs.map +1 -0
- package/dist/middleware/index.d.mts +4 -0
- package/dist/middleware/index.d.ts +4 -0
- package/dist/middleware/index.js +63 -0
- package/dist/middleware/index.js.map +1 -0
- package/dist/middleware/index.mjs +59 -0
- package/dist/middleware/index.mjs.map +1 -0
- package/dist/providers/index.d.mts +96 -0
- package/dist/providers/index.d.ts +96 -0
- package/dist/providers/index.js +379 -0
- package/dist/providers/index.js.map +1 -0
- package/dist/providers/index.mjs +370 -0
- package/dist/providers/index.mjs.map +1 -0
- package/dist/types-G9qmfSeZ.d.mts +260 -0
- package/dist/types-G9qmfSeZ.d.ts +260 -0
- package/logo.png +0 -0
- package/package.json +114 -0
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
export { b as MemoryMiddlewareOptions, e as MiddlewareRequest, f as MiddlewareResponse, N as NextFunction, c as createMemoryMiddleware, d as digestAfterResponse, w as withMemory } from '../index-Ci5Q9G9H.mjs';
|
|
2
|
+
import '../types-G9qmfSeZ.mjs';
|
|
3
|
+
import '../BaseAdapter-BoRh1T7O.mjs';
|
|
4
|
+
import '../BaseProvider-edMh_R9t.mjs';
|
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
export { b as MemoryMiddlewareOptions, e as MiddlewareRequest, f as MiddlewareResponse, N as NextFunction, c as createMemoryMiddleware, d as digestAfterResponse, w as withMemory } from '../index-Dl-Q2au9.js';
|
|
2
|
+
import '../types-G9qmfSeZ.js';
|
|
3
|
+
import '../BaseAdapter-CQVX-gcA.js';
|
|
4
|
+
import '../BaseProvider-CEoiLGj5.js';
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
// src/middleware/index.ts
|
|
4
|
+
function createMemoryMiddleware(memory, options = {}) {
|
|
5
|
+
const {
|
|
6
|
+
getUserId = (req) => req.user?.id || req.user?.userId || req.body?.userId,
|
|
7
|
+
getMessage = (req) => req.body?.message,
|
|
8
|
+
attachToRequest = true
|
|
9
|
+
} = options;
|
|
10
|
+
return async (req, res, next) => {
|
|
11
|
+
try {
|
|
12
|
+
const userId = getUserId(req);
|
|
13
|
+
const message = getMessage(req);
|
|
14
|
+
if (!userId || !message) {
|
|
15
|
+
return next();
|
|
16
|
+
}
|
|
17
|
+
const context = await memory.hydrate(userId, message);
|
|
18
|
+
if (attachToRequest) {
|
|
19
|
+
req.memoryContext = context;
|
|
20
|
+
if (res.locals) {
|
|
21
|
+
res.locals.memoryContext = context;
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
next();
|
|
25
|
+
} catch (error) {
|
|
26
|
+
next(error);
|
|
27
|
+
}
|
|
28
|
+
};
|
|
29
|
+
}
|
|
30
|
+
function digestAfterResponse(memory, userId, userMessage, assistantResponse) {
|
|
31
|
+
setImmediate(() => {
|
|
32
|
+
memory.digest(userId, userMessage, assistantResponse);
|
|
33
|
+
});
|
|
34
|
+
}
|
|
35
|
+
function withMemory(memory, handler, options = {}) {
|
|
36
|
+
return async (req) => {
|
|
37
|
+
try {
|
|
38
|
+
const body = await req.json();
|
|
39
|
+
const userId = options.getUserId?.(req) || body?.userId;
|
|
40
|
+
const message = options.getMessage?.(body) || body?.message;
|
|
41
|
+
if (!userId || !message) {
|
|
42
|
+
return new Response(
|
|
43
|
+
JSON.stringify({ error: "userId and message are required" }),
|
|
44
|
+
{ status: 400, headers: { "Content-Type": "application/json" } }
|
|
45
|
+
);
|
|
46
|
+
}
|
|
47
|
+
const context = await memory.hydrate(userId, message);
|
|
48
|
+
return handler(req, context);
|
|
49
|
+
} catch (error) {
|
|
50
|
+
const message = error instanceof Error ? error.message : "Internal error";
|
|
51
|
+
return new Response(JSON.stringify({ error: message }), {
|
|
52
|
+
status: 500,
|
|
53
|
+
headers: { "Content-Type": "application/json" }
|
|
54
|
+
});
|
|
55
|
+
}
|
|
56
|
+
};
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
exports.createMemoryMiddleware = createMemoryMiddleware;
|
|
60
|
+
exports.digestAfterResponse = digestAfterResponse;
|
|
61
|
+
exports.withMemory = withMemory;
|
|
62
|
+
//# sourceMappingURL=index.js.map
|
|
63
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/middleware/index.ts"],"names":[],"mappings":";;;AAiFO,SAAS,sBAAA,CACd,MAAA,EACA,OAAA,GAAmC,EAAC,EACpC;AACA,EAAA,MAAM;AAAA,IACJ,SAAA,GAAY,CAAC,GAAA,KAAQ,GAAA,CAAI,IAAA,EAAM,MAAM,GAAA,CAAI,IAAA,EAAM,MAAA,IAAU,GAAA,CAAI,IAAA,EAAM,MAAA;AAAA,IACnE,UAAA,GAAa,CAAC,GAAA,KAAQ,GAAA,CAAI,IAAA,EAAM,OAAA;AAAA,IAChC,eAAA,GAAkB;AAAA,GACpB,GAAI,OAAA;AAEJ,EAAA,OAAO,OACL,GAAA,EACA,GAAA,EACA,IAAA,KACkB;AAClB,IAAA,IAAI;AACF,MAAA,MAAM,MAAA,GAAS,UAAU,GAAG,CAAA;AAC5B,MAAA,MAAM,OAAA,GAAU,WAAW,GAAG,CAAA;AAE9B,MAAA,IAAI,CAAC,MAAA,IAAU,CAAC,OAAA,EAAS;AACvB,QAAA,OAAO,IAAA,EAAK;AAAA,MACd;AAGA,MAAA,MAAM,OAAA,GAAU,MAAM,MAAA,CAAO,OAAA,CAAQ,QAAQ,OAAO,CAAA;AAGpD,MAAA,IAAI,eAAA,EAAiB;AACnB,QAAA,GAAA,CAAI,aAAA,GAAgB,OAAA;AACpB,QAAA,IAAI,IAAI,MAAA,EAAQ;AACd,UAAA,GAAA,CAAI,OAAO,aAAA,GAAgB,OAAA;AAAA,QAC7B;AAAA,MACF;AAEA,MAAA,IAAA,EAAK;AAAA,IACP,SAAS,KAAA,EAAO;AACd,MAAA,IAAA,CAAK,KAAK,CAAA;AAAA,IACZ;AAAA,EACF,CAAA;AACF;AAiBO,SAAS,mBAAA,CACd,MAAA,EACA,MAAA,EACA,WAAA,EACA,iBAAA,EACM;AAEN,EAAA,YAAA,CAAa,MAAM;AACjB,IAAA,MAAA,CAAO,MAAA,CAAO,MAAA,EAAQ,WAAA,EAAa,iBAAiB,CAAA;AAAA,EACtD,CAAC,CAAA;AACH;AAmBO,SAAS,UAAA,CAGd,MAAA,EACA,OAAA,EACA,OAAA,GAGI,EAAC,EACL;AACA,EAAA,OAAO,OAAO,GAAA,KAA8B;AAC1C,IAAA,IAAI;AACF,MAAA,MAAM,IAAA,GAAO,MAAM,GAAA,CAAI,IAAA,EAAK;AAC5B,MAAA,MAAM,MAAA,GAAS,OAAA,CAAQ,SAAA,GAAY,GAAG,KAAK,IAAA,EAAM,MAAA;AACjD,MAAA,MAAM,OAAA,GAAU,OAAA,CAAQ,UAAA,GAAa,IAAI,KAAK,IAAA,EAAM,OAAA;AAEpD,MAAA,IAAI,CAAC,MAAA,IAAU,CAAC,OAAA,EAAS;AACvB,QAAA,OAAO,IAAI,QAAA;AAAA,UACT,IAAA,CAAK,SAAA,CAAU,EAAE,KAAA,EAAO,mCAAmC,CAAA;AAAA,UAC3D,EAAE,MAAA,EAAQ,GAAA,EAAK,SAAS,EAAE,cAAA,EAAgB,oBAAmB;AAAE,SACjE;AAAA,MACF;AAEA,MAAA,MAAM,OAAA,GAAU,MAAM,MAAA,CAAO,OAAA,CAAQ,QAAQ,OAAO,CAAA;AACpD,MAAA,OAAO,OAAA,CAAQ,KAAK,OAAO,CAAA;AAAA,IAC7B,SAAS,KAAA,EAAO;AACd,MAAA,MAAM,OAAA,GAAU,KAAA,YAAiB,KAAA,GAAQ,KAAA,CAAM,OAAA,GAAU,gBAAA;AACzD,MAAA,OAAO,IAAI,SAAS,IAAA,CAAK,SAAA,CAAU,EAAE,KAAA,EAAO,OAAA,EAAS,CAAA,EAAG;AAAA,QACtD,MAAA,EAAQ,GAAA;AAAA,QACR,OAAA,EAAS,EAAE,cAAA,EAAgB,kBAAA;AAAmB,OAC/C,CAAA;AAAA,IACH;AAAA,EACF,CAAA;AACF","file":"index.js","sourcesContent":["import type { MemoryOS } from \"../MemoryOS\";\r\nimport type { HydratedContext } from \"../types\";\r\n\r\n/**\r\n * Express/Connect-style request object\r\n */\r\nexport interface MiddlewareRequest {\r\n body?: { userId?: string; message?: string; [key: string]: unknown };\r\n params?: Record<string, string>;\r\n query?: Record<string, string>;\r\n headers?: Record<string, string | string[] | undefined>;\r\n user?: { id?: string; userId?: string; [key: string]: unknown };\r\n memoryContext?: HydratedContext;\r\n}\r\n\r\n/**\r\n * Express/Connect-style response object\r\n */\r\nexport interface MiddlewareResponse {\r\n locals?: Record<string, unknown>;\r\n json?: (body: unknown) => void;\r\n on?: (event: string, callback: () => void) => void;\r\n}\r\n\r\n/**\r\n * Next function to call the next middleware\r\n */\r\nexport type NextFunction = (error?: unknown) => void;\r\n\r\n/**\r\n * Options for the memory middleware\r\n */\r\nexport interface MemoryMiddlewareOptions {\r\n /** Function to extract userId from request */\r\n getUserId?: (req: MiddlewareRequest) => string | undefined;\r\n /** Function to extract user message from request */\r\n getMessage?: (req: MiddlewareRequest) => string | undefined;\r\n /** Attach context to request object */\r\n attachToRequest?: boolean;\r\n /** Auto-digest on response finish (requires response body capture) */\r\n autoDigest?: boolean;\r\n}\r\n\r\n/**\r\n * Result attached to request/response\r\n */\r\ndeclare global {\r\n // eslint-disable-next-line @typescript-eslint/no-namespace\r\n namespace Express {\r\n interface Request {\r\n memoryContext?: HydratedContext;\r\n }\r\n interface Locals {\r\n memoryContext?: HydratedContext;\r\n }\r\n }\r\n}\r\n\r\n/**\r\n * Create Express middleware for automatic context hydration.\r\n *\r\n * @example\r\n * ```typescript\r\n * import express from 'express';\r\n * import { MemoryOS } from 'mem-ts';\r\n * import { createMemoryMiddleware } from 'mem-ts/middleware';\r\n *\r\n * const app = express();\r\n * const memory = new MemoryOS({ ... });\r\n *\r\n * app.use('/chat', createMemoryMiddleware(memory, {\r\n * getUserId: (req) => req.user?.id,\r\n * getMessage: (req) => req.body?.message,\r\n * }));\r\n *\r\n * app.post('/chat', (req, res) => {\r\n * const context = req.memoryContext;\r\n * // Use context.compiledPrompt in your LLM call\r\n * });\r\n * ```\r\n */\r\nexport function createMemoryMiddleware(\r\n memory: MemoryOS,\r\n options: MemoryMiddlewareOptions = {}\r\n) {\r\n const {\r\n getUserId = (req) => req.user?.id || req.user?.userId || req.body?.userId,\r\n getMessage = (req) => req.body?.message,\r\n attachToRequest = true,\r\n } = options;\r\n\r\n return async (\r\n req: MiddlewareRequest,\r\n res: MiddlewareResponse,\r\n next: NextFunction\r\n ): Promise<void> => {\r\n try {\r\n const userId = getUserId(req);\r\n const message = getMessage(req);\r\n\r\n if (!userId || !message) {\r\n return next();\r\n }\r\n\r\n // Hydrate context\r\n const context = await memory.hydrate(userId, message);\r\n\r\n // Attach to request\r\n if (attachToRequest) {\r\n req.memoryContext = context;\r\n if (res.locals) {\r\n res.locals.memoryContext = context;\r\n }\r\n }\r\n\r\n next();\r\n } catch (error) {\r\n next(error);\r\n }\r\n };\r\n}\r\n\r\n/**\r\n * Helper function to digest after response in Express.\r\n * Call this after sending the response.\r\n *\r\n * @example\r\n * ```typescript\r\n * app.post('/chat', async (req, res) => {\r\n * const response = await callLLM(req.memoryContext, req.body.message);\r\n * res.json({ message: response });\r\n *\r\n * // Digest in background\r\n * digestAfterResponse(memory, req.user.id, req.body.message, response);\r\n * });\r\n * ```\r\n */\r\nexport function digestAfterResponse(\r\n memory: MemoryOS,\r\n userId: string,\r\n userMessage: string,\r\n assistantResponse: string\r\n): void {\r\n // Fire and forget\r\n setImmediate(() => {\r\n memory.digest(userId, userMessage, assistantResponse);\r\n });\r\n}\r\n\r\n/**\r\n * Create a Next.js API route handler wrapper.\r\n *\r\n * @example\r\n * ```typescript\r\n * // pages/api/chat.ts or app/api/chat/route.ts\r\n * import { withMemory } from 'mem-ts/middleware';\r\n *\r\n * export const POST = withMemory(memory, async (req, context) => {\r\n * const { message } = await req.json();\r\n * const response = await callLLM(context.compiledPrompt, message);\r\n * return Response.json({ message: response });\r\n * }, {\r\n * getUserId: (req) => req.headers.get('x-user-id'),\r\n * });\r\n * ```\r\n */\r\nexport function withMemory<\r\n T extends { json: () => Promise<{ message?: string; userId?: string }> }\r\n>(\r\n memory: MemoryOS,\r\n handler: (req: T, context: HydratedContext) => Promise<Response>,\r\n options: {\r\n getUserId?: (req: T) => string | null | undefined;\r\n getMessage?: (body: { message?: string }) => string | undefined;\r\n } = {}\r\n) {\r\n return async (req: T): Promise<Response> => {\r\n try {\r\n const body = await req.json();\r\n const userId = options.getUserId?.(req) || body?.userId;\r\n const message = options.getMessage?.(body) || body?.message;\r\n\r\n if (!userId || !message) {\r\n return new Response(\r\n JSON.stringify({ error: \"userId and message are required\" }),\r\n { status: 400, headers: { \"Content-Type\": \"application/json\" } }\r\n );\r\n }\r\n\r\n const context = await memory.hydrate(userId, message);\r\n return handler(req, context);\r\n } catch (error) {\r\n const message = error instanceof Error ? error.message : \"Internal error\";\r\n return new Response(JSON.stringify({ error: message }), {\r\n status: 500,\r\n headers: { \"Content-Type\": \"application/json\" },\r\n });\r\n }\r\n };\r\n}\r\n"]}
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
// src/middleware/index.ts
|
|
2
|
+
function createMemoryMiddleware(memory, options = {}) {
|
|
3
|
+
const {
|
|
4
|
+
getUserId = (req) => req.user?.id || req.user?.userId || req.body?.userId,
|
|
5
|
+
getMessage = (req) => req.body?.message,
|
|
6
|
+
attachToRequest = true
|
|
7
|
+
} = options;
|
|
8
|
+
return async (req, res, next) => {
|
|
9
|
+
try {
|
|
10
|
+
const userId = getUserId(req);
|
|
11
|
+
const message = getMessage(req);
|
|
12
|
+
if (!userId || !message) {
|
|
13
|
+
return next();
|
|
14
|
+
}
|
|
15
|
+
const context = await memory.hydrate(userId, message);
|
|
16
|
+
if (attachToRequest) {
|
|
17
|
+
req.memoryContext = context;
|
|
18
|
+
if (res.locals) {
|
|
19
|
+
res.locals.memoryContext = context;
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
next();
|
|
23
|
+
} catch (error) {
|
|
24
|
+
next(error);
|
|
25
|
+
}
|
|
26
|
+
};
|
|
27
|
+
}
|
|
28
|
+
function digestAfterResponse(memory, userId, userMessage, assistantResponse) {
|
|
29
|
+
setImmediate(() => {
|
|
30
|
+
memory.digest(userId, userMessage, assistantResponse);
|
|
31
|
+
});
|
|
32
|
+
}
|
|
33
|
+
function withMemory(memory, handler, options = {}) {
|
|
34
|
+
return async (req) => {
|
|
35
|
+
try {
|
|
36
|
+
const body = await req.json();
|
|
37
|
+
const userId = options.getUserId?.(req) || body?.userId;
|
|
38
|
+
const message = options.getMessage?.(body) || body?.message;
|
|
39
|
+
if (!userId || !message) {
|
|
40
|
+
return new Response(
|
|
41
|
+
JSON.stringify({ error: "userId and message are required" }),
|
|
42
|
+
{ status: 400, headers: { "Content-Type": "application/json" } }
|
|
43
|
+
);
|
|
44
|
+
}
|
|
45
|
+
const context = await memory.hydrate(userId, message);
|
|
46
|
+
return handler(req, context);
|
|
47
|
+
} catch (error) {
|
|
48
|
+
const message = error instanceof Error ? error.message : "Internal error";
|
|
49
|
+
return new Response(JSON.stringify({ error: message }), {
|
|
50
|
+
status: 500,
|
|
51
|
+
headers: { "Content-Type": "application/json" }
|
|
52
|
+
});
|
|
53
|
+
}
|
|
54
|
+
};
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
export { createMemoryMiddleware, digestAfterResponse, withMemory };
|
|
58
|
+
//# sourceMappingURL=index.mjs.map
|
|
59
|
+
//# sourceMappingURL=index.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/middleware/index.ts"],"names":[],"mappings":";AAiFO,SAAS,sBAAA,CACd,MAAA,EACA,OAAA,GAAmC,EAAC,EACpC;AACA,EAAA,MAAM;AAAA,IACJ,SAAA,GAAY,CAAC,GAAA,KAAQ,GAAA,CAAI,IAAA,EAAM,MAAM,GAAA,CAAI,IAAA,EAAM,MAAA,IAAU,GAAA,CAAI,IAAA,EAAM,MAAA;AAAA,IACnE,UAAA,GAAa,CAAC,GAAA,KAAQ,GAAA,CAAI,IAAA,EAAM,OAAA;AAAA,IAChC,eAAA,GAAkB;AAAA,GACpB,GAAI,OAAA;AAEJ,EAAA,OAAO,OACL,GAAA,EACA,GAAA,EACA,IAAA,KACkB;AAClB,IAAA,IAAI;AACF,MAAA,MAAM,MAAA,GAAS,UAAU,GAAG,CAAA;AAC5B,MAAA,MAAM,OAAA,GAAU,WAAW,GAAG,CAAA;AAE9B,MAAA,IAAI,CAAC,MAAA,IAAU,CAAC,OAAA,EAAS;AACvB,QAAA,OAAO,IAAA,EAAK;AAAA,MACd;AAGA,MAAA,MAAM,OAAA,GAAU,MAAM,MAAA,CAAO,OAAA,CAAQ,QAAQ,OAAO,CAAA;AAGpD,MAAA,IAAI,eAAA,EAAiB;AACnB,QAAA,GAAA,CAAI,aAAA,GAAgB,OAAA;AACpB,QAAA,IAAI,IAAI,MAAA,EAAQ;AACd,UAAA,GAAA,CAAI,OAAO,aAAA,GAAgB,OAAA;AAAA,QAC7B;AAAA,MACF;AAEA,MAAA,IAAA,EAAK;AAAA,IACP,SAAS,KAAA,EAAO;AACd,MAAA,IAAA,CAAK,KAAK,CAAA;AAAA,IACZ;AAAA,EACF,CAAA;AACF;AAiBO,SAAS,mBAAA,CACd,MAAA,EACA,MAAA,EACA,WAAA,EACA,iBAAA,EACM;AAEN,EAAA,YAAA,CAAa,MAAM;AACjB,IAAA,MAAA,CAAO,MAAA,CAAO,MAAA,EAAQ,WAAA,EAAa,iBAAiB,CAAA;AAAA,EACtD,CAAC,CAAA;AACH;AAmBO,SAAS,UAAA,CAGd,MAAA,EACA,OAAA,EACA,OAAA,GAGI,EAAC,EACL;AACA,EAAA,OAAO,OAAO,GAAA,KAA8B;AAC1C,IAAA,IAAI;AACF,MAAA,MAAM,IAAA,GAAO,MAAM,GAAA,CAAI,IAAA,EAAK;AAC5B,MAAA,MAAM,MAAA,GAAS,OAAA,CAAQ,SAAA,GAAY,GAAG,KAAK,IAAA,EAAM,MAAA;AACjD,MAAA,MAAM,OAAA,GAAU,OAAA,CAAQ,UAAA,GAAa,IAAI,KAAK,IAAA,EAAM,OAAA;AAEpD,MAAA,IAAI,CAAC,MAAA,IAAU,CAAC,OAAA,EAAS;AACvB,QAAA,OAAO,IAAI,QAAA;AAAA,UACT,IAAA,CAAK,SAAA,CAAU,EAAE,KAAA,EAAO,mCAAmC,CAAA;AAAA,UAC3D,EAAE,MAAA,EAAQ,GAAA,EAAK,SAAS,EAAE,cAAA,EAAgB,oBAAmB;AAAE,SACjE;AAAA,MACF;AAEA,MAAA,MAAM,OAAA,GAAU,MAAM,MAAA,CAAO,OAAA,CAAQ,QAAQ,OAAO,CAAA;AACpD,MAAA,OAAO,OAAA,CAAQ,KAAK,OAAO,CAAA;AAAA,IAC7B,SAAS,KAAA,EAAO;AACd,MAAA,MAAM,OAAA,GAAU,KAAA,YAAiB,KAAA,GAAQ,KAAA,CAAM,OAAA,GAAU,gBAAA;AACzD,MAAA,OAAO,IAAI,SAAS,IAAA,CAAK,SAAA,CAAU,EAAE,KAAA,EAAO,OAAA,EAAS,CAAA,EAAG;AAAA,QACtD,MAAA,EAAQ,GAAA;AAAA,QACR,OAAA,EAAS,EAAE,cAAA,EAAgB,kBAAA;AAAmB,OAC/C,CAAA;AAAA,IACH;AAAA,EACF,CAAA;AACF","file":"index.mjs","sourcesContent":["import type { MemoryOS } from \"../MemoryOS\";\r\nimport type { HydratedContext } from \"../types\";\r\n\r\n/**\r\n * Express/Connect-style request object\r\n */\r\nexport interface MiddlewareRequest {\r\n body?: { userId?: string; message?: string; [key: string]: unknown };\r\n params?: Record<string, string>;\r\n query?: Record<string, string>;\r\n headers?: Record<string, string | string[] | undefined>;\r\n user?: { id?: string; userId?: string; [key: string]: unknown };\r\n memoryContext?: HydratedContext;\r\n}\r\n\r\n/**\r\n * Express/Connect-style response object\r\n */\r\nexport interface MiddlewareResponse {\r\n locals?: Record<string, unknown>;\r\n json?: (body: unknown) => void;\r\n on?: (event: string, callback: () => void) => void;\r\n}\r\n\r\n/**\r\n * Next function to call the next middleware\r\n */\r\nexport type NextFunction = (error?: unknown) => void;\r\n\r\n/**\r\n * Options for the memory middleware\r\n */\r\nexport interface MemoryMiddlewareOptions {\r\n /** Function to extract userId from request */\r\n getUserId?: (req: MiddlewareRequest) => string | undefined;\r\n /** Function to extract user message from request */\r\n getMessage?: (req: MiddlewareRequest) => string | undefined;\r\n /** Attach context to request object */\r\n attachToRequest?: boolean;\r\n /** Auto-digest on response finish (requires response body capture) */\r\n autoDigest?: boolean;\r\n}\r\n\r\n/**\r\n * Result attached to request/response\r\n */\r\ndeclare global {\r\n // eslint-disable-next-line @typescript-eslint/no-namespace\r\n namespace Express {\r\n interface Request {\r\n memoryContext?: HydratedContext;\r\n }\r\n interface Locals {\r\n memoryContext?: HydratedContext;\r\n }\r\n }\r\n}\r\n\r\n/**\r\n * Create Express middleware for automatic context hydration.\r\n *\r\n * @example\r\n * ```typescript\r\n * import express from 'express';\r\n * import { MemoryOS } from 'mem-ts';\r\n * import { createMemoryMiddleware } from 'mem-ts/middleware';\r\n *\r\n * const app = express();\r\n * const memory = new MemoryOS({ ... });\r\n *\r\n * app.use('/chat', createMemoryMiddleware(memory, {\r\n * getUserId: (req) => req.user?.id,\r\n * getMessage: (req) => req.body?.message,\r\n * }));\r\n *\r\n * app.post('/chat', (req, res) => {\r\n * const context = req.memoryContext;\r\n * // Use context.compiledPrompt in your LLM call\r\n * });\r\n * ```\r\n */\r\nexport function createMemoryMiddleware(\r\n memory: MemoryOS,\r\n options: MemoryMiddlewareOptions = {}\r\n) {\r\n const {\r\n getUserId = (req) => req.user?.id || req.user?.userId || req.body?.userId,\r\n getMessage = (req) => req.body?.message,\r\n attachToRequest = true,\r\n } = options;\r\n\r\n return async (\r\n req: MiddlewareRequest,\r\n res: MiddlewareResponse,\r\n next: NextFunction\r\n ): Promise<void> => {\r\n try {\r\n const userId = getUserId(req);\r\n const message = getMessage(req);\r\n\r\n if (!userId || !message) {\r\n return next();\r\n }\r\n\r\n // Hydrate context\r\n const context = await memory.hydrate(userId, message);\r\n\r\n // Attach to request\r\n if (attachToRequest) {\r\n req.memoryContext = context;\r\n if (res.locals) {\r\n res.locals.memoryContext = context;\r\n }\r\n }\r\n\r\n next();\r\n } catch (error) {\r\n next(error);\r\n }\r\n };\r\n}\r\n\r\n/**\r\n * Helper function to digest after response in Express.\r\n * Call this after sending the response.\r\n *\r\n * @example\r\n * ```typescript\r\n * app.post('/chat', async (req, res) => {\r\n * const response = await callLLM(req.memoryContext, req.body.message);\r\n * res.json({ message: response });\r\n *\r\n * // Digest in background\r\n * digestAfterResponse(memory, req.user.id, req.body.message, response);\r\n * });\r\n * ```\r\n */\r\nexport function digestAfterResponse(\r\n memory: MemoryOS,\r\n userId: string,\r\n userMessage: string,\r\n assistantResponse: string\r\n): void {\r\n // Fire and forget\r\n setImmediate(() => {\r\n memory.digest(userId, userMessage, assistantResponse);\r\n });\r\n}\r\n\r\n/**\r\n * Create a Next.js API route handler wrapper.\r\n *\r\n * @example\r\n * ```typescript\r\n * // pages/api/chat.ts or app/api/chat/route.ts\r\n * import { withMemory } from 'mem-ts/middleware';\r\n *\r\n * export const POST = withMemory(memory, async (req, context) => {\r\n * const { message } = await req.json();\r\n * const response = await callLLM(context.compiledPrompt, message);\r\n * return Response.json({ message: response });\r\n * }, {\r\n * getUserId: (req) => req.headers.get('x-user-id'),\r\n * });\r\n * ```\r\n */\r\nexport function withMemory<\r\n T extends { json: () => Promise<{ message?: string; userId?: string }> }\r\n>(\r\n memory: MemoryOS,\r\n handler: (req: T, context: HydratedContext) => Promise<Response>,\r\n options: {\r\n getUserId?: (req: T) => string | null | undefined;\r\n getMessage?: (body: { message?: string }) => string | undefined;\r\n } = {}\r\n) {\r\n return async (req: T): Promise<Response> => {\r\n try {\r\n const body = await req.json();\r\n const userId = options.getUserId?.(req) || body?.userId;\r\n const message = options.getMessage?.(body) || body?.message;\r\n\r\n if (!userId || !message) {\r\n return new Response(\r\n JSON.stringify({ error: \"userId and message are required\" }),\r\n { status: 400, headers: { \"Content-Type\": \"application/json\" } }\r\n );\r\n }\r\n\r\n const context = await memory.hydrate(userId, message);\r\n return handler(req, context);\r\n } catch (error) {\r\n const message = error instanceof Error ? error.message : \"Internal error\";\r\n return new Response(JSON.stringify({ error: message }), {\r\n status: 500,\r\n headers: { \"Content-Type\": \"application/json\" },\r\n });\r\n }\r\n };\r\n}\r\n"]}
|
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
import { B as BaseProvider } from '../BaseProvider-edMh_R9t.mjs';
|
|
2
|
+
import { a as CompletionOptions, b as CompletionResult, P as ProviderConfig, e as ProviderName } from '../types-G9qmfSeZ.mjs';
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* OpenAI provider using native fetch (no SDK required)
|
|
6
|
+
*/
|
|
7
|
+
declare class OpenAIProvider extends BaseProvider {
|
|
8
|
+
private endpoint;
|
|
9
|
+
constructor(config: {
|
|
10
|
+
apiKey: string;
|
|
11
|
+
model?: string;
|
|
12
|
+
baseUrl?: string;
|
|
13
|
+
});
|
|
14
|
+
getDefaultModel(): string;
|
|
15
|
+
getName(): string;
|
|
16
|
+
complete(options: CompletionOptions): Promise<CompletionResult>;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
/**
|
|
20
|
+
* Anthropic provider using the official @anthropic-ai/sdk package
|
|
21
|
+
*/
|
|
22
|
+
declare class AnthropicProvider extends BaseProvider {
|
|
23
|
+
private client;
|
|
24
|
+
constructor(config: {
|
|
25
|
+
apiKey: string;
|
|
26
|
+
model?: string;
|
|
27
|
+
baseUrl?: string;
|
|
28
|
+
});
|
|
29
|
+
private initClient;
|
|
30
|
+
getDefaultModel(): string;
|
|
31
|
+
getName(): string;
|
|
32
|
+
static isAvailable(): boolean;
|
|
33
|
+
complete(options: CompletionOptions): Promise<CompletionResult>;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* Google Gemini provider using the official @google/generative-ai package
|
|
38
|
+
*/
|
|
39
|
+
declare class GeminiProvider extends BaseProvider {
|
|
40
|
+
private genAI;
|
|
41
|
+
constructor(config: {
|
|
42
|
+
apiKey: string;
|
|
43
|
+
model?: string;
|
|
44
|
+
baseUrl?: string;
|
|
45
|
+
});
|
|
46
|
+
private initClient;
|
|
47
|
+
getDefaultModel(): string;
|
|
48
|
+
getName(): string;
|
|
49
|
+
static isAvailable(): boolean;
|
|
50
|
+
complete(options: CompletionOptions): Promise<CompletionResult>;
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
/**
|
|
54
|
+
* Groq provider using the official groq-sdk package
|
|
55
|
+
*/
|
|
56
|
+
declare class GroqProvider extends BaseProvider {
|
|
57
|
+
private client;
|
|
58
|
+
constructor(config: {
|
|
59
|
+
apiKey: string;
|
|
60
|
+
model?: string;
|
|
61
|
+
baseUrl?: string;
|
|
62
|
+
});
|
|
63
|
+
private initClient;
|
|
64
|
+
getDefaultModel(): string;
|
|
65
|
+
getName(): string;
|
|
66
|
+
static isAvailable(): boolean;
|
|
67
|
+
complete(options: CompletionOptions): Promise<CompletionResult>;
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
/**
|
|
71
|
+
* Cerebras provider using the official @cerebras/cerebras_cloud_sdk package
|
|
72
|
+
*/
|
|
73
|
+
declare class CerebrasProvider extends BaseProvider {
|
|
74
|
+
private client;
|
|
75
|
+
constructor(config: {
|
|
76
|
+
apiKey: string;
|
|
77
|
+
model?: string;
|
|
78
|
+
baseUrl?: string;
|
|
79
|
+
});
|
|
80
|
+
private initClient;
|
|
81
|
+
getDefaultModel(): string;
|
|
82
|
+
getName(): string;
|
|
83
|
+
static isAvailable(): boolean;
|
|
84
|
+
complete(options: CompletionOptions): Promise<CompletionResult>;
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
/**
|
|
88
|
+
* Create a provider instance from configuration
|
|
89
|
+
*/
|
|
90
|
+
declare function createProvider(config: ProviderConfig): BaseProvider;
|
|
91
|
+
/**
|
|
92
|
+
* Check which providers are available (have their SDKs installed)
|
|
93
|
+
*/
|
|
94
|
+
declare function getAvailableProviders(): ProviderName[];
|
|
95
|
+
|
|
96
|
+
export { AnthropicProvider, BaseProvider, CerebrasProvider, GeminiProvider, GroqProvider, OpenAIProvider, createProvider, getAvailableProviders };
|
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
import { B as BaseProvider } from '../BaseProvider-CEoiLGj5.js';
|
|
2
|
+
import { a as CompletionOptions, b as CompletionResult, P as ProviderConfig, e as ProviderName } from '../types-G9qmfSeZ.js';
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* OpenAI provider using native fetch (no SDK required)
|
|
6
|
+
*/
|
|
7
|
+
declare class OpenAIProvider extends BaseProvider {
|
|
8
|
+
private endpoint;
|
|
9
|
+
constructor(config: {
|
|
10
|
+
apiKey: string;
|
|
11
|
+
model?: string;
|
|
12
|
+
baseUrl?: string;
|
|
13
|
+
});
|
|
14
|
+
getDefaultModel(): string;
|
|
15
|
+
getName(): string;
|
|
16
|
+
complete(options: CompletionOptions): Promise<CompletionResult>;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
/**
|
|
20
|
+
* Anthropic provider using the official @anthropic-ai/sdk package
|
|
21
|
+
*/
|
|
22
|
+
declare class AnthropicProvider extends BaseProvider {
|
|
23
|
+
private client;
|
|
24
|
+
constructor(config: {
|
|
25
|
+
apiKey: string;
|
|
26
|
+
model?: string;
|
|
27
|
+
baseUrl?: string;
|
|
28
|
+
});
|
|
29
|
+
private initClient;
|
|
30
|
+
getDefaultModel(): string;
|
|
31
|
+
getName(): string;
|
|
32
|
+
static isAvailable(): boolean;
|
|
33
|
+
complete(options: CompletionOptions): Promise<CompletionResult>;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* Google Gemini provider using the official @google/generative-ai package
|
|
38
|
+
*/
|
|
39
|
+
declare class GeminiProvider extends BaseProvider {
|
|
40
|
+
private genAI;
|
|
41
|
+
constructor(config: {
|
|
42
|
+
apiKey: string;
|
|
43
|
+
model?: string;
|
|
44
|
+
baseUrl?: string;
|
|
45
|
+
});
|
|
46
|
+
private initClient;
|
|
47
|
+
getDefaultModel(): string;
|
|
48
|
+
getName(): string;
|
|
49
|
+
static isAvailable(): boolean;
|
|
50
|
+
complete(options: CompletionOptions): Promise<CompletionResult>;
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
/**
|
|
54
|
+
* Groq provider using the official groq-sdk package
|
|
55
|
+
*/
|
|
56
|
+
declare class GroqProvider extends BaseProvider {
|
|
57
|
+
private client;
|
|
58
|
+
constructor(config: {
|
|
59
|
+
apiKey: string;
|
|
60
|
+
model?: string;
|
|
61
|
+
baseUrl?: string;
|
|
62
|
+
});
|
|
63
|
+
private initClient;
|
|
64
|
+
getDefaultModel(): string;
|
|
65
|
+
getName(): string;
|
|
66
|
+
static isAvailable(): boolean;
|
|
67
|
+
complete(options: CompletionOptions): Promise<CompletionResult>;
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
/**
|
|
71
|
+
* Cerebras provider using the official @cerebras/cerebras_cloud_sdk package
|
|
72
|
+
*/
|
|
73
|
+
declare class CerebrasProvider extends BaseProvider {
|
|
74
|
+
private client;
|
|
75
|
+
constructor(config: {
|
|
76
|
+
apiKey: string;
|
|
77
|
+
model?: string;
|
|
78
|
+
baseUrl?: string;
|
|
79
|
+
});
|
|
80
|
+
private initClient;
|
|
81
|
+
getDefaultModel(): string;
|
|
82
|
+
getName(): string;
|
|
83
|
+
static isAvailable(): boolean;
|
|
84
|
+
complete(options: CompletionOptions): Promise<CompletionResult>;
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
/**
|
|
88
|
+
* Create a provider instance from configuration
|
|
89
|
+
*/
|
|
90
|
+
declare function createProvider(config: ProviderConfig): BaseProvider;
|
|
91
|
+
/**
|
|
92
|
+
* Check which providers are available (have their SDKs installed)
|
|
93
|
+
*/
|
|
94
|
+
declare function getAvailableProviders(): ProviderName[];
|
|
95
|
+
|
|
96
|
+
export { AnthropicProvider, BaseProvider, CerebrasProvider, GeminiProvider, GroqProvider, OpenAIProvider, createProvider, getAvailableProviders };
|