@lelemondev/sdk 0.3.0 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. package/README.md +536 -93
  2. package/dist/express-Dt5wT6_n.d.mts +67 -0
  3. package/dist/express-Dt5wT6_n.d.ts +67 -0
  4. package/dist/express.d.mts +1 -0
  5. package/dist/express.d.ts +1 -0
  6. package/dist/express.js +21 -0
  7. package/dist/express.js.map +1 -0
  8. package/dist/express.mjs +19 -0
  9. package/dist/express.mjs.map +1 -0
  10. package/dist/hono-Dzmu77iW.d.mts +80 -0
  11. package/dist/hono-Dzmu77iW.d.ts +80 -0
  12. package/dist/hono.d.mts +1 -0
  13. package/dist/hono.d.ts +1 -0
  14. package/dist/hono.js +23 -0
  15. package/dist/hono.js.map +1 -0
  16. package/dist/hono.mjs +21 -0
  17. package/dist/hono.mjs.map +1 -0
  18. package/dist/index.d.mts +2 -2
  19. package/dist/index.d.ts +2 -2
  20. package/dist/index.js +949 -3
  21. package/dist/index.js.map +1 -1
  22. package/dist/index.mjs +949 -3
  23. package/dist/index.mjs.map +1 -1
  24. package/dist/integrations.d.mts +4 -0
  25. package/dist/integrations.d.ts +4 -0
  26. package/dist/integrations.js +93 -0
  27. package/dist/integrations.js.map +1 -0
  28. package/dist/integrations.mjs +88 -0
  29. package/dist/integrations.mjs.map +1 -0
  30. package/dist/lambda-CAuiF9dH.d.mts +79 -0
  31. package/dist/lambda-CAuiF9dH.d.ts +79 -0
  32. package/dist/lambda.d.mts +1 -0
  33. package/dist/lambda.d.ts +1 -0
  34. package/dist/lambda.js +21 -0
  35. package/dist/lambda.js.map +1 -0
  36. package/dist/lambda.mjs +19 -0
  37. package/dist/lambda.mjs.map +1 -0
  38. package/dist/next-BC9PmEho.d.mts +100 -0
  39. package/dist/next-BC9PmEho.d.ts +100 -0
  40. package/dist/next.d.mts +1 -0
  41. package/dist/next.d.ts +1 -0
  42. package/dist/next.js +33 -0
  43. package/dist/next.js.map +1 -0
  44. package/dist/next.mjs +30 -0
  45. package/dist/next.mjs.map +1 -0
  46. package/package.json +59 -11
@@ -0,0 +1,100 @@
1
+ /**
2
+ * Next.js App Router Integration
3
+ *
4
+ * Wraps route handlers to automatically flush traces.
5
+ * Supports Next.js 15+ `after()` and Vercel's `waitUntil()`.
6
+ *
7
+ * @example
8
+ * import { withObserve } from '@lelemondev/sdk/next';
9
+ *
10
+ * export const POST = withObserve(async (req) => {
11
+ * const openai = observe(new OpenAI());
12
+ * const result = await openai.chat.completions.create({...});
13
+ * return Response.json(result);
14
+ * });
15
+ */
16
+ /**
17
+ * Next.js App Router handler type
18
+ *
19
+ * @typeParam TContext - Optional context type for dynamic route parameters
20
+ */
21
+ type NextRouteHandler<TContext = unknown> = (request: Request, context?: TContext) => Response | Promise<Response>;
22
+ /**
23
+ * Options for the Next.js wrapper
24
+ */
25
+ interface NextObserveOptions {
26
+ /**
27
+ * Next.js 15+ after() function from 'next/server'
28
+ * Preferred method - runs after response without blocking
29
+ *
30
+ * @example
31
+ * import { after } from 'next/server';
32
+ * export const POST = withObserve(handler, { after });
33
+ */
34
+ after?: (callback: () => void | Promise<void>) => void;
35
+ /**
36
+ * Vercel's waitUntil() from '@vercel/functions'
37
+ * Alternative for Vercel deployments
38
+ *
39
+ * @example
40
+ * import { waitUntil } from '@vercel/functions';
41
+ * export const POST = withObserve(handler, { waitUntil });
42
+ */
43
+ waitUntil?: (promise: Promise<unknown>) => void;
44
+ }
45
+ /**
46
+ * Wrap a Next.js App Router handler with automatic trace flushing
47
+ *
48
+ * @param handler - Your route handler function
49
+ * @param options - Optional: pass `after` (Next.js 15+) or `waitUntil` (Vercel)
50
+ * @returns Wrapped handler that auto-flushes traces
51
+ *
52
+ * @example
53
+ * // Basic usage (blocking flush)
54
+ * export const POST = withObserve(async (req) => {
55
+ * return Response.json({ ok: true });
56
+ * });
57
+ *
58
+ * @example
59
+ * // Next.js 15+ with after() - non-blocking (recommended)
60
+ * import { after } from 'next/server';
61
+ *
62
+ * export const POST = withObserve(
63
+ * async (req) => Response.json({ ok: true }),
64
+ * { after }
65
+ * );
66
+ *
67
+ * @example
68
+ * // Vercel with waitUntil() - non-blocking
69
+ * import { waitUntil } from '@vercel/functions';
70
+ *
71
+ * export const POST = withObserve(
72
+ * async (req) => Response.json({ ok: true }),
73
+ * { waitUntil }
74
+ * );
75
+ */
76
+ declare function withObserve<TContext = unknown>(handler: NextRouteHandler<TContext>, options?: NextObserveOptions): NextRouteHandler<TContext>;
77
+ /**
78
+ * Create a pre-configured wrapper with default options
79
+ *
80
+ * @example
81
+ * import { after } from 'next/server';
82
+ * import { createWrapper } from '@lelemondev/sdk/next';
83
+ *
84
+ * const withObserve = createWrapper({ after });
85
+ *
86
+ * export const POST = withObserve(async (req) => {
87
+ * return Response.json({ ok: true });
88
+ * });
89
+ */
90
+ declare function createWrapper(defaultOptions: NextObserveOptions): <TContext = unknown>(handler: NextRouteHandler<TContext>, options?: NextObserveOptions) => NextRouteHandler<TContext>;
91
+
92
+ type next_NextObserveOptions = NextObserveOptions;
93
+ type next_NextRouteHandler<TContext = unknown> = NextRouteHandler<TContext>;
94
+ declare const next_createWrapper: typeof createWrapper;
95
+ declare const next_withObserve: typeof withObserve;
96
+ declare namespace next {
97
+ export { type next_NextObserveOptions as NextObserveOptions, type next_NextRouteHandler as NextRouteHandler, next_createWrapper as createWrapper, next_withObserve as withObserve };
98
+ }
99
+
100
+ export { type NextRouteHandler as N, type NextObserveOptions as a, createWrapper as c, next as n, withObserve as w };
@@ -0,0 +1 @@
1
+ export { a as NextObserveOptions, N as NextRouteHandler, c as createWrapper, w as withObserve } from './next-BC9PmEho.mjs';
package/dist/next.d.ts ADDED
@@ -0,0 +1 @@
1
+ export { a as NextObserveOptions, N as NextRouteHandler, c as createWrapper, w as withObserve } from './next-BC9PmEho.js';
package/dist/next.js ADDED
@@ -0,0 +1,33 @@
1
+ 'use strict';
2
+
3
+ /* @lelemondev/sdk - LLM Observability */
4
+
5
+ async function flush() {
6
+ }
7
+
8
+ // src/integrations/next.ts
9
+ function withObserve(handler, options) {
10
+ return async (request, context) => {
11
+ try {
12
+ return await handler(request, context);
13
+ } finally {
14
+ if (options?.after) {
15
+ options.after(() => flush());
16
+ } else if (options?.waitUntil) {
17
+ options.waitUntil(flush());
18
+ } else {
19
+ await flush();
20
+ }
21
+ }
22
+ };
23
+ }
24
+ function createWrapper(defaultOptions) {
25
+ return function(handler, options) {
26
+ return withObserve(handler, { ...defaultOptions, ...options });
27
+ };
28
+ }
29
+
30
+ exports.createWrapper = createWrapper;
31
+ exports.withObserve = withObserve;
32
+ //# sourceMappingURL=next.js.map
33
+ //# sourceMappingURL=next.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/core/config.ts","../src/integrations/next.ts"],"names":[],"mappings":";;;;AAuEA,eAAsB,KAAA,GAAuB;AAI7C;;;ACiBO,SAAS,WAAA,CACd,SACA,OAAA,EAC4B;AAC5B,EAAA,OAAO,OAAO,SAAkB,OAAA,KAA0C;AACxE,IAAA,IAAI;AACF,MAAA,OAAO,MAAM,OAAA,CAAQ,OAAA,EAAS,OAAO,CAAA;AAAA,IACvC,CAAA,SAAE;AAEA,MAAA,IAAI,SAAS,KAAA,EAAO;AAElB,QAAA,OAAA,CAAQ,KAAA,CAAM,MAAM,KAAA,EAAO,CAAA;AAAA,MAC7B,CAAA,MAAA,IAAW,SAAS,SAAA,EAAW;AAE7B,QAAA,OAAA,CAAQ,SAAA,CAAU,OAAO,CAAA;AAAA,MAC3B,CAAA,MAAO;AAEL,QAAA,MAAM,KAAA,EAAM;AAAA,MACd;AAAA,IACF;AAAA,EACF,CAAA;AACF;AAeO,SAAS,cAAc,cAAA,EAAoC;AAChE,EAAA,OAAO,SACL,SACA,OAAA,EAC4B;AAC5B,IAAA,OAAO,YAAY,OAAA,EAAS,EAAE,GAAG,cAAA,EAAgB,GAAG,SAAS,CAAA;AAAA,EAC/D,CAAA;AACF","file":"next.js","sourcesContent":["/**\n * Global Configuration\n *\n * Manages SDK configuration and transport instance.\n */\n\nimport type { LelemonConfig } from './types';\nimport { Transport } from './transport';\n\n// ─────────────────────────────────────────────────────────────\n// Global State\n// ─────────────────────────────────────────────────────────────\n\nlet globalConfig: LelemonConfig = {};\nlet globalTransport: Transport | null = null;\nlet initialized = false;\n\n// ─────────────────────────────────────────────────────────────\n// Configuration\n// ─────────────────────────────────────────────────────────────\n\nconst DEFAULT_ENDPOINT = 'https://api.lelemon.dev';\n\n/**\n * Initialize the SDK\n * Call once at app startup\n */\nexport function init(config: LelemonConfig = {}): void {\n globalConfig = config;\n globalTransport = createTransport(config);\n initialized = true;\n}\n\n/**\n * Get current config\n */\nexport function getConfig(): LelemonConfig {\n return globalConfig;\n}\n\n/**\n * Check if SDK is initialized\n */\nexport function isInitialized(): boolean {\n return initialized;\n}\n\n/**\n * Check if SDK is enabled\n */\nexport function isEnabled(): boolean {\n return getTransport().isEnabled();\n}\n\n// ─────────────────────────────────────────────────────────────\n// Transport\n// ─────────────────────────────────────────────────────────────\n\n/**\n * Get or create transport instance\n */\nexport function getTransport(): Transport {\n if (!globalTransport) {\n globalTransport = createTransport(globalConfig);\n }\n return globalTransport;\n}\n\n/**\n * Flush all pending traces\n */\nexport async function flush(): Promise<void> {\n if (globalTransport) {\n await globalTransport.flush();\n }\n}\n\n/**\n * Create transport instance\n */\nfunction createTransport(config: LelemonConfig): Transport {\n const apiKey = config.apiKey ?? getEnvVar('LELEMON_API_KEY');\n\n if (!apiKey && !config.disabled) {\n console.warn(\n '[Lelemon] No API key provided. Set apiKey in init() or LELEMON_API_KEY env var. Tracing disabled.'\n );\n }\n\n return new Transport({\n apiKey: apiKey ?? '',\n endpoint: config.endpoint ?? DEFAULT_ENDPOINT,\n debug: config.debug ?? false,\n disabled: config.disabled ?? !apiKey,\n batchSize: config.batchSize,\n flushIntervalMs: config.flushIntervalMs,\n requestTimeoutMs: config.requestTimeoutMs,\n });\n}\n\n/**\n * Get environment variable (works in Node and edge)\n */\nfunction getEnvVar(name: string): string | undefined {\n if (typeof process !== 'undefined' && process.env) {\n return process.env[name];\n }\n return undefined;\n}\n","/**\n * Next.js App Router Integration\n *\n * Wraps route handlers to automatically flush traces.\n * Supports Next.js 15+ `after()` and Vercel's `waitUntil()`.\n *\n * @example\n * import { withObserve } from '@lelemondev/sdk/next';\n *\n * export const POST = withObserve(async (req) => {\n * const openai = observe(new OpenAI());\n * const result = await openai.chat.completions.create({...});\n * return Response.json(result);\n * });\n */\n\nimport { flush } from '../core/config';\n\n// ─────────────────────────────────────────────────────────────\n// Types\n// ─────────────────────────────────────────────────────────────\n\n/**\n * Next.js App Router handler type\n *\n * @typeParam TContext - Optional context type for dynamic route parameters\n */\nexport type NextRouteHandler<TContext = unknown> = (\n request: Request,\n context?: TContext\n) => Response | Promise<Response>;\n\n/**\n * Options for the Next.js wrapper\n */\nexport interface NextObserveOptions {\n /**\n * Next.js 15+ after() function from 'next/server'\n * Preferred method - runs after response without blocking\n *\n * @example\n * import { after } from 'next/server';\n * export const POST = withObserve(handler, { after });\n */\n after?: (callback: () => void | Promise<void>) => void;\n\n /**\n * Vercel's waitUntil() from '@vercel/functions'\n * Alternative for Vercel deployments\n *\n * @example\n * import { waitUntil } from '@vercel/functions';\n * export const POST = withObserve(handler, { waitUntil });\n */\n waitUntil?: (promise: Promise<unknown>) => void;\n}\n\n// ─────────────────────────────────────────────────────────────\n// Wrapper\n// ─────────────────────────────────────────────────────────────\n\n/**\n * Wrap a Next.js App Router handler with automatic trace flushing\n *\n * @param handler - Your route handler function\n * @param options - Optional: pass `after` (Next.js 15+) or `waitUntil` (Vercel)\n * @returns Wrapped handler that auto-flushes traces\n *\n * @example\n * // Basic usage (blocking flush)\n * export const POST = withObserve(async (req) => {\n * return Response.json({ ok: true });\n * });\n *\n * @example\n * // Next.js 15+ with after() - non-blocking (recommended)\n * import { after } from 'next/server';\n *\n * export const POST = withObserve(\n * async (req) => Response.json({ ok: true }),\n * { after }\n * );\n *\n * @example\n * // Vercel with waitUntil() - non-blocking\n * import { waitUntil } from '@vercel/functions';\n *\n * export const POST = withObserve(\n * async (req) => Response.json({ ok: true }),\n * { waitUntil }\n * );\n */\nexport function withObserve<TContext = unknown>(\n handler: NextRouteHandler<TContext>,\n options?: NextObserveOptions\n): NextRouteHandler<TContext> {\n return async (request: Request, context?: TContext): Promise<Response> => {\n try {\n return await handler(request, context);\n } finally {\n // Priority: after() > waitUntil() > blocking flush\n if (options?.after) {\n // Next.js 15+ native - best option\n options.after(() => flush());\n } else if (options?.waitUntil) {\n // Vercel platform\n options.waitUntil(flush());\n } else {\n // Fallback: blocking flush\n await flush();\n }\n }\n };\n}\n\n/**\n * Create a pre-configured wrapper with default options\n *\n * @example\n * import { after } from 'next/server';\n * import { createWrapper } from '@lelemondev/sdk/next';\n *\n * const withObserve = createWrapper({ after });\n *\n * export const POST = withObserve(async (req) => {\n * return Response.json({ ok: true });\n * });\n */\nexport function createWrapper(defaultOptions: NextObserveOptions) {\n return function <TContext = unknown>(\n handler: NextRouteHandler<TContext>,\n options?: NextObserveOptions\n ): NextRouteHandler<TContext> {\n return withObserve(handler, { ...defaultOptions, ...options });\n };\n}\n"]}
package/dist/next.mjs ADDED
@@ -0,0 +1,30 @@
1
+ /* @lelemondev/sdk - LLM Observability */
2
+
3
+ async function flush() {
4
+ }
5
+
6
+ // src/integrations/next.ts
7
+ function withObserve(handler, options) {
8
+ return async (request, context) => {
9
+ try {
10
+ return await handler(request, context);
11
+ } finally {
12
+ if (options?.after) {
13
+ options.after(() => flush());
14
+ } else if (options?.waitUntil) {
15
+ options.waitUntil(flush());
16
+ } else {
17
+ await flush();
18
+ }
19
+ }
20
+ };
21
+ }
22
+ function createWrapper(defaultOptions) {
23
+ return function(handler, options) {
24
+ return withObserve(handler, { ...defaultOptions, ...options });
25
+ };
26
+ }
27
+
28
+ export { createWrapper, withObserve };
29
+ //# sourceMappingURL=next.mjs.map
30
+ //# sourceMappingURL=next.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/core/config.ts","../src/integrations/next.ts"],"names":[],"mappings":";;AAuEA,eAAsB,KAAA,GAAuB;AAI7C;;;ACiBO,SAAS,WAAA,CACd,SACA,OAAA,EAC4B;AAC5B,EAAA,OAAO,OAAO,SAAkB,OAAA,KAA0C;AACxE,IAAA,IAAI;AACF,MAAA,OAAO,MAAM,OAAA,CAAQ,OAAA,EAAS,OAAO,CAAA;AAAA,IACvC,CAAA,SAAE;AAEA,MAAA,IAAI,SAAS,KAAA,EAAO;AAElB,QAAA,OAAA,CAAQ,KAAA,CAAM,MAAM,KAAA,EAAO,CAAA;AAAA,MAC7B,CAAA,MAAA,IAAW,SAAS,SAAA,EAAW;AAE7B,QAAA,OAAA,CAAQ,SAAA,CAAU,OAAO,CAAA;AAAA,MAC3B,CAAA,MAAO;AAEL,QAAA,MAAM,KAAA,EAAM;AAAA,MACd;AAAA,IACF;AAAA,EACF,CAAA;AACF;AAeO,SAAS,cAAc,cAAA,EAAoC;AAChE,EAAA,OAAO,SACL,SACA,OAAA,EAC4B;AAC5B,IAAA,OAAO,YAAY,OAAA,EAAS,EAAE,GAAG,cAAA,EAAgB,GAAG,SAAS,CAAA;AAAA,EAC/D,CAAA;AACF","file":"next.mjs","sourcesContent":["/**\n * Global Configuration\n *\n * Manages SDK configuration and transport instance.\n */\n\nimport type { LelemonConfig } from './types';\nimport { Transport } from './transport';\n\n// ─────────────────────────────────────────────────────────────\n// Global State\n// ─────────────────────────────────────────────────────────────\n\nlet globalConfig: LelemonConfig = {};\nlet globalTransport: Transport | null = null;\nlet initialized = false;\n\n// ─────────────────────────────────────────────────────────────\n// Configuration\n// ─────────────────────────────────────────────────────────────\n\nconst DEFAULT_ENDPOINT = 'https://api.lelemon.dev';\n\n/**\n * Initialize the SDK\n * Call once at app startup\n */\nexport function init(config: LelemonConfig = {}): void {\n globalConfig = config;\n globalTransport = createTransport(config);\n initialized = true;\n}\n\n/**\n * Get current config\n */\nexport function getConfig(): LelemonConfig {\n return globalConfig;\n}\n\n/**\n * Check if SDK is initialized\n */\nexport function isInitialized(): boolean {\n return initialized;\n}\n\n/**\n * Check if SDK is enabled\n */\nexport function isEnabled(): boolean {\n return getTransport().isEnabled();\n}\n\n// ─────────────────────────────────────────────────────────────\n// Transport\n// ─────────────────────────────────────────────────────────────\n\n/**\n * Get or create transport instance\n */\nexport function getTransport(): Transport {\n if (!globalTransport) {\n globalTransport = createTransport(globalConfig);\n }\n return globalTransport;\n}\n\n/**\n * Flush all pending traces\n */\nexport async function flush(): Promise<void> {\n if (globalTransport) {\n await globalTransport.flush();\n }\n}\n\n/**\n * Create transport instance\n */\nfunction createTransport(config: LelemonConfig): Transport {\n const apiKey = config.apiKey ?? getEnvVar('LELEMON_API_KEY');\n\n if (!apiKey && !config.disabled) {\n console.warn(\n '[Lelemon] No API key provided. Set apiKey in init() or LELEMON_API_KEY env var. Tracing disabled.'\n );\n }\n\n return new Transport({\n apiKey: apiKey ?? '',\n endpoint: config.endpoint ?? DEFAULT_ENDPOINT,\n debug: config.debug ?? false,\n disabled: config.disabled ?? !apiKey,\n batchSize: config.batchSize,\n flushIntervalMs: config.flushIntervalMs,\n requestTimeoutMs: config.requestTimeoutMs,\n });\n}\n\n/**\n * Get environment variable (works in Node and edge)\n */\nfunction getEnvVar(name: string): string | undefined {\n if (typeof process !== 'undefined' && process.env) {\n return process.env[name];\n }\n return undefined;\n}\n","/**\n * Next.js App Router Integration\n *\n * Wraps route handlers to automatically flush traces.\n * Supports Next.js 15+ `after()` and Vercel's `waitUntil()`.\n *\n * @example\n * import { withObserve } from '@lelemondev/sdk/next';\n *\n * export const POST = withObserve(async (req) => {\n * const openai = observe(new OpenAI());\n * const result = await openai.chat.completions.create({...});\n * return Response.json(result);\n * });\n */\n\nimport { flush } from '../core/config';\n\n// ─────────────────────────────────────────────────────────────\n// Types\n// ─────────────────────────────────────────────────────────────\n\n/**\n * Next.js App Router handler type\n *\n * @typeParam TContext - Optional context type for dynamic route parameters\n */\nexport type NextRouteHandler<TContext = unknown> = (\n request: Request,\n context?: TContext\n) => Response | Promise<Response>;\n\n/**\n * Options for the Next.js wrapper\n */\nexport interface NextObserveOptions {\n /**\n * Next.js 15+ after() function from 'next/server'\n * Preferred method - runs after response without blocking\n *\n * @example\n * import { after } from 'next/server';\n * export const POST = withObserve(handler, { after });\n */\n after?: (callback: () => void | Promise<void>) => void;\n\n /**\n * Vercel's waitUntil() from '@vercel/functions'\n * Alternative for Vercel deployments\n *\n * @example\n * import { waitUntil } from '@vercel/functions';\n * export const POST = withObserve(handler, { waitUntil });\n */\n waitUntil?: (promise: Promise<unknown>) => void;\n}\n\n// ─────────────────────────────────────────────────────────────\n// Wrapper\n// ─────────────────────────────────────────────────────────────\n\n/**\n * Wrap a Next.js App Router handler with automatic trace flushing\n *\n * @param handler - Your route handler function\n * @param options - Optional: pass `after` (Next.js 15+) or `waitUntil` (Vercel)\n * @returns Wrapped handler that auto-flushes traces\n *\n * @example\n * // Basic usage (blocking flush)\n * export const POST = withObserve(async (req) => {\n * return Response.json({ ok: true });\n * });\n *\n * @example\n * // Next.js 15+ with after() - non-blocking (recommended)\n * import { after } from 'next/server';\n *\n * export const POST = withObserve(\n * async (req) => Response.json({ ok: true }),\n * { after }\n * );\n *\n * @example\n * // Vercel with waitUntil() - non-blocking\n * import { waitUntil } from '@vercel/functions';\n *\n * export const POST = withObserve(\n * async (req) => Response.json({ ok: true }),\n * { waitUntil }\n * );\n */\nexport function withObserve<TContext = unknown>(\n handler: NextRouteHandler<TContext>,\n options?: NextObserveOptions\n): NextRouteHandler<TContext> {\n return async (request: Request, context?: TContext): Promise<Response> => {\n try {\n return await handler(request, context);\n } finally {\n // Priority: after() > waitUntil() > blocking flush\n if (options?.after) {\n // Next.js 15+ native - best option\n options.after(() => flush());\n } else if (options?.waitUntil) {\n // Vercel platform\n options.waitUntil(flush());\n } else {\n // Fallback: blocking flush\n await flush();\n }\n }\n };\n}\n\n/**\n * Create a pre-configured wrapper with default options\n *\n * @example\n * import { after } from 'next/server';\n * import { createWrapper } from '@lelemondev/sdk/next';\n *\n * const withObserve = createWrapper({ after });\n *\n * export const POST = withObserve(async (req) => {\n * return Response.json({ ok: true });\n * });\n */\nexport function createWrapper(defaultOptions: NextObserveOptions) {\n return function <TContext = unknown>(\n handler: NextRouteHandler<TContext>,\n options?: NextObserveOptions\n ): NextRouteHandler<TContext> {\n return withObserve(handler, { ...defaultOptions, ...options });\n };\n}\n"]}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@lelemondev/sdk",
3
- "version": "0.3.0",
3
+ "version": "0.5.0",
4
4
  "description": "Automatic LLM observability. Wrap your client, everything is traced.",
5
5
  "author": "Lelemon <info@lelemon.dev>",
6
6
  "license": "MIT",
@@ -18,15 +18,15 @@
18
18
  "tracing",
19
19
  "openai",
20
20
  "anthropic",
21
- "gemini",
22
- "bedrock",
21
+ "nextjs",
22
+ "lambda",
23
+ "express",
24
+ "hono",
23
25
  "claude",
24
26
  "gpt",
25
27
  "ai",
26
28
  "monitoring",
27
- "analytics",
28
- "langchain",
29
- "agent"
29
+ "serverless"
30
30
  ],
31
31
  "main": "./dist/index.js",
32
32
  "module": "./dist/index.mjs",
@@ -34,16 +34,53 @@
34
34
  "exports": {
35
35
  ".": {
36
36
  "types": "./dist/index.d.ts",
37
- "bun": "./dist/index.mjs",
38
- "deno": "./dist/index.mjs",
39
37
  "import": "./dist/index.mjs",
40
- "require": "./dist/index.js",
41
- "default": "./dist/index.mjs"
38
+ "require": "./dist/index.js"
39
+ },
40
+ "./next": {
41
+ "types": "./dist/next.d.ts",
42
+ "import": "./dist/next.mjs",
43
+ "require": "./dist/next.js"
44
+ },
45
+ "./lambda": {
46
+ "types": "./dist/lambda.d.ts",
47
+ "import": "./dist/lambda.mjs",
48
+ "require": "./dist/lambda.js"
49
+ },
50
+ "./express": {
51
+ "types": "./dist/express.d.ts",
52
+ "import": "./dist/express.mjs",
53
+ "require": "./dist/express.js"
54
+ },
55
+ "./hono": {
56
+ "types": "./dist/hono.d.ts",
57
+ "import": "./dist/hono.mjs",
58
+ "require": "./dist/hono.js"
59
+ },
60
+ "./integrations": {
61
+ "types": "./dist/integrations.d.ts",
62
+ "import": "./dist/integrations.mjs",
63
+ "require": "./dist/integrations.js"
42
64
  },
43
65
  "./package.json": "./package.json"
44
66
  },
45
67
  "typesVersions": {
46
68
  "*": {
69
+ "next": [
70
+ "./dist/next.d.ts"
71
+ ],
72
+ "lambda": [
73
+ "./dist/lambda.d.ts"
74
+ ],
75
+ "express": [
76
+ "./dist/express.d.ts"
77
+ ],
78
+ "hono": [
79
+ "./dist/hono.d.ts"
80
+ ],
81
+ "integrations": [
82
+ "./dist/integrations.d.ts"
83
+ ],
47
84
  "*": [
48
85
  "./dist/index.d.ts"
49
86
  ]
@@ -60,14 +97,25 @@
60
97
  "scripts": {
61
98
  "build": "tsup",
62
99
  "dev": "tsup --watch",
100
+ "docs": "typedoc && node scripts/generate-llms-txt.mjs",
63
101
  "prepublishOnly": "npm run build",
64
102
  "lint": "eslint src/",
65
103
  "test": "vitest",
104
+ "test:run": "vitest run",
105
+ "test:coverage": "vitest run --coverage",
106
+ "test:e2e": "vitest run tests/e2e",
66
107
  "typecheck": "tsc --noEmit"
67
108
  },
68
109
  "devDependencies": {
110
+ "@aws-sdk/client-bedrock-runtime": "^3.962.0",
111
+ "@google/generative-ai": "^0.24.1",
69
112
  "@types/node": "^20.0.0",
113
+ "@vitest/coverage-v8": "^2.0.0",
114
+ "dotenv": "^17.2.3",
115
+ "openai": "^6.15.0",
70
116
  "tsup": "^8.5.1",
71
- "typescript": "^5.9.3"
117
+ "typedoc": "^0.28.15",
118
+ "typescript": "^5.9.3",
119
+ "vitest": "^2.0.0"
72
120
  }
73
121
  }