@lelemondev/sdk 0.2.1 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +164 -98
- package/dist/express-Cmb_A4sI.d.mts +47 -0
- package/dist/express-Cmb_A4sI.d.ts +47 -0
- package/dist/express.d.mts +1 -0
- package/dist/express.d.ts +1 -0
- package/dist/express.js +21 -0
- package/dist/express.js.map +1 -0
- package/dist/express.mjs +19 -0
- package/dist/express.mjs.map +1 -0
- package/dist/hono-ChTmQk_V.d.mts +61 -0
- package/dist/hono-ChTmQk_V.d.ts +61 -0
- package/dist/hono.d.mts +1 -0
- package/dist/hono.d.ts +1 -0
- package/dist/hono.js +23 -0
- package/dist/hono.js.map +1 -0
- package/dist/hono.mjs +21 -0
- package/dist/hono.mjs.map +1 -0
- package/dist/index.d.mts +50 -278
- package/dist/index.d.ts +50 -278
- package/dist/index.js +725 -525
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +724 -521
- package/dist/index.mjs.map +1 -1
- package/dist/integrations.d.mts +4 -0
- package/dist/integrations.d.ts +4 -0
- package/dist/integrations.js +93 -0
- package/dist/integrations.js.map +1 -0
- package/dist/integrations.mjs +88 -0
- package/dist/integrations.mjs.map +1 -0
- package/dist/lambda-DQmEfWXC.d.mts +75 -0
- package/dist/lambda-DQmEfWXC.d.ts +75 -0
- package/dist/lambda.d.mts +1 -0
- package/dist/lambda.d.ts +1 -0
- package/dist/lambda.js +21 -0
- package/dist/lambda.js.map +1 -0
- package/dist/lambda.mjs +19 -0
- package/dist/lambda.mjs.map +1 -0
- package/dist/next-0nso_zEN.d.mts +94 -0
- package/dist/next-0nso_zEN.d.ts +94 -0
- package/dist/next.d.mts +1 -0
- package/dist/next.d.ts +1 -0
- package/dist/next.js +33 -0
- package/dist/next.js.map +1 -0
- package/dist/next.mjs +30 -0
- package/dist/next.mjs.map +1 -0
- package/package.json +39 -14
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* AWS Lambda Integration
|
|
3
|
+
*
|
|
4
|
+
* Wraps Lambda handlers to automatically flush traces before the function exits.
|
|
5
|
+
*
|
|
6
|
+
* @example
|
|
7
|
+
* import { withObserve } from '@lelemondev/sdk/lambda';
|
|
8
|
+
*
|
|
9
|
+
* export const handler = withObserve(async (event) => {
|
|
10
|
+
* const openai = observe(new OpenAI());
|
|
11
|
+
* const result = await openai.chat.completions.create({...});
|
|
12
|
+
* return { statusCode: 200, body: JSON.stringify(result) };
|
|
13
|
+
* });
|
|
14
|
+
*/
|
|
15
|
+
/**
|
|
16
|
+
* AWS Lambda Context object
|
|
17
|
+
*/
|
|
18
|
+
interface LambdaContext {
|
|
19
|
+
functionName: string;
|
|
20
|
+
functionVersion: string;
|
|
21
|
+
invokedFunctionArn: string;
|
|
22
|
+
memoryLimitInMB: string;
|
|
23
|
+
awsRequestId: string;
|
|
24
|
+
logGroupName: string;
|
|
25
|
+
logStreamName: string;
|
|
26
|
+
getRemainingTimeInMillis(): number;
|
|
27
|
+
[key: string]: unknown;
|
|
28
|
+
}
|
|
29
|
+
/**
|
|
30
|
+
* Generic Lambda handler type
|
|
31
|
+
*/
|
|
32
|
+
type LambdaHandler<TEvent = unknown, TResult = unknown> = (event: TEvent, context: LambdaContext) => Promise<TResult>;
|
|
33
|
+
/**
|
|
34
|
+
* Wrap an AWS Lambda handler with automatic trace flushing
|
|
35
|
+
*
|
|
36
|
+
* Always flushes before returning - Lambda freezes the container
|
|
37
|
+
* immediately after the handler returns, so this is required.
|
|
38
|
+
*
|
|
39
|
+
* @param handler - Your Lambda handler function
|
|
40
|
+
* @returns Wrapped handler that auto-flushes traces
|
|
41
|
+
*
|
|
42
|
+
* @example
|
|
43
|
+
* // API Gateway event
|
|
44
|
+
* export const handler = withObserve(async (event) => {
|
|
45
|
+
* const body = JSON.parse(event.body);
|
|
46
|
+
* const openai = observe(new OpenAI());
|
|
47
|
+
* const result = await openai.chat.completions.create({
|
|
48
|
+
* model: 'gpt-4',
|
|
49
|
+
* messages: [{ role: 'user', content: body.message }],
|
|
50
|
+
* });
|
|
51
|
+
* return {
|
|
52
|
+
* statusCode: 200,
|
|
53
|
+
* body: JSON.stringify(result.choices[0].message),
|
|
54
|
+
* };
|
|
55
|
+
* });
|
|
56
|
+
*
|
|
57
|
+
* @example
|
|
58
|
+
* // With typed events
|
|
59
|
+
* import type { APIGatewayProxyEvent, APIGatewayProxyResult } from 'aws-lambda';
|
|
60
|
+
*
|
|
61
|
+
* export const handler = withObserve<APIGatewayProxyEvent, APIGatewayProxyResult>(
|
|
62
|
+
* async (event, context) => {
|
|
63
|
+
* return { statusCode: 200, body: 'OK' };
|
|
64
|
+
* }
|
|
65
|
+
* );
|
|
66
|
+
*/
|
|
67
|
+
declare function withObserve<TEvent = unknown, TResult = unknown>(handler: LambdaHandler<TEvent, TResult>): LambdaHandler<TEvent, TResult>;
|
|
68
|
+
|
|
69
|
+
type lambda_LambdaContext = LambdaContext;
|
|
70
|
+
declare const lambda_withObserve: typeof withObserve;
|
|
71
|
+
declare namespace lambda {
|
|
72
|
+
export { type lambda_LambdaContext as LambdaContext, lambda_withObserve as withObserve };
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
export { type LambdaContext as L, lambda as l, withObserve as w };
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* AWS Lambda Integration
|
|
3
|
+
*
|
|
4
|
+
* Wraps Lambda handlers to automatically flush traces before the function exits.
|
|
5
|
+
*
|
|
6
|
+
* @example
|
|
7
|
+
* import { withObserve } from '@lelemondev/sdk/lambda';
|
|
8
|
+
*
|
|
9
|
+
* export const handler = withObserve(async (event) => {
|
|
10
|
+
* const openai = observe(new OpenAI());
|
|
11
|
+
* const result = await openai.chat.completions.create({...});
|
|
12
|
+
* return { statusCode: 200, body: JSON.stringify(result) };
|
|
13
|
+
* });
|
|
14
|
+
*/
|
|
15
|
+
/**
|
|
16
|
+
* AWS Lambda Context object
|
|
17
|
+
*/
|
|
18
|
+
interface LambdaContext {
|
|
19
|
+
functionName: string;
|
|
20
|
+
functionVersion: string;
|
|
21
|
+
invokedFunctionArn: string;
|
|
22
|
+
memoryLimitInMB: string;
|
|
23
|
+
awsRequestId: string;
|
|
24
|
+
logGroupName: string;
|
|
25
|
+
logStreamName: string;
|
|
26
|
+
getRemainingTimeInMillis(): number;
|
|
27
|
+
[key: string]: unknown;
|
|
28
|
+
}
|
|
29
|
+
/**
|
|
30
|
+
* Generic Lambda handler type
|
|
31
|
+
*/
|
|
32
|
+
type LambdaHandler<TEvent = unknown, TResult = unknown> = (event: TEvent, context: LambdaContext) => Promise<TResult>;
|
|
33
|
+
/**
|
|
34
|
+
* Wrap an AWS Lambda handler with automatic trace flushing
|
|
35
|
+
*
|
|
36
|
+
* Always flushes before returning - Lambda freezes the container
|
|
37
|
+
* immediately after the handler returns, so this is required.
|
|
38
|
+
*
|
|
39
|
+
* @param handler - Your Lambda handler function
|
|
40
|
+
* @returns Wrapped handler that auto-flushes traces
|
|
41
|
+
*
|
|
42
|
+
* @example
|
|
43
|
+
* // API Gateway event
|
|
44
|
+
* export const handler = withObserve(async (event) => {
|
|
45
|
+
* const body = JSON.parse(event.body);
|
|
46
|
+
* const openai = observe(new OpenAI());
|
|
47
|
+
* const result = await openai.chat.completions.create({
|
|
48
|
+
* model: 'gpt-4',
|
|
49
|
+
* messages: [{ role: 'user', content: body.message }],
|
|
50
|
+
* });
|
|
51
|
+
* return {
|
|
52
|
+
* statusCode: 200,
|
|
53
|
+
* body: JSON.stringify(result.choices[0].message),
|
|
54
|
+
* };
|
|
55
|
+
* });
|
|
56
|
+
*
|
|
57
|
+
* @example
|
|
58
|
+
* // With typed events
|
|
59
|
+
* import type { APIGatewayProxyEvent, APIGatewayProxyResult } from 'aws-lambda';
|
|
60
|
+
*
|
|
61
|
+
* export const handler = withObserve<APIGatewayProxyEvent, APIGatewayProxyResult>(
|
|
62
|
+
* async (event, context) => {
|
|
63
|
+
* return { statusCode: 200, body: 'OK' };
|
|
64
|
+
* }
|
|
65
|
+
* );
|
|
66
|
+
*/
|
|
67
|
+
declare function withObserve<TEvent = unknown, TResult = unknown>(handler: LambdaHandler<TEvent, TResult>): LambdaHandler<TEvent, TResult>;
|
|
68
|
+
|
|
69
|
+
type lambda_LambdaContext = LambdaContext;
|
|
70
|
+
declare const lambda_withObserve: typeof withObserve;
|
|
71
|
+
declare namespace lambda {
|
|
72
|
+
export { type lambda_LambdaContext as LambdaContext, lambda_withObserve as withObserve };
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
export { type LambdaContext as L, lambda as l, withObserve as w };
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export { L as LambdaContext, w as withObserve } from './lambda-DQmEfWXC.mjs';
|
package/dist/lambda.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export { L as LambdaContext, w as withObserve } from './lambda-DQmEfWXC.js';
|
package/dist/lambda.js
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
/* @lelemondev/sdk - LLM Observability */
|
|
4
|
+
|
|
5
|
+
async function flush() {
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
// src/integrations/lambda.ts
|
|
9
|
+
function withObserve(handler) {
|
|
10
|
+
return async (event, context) => {
|
|
11
|
+
try {
|
|
12
|
+
return await handler(event, context);
|
|
13
|
+
} finally {
|
|
14
|
+
await flush();
|
|
15
|
+
}
|
|
16
|
+
};
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
exports.withObserve = withObserve;
|
|
20
|
+
//# sourceMappingURL=lambda.js.map
|
|
21
|
+
//# sourceMappingURL=lambda.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/core/config.ts","../src/integrations/lambda.ts"],"names":[],"mappings":";;;;AAuEA,eAAsB,KAAA,GAAuB;AAI7C;;;ACOO,SAAS,YACd,OAAA,EACgC;AAChC,EAAA,OAAO,OAAO,OAAe,OAAA,KAA6C;AACxE,IAAA,IAAI;AACF,MAAA,OAAO,MAAM,OAAA,CAAQ,KAAA,EAAO,OAAO,CAAA;AAAA,IACrC,CAAA,SAAE;AAEA,MAAA,MAAM,KAAA,EAAM;AAAA,IACd;AAAA,EACF,CAAA;AACF","file":"lambda.js","sourcesContent":["/**\n * Global Configuration\n *\n * Manages SDK configuration and transport instance.\n */\n\nimport type { LelemonConfig } from './types';\nimport { Transport } from './transport';\n\n// ─────────────────────────────────────────────────────────────\n// Global State\n// ─────────────────────────────────────────────────────────────\n\nlet globalConfig: LelemonConfig = {};\nlet globalTransport: Transport | null = null;\nlet initialized = false;\n\n// ─────────────────────────────────────────────────────────────\n// Configuration\n// ─────────────────────────────────────────────────────────────\n\nconst DEFAULT_ENDPOINT = 'https://api.lelemon.dev';\n\n/**\n * Initialize the SDK\n * Call once at app startup\n */\nexport function init(config: LelemonConfig = {}): void {\n globalConfig = config;\n globalTransport = createTransport(config);\n initialized = true;\n}\n\n/**\n * Get current config\n */\nexport function getConfig(): LelemonConfig {\n return globalConfig;\n}\n\n/**\n * Check if SDK is initialized\n */\nexport function isInitialized(): boolean {\n return initialized;\n}\n\n/**\n * Check if SDK is enabled\n */\nexport function isEnabled(): boolean {\n return getTransport().isEnabled();\n}\n\n// ─────────────────────────────────────────────────────────────\n// Transport\n// ─────────────────────────────────────────────────────────────\n\n/**\n * Get or create transport instance\n */\nexport function getTransport(): Transport {\n if (!globalTransport) {\n globalTransport = createTransport(globalConfig);\n }\n return globalTransport;\n}\n\n/**\n * Flush all pending traces\n */\nexport async function flush(): Promise<void> {\n if (globalTransport) {\n await globalTransport.flush();\n }\n}\n\n/**\n * Create transport instance\n */\nfunction createTransport(config: LelemonConfig): Transport {\n const apiKey = config.apiKey ?? getEnvVar('LELEMON_API_KEY');\n\n if (!apiKey && !config.disabled) {\n console.warn(\n '[Lelemon] No API key provided. Set apiKey in init() or LELEMON_API_KEY env var. Tracing disabled.'\n );\n }\n\n return new Transport({\n apiKey: apiKey ?? '',\n endpoint: config.endpoint ?? DEFAULT_ENDPOINT,\n debug: config.debug ?? false,\n disabled: config.disabled ?? !apiKey,\n batchSize: config.batchSize,\n flushIntervalMs: config.flushIntervalMs,\n requestTimeoutMs: config.requestTimeoutMs,\n });\n}\n\n/**\n * Get environment variable (works in Node and edge)\n */\nfunction getEnvVar(name: string): string | undefined {\n if (typeof process !== 'undefined' && process.env) {\n return process.env[name];\n }\n return undefined;\n}\n","/**\n * AWS Lambda Integration\n *\n * Wraps Lambda handlers to automatically flush traces before the function exits.\n *\n * @example\n * import { withObserve } from '@lelemondev/sdk/lambda';\n *\n * export const handler = withObserve(async (event) => {\n * const openai = observe(new OpenAI());\n * const result = await openai.chat.completions.create({...});\n * return { statusCode: 200, body: JSON.stringify(result) };\n * });\n */\n\nimport { flush } from '../core/config';\n\n// ─────────────────────────────────────────────────────────────\n// Types (minimal to avoid requiring @types/aws-lambda)\n// ─────────────────────────────────────────────────────────────\n\n/**\n * AWS Lambda Context object\n */\nexport interface LambdaContext {\n functionName: string;\n functionVersion: string;\n invokedFunctionArn: string;\n memoryLimitInMB: string;\n awsRequestId: string;\n logGroupName: string;\n logStreamName: string;\n getRemainingTimeInMillis(): number;\n [key: string]: unknown;\n}\n\n/**\n * Generic Lambda handler type\n */\ntype LambdaHandler<TEvent = unknown, TResult = unknown> = (\n event: TEvent,\n context: LambdaContext\n) => Promise<TResult>;\n\n// ─────────────────────────────────────────────────────────────\n// Wrapper\n// ─────────────────────────────────────────────────────────────\n\n/**\n * Wrap an AWS Lambda handler with automatic trace flushing\n *\n * Always flushes before returning - Lambda freezes the container\n * immediately after the handler returns, so this is required.\n *\n * @param handler - Your Lambda handler function\n * @returns Wrapped handler that auto-flushes traces\n *\n * @example\n * // API Gateway event\n * export const handler = withObserve(async (event) => {\n * const body = JSON.parse(event.body);\n * const openai = observe(new OpenAI());\n * const result = await openai.chat.completions.create({\n * model: 'gpt-4',\n * messages: [{ role: 'user', content: body.message }],\n * });\n * return {\n * statusCode: 200,\n * body: JSON.stringify(result.choices[0].message),\n * };\n * });\n *\n * @example\n * // With typed events\n * import type { APIGatewayProxyEvent, APIGatewayProxyResult } from 'aws-lambda';\n *\n * export const handler = withObserve<APIGatewayProxyEvent, APIGatewayProxyResult>(\n * async (event, context) => {\n * return { statusCode: 200, body: 'OK' };\n * }\n * );\n */\nexport function withObserve<TEvent = unknown, TResult = unknown>(\n handler: LambdaHandler<TEvent, TResult>\n): LambdaHandler<TEvent, TResult> {\n return async (event: TEvent, context: LambdaContext): Promise<TResult> => {\n try {\n return await handler(event, context);\n } finally {\n // Always flush - Lambda freezes immediately after return\n await flush();\n }\n };\n}\n"]}
|
package/dist/lambda.mjs
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
/* @lelemondev/sdk - LLM Observability */
|
|
2
|
+
|
|
3
|
+
async function flush() {
|
|
4
|
+
}
|
|
5
|
+
|
|
6
|
+
// src/integrations/lambda.ts
|
|
7
|
+
function withObserve(handler) {
|
|
8
|
+
return async (event, context) => {
|
|
9
|
+
try {
|
|
10
|
+
return await handler(event, context);
|
|
11
|
+
} finally {
|
|
12
|
+
await flush();
|
|
13
|
+
}
|
|
14
|
+
};
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
export { withObserve };
|
|
18
|
+
//# sourceMappingURL=lambda.mjs.map
|
|
19
|
+
//# sourceMappingURL=lambda.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/core/config.ts","../src/integrations/lambda.ts"],"names":[],"mappings":";;AAuEA,eAAsB,KAAA,GAAuB;AAI7C;;;ACOO,SAAS,YACd,OAAA,EACgC;AAChC,EAAA,OAAO,OAAO,OAAe,OAAA,KAA6C;AACxE,IAAA,IAAI;AACF,MAAA,OAAO,MAAM,OAAA,CAAQ,KAAA,EAAO,OAAO,CAAA;AAAA,IACrC,CAAA,SAAE;AAEA,MAAA,MAAM,KAAA,EAAM;AAAA,IACd;AAAA,EACF,CAAA;AACF","file":"lambda.mjs","sourcesContent":["/**\n * Global Configuration\n *\n * Manages SDK configuration and transport instance.\n */\n\nimport type { LelemonConfig } from './types';\nimport { Transport } from './transport';\n\n// ─────────────────────────────────────────────────────────────\n// Global State\n// ─────────────────────────────────────────────────────────────\n\nlet globalConfig: LelemonConfig = {};\nlet globalTransport: Transport | null = null;\nlet initialized = false;\n\n// ─────────────────────────────────────────────────────────────\n// Configuration\n// ─────────────────────────────────────────────────────────────\n\nconst DEFAULT_ENDPOINT = 'https://api.lelemon.dev';\n\n/**\n * Initialize the SDK\n * Call once at app startup\n */\nexport function init(config: LelemonConfig = {}): void {\n globalConfig = config;\n globalTransport = createTransport(config);\n initialized = true;\n}\n\n/**\n * Get current config\n */\nexport function getConfig(): LelemonConfig {\n return globalConfig;\n}\n\n/**\n * Check if SDK is initialized\n */\nexport function isInitialized(): boolean {\n return initialized;\n}\n\n/**\n * Check if SDK is enabled\n */\nexport function isEnabled(): boolean {\n return getTransport().isEnabled();\n}\n\n// ─────────────────────────────────────────────────────────────\n// Transport\n// ─────────────────────────────────────────────────────────────\n\n/**\n * Get or create transport instance\n */\nexport function getTransport(): Transport {\n if (!globalTransport) {\n globalTransport = createTransport(globalConfig);\n }\n return globalTransport;\n}\n\n/**\n * Flush all pending traces\n */\nexport async function flush(): Promise<void> {\n if (globalTransport) {\n await globalTransport.flush();\n }\n}\n\n/**\n * Create transport instance\n */\nfunction createTransport(config: LelemonConfig): Transport {\n const apiKey = config.apiKey ?? getEnvVar('LELEMON_API_KEY');\n\n if (!apiKey && !config.disabled) {\n console.warn(\n '[Lelemon] No API key provided. Set apiKey in init() or LELEMON_API_KEY env var. Tracing disabled.'\n );\n }\n\n return new Transport({\n apiKey: apiKey ?? '',\n endpoint: config.endpoint ?? DEFAULT_ENDPOINT,\n debug: config.debug ?? false,\n disabled: config.disabled ?? !apiKey,\n batchSize: config.batchSize,\n flushIntervalMs: config.flushIntervalMs,\n requestTimeoutMs: config.requestTimeoutMs,\n });\n}\n\n/**\n * Get environment variable (works in Node and edge)\n */\nfunction getEnvVar(name: string): string | undefined {\n if (typeof process !== 'undefined' && process.env) {\n return process.env[name];\n }\n return undefined;\n}\n","/**\n * AWS Lambda Integration\n *\n * Wraps Lambda handlers to automatically flush traces before the function exits.\n *\n * @example\n * import { withObserve } from '@lelemondev/sdk/lambda';\n *\n * export const handler = withObserve(async (event) => {\n * const openai = observe(new OpenAI());\n * const result = await openai.chat.completions.create({...});\n * return { statusCode: 200, body: JSON.stringify(result) };\n * });\n */\n\nimport { flush } from '../core/config';\n\n// ─────────────────────────────────────────────────────────────\n// Types (minimal to avoid requiring @types/aws-lambda)\n// ─────────────────────────────────────────────────────────────\n\n/**\n * AWS Lambda Context object\n */\nexport interface LambdaContext {\n functionName: string;\n functionVersion: string;\n invokedFunctionArn: string;\n memoryLimitInMB: string;\n awsRequestId: string;\n logGroupName: string;\n logStreamName: string;\n getRemainingTimeInMillis(): number;\n [key: string]: unknown;\n}\n\n/**\n * Generic Lambda handler type\n */\ntype LambdaHandler<TEvent = unknown, TResult = unknown> = (\n event: TEvent,\n context: LambdaContext\n) => Promise<TResult>;\n\n// ─────────────────────────────────────────────────────────────\n// Wrapper\n// ─────────────────────────────────────────────────────────────\n\n/**\n * Wrap an AWS Lambda handler with automatic trace flushing\n *\n * Always flushes before returning - Lambda freezes the container\n * immediately after the handler returns, so this is required.\n *\n * @param handler - Your Lambda handler function\n * @returns Wrapped handler that auto-flushes traces\n *\n * @example\n * // API Gateway event\n * export const handler = withObserve(async (event) => {\n * const body = JSON.parse(event.body);\n * const openai = observe(new OpenAI());\n * const result = await openai.chat.completions.create({\n * model: 'gpt-4',\n * messages: [{ role: 'user', content: body.message }],\n * });\n * return {\n * statusCode: 200,\n * body: JSON.stringify(result.choices[0].message),\n * };\n * });\n *\n * @example\n * // With typed events\n * import type { APIGatewayProxyEvent, APIGatewayProxyResult } from 'aws-lambda';\n *\n * export const handler = withObserve<APIGatewayProxyEvent, APIGatewayProxyResult>(\n * async (event, context) => {\n * return { statusCode: 200, body: 'OK' };\n * }\n * );\n */\nexport function withObserve<TEvent = unknown, TResult = unknown>(\n handler: LambdaHandler<TEvent, TResult>\n): LambdaHandler<TEvent, TResult> {\n return async (event: TEvent, context: LambdaContext): Promise<TResult> => {\n try {\n return await handler(event, context);\n } finally {\n // Always flush - Lambda freezes immediately after return\n await flush();\n }\n };\n}\n"]}
|
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Next.js App Router Integration
|
|
3
|
+
*
|
|
4
|
+
* Wraps route handlers to automatically flush traces.
|
|
5
|
+
* Supports Next.js 15+ `after()` and Vercel's `waitUntil()`.
|
|
6
|
+
*
|
|
7
|
+
* @example
|
|
8
|
+
* import { withObserve } from '@lelemondev/sdk/next';
|
|
9
|
+
*
|
|
10
|
+
* export const POST = withObserve(async (req) => {
|
|
11
|
+
* const openai = observe(new OpenAI());
|
|
12
|
+
* const result = await openai.chat.completions.create({...});
|
|
13
|
+
* return Response.json(result);
|
|
14
|
+
* });
|
|
15
|
+
*/
|
|
16
|
+
type NextRouteHandler<TContext = unknown> = (request: Request, context?: TContext) => Response | Promise<Response>;
|
|
17
|
+
/**
|
|
18
|
+
* Options for the Next.js wrapper
|
|
19
|
+
*/
|
|
20
|
+
interface NextObserveOptions {
|
|
21
|
+
/**
|
|
22
|
+
* Next.js 15+ after() function from 'next/server'
|
|
23
|
+
* Preferred method - runs after response without blocking
|
|
24
|
+
*
|
|
25
|
+
* @example
|
|
26
|
+
* import { after } from 'next/server';
|
|
27
|
+
* export const POST = withObserve(handler, { after });
|
|
28
|
+
*/
|
|
29
|
+
after?: (callback: () => void | Promise<void>) => void;
|
|
30
|
+
/**
|
|
31
|
+
* Vercel's waitUntil() from '@vercel/functions'
|
|
32
|
+
* Alternative for Vercel deployments
|
|
33
|
+
*
|
|
34
|
+
* @example
|
|
35
|
+
* import { waitUntil } from '@vercel/functions';
|
|
36
|
+
* export const POST = withObserve(handler, { waitUntil });
|
|
37
|
+
*/
|
|
38
|
+
waitUntil?: (promise: Promise<unknown>) => void;
|
|
39
|
+
}
|
|
40
|
+
/**
|
|
41
|
+
* Wrap a Next.js App Router handler with automatic trace flushing
|
|
42
|
+
*
|
|
43
|
+
* @param handler - Your route handler function
|
|
44
|
+
* @param options - Optional: pass `after` (Next.js 15+) or `waitUntil` (Vercel)
|
|
45
|
+
* @returns Wrapped handler that auto-flushes traces
|
|
46
|
+
*
|
|
47
|
+
* @example
|
|
48
|
+
* // Basic usage (blocking flush)
|
|
49
|
+
* export const POST = withObserve(async (req) => {
|
|
50
|
+
* return Response.json({ ok: true });
|
|
51
|
+
* });
|
|
52
|
+
*
|
|
53
|
+
* @example
|
|
54
|
+
* // Next.js 15+ with after() - non-blocking (recommended)
|
|
55
|
+
* import { after } from 'next/server';
|
|
56
|
+
*
|
|
57
|
+
* export const POST = withObserve(
|
|
58
|
+
* async (req) => Response.json({ ok: true }),
|
|
59
|
+
* { after }
|
|
60
|
+
* );
|
|
61
|
+
*
|
|
62
|
+
* @example
|
|
63
|
+
* // Vercel with waitUntil() - non-blocking
|
|
64
|
+
* import { waitUntil } from '@vercel/functions';
|
|
65
|
+
*
|
|
66
|
+
* export const POST = withObserve(
|
|
67
|
+
* async (req) => Response.json({ ok: true }),
|
|
68
|
+
* { waitUntil }
|
|
69
|
+
* );
|
|
70
|
+
*/
|
|
71
|
+
declare function withObserve<TContext = unknown>(handler: NextRouteHandler<TContext>, options?: NextObserveOptions): NextRouteHandler<TContext>;
|
|
72
|
+
/**
|
|
73
|
+
* Create a pre-configured wrapper with default options
|
|
74
|
+
*
|
|
75
|
+
* @example
|
|
76
|
+
* import { after } from 'next/server';
|
|
77
|
+
* import { createWrapper } from '@lelemondev/sdk/next';
|
|
78
|
+
*
|
|
79
|
+
* const withObserve = createWrapper({ after });
|
|
80
|
+
*
|
|
81
|
+
* export const POST = withObserve(async (req) => {
|
|
82
|
+
* return Response.json({ ok: true });
|
|
83
|
+
* });
|
|
84
|
+
*/
|
|
85
|
+
declare function createWrapper(defaultOptions: NextObserveOptions): <TContext = unknown>(handler: NextRouteHandler<TContext>, options?: NextObserveOptions) => NextRouteHandler<TContext>;
|
|
86
|
+
|
|
87
|
+
type next_NextObserveOptions = NextObserveOptions;
|
|
88
|
+
declare const next_createWrapper: typeof createWrapper;
|
|
89
|
+
declare const next_withObserve: typeof withObserve;
|
|
90
|
+
declare namespace next {
|
|
91
|
+
export { type next_NextObserveOptions as NextObserveOptions, next_createWrapper as createWrapper, next_withObserve as withObserve };
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
export { type NextObserveOptions as N, createWrapper as c, next as n, withObserve as w };
|
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Next.js App Router Integration
|
|
3
|
+
*
|
|
4
|
+
* Wraps route handlers to automatically flush traces.
|
|
5
|
+
* Supports Next.js 15+ `after()` and Vercel's `waitUntil()`.
|
|
6
|
+
*
|
|
7
|
+
* @example
|
|
8
|
+
* import { withObserve } from '@lelemondev/sdk/next';
|
|
9
|
+
*
|
|
10
|
+
* export const POST = withObserve(async (req) => {
|
|
11
|
+
* const openai = observe(new OpenAI());
|
|
12
|
+
* const result = await openai.chat.completions.create({...});
|
|
13
|
+
* return Response.json(result);
|
|
14
|
+
* });
|
|
15
|
+
*/
|
|
16
|
+
type NextRouteHandler<TContext = unknown> = (request: Request, context?: TContext) => Response | Promise<Response>;
|
|
17
|
+
/**
|
|
18
|
+
* Options for the Next.js wrapper
|
|
19
|
+
*/
|
|
20
|
+
interface NextObserveOptions {
|
|
21
|
+
/**
|
|
22
|
+
* Next.js 15+ after() function from 'next/server'
|
|
23
|
+
* Preferred method - runs after response without blocking
|
|
24
|
+
*
|
|
25
|
+
* @example
|
|
26
|
+
* import { after } from 'next/server';
|
|
27
|
+
* export const POST = withObserve(handler, { after });
|
|
28
|
+
*/
|
|
29
|
+
after?: (callback: () => void | Promise<void>) => void;
|
|
30
|
+
/**
|
|
31
|
+
* Vercel's waitUntil() from '@vercel/functions'
|
|
32
|
+
* Alternative for Vercel deployments
|
|
33
|
+
*
|
|
34
|
+
* @example
|
|
35
|
+
* import { waitUntil } from '@vercel/functions';
|
|
36
|
+
* export const POST = withObserve(handler, { waitUntil });
|
|
37
|
+
*/
|
|
38
|
+
waitUntil?: (promise: Promise<unknown>) => void;
|
|
39
|
+
}
|
|
40
|
+
/**
|
|
41
|
+
* Wrap a Next.js App Router handler with automatic trace flushing
|
|
42
|
+
*
|
|
43
|
+
* @param handler - Your route handler function
|
|
44
|
+
* @param options - Optional: pass `after` (Next.js 15+) or `waitUntil` (Vercel)
|
|
45
|
+
* @returns Wrapped handler that auto-flushes traces
|
|
46
|
+
*
|
|
47
|
+
* @example
|
|
48
|
+
* // Basic usage (blocking flush)
|
|
49
|
+
* export const POST = withObserve(async (req) => {
|
|
50
|
+
* return Response.json({ ok: true });
|
|
51
|
+
* });
|
|
52
|
+
*
|
|
53
|
+
* @example
|
|
54
|
+
* // Next.js 15+ with after() - non-blocking (recommended)
|
|
55
|
+
* import { after } from 'next/server';
|
|
56
|
+
*
|
|
57
|
+
* export const POST = withObserve(
|
|
58
|
+
* async (req) => Response.json({ ok: true }),
|
|
59
|
+
* { after }
|
|
60
|
+
* );
|
|
61
|
+
*
|
|
62
|
+
* @example
|
|
63
|
+
* // Vercel with waitUntil() - non-blocking
|
|
64
|
+
* import { waitUntil } from '@vercel/functions';
|
|
65
|
+
*
|
|
66
|
+
* export const POST = withObserve(
|
|
67
|
+
* async (req) => Response.json({ ok: true }),
|
|
68
|
+
* { waitUntil }
|
|
69
|
+
* );
|
|
70
|
+
*/
|
|
71
|
+
declare function withObserve<TContext = unknown>(handler: NextRouteHandler<TContext>, options?: NextObserveOptions): NextRouteHandler<TContext>;
|
|
72
|
+
/**
|
|
73
|
+
* Create a pre-configured wrapper with default options
|
|
74
|
+
*
|
|
75
|
+
* @example
|
|
76
|
+
* import { after } from 'next/server';
|
|
77
|
+
* import { createWrapper } from '@lelemondev/sdk/next';
|
|
78
|
+
*
|
|
79
|
+
* const withObserve = createWrapper({ after });
|
|
80
|
+
*
|
|
81
|
+
* export const POST = withObserve(async (req) => {
|
|
82
|
+
* return Response.json({ ok: true });
|
|
83
|
+
* });
|
|
84
|
+
*/
|
|
85
|
+
declare function createWrapper(defaultOptions: NextObserveOptions): <TContext = unknown>(handler: NextRouteHandler<TContext>, options?: NextObserveOptions) => NextRouteHandler<TContext>;
|
|
86
|
+
|
|
87
|
+
type next_NextObserveOptions = NextObserveOptions;
|
|
88
|
+
declare const next_createWrapper: typeof createWrapper;
|
|
89
|
+
declare const next_withObserve: typeof withObserve;
|
|
90
|
+
declare namespace next {
|
|
91
|
+
export { type next_NextObserveOptions as NextObserveOptions, next_createWrapper as createWrapper, next_withObserve as withObserve };
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
export { type NextObserveOptions as N, createWrapper as c, next as n, withObserve as w };
|
package/dist/next.d.mts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export { N as NextObserveOptions, c as createWrapper, w as withObserve } from './next-0nso_zEN.mjs';
|
package/dist/next.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export { N as NextObserveOptions, c as createWrapper, w as withObserve } from './next-0nso_zEN.js';
|
package/dist/next.js
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
/* @lelemondev/sdk - LLM Observability */
|
|
4
|
+
|
|
5
|
+
async function flush() {
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
// src/integrations/next.ts
|
|
9
|
+
function withObserve(handler, options) {
|
|
10
|
+
return async (request, context) => {
|
|
11
|
+
try {
|
|
12
|
+
return await handler(request, context);
|
|
13
|
+
} finally {
|
|
14
|
+
if (options?.after) {
|
|
15
|
+
options.after(() => flush());
|
|
16
|
+
} else if (options?.waitUntil) {
|
|
17
|
+
options.waitUntil(flush());
|
|
18
|
+
} else {
|
|
19
|
+
await flush();
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
};
|
|
23
|
+
}
|
|
24
|
+
function createWrapper(defaultOptions) {
|
|
25
|
+
return function(handler, options) {
|
|
26
|
+
return withObserve(handler, { ...defaultOptions, ...options });
|
|
27
|
+
};
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
exports.createWrapper = createWrapper;
|
|
31
|
+
exports.withObserve = withObserve;
|
|
32
|
+
//# sourceMappingURL=next.js.map
|
|
33
|
+
//# sourceMappingURL=next.js.map
|
package/dist/next.js.map
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/core/config.ts","../src/integrations/next.ts"],"names":[],"mappings":";;;;AAuEA,eAAsB,KAAA,GAAuB;AAI7C;;;ACYO,SAAS,WAAA,CACd,SACA,OAAA,EAC4B;AAC5B,EAAA,OAAO,OAAO,SAAkB,OAAA,KAA0C;AACxE,IAAA,IAAI;AACF,MAAA,OAAO,MAAM,OAAA,CAAQ,OAAA,EAAS,OAAO,CAAA;AAAA,IACvC,CAAA,SAAE;AAEA,MAAA,IAAI,SAAS,KAAA,EAAO;AAElB,QAAA,OAAA,CAAQ,KAAA,CAAM,MAAM,KAAA,EAAO,CAAA;AAAA,MAC7B,CAAA,MAAA,IAAW,SAAS,SAAA,EAAW;AAE7B,QAAA,OAAA,CAAQ,SAAA,CAAU,OAAO,CAAA;AAAA,MAC3B,CAAA,MAAO;AAEL,QAAA,MAAM,KAAA,EAAM;AAAA,MACd;AAAA,IACF;AAAA,EACF,CAAA;AACF;AAeO,SAAS,cAAc,cAAA,EAAoC;AAChE,EAAA,OAAO,SACL,SACA,OAAA,EAC4B;AAC5B,IAAA,OAAO,YAAY,OAAA,EAAS,EAAE,GAAG,cAAA,EAAgB,GAAG,SAAS,CAAA;AAAA,EAC/D,CAAA;AACF","file":"next.js","sourcesContent":["/**\n * Global Configuration\n *\n * Manages SDK configuration and transport instance.\n */\n\nimport type { LelemonConfig } from './types';\nimport { Transport } from './transport';\n\n// ─────────────────────────────────────────────────────────────\n// Global State\n// ─────────────────────────────────────────────────────────────\n\nlet globalConfig: LelemonConfig = {};\nlet globalTransport: Transport | null = null;\nlet initialized = false;\n\n// ─────────────────────────────────────────────────────────────\n// Configuration\n// ─────────────────────────────────────────────────────────────\n\nconst DEFAULT_ENDPOINT = 'https://api.lelemon.dev';\n\n/**\n * Initialize the SDK\n * Call once at app startup\n */\nexport function init(config: LelemonConfig = {}): void {\n globalConfig = config;\n globalTransport = createTransport(config);\n initialized = true;\n}\n\n/**\n * Get current config\n */\nexport function getConfig(): LelemonConfig {\n return globalConfig;\n}\n\n/**\n * Check if SDK is initialized\n */\nexport function isInitialized(): boolean {\n return initialized;\n}\n\n/**\n * Check if SDK is enabled\n */\nexport function isEnabled(): boolean {\n return getTransport().isEnabled();\n}\n\n// ─────────────────────────────────────────────────────────────\n// Transport\n// ─────────────────────────────────────────────────────────────\n\n/**\n * Get or create transport instance\n */\nexport function getTransport(): Transport {\n if (!globalTransport) {\n globalTransport = createTransport(globalConfig);\n }\n return globalTransport;\n}\n\n/**\n * Flush all pending traces\n */\nexport async function flush(): Promise<void> {\n if (globalTransport) {\n await globalTransport.flush();\n }\n}\n\n/**\n * Create transport instance\n */\nfunction createTransport(config: LelemonConfig): Transport {\n const apiKey = config.apiKey ?? getEnvVar('LELEMON_API_KEY');\n\n if (!apiKey && !config.disabled) {\n console.warn(\n '[Lelemon] No API key provided. Set apiKey in init() or LELEMON_API_KEY env var. Tracing disabled.'\n );\n }\n\n return new Transport({\n apiKey: apiKey ?? '',\n endpoint: config.endpoint ?? DEFAULT_ENDPOINT,\n debug: config.debug ?? false,\n disabled: config.disabled ?? !apiKey,\n batchSize: config.batchSize,\n flushIntervalMs: config.flushIntervalMs,\n requestTimeoutMs: config.requestTimeoutMs,\n });\n}\n\n/**\n * Get environment variable (works in Node and edge)\n */\nfunction getEnvVar(name: string): string | undefined {\n if (typeof process !== 'undefined' && process.env) {\n return process.env[name];\n }\n return undefined;\n}\n","/**\n * Next.js App Router Integration\n *\n * Wraps route handlers to automatically flush traces.\n * Supports Next.js 15+ `after()` and Vercel's `waitUntil()`.\n *\n * @example\n * import { withObserve } from '@lelemondev/sdk/next';\n *\n * export const POST = withObserve(async (req) => {\n * const openai = observe(new OpenAI());\n * const result = await openai.chat.completions.create({...});\n * return Response.json(result);\n * });\n */\n\nimport { flush } from '../core/config';\n\n// ─────────────────────────────────────────────────────────────\n// Types\n// ─────────────────────────────────────────────────────────────\n\ntype NextRouteHandler<TContext = unknown> = (\n request: Request,\n context?: TContext\n) => Response | Promise<Response>;\n\n/**\n * Options for the Next.js wrapper\n */\nexport interface NextObserveOptions {\n /**\n * Next.js 15+ after() function from 'next/server'\n * Preferred method - runs after response without blocking\n *\n * @example\n * import { after } from 'next/server';\n * export const POST = withObserve(handler, { after });\n */\n after?: (callback: () => void | Promise<void>) => void;\n\n /**\n * Vercel's waitUntil() from '@vercel/functions'\n * Alternative for Vercel deployments\n *\n * @example\n * import { waitUntil } from '@vercel/functions';\n * export const POST = withObserve(handler, { waitUntil });\n */\n waitUntil?: (promise: Promise<unknown>) => void;\n}\n\n// ─────────────────────────────────────────────────────────────\n// Wrapper\n// ─────────────────────────────────────────────────────────────\n\n/**\n * Wrap a Next.js App Router handler with automatic trace flushing\n *\n * @param handler - Your route handler function\n * @param options - Optional: pass `after` (Next.js 15+) or `waitUntil` (Vercel)\n * @returns Wrapped handler that auto-flushes traces\n *\n * @example\n * // Basic usage (blocking flush)\n * export const POST = withObserve(async (req) => {\n * return Response.json({ ok: true });\n * });\n *\n * @example\n * // Next.js 15+ with after() - non-blocking (recommended)\n * import { after } from 'next/server';\n *\n * export const POST = withObserve(\n * async (req) => Response.json({ ok: true }),\n * { after }\n * );\n *\n * @example\n * // Vercel with waitUntil() - non-blocking\n * import { waitUntil } from '@vercel/functions';\n *\n * export const POST = withObserve(\n * async (req) => Response.json({ ok: true }),\n * { waitUntil }\n * );\n */\nexport function withObserve<TContext = unknown>(\n handler: NextRouteHandler<TContext>,\n options?: NextObserveOptions\n): NextRouteHandler<TContext> {\n return async (request: Request, context?: TContext): Promise<Response> => {\n try {\n return await handler(request, context);\n } finally {\n // Priority: after() > waitUntil() > blocking flush\n if (options?.after) {\n // Next.js 15+ native - best option\n options.after(() => flush());\n } else if (options?.waitUntil) {\n // Vercel platform\n options.waitUntil(flush());\n } else {\n // Fallback: blocking flush\n await flush();\n }\n }\n };\n}\n\n/**\n * Create a pre-configured wrapper with default options\n *\n * @example\n * import { after } from 'next/server';\n * import { createWrapper } from '@lelemondev/sdk/next';\n *\n * const withObserve = createWrapper({ after });\n *\n * export const POST = withObserve(async (req) => {\n * return Response.json({ ok: true });\n * });\n */\nexport function createWrapper(defaultOptions: NextObserveOptions) {\n return function <TContext = unknown>(\n handler: NextRouteHandler<TContext>,\n options?: NextObserveOptions\n ): NextRouteHandler<TContext> {\n return withObserve(handler, { ...defaultOptions, ...options });\n };\n}\n"]}
|
package/dist/next.mjs
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
/* @lelemondev/sdk - LLM Observability */
|
|
2
|
+
|
|
3
|
+
async function flush() {
|
|
4
|
+
}
|
|
5
|
+
|
|
6
|
+
// src/integrations/next.ts
|
|
7
|
+
function withObserve(handler, options) {
|
|
8
|
+
return async (request, context) => {
|
|
9
|
+
try {
|
|
10
|
+
return await handler(request, context);
|
|
11
|
+
} finally {
|
|
12
|
+
if (options?.after) {
|
|
13
|
+
options.after(() => flush());
|
|
14
|
+
} else if (options?.waitUntil) {
|
|
15
|
+
options.waitUntil(flush());
|
|
16
|
+
} else {
|
|
17
|
+
await flush();
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
};
|
|
21
|
+
}
|
|
22
|
+
function createWrapper(defaultOptions) {
|
|
23
|
+
return function(handler, options) {
|
|
24
|
+
return withObserve(handler, { ...defaultOptions, ...options });
|
|
25
|
+
};
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
export { createWrapper, withObserve };
|
|
29
|
+
//# sourceMappingURL=next.mjs.map
|
|
30
|
+
//# sourceMappingURL=next.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/core/config.ts","../src/integrations/next.ts"],"names":[],"mappings":";;AAuEA,eAAsB,KAAA,GAAuB;AAI7C;;;ACYO,SAAS,WAAA,CACd,SACA,OAAA,EAC4B;AAC5B,EAAA,OAAO,OAAO,SAAkB,OAAA,KAA0C;AACxE,IAAA,IAAI;AACF,MAAA,OAAO,MAAM,OAAA,CAAQ,OAAA,EAAS,OAAO,CAAA;AAAA,IACvC,CAAA,SAAE;AAEA,MAAA,IAAI,SAAS,KAAA,EAAO;AAElB,QAAA,OAAA,CAAQ,KAAA,CAAM,MAAM,KAAA,EAAO,CAAA;AAAA,MAC7B,CAAA,MAAA,IAAW,SAAS,SAAA,EAAW;AAE7B,QAAA,OAAA,CAAQ,SAAA,CAAU,OAAO,CAAA;AAAA,MAC3B,CAAA,MAAO;AAEL,QAAA,MAAM,KAAA,EAAM;AAAA,MACd;AAAA,IACF;AAAA,EACF,CAAA;AACF;AAeO,SAAS,cAAc,cAAA,EAAoC;AAChE,EAAA,OAAO,SACL,SACA,OAAA,EAC4B;AAC5B,IAAA,OAAO,YAAY,OAAA,EAAS,EAAE,GAAG,cAAA,EAAgB,GAAG,SAAS,CAAA;AAAA,EAC/D,CAAA;AACF","file":"next.mjs","sourcesContent":["/**\n * Global Configuration\n *\n * Manages SDK configuration and transport instance.\n */\n\nimport type { LelemonConfig } from './types';\nimport { Transport } from './transport';\n\n// ─────────────────────────────────────────────────────────────\n// Global State\n// ─────────────────────────────────────────────────────────────\n\nlet globalConfig: LelemonConfig = {};\nlet globalTransport: Transport | null = null;\nlet initialized = false;\n\n// ─────────────────────────────────────────────────────────────\n// Configuration\n// ─────────────────────────────────────────────────────────────\n\nconst DEFAULT_ENDPOINT = 'https://api.lelemon.dev';\n\n/**\n * Initialize the SDK\n * Call once at app startup\n */\nexport function init(config: LelemonConfig = {}): void {\n globalConfig = config;\n globalTransport = createTransport(config);\n initialized = true;\n}\n\n/**\n * Get current config\n */\nexport function getConfig(): LelemonConfig {\n return globalConfig;\n}\n\n/**\n * Check if SDK is initialized\n */\nexport function isInitialized(): boolean {\n return initialized;\n}\n\n/**\n * Check if SDK is enabled\n */\nexport function isEnabled(): boolean {\n return getTransport().isEnabled();\n}\n\n// ─────────────────────────────────────────────────────────────\n// Transport\n// ─────────────────────────────────────────────────────────────\n\n/**\n * Get or create transport instance\n */\nexport function getTransport(): Transport {\n if (!globalTransport) {\n globalTransport = createTransport(globalConfig);\n }\n return globalTransport;\n}\n\n/**\n * Flush all pending traces\n */\nexport async function flush(): Promise<void> {\n if (globalTransport) {\n await globalTransport.flush();\n }\n}\n\n/**\n * Create transport instance\n */\nfunction createTransport(config: LelemonConfig): Transport {\n const apiKey = config.apiKey ?? getEnvVar('LELEMON_API_KEY');\n\n if (!apiKey && !config.disabled) {\n console.warn(\n '[Lelemon] No API key provided. Set apiKey in init() or LELEMON_API_KEY env var. Tracing disabled.'\n );\n }\n\n return new Transport({\n apiKey: apiKey ?? '',\n endpoint: config.endpoint ?? DEFAULT_ENDPOINT,\n debug: config.debug ?? false,\n disabled: config.disabled ?? !apiKey,\n batchSize: config.batchSize,\n flushIntervalMs: config.flushIntervalMs,\n requestTimeoutMs: config.requestTimeoutMs,\n });\n}\n\n/**\n * Get environment variable (works in Node and edge)\n */\nfunction getEnvVar(name: string): string | undefined {\n if (typeof process !== 'undefined' && process.env) {\n return process.env[name];\n }\n return undefined;\n}\n","/**\n * Next.js App Router Integration\n *\n * Wraps route handlers to automatically flush traces.\n * Supports Next.js 15+ `after()` and Vercel's `waitUntil()`.\n *\n * @example\n * import { withObserve } from '@lelemondev/sdk/next';\n *\n * export const POST = withObserve(async (req) => {\n * const openai = observe(new OpenAI());\n * const result = await openai.chat.completions.create({...});\n * return Response.json(result);\n * });\n */\n\nimport { flush } from '../core/config';\n\n// ─────────────────────────────────────────────────────────────\n// Types\n// ─────────────────────────────────────────────────────────────\n\ntype NextRouteHandler<TContext = unknown> = (\n request: Request,\n context?: TContext\n) => Response | Promise<Response>;\n\n/**\n * Options for the Next.js wrapper\n */\nexport interface NextObserveOptions {\n /**\n * Next.js 15+ after() function from 'next/server'\n * Preferred method - runs after response without blocking\n *\n * @example\n * import { after } from 'next/server';\n * export const POST = withObserve(handler, { after });\n */\n after?: (callback: () => void | Promise<void>) => void;\n\n /**\n * Vercel's waitUntil() from '@vercel/functions'\n * Alternative for Vercel deployments\n *\n * @example\n * import { waitUntil } from '@vercel/functions';\n * export const POST = withObserve(handler, { waitUntil });\n */\n waitUntil?: (promise: Promise<unknown>) => void;\n}\n\n// ─────────────────────────────────────────────────────────────\n// Wrapper\n// ─────────────────────────────────────────────────────────────\n\n/**\n * Wrap a Next.js App Router handler with automatic trace flushing\n *\n * @param handler - Your route handler function\n * @param options - Optional: pass `after` (Next.js 15+) or `waitUntil` (Vercel)\n * @returns Wrapped handler that auto-flushes traces\n *\n * @example\n * // Basic usage (blocking flush)\n * export const POST = withObserve(async (req) => {\n * return Response.json({ ok: true });\n * });\n *\n * @example\n * // Next.js 15+ with after() - non-blocking (recommended)\n * import { after } from 'next/server';\n *\n * export const POST = withObserve(\n * async (req) => Response.json({ ok: true }),\n * { after }\n * );\n *\n * @example\n * // Vercel with waitUntil() - non-blocking\n * import { waitUntil } from '@vercel/functions';\n *\n * export const POST = withObserve(\n * async (req) => Response.json({ ok: true }),\n * { waitUntil }\n * );\n */\nexport function withObserve<TContext = unknown>(\n handler: NextRouteHandler<TContext>,\n options?: NextObserveOptions\n): NextRouteHandler<TContext> {\n return async (request: Request, context?: TContext): Promise<Response> => {\n try {\n return await handler(request, context);\n } finally {\n // Priority: after() > waitUntil() > blocking flush\n if (options?.after) {\n // Next.js 15+ native - best option\n options.after(() => flush());\n } else if (options?.waitUntil) {\n // Vercel platform\n options.waitUntil(flush());\n } else {\n // Fallback: blocking flush\n await flush();\n }\n }\n };\n}\n\n/**\n * Create a pre-configured wrapper with default options\n *\n * @example\n * import { after } from 'next/server';\n * import { createWrapper } from '@lelemondev/sdk/next';\n *\n * const withObserve = createWrapper({ after });\n *\n * export const POST = withObserve(async (req) => {\n * return Response.json({ ok: true });\n * });\n */\nexport function createWrapper(defaultOptions: NextObserveOptions) {\n return function <TContext = unknown>(\n handler: NextRouteHandler<TContext>,\n options?: NextObserveOptions\n ): NextRouteHandler<TContext> {\n return withObserve(handler, { ...defaultOptions, ...options });\n };\n}\n"]}
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@lelemondev/sdk",
|
|
3
|
-
"version": "0.
|
|
4
|
-
"description": "
|
|
3
|
+
"version": "0.4.0",
|
|
4
|
+
"description": "Automatic LLM observability. Wrap your client, everything is traced.",
|
|
5
5
|
"author": "Lelemon <info@lelemon.dev>",
|
|
6
6
|
"license": "MIT",
|
|
7
7
|
"repository": {
|
|
@@ -18,15 +18,15 @@
|
|
|
18
18
|
"tracing",
|
|
19
19
|
"openai",
|
|
20
20
|
"anthropic",
|
|
21
|
-
"
|
|
22
|
-
"
|
|
21
|
+
"nextjs",
|
|
22
|
+
"lambda",
|
|
23
|
+
"express",
|
|
24
|
+
"hono",
|
|
23
25
|
"claude",
|
|
24
26
|
"gpt",
|
|
25
27
|
"ai",
|
|
26
28
|
"monitoring",
|
|
27
|
-
"
|
|
28
|
-
"langchain",
|
|
29
|
-
"agent"
|
|
29
|
+
"serverless"
|
|
30
30
|
],
|
|
31
31
|
"main": "./dist/index.js",
|
|
32
32
|
"module": "./dist/index.mjs",
|
|
@@ -34,19 +34,44 @@
|
|
|
34
34
|
"exports": {
|
|
35
35
|
".": {
|
|
36
36
|
"types": "./dist/index.d.ts",
|
|
37
|
-
"bun": "./dist/index.mjs",
|
|
38
|
-
"deno": "./dist/index.mjs",
|
|
39
37
|
"import": "./dist/index.mjs",
|
|
40
|
-
"require": "./dist/index.js"
|
|
41
|
-
|
|
38
|
+
"require": "./dist/index.js"
|
|
39
|
+
},
|
|
40
|
+
"./next": {
|
|
41
|
+
"types": "./dist/next.d.ts",
|
|
42
|
+
"import": "./dist/next.mjs",
|
|
43
|
+
"require": "./dist/next.js"
|
|
44
|
+
},
|
|
45
|
+
"./lambda": {
|
|
46
|
+
"types": "./dist/lambda.d.ts",
|
|
47
|
+
"import": "./dist/lambda.mjs",
|
|
48
|
+
"require": "./dist/lambda.js"
|
|
49
|
+
},
|
|
50
|
+
"./express": {
|
|
51
|
+
"types": "./dist/express.d.ts",
|
|
52
|
+
"import": "./dist/express.mjs",
|
|
53
|
+
"require": "./dist/express.js"
|
|
54
|
+
},
|
|
55
|
+
"./hono": {
|
|
56
|
+
"types": "./dist/hono.d.ts",
|
|
57
|
+
"import": "./dist/hono.mjs",
|
|
58
|
+
"require": "./dist/hono.js"
|
|
59
|
+
},
|
|
60
|
+
"./integrations": {
|
|
61
|
+
"types": "./dist/integrations.d.ts",
|
|
62
|
+
"import": "./dist/integrations.mjs",
|
|
63
|
+
"require": "./dist/integrations.js"
|
|
42
64
|
},
|
|
43
65
|
"./package.json": "./package.json"
|
|
44
66
|
},
|
|
45
67
|
"typesVersions": {
|
|
46
68
|
"*": {
|
|
47
|
-
"
|
|
48
|
-
|
|
49
|
-
]
|
|
69
|
+
"next": ["./dist/next.d.ts"],
|
|
70
|
+
"lambda": ["./dist/lambda.d.ts"],
|
|
71
|
+
"express": ["./dist/express.d.ts"],
|
|
72
|
+
"hono": ["./dist/hono.d.ts"],
|
|
73
|
+
"integrations": ["./dist/integrations.d.ts"],
|
|
74
|
+
"*": ["./dist/index.d.ts"]
|
|
50
75
|
}
|
|
51
76
|
},
|
|
52
77
|
"files": [
|