tokenmeter 0.9.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +346 -0
- package/dist/__tests__/context.test.d.ts +2 -0
- package/dist/__tests__/context.test.d.ts.map +1 -0
- package/dist/__tests__/context.test.js +94 -0
- package/dist/__tests__/context.test.js.map +1 -0
- package/dist/__tests__/elevenlabs.test.d.ts +2 -0
- package/dist/__tests__/elevenlabs.test.d.ts.map +1 -0
- package/dist/__tests__/elevenlabs.test.js +108 -0
- package/dist/__tests__/elevenlabs.test.js.map +1 -0
- package/dist/__tests__/fal.test.d.ts +2 -0
- package/dist/__tests__/fal.test.d.ts.map +1 -0
- package/dist/__tests__/fal.test.js +153 -0
- package/dist/__tests__/fal.test.js.map +1 -0
- package/dist/__tests__/pricing.test.d.ts +2 -0
- package/dist/__tests__/pricing.test.d.ts.map +1 -0
- package/dist/__tests__/pricing.test.js +76 -0
- package/dist/__tests__/pricing.test.js.map +1 -0
- package/dist/__tests__/recorder.test.d.ts +2 -0
- package/dist/__tests__/recorder.test.d.ts.map +1 -0
- package/dist/__tests__/recorder.test.js +133 -0
- package/dist/__tests__/recorder.test.js.map +1 -0
- package/dist/__tests__/storage.test.d.ts +2 -0
- package/dist/__tests__/storage.test.d.ts.map +1 -0
- package/dist/__tests__/storage.test.js +106 -0
- package/dist/__tests__/storage.test.js.map +1 -0
- package/dist/client/index.d.ts +8 -0
- package/dist/client/index.d.ts.map +1 -0
- package/dist/client/index.js +7 -0
- package/dist/client/index.js.map +1 -0
- package/dist/config.d.ts +92 -0
- package/dist/config.d.ts.map +1 -0
- package/dist/config.js +166 -0
- package/dist/config.js.map +1 -0
- package/dist/context.d.ts +80 -0
- package/dist/context.d.ts.map +1 -0
- package/dist/context.js +131 -0
- package/dist/context.js.map +1 -0
- package/dist/exporter/PostgresExporter.d.ts +82 -0
- package/dist/exporter/PostgresExporter.d.ts.map +1 -0
- package/dist/exporter/PostgresExporter.js +237 -0
- package/dist/exporter/PostgresExporter.js.map +1 -0
- package/dist/exporter/index.d.ts +8 -0
- package/dist/exporter/index.d.ts.map +1 -0
- package/dist/exporter/index.js +7 -0
- package/dist/exporter/index.js.map +1 -0
- package/dist/index.d.ts +31 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +37 -0
- package/dist/index.js.map +1 -0
- package/dist/instrumentation/proxy.d.ts +26 -0
- package/dist/instrumentation/proxy.d.ts.map +1 -0
- package/dist/instrumentation/proxy.js +337 -0
- package/dist/instrumentation/proxy.js.map +1 -0
- package/dist/instrumentation/strategies/index.d.ts +55 -0
- package/dist/instrumentation/strategies/index.d.ts.map +1 -0
- package/dist/instrumentation/strategies/index.js +429 -0
- package/dist/instrumentation/strategies/index.js.map +1 -0
- package/dist/integrations/express/index.d.ts +137 -0
- package/dist/integrations/express/index.d.ts.map +1 -0
- package/dist/integrations/express/index.js +186 -0
- package/dist/integrations/express/index.js.map +1 -0
- package/dist/integrations/inngest/index.d.ts +222 -0
- package/dist/integrations/inngest/index.d.ts.map +1 -0
- package/dist/integrations/inngest/index.js +223 -0
- package/dist/integrations/inngest/index.js.map +1 -0
- package/dist/integrations/langfuse/index.d.ts +170 -0
- package/dist/integrations/langfuse/index.d.ts.map +1 -0
- package/dist/integrations/langfuse/index.js +225 -0
- package/dist/integrations/langfuse/index.js.map +1 -0
- package/dist/integrations/next/index.d.ts +138 -0
- package/dist/integrations/next/index.d.ts.map +1 -0
- package/dist/integrations/next/index.js +170 -0
- package/dist/integrations/next/index.js.map +1 -0
- package/dist/integrations/nextjs/index.d.ts +198 -0
- package/dist/integrations/nextjs/index.d.ts.map +1 -0
- package/dist/integrations/nextjs/index.js +181 -0
- package/dist/integrations/nextjs/index.js.map +1 -0
- package/dist/integrations/vercel-ai/index.d.ts +288 -0
- package/dist/integrations/vercel-ai/index.d.ts.map +1 -0
- package/dist/integrations/vercel-ai/index.js +260 -0
- package/dist/integrations/vercel-ai/index.js.map +1 -0
- package/dist/logger.d.ts +58 -0
- package/dist/logger.d.ts.map +1 -0
- package/dist/logger.js +89 -0
- package/dist/logger.js.map +1 -0
- package/dist/pricing/catalog.d.ts +10 -0
- package/dist/pricing/catalog.d.ts.map +1 -0
- package/dist/pricing/catalog.js +297 -0
- package/dist/pricing/catalog.js.map +1 -0
- package/dist/pricing/index.d.ts +77 -0
- package/dist/pricing/index.d.ts.map +1 -0
- package/dist/pricing/index.js +251 -0
- package/dist/pricing/index.js.map +1 -0
- package/dist/pricing/manifest.d.ts +156 -0
- package/dist/pricing/manifest.d.ts.map +1 -0
- package/dist/pricing/manifest.js +381 -0
- package/dist/pricing/manifest.js.map +1 -0
- package/dist/pricing/manifest.json +12786 -0
- package/dist/pricing/providers/anthropic.json +253 -0
- package/dist/pricing/providers/bedrock.json +341 -0
- package/dist/pricing/providers/bfl.json +220 -0
- package/dist/pricing/providers/elevenlabs.json +142 -0
- package/dist/pricing/providers/fal.json +15866 -0
- package/dist/pricing/providers/google.json +346 -0
- package/dist/pricing/providers/openai.json +1035 -0
- package/dist/pricing/schema.d.ts +102 -0
- package/dist/pricing/schema.d.ts.map +1 -0
- package/dist/pricing/schema.js +56 -0
- package/dist/pricing/schema.js.map +1 -0
- package/dist/processor/TokenMeterProcessor.d.ts +55 -0
- package/dist/processor/TokenMeterProcessor.d.ts.map +1 -0
- package/dist/processor/TokenMeterProcessor.js +132 -0
- package/dist/processor/TokenMeterProcessor.js.map +1 -0
- package/dist/query/client.d.ts +61 -0
- package/dist/query/client.d.ts.map +1 -0
- package/dist/query/client.js +206 -0
- package/dist/query/client.js.map +1 -0
- package/dist/query/index.d.ts +8 -0
- package/dist/query/index.d.ts.map +1 -0
- package/dist/query/index.js +7 -0
- package/dist/query/index.js.map +1 -0
- package/dist/recorder.d.ts +74 -0
- package/dist/recorder.d.ts.map +1 -0
- package/dist/recorder.js +227 -0
- package/dist/recorder.js.map +1 -0
- package/dist/sdks/anthropic.d.ts +21 -0
- package/dist/sdks/anthropic.d.ts.map +1 -0
- package/dist/sdks/anthropic.js +258 -0
- package/dist/sdks/anthropic.js.map +1 -0
- package/dist/sdks/elevenlabs.d.ts +59 -0
- package/dist/sdks/elevenlabs.d.ts.map +1 -0
- package/dist/sdks/elevenlabs.js +192 -0
- package/dist/sdks/elevenlabs.js.map +1 -0
- package/dist/sdks/fal.d.ts +102 -0
- package/dist/sdks/fal.d.ts.map +1 -0
- package/dist/sdks/fal.js +306 -0
- package/dist/sdks/fal.js.map +1 -0
- package/dist/sdks/openai.d.ts +17 -0
- package/dist/sdks/openai.d.ts.map +1 -0
- package/dist/sdks/openai.js +191 -0
- package/dist/sdks/openai.js.map +1 -0
- package/dist/storage/interface.d.ts +15 -0
- package/dist/storage/interface.d.ts.map +1 -0
- package/dist/storage/interface.js +53 -0
- package/dist/storage/interface.js.map +1 -0
- package/dist/storage/prisma.d.ts +15 -0
- package/dist/storage/prisma.d.ts.map +1 -0
- package/dist/storage/prisma.js +135 -0
- package/dist/storage/prisma.js.map +1 -0
- package/dist/types.d.ts +206 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +45 -0
- package/dist/types.js.map +1 -0
- package/dist/vercel-ai/index.d.ts +89 -0
- package/dist/vercel-ai/index.d.ts.map +1 -0
- package/dist/vercel-ai/index.js +298 -0
- package/dist/vercel-ai/index.js.map +1 -0
- package/package.json +119 -0
|
@@ -0,0 +1,288 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Vercel AI SDK Integration
|
|
3
|
+
*
|
|
4
|
+
* Provides a non-invasive integration with the Vercel AI SDK using
|
|
5
|
+
* the built-in experimental_telemetry feature. No import changes required.
|
|
6
|
+
*
|
|
7
|
+
* @example
|
|
8
|
+
* ```typescript
|
|
9
|
+
* import { generateText } from 'ai';
|
|
10
|
+
* import { openai } from '@ai-sdk/openai';
|
|
11
|
+
* import { telemetry } from 'tokenmeter/vercel-ai';
|
|
12
|
+
*
|
|
13
|
+
* const result = await generateText({
|
|
14
|
+
* model: openai('gpt-4o'),
|
|
15
|
+
* prompt: 'Hello!',
|
|
16
|
+
* experimental_telemetry: telemetry({
|
|
17
|
+
* userId: 'user_123',
|
|
18
|
+
* orgId: 'org_456',
|
|
19
|
+
* }),
|
|
20
|
+
* });
|
|
21
|
+
* ```
|
|
22
|
+
*/
|
|
23
|
+
import { type Tracer } from "@opentelemetry/api";
|
|
24
|
+
/**
|
|
25
|
+
* Telemetry settings expected by Vercel AI SDK
|
|
26
|
+
*/
|
|
27
|
+
export interface TelemetrySettings {
|
|
28
|
+
/**
|
|
29
|
+
* Enable telemetry collection.
|
|
30
|
+
* @default true
|
|
31
|
+
*/
|
|
32
|
+
isEnabled?: boolean;
|
|
33
|
+
/**
|
|
34
|
+
* Identifier for this function call (appears in spans).
|
|
35
|
+
*/
|
|
36
|
+
functionId?: string;
|
|
37
|
+
/**
|
|
38
|
+
* Custom metadata to include in telemetry.
|
|
39
|
+
* These become span attributes prefixed with `ai.telemetry.metadata.`
|
|
40
|
+
*/
|
|
41
|
+
metadata?: Record<string, string | number | boolean>;
|
|
42
|
+
/**
|
|
43
|
+
* Whether to record input values (prompts, messages).
|
|
44
|
+
* Disable for privacy or performance.
|
|
45
|
+
* @default true
|
|
46
|
+
*/
|
|
47
|
+
recordInputs?: boolean;
|
|
48
|
+
/**
|
|
49
|
+
* Whether to record output values (responses).
|
|
50
|
+
* Disable for privacy or performance.
|
|
51
|
+
* @default true
|
|
52
|
+
*/
|
|
53
|
+
recordOutputs?: boolean;
|
|
54
|
+
/**
|
|
55
|
+
* Custom OpenTelemetry tracer to use.
|
|
56
|
+
* If not provided, uses the default tracer.
|
|
57
|
+
*/
|
|
58
|
+
tracer?: Tracer;
|
|
59
|
+
}
|
|
60
|
+
/**
|
|
61
|
+
* Options for tokenmeter telemetry configuration
|
|
62
|
+
*/
|
|
63
|
+
export interface TokenMeterTelemetryOptions {
|
|
64
|
+
/**
|
|
65
|
+
* User ID for cost attribution.
|
|
66
|
+
* Will be added to span metadata as `userId`.
|
|
67
|
+
*/
|
|
68
|
+
userId?: string;
|
|
69
|
+
/**
|
|
70
|
+
* Organization ID for cost attribution.
|
|
71
|
+
* Will be added to span metadata as `orgId`.
|
|
72
|
+
*/
|
|
73
|
+
orgId?: string;
|
|
74
|
+
/**
|
|
75
|
+
* Workflow ID for grouping related calls.
|
|
76
|
+
* Will be added to span metadata as `workflowId`.
|
|
77
|
+
*/
|
|
78
|
+
workflowId?: string;
|
|
79
|
+
/**
|
|
80
|
+
* Function identifier for the telemetry span.
|
|
81
|
+
*/
|
|
82
|
+
functionId?: string;
|
|
83
|
+
/**
|
|
84
|
+
* Additional metadata to include.
|
|
85
|
+
*/
|
|
86
|
+
metadata?: Record<string, string | number | boolean | undefined>;
|
|
87
|
+
/**
|
|
88
|
+
* Whether to record inputs (prompts/messages).
|
|
89
|
+
* @default true
|
|
90
|
+
*/
|
|
91
|
+
recordInputs?: boolean;
|
|
92
|
+
/**
|
|
93
|
+
* Whether to record outputs (responses).
|
|
94
|
+
* @default true
|
|
95
|
+
*/
|
|
96
|
+
recordOutputs?: boolean;
|
|
97
|
+
}
|
|
98
|
+
/**
|
|
99
|
+
* Create telemetry configuration for Vercel AI SDK with tokenmeter attributes.
|
|
100
|
+
*
|
|
101
|
+
* This is the recommended way to integrate tokenmeter with the Vercel AI SDK.
|
|
102
|
+
* It uses the SDK's built-in telemetry feature, requiring no import changes.
|
|
103
|
+
*
|
|
104
|
+
* The SDK will emit OpenTelemetry spans with usage data that can be processed
|
|
105
|
+
* by the TokenMeterProcessor to calculate costs.
|
|
106
|
+
*
|
|
107
|
+
* @example Basic usage
|
|
108
|
+
* ```typescript
|
|
109
|
+
* import { generateText } from 'ai';
|
|
110
|
+
* import { openai } from '@ai-sdk/openai';
|
|
111
|
+
* import { telemetry } from 'tokenmeter/vercel-ai';
|
|
112
|
+
*
|
|
113
|
+
* const result = await generateText({
|
|
114
|
+
* model: openai('gpt-4o'),
|
|
115
|
+
* prompt: 'Hello!',
|
|
116
|
+
* experimental_telemetry: telemetry(),
|
|
117
|
+
* });
|
|
118
|
+
* ```
|
|
119
|
+
*
|
|
120
|
+
* @example With user attribution
|
|
121
|
+
* ```typescript
|
|
122
|
+
* const result = await generateText({
|
|
123
|
+
* model: openai('gpt-4o'),
|
|
124
|
+
* prompt: 'Hello!',
|
|
125
|
+
* experimental_telemetry: telemetry({
|
|
126
|
+
* userId: 'user_123',
|
|
127
|
+
* orgId: 'org_456',
|
|
128
|
+
* workflowId: 'chat-session-789',
|
|
129
|
+
* }),
|
|
130
|
+
* });
|
|
131
|
+
* ```
|
|
132
|
+
*
|
|
133
|
+
* @example With custom metadata
|
|
134
|
+
* ```typescript
|
|
135
|
+
* const result = await generateText({
|
|
136
|
+
* model: openai('gpt-4o'),
|
|
137
|
+
* prompt: 'Hello!',
|
|
138
|
+
* experimental_telemetry: telemetry({
|
|
139
|
+
* userId: currentUser.id,
|
|
140
|
+
* metadata: {
|
|
141
|
+
* feature: 'chat',
|
|
142
|
+
* tier: 'premium',
|
|
143
|
+
* },
|
|
144
|
+
* }),
|
|
145
|
+
* });
|
|
146
|
+
* ```
|
|
147
|
+
*
|
|
148
|
+
* @example Disable input/output recording for privacy
|
|
149
|
+
* ```typescript
|
|
150
|
+
* const result = await generateText({
|
|
151
|
+
* model: openai('gpt-4o'),
|
|
152
|
+
* prompt: sensitivePrompt,
|
|
153
|
+
* experimental_telemetry: telemetry({
|
|
154
|
+
* userId: 'user_123',
|
|
155
|
+
* recordInputs: false,
|
|
156
|
+
* recordOutputs: false,
|
|
157
|
+
* }),
|
|
158
|
+
* });
|
|
159
|
+
* ```
|
|
160
|
+
*/
|
|
161
|
+
export declare function telemetry(options?: TokenMeterTelemetryOptions): TelemetrySettings;
|
|
162
|
+
/**
|
|
163
|
+
* Create a reusable telemetry configuration factory.
|
|
164
|
+
*
|
|
165
|
+
* Useful when you have common attributes across multiple calls.
|
|
166
|
+
*
|
|
167
|
+
* @example
|
|
168
|
+
* ```typescript
|
|
169
|
+
* import { createTelemetry } from 'tokenmeter/vercel-ai';
|
|
170
|
+
*
|
|
171
|
+
* // Create once with common options
|
|
172
|
+
* const withTelemetry = createTelemetry({
|
|
173
|
+
* orgId: 'org_456',
|
|
174
|
+
* recordInputs: false, // Privacy setting
|
|
175
|
+
* });
|
|
176
|
+
*
|
|
177
|
+
* // Use in multiple calls
|
|
178
|
+
* await generateText({
|
|
179
|
+
* model: openai('gpt-4o'),
|
|
180
|
+
* prompt: 'Hello!',
|
|
181
|
+
* experimental_telemetry: withTelemetry({ userId: 'user_123' }),
|
|
182
|
+
* });
|
|
183
|
+
*
|
|
184
|
+
* await generateText({
|
|
185
|
+
* model: openai('gpt-4o'),
|
|
186
|
+
* prompt: 'Goodbye!',
|
|
187
|
+
* experimental_telemetry: withTelemetry({ userId: 'user_456' }),
|
|
188
|
+
* });
|
|
189
|
+
* ```
|
|
190
|
+
*/
|
|
191
|
+
export declare function createTelemetry(defaultOptions?: TokenMeterTelemetryOptions): (options?: TokenMeterTelemetryOptions) => TelemetrySettings;
|
|
192
|
+
/**
|
|
193
|
+
* Vercel AI SDK span attribute names.
|
|
194
|
+
*
|
|
195
|
+
* These are the attributes emitted by the Vercel AI SDK when telemetry is enabled.
|
|
196
|
+
* Use these constants when building custom processors or exporters.
|
|
197
|
+
*/
|
|
198
|
+
export declare const VERCEL_AI_ATTRIBUTES: {
|
|
199
|
+
/** Model identifier (e.g., "gpt-4o") */
|
|
200
|
+
readonly MODEL_ID: "ai.model.id";
|
|
201
|
+
/** Provider name (e.g., "openai") */
|
|
202
|
+
readonly MODEL_PROVIDER: "ai.model.provider";
|
|
203
|
+
/** Input/prompt tokens used */
|
|
204
|
+
readonly USAGE_PROMPT_TOKENS: "ai.usage.promptTokens";
|
|
205
|
+
/** Output/completion tokens used */
|
|
206
|
+
readonly USAGE_COMPLETION_TOKENS: "ai.usage.completionTokens";
|
|
207
|
+
/** Function identifier from telemetry config */
|
|
208
|
+
readonly FUNCTION_ID: "ai.telemetry.functionId";
|
|
209
|
+
/** Finish reason (e.g., "stop", "length") */
|
|
210
|
+
readonly FINISH_REASON: "ai.response.finishReason";
|
|
211
|
+
/** The prompt text (if recordInputs is true) */
|
|
212
|
+
readonly PROMPT: "ai.prompt";
|
|
213
|
+
/** The response text (if recordOutputs is true) */
|
|
214
|
+
readonly RESPONSE_TEXT: "ai.response.text";
|
|
215
|
+
};
|
|
216
|
+
/**
|
|
217
|
+
* Span names used by the Vercel AI SDK.
|
|
218
|
+
*
|
|
219
|
+
* Use these to filter/identify Vercel AI SDK spans in processors.
|
|
220
|
+
*/
|
|
221
|
+
export declare const VERCEL_AI_SPAN_NAMES: {
|
|
222
|
+
/** Top-level generateText span */
|
|
223
|
+
readonly GENERATE_TEXT: "ai.generateText";
|
|
224
|
+
/** Provider-level doGenerate span */
|
|
225
|
+
readonly GENERATE_TEXT_DO_GENERATE: "ai.generateText.doGenerate";
|
|
226
|
+
/** Top-level streamText span */
|
|
227
|
+
readonly STREAM_TEXT: "ai.streamText";
|
|
228
|
+
/** Provider-level doStream span */
|
|
229
|
+
readonly STREAM_TEXT_DO_STREAM: "ai.streamText.doStream";
|
|
230
|
+
/** Top-level generateObject span */
|
|
231
|
+
readonly GENERATE_OBJECT: "ai.generateObject";
|
|
232
|
+
/** Provider-level doGenerate span for objects */
|
|
233
|
+
readonly GENERATE_OBJECT_DO_GENERATE: "ai.generateObject.doGenerate";
|
|
234
|
+
/** Top-level streamObject span */
|
|
235
|
+
readonly STREAM_OBJECT: "ai.streamObject";
|
|
236
|
+
/** Tool call span */
|
|
237
|
+
readonly TOOL_CALL: "ai.toolCall";
|
|
238
|
+
/** Embedding span */
|
|
239
|
+
readonly EMBED: "ai.embed";
|
|
240
|
+
/** Batch embedding span */
|
|
241
|
+
readonly EMBED_MANY: "ai.embedMany";
|
|
242
|
+
};
|
|
243
|
+
/**
|
|
244
|
+
* Check if a span name is from the Vercel AI SDK.
|
|
245
|
+
*
|
|
246
|
+
* Useful in custom SpanProcessors to identify Vercel AI spans.
|
|
247
|
+
*
|
|
248
|
+
* @example
|
|
249
|
+
* ```typescript
|
|
250
|
+
* import { isVercelAISpan } from 'tokenmeter/vercel-ai';
|
|
251
|
+
*
|
|
252
|
+
* class MyProcessor implements SpanProcessor {
|
|
253
|
+
* onEnd(span: ReadableSpan) {
|
|
254
|
+
* if (isVercelAISpan(span.name)) {
|
|
255
|
+
* // Process Vercel AI SDK span
|
|
256
|
+
* }
|
|
257
|
+
* }
|
|
258
|
+
* }
|
|
259
|
+
* ```
|
|
260
|
+
*/
|
|
261
|
+
export declare function isVercelAISpan(spanName: string): boolean;
|
|
262
|
+
/**
|
|
263
|
+
* Extract provider and model from Vercel AI SDK span attributes.
|
|
264
|
+
*
|
|
265
|
+
* @example
|
|
266
|
+
* ```typescript
|
|
267
|
+
* const { provider, model } = extractModelInfo(span.attributes);
|
|
268
|
+
* // provider: "openai", model: "gpt-4o"
|
|
269
|
+
* ```
|
|
270
|
+
*/
|
|
271
|
+
export declare function extractModelInfo(attributes: Record<string, unknown>): {
|
|
272
|
+
provider: string;
|
|
273
|
+
model: string;
|
|
274
|
+
};
|
|
275
|
+
/**
|
|
276
|
+
* Extract usage data from Vercel AI SDK span attributes.
|
|
277
|
+
*
|
|
278
|
+
* @example
|
|
279
|
+
* ```typescript
|
|
280
|
+
* const usage = extractUsage(span.attributes);
|
|
281
|
+
* // { promptTokens: 100, completionTokens: 50 }
|
|
282
|
+
* ```
|
|
283
|
+
*/
|
|
284
|
+
export declare function extractUsage(attributes: Record<string, unknown>): {
|
|
285
|
+
promptTokens: number | undefined;
|
|
286
|
+
completionTokens: number | undefined;
|
|
287
|
+
};
|
|
288
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/integrations/vercel-ai/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;;;GAqBG;AAEH,OAAO,EAAS,KAAK,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAExD;;GAEG;AACH,MAAM,WAAW,iBAAiB;IAChC;;;OAGG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IAEpB;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IAEpB;;;OAGG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,GAAG,MAAM,GAAG,OAAO,CAAC,CAAC;IAErD;;;;OAIG;IACH,YAAY,CAAC,EAAE,OAAO,CAAC;IAEvB;;;;OAIG;IACH,aAAa,CAAC,EAAE,OAAO,CAAC;IAExB;;;OAGG;IACH,MAAM,CAAC,EAAE,MAAM,CAAC;CACjB;AAED;;GAEG;AACH,MAAM,WAAW,0BAA0B;IACzC;;;OAGG;IACH,MAAM,CAAC,EAAE,MAAM,CAAC;IAEhB;;;OAGG;IACH,KAAK,CAAC,EAAE,MAAM,CAAC;IAEf;;;OAGG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IAEpB;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IAEpB;;OAEG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,GAAG,MAAM,GAAG,OAAO,GAAG,SAAS,CAAC,CAAC;IAEjE;;;OAGG;IACH,YAAY,CAAC,EAAE,OAAO,CAAC;IAEvB;;;OAGG;IACH,aAAa,CAAC,EAAE,OAAO,CAAC;CACzB;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA8DG;AACH,wBAAgB,SAAS,CACvB,OAAO,GAAE,0BAA+B,GACvC,iBAAiB,CAgCnB;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA4BG;AACH,wBAAgB,eAAe,CAC7B,cAAc,GAAE,0BAA+B,GAC9C,CAAC,OAAO,CAAC,EAAE,0BAA0B,KAAK,iBAAiB,CAW7D;AAED;;;;;GAKG;AACH,eAAO,MAAM,oBAAoB;IAC/B,wCAAwC;;IAExC,qCAAqC;;IAErC,+BAA+B;;IAE/B,oCAAoC;;IAEpC,gDAAgD;;IAEhD,6CAA6C;;IAE7C,gDAAgD;;IAEhD,mDAAmD;;CAE3C,CAAC;AAEX;;;;GAIG;AACH,eAAO,MAAM,oBAAoB;IAC/B,kCAAkC;;IAElC,qCAAqC;;IAErC,gCAAgC;;IAEhC,mCAAmC;;IAEnC,oCAAoC;;IAEpC,iDAAiD;;IAEjD,kCAAkC;;IAElC,qBAAqB;;IAErB,qBAAqB;;IAErB,2BAA2B;;CAEnB,CAAC;AAEX;;;;;;;;;;;;;;;;;GAiBG;AACH,wBAAgB,cAAc,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAExD;AAED;;;;;;;;GAQG;AACH,wBAAgB,gBAAgB,CAAC,UAAU,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG;IACrE,QAAQ,EAAE,MAAM,CAAC;IACjB,KAAK,EAAE,MAAM,CAAC;CACf,CAMA;AAED;;;;;;;;GAQG;AACH,wBAAgB,YAAY,CAAC,UAAU,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG;IACjE,YAAY,EAAE,MAAM,GAAG,SAAS,CAAC;IACjC,gBAAgB,EAAE,MAAM,GAAG,SAAS,CAAC;CACtC,CASA"}
|
|
@@ -0,0 +1,260 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Vercel AI SDK Integration
|
|
3
|
+
*
|
|
4
|
+
* Provides a non-invasive integration with the Vercel AI SDK using
|
|
5
|
+
* the built-in experimental_telemetry feature. No import changes required.
|
|
6
|
+
*
|
|
7
|
+
* @example
|
|
8
|
+
* ```typescript
|
|
9
|
+
* import { generateText } from 'ai';
|
|
10
|
+
* import { openai } from '@ai-sdk/openai';
|
|
11
|
+
* import { telemetry } from 'tokenmeter/vercel-ai';
|
|
12
|
+
*
|
|
13
|
+
* const result = await generateText({
|
|
14
|
+
* model: openai('gpt-4o'),
|
|
15
|
+
* prompt: 'Hello!',
|
|
16
|
+
* experimental_telemetry: telemetry({
|
|
17
|
+
* userId: 'user_123',
|
|
18
|
+
* orgId: 'org_456',
|
|
19
|
+
* }),
|
|
20
|
+
* });
|
|
21
|
+
* ```
|
|
22
|
+
*/
|
|
23
|
+
import { trace } from "@opentelemetry/api";
|
|
24
|
+
/**
|
|
25
|
+
* Create telemetry configuration for Vercel AI SDK with tokenmeter attributes.
|
|
26
|
+
*
|
|
27
|
+
* This is the recommended way to integrate tokenmeter with the Vercel AI SDK.
|
|
28
|
+
* It uses the SDK's built-in telemetry feature, requiring no import changes.
|
|
29
|
+
*
|
|
30
|
+
* The SDK will emit OpenTelemetry spans with usage data that can be processed
|
|
31
|
+
* by the TokenMeterProcessor to calculate costs.
|
|
32
|
+
*
|
|
33
|
+
* @example Basic usage
|
|
34
|
+
* ```typescript
|
|
35
|
+
* import { generateText } from 'ai';
|
|
36
|
+
* import { openai } from '@ai-sdk/openai';
|
|
37
|
+
* import { telemetry } from 'tokenmeter/vercel-ai';
|
|
38
|
+
*
|
|
39
|
+
* const result = await generateText({
|
|
40
|
+
* model: openai('gpt-4o'),
|
|
41
|
+
* prompt: 'Hello!',
|
|
42
|
+
* experimental_telemetry: telemetry(),
|
|
43
|
+
* });
|
|
44
|
+
* ```
|
|
45
|
+
*
|
|
46
|
+
* @example With user attribution
|
|
47
|
+
* ```typescript
|
|
48
|
+
* const result = await generateText({
|
|
49
|
+
* model: openai('gpt-4o'),
|
|
50
|
+
* prompt: 'Hello!',
|
|
51
|
+
* experimental_telemetry: telemetry({
|
|
52
|
+
* userId: 'user_123',
|
|
53
|
+
* orgId: 'org_456',
|
|
54
|
+
* workflowId: 'chat-session-789',
|
|
55
|
+
* }),
|
|
56
|
+
* });
|
|
57
|
+
* ```
|
|
58
|
+
*
|
|
59
|
+
* @example With custom metadata
|
|
60
|
+
* ```typescript
|
|
61
|
+
* const result = await generateText({
|
|
62
|
+
* model: openai('gpt-4o'),
|
|
63
|
+
* prompt: 'Hello!',
|
|
64
|
+
* experimental_telemetry: telemetry({
|
|
65
|
+
* userId: currentUser.id,
|
|
66
|
+
* metadata: {
|
|
67
|
+
* feature: 'chat',
|
|
68
|
+
* tier: 'premium',
|
|
69
|
+
* },
|
|
70
|
+
* }),
|
|
71
|
+
* });
|
|
72
|
+
* ```
|
|
73
|
+
*
|
|
74
|
+
* @example Disable input/output recording for privacy
|
|
75
|
+
* ```typescript
|
|
76
|
+
* const result = await generateText({
|
|
77
|
+
* model: openai('gpt-4o'),
|
|
78
|
+
* prompt: sensitivePrompt,
|
|
79
|
+
* experimental_telemetry: telemetry({
|
|
80
|
+
* userId: 'user_123',
|
|
81
|
+
* recordInputs: false,
|
|
82
|
+
* recordOutputs: false,
|
|
83
|
+
* }),
|
|
84
|
+
* });
|
|
85
|
+
* ```
|
|
86
|
+
*/
|
|
87
|
+
export function telemetry(options = {}) {
|
|
88
|
+
// Build metadata object with tokenmeter attributes
|
|
89
|
+
const metadata = {};
|
|
90
|
+
// Copy user-provided metadata, filtering out undefined values
|
|
91
|
+
if (options.metadata) {
|
|
92
|
+
for (const [key, value] of Object.entries(options.metadata)) {
|
|
93
|
+
if (value !== undefined) {
|
|
94
|
+
metadata[key] = value;
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
// Add tokenmeter-specific attributes
|
|
99
|
+
if (options.userId) {
|
|
100
|
+
metadata["tokenmeter.user_id"] = options.userId;
|
|
101
|
+
}
|
|
102
|
+
if (options.orgId) {
|
|
103
|
+
metadata["tokenmeter.org_id"] = options.orgId;
|
|
104
|
+
}
|
|
105
|
+
if (options.workflowId) {
|
|
106
|
+
metadata["tokenmeter.workflow_id"] = options.workflowId;
|
|
107
|
+
}
|
|
108
|
+
return {
|
|
109
|
+
isEnabled: true,
|
|
110
|
+
functionId: options.functionId,
|
|
111
|
+
metadata: Object.keys(metadata).length > 0 ? metadata : undefined,
|
|
112
|
+
recordInputs: options.recordInputs ?? true,
|
|
113
|
+
recordOutputs: options.recordOutputs ?? true,
|
|
114
|
+
tracer: trace.getTracer("tokenmeter", "5.0.0"),
|
|
115
|
+
};
|
|
116
|
+
}
|
|
117
|
+
/**
|
|
118
|
+
* Create a reusable telemetry configuration factory.
|
|
119
|
+
*
|
|
120
|
+
* Useful when you have common attributes across multiple calls.
|
|
121
|
+
*
|
|
122
|
+
* @example
|
|
123
|
+
* ```typescript
|
|
124
|
+
* import { createTelemetry } from 'tokenmeter/vercel-ai';
|
|
125
|
+
*
|
|
126
|
+
* // Create once with common options
|
|
127
|
+
* const withTelemetry = createTelemetry({
|
|
128
|
+
* orgId: 'org_456',
|
|
129
|
+
* recordInputs: false, // Privacy setting
|
|
130
|
+
* });
|
|
131
|
+
*
|
|
132
|
+
* // Use in multiple calls
|
|
133
|
+
* await generateText({
|
|
134
|
+
* model: openai('gpt-4o'),
|
|
135
|
+
* prompt: 'Hello!',
|
|
136
|
+
* experimental_telemetry: withTelemetry({ userId: 'user_123' }),
|
|
137
|
+
* });
|
|
138
|
+
*
|
|
139
|
+
* await generateText({
|
|
140
|
+
* model: openai('gpt-4o'),
|
|
141
|
+
* prompt: 'Goodbye!',
|
|
142
|
+
* experimental_telemetry: withTelemetry({ userId: 'user_456' }),
|
|
143
|
+
* });
|
|
144
|
+
* ```
|
|
145
|
+
*/
|
|
146
|
+
export function createTelemetry(defaultOptions = {}) {
|
|
147
|
+
return (options) => {
|
|
148
|
+
return telemetry({
|
|
149
|
+
...defaultOptions,
|
|
150
|
+
...options,
|
|
151
|
+
metadata: {
|
|
152
|
+
...defaultOptions.metadata,
|
|
153
|
+
...options?.metadata,
|
|
154
|
+
},
|
|
155
|
+
});
|
|
156
|
+
};
|
|
157
|
+
}
|
|
158
|
+
/**
|
|
159
|
+
* Vercel AI SDK span attribute names.
|
|
160
|
+
*
|
|
161
|
+
* These are the attributes emitted by the Vercel AI SDK when telemetry is enabled.
|
|
162
|
+
* Use these constants when building custom processors or exporters.
|
|
163
|
+
*/
|
|
164
|
+
export const VERCEL_AI_ATTRIBUTES = {
|
|
165
|
+
/** Model identifier (e.g., "gpt-4o") */
|
|
166
|
+
MODEL_ID: "ai.model.id",
|
|
167
|
+
/** Provider name (e.g., "openai") */
|
|
168
|
+
MODEL_PROVIDER: "ai.model.provider",
|
|
169
|
+
/** Input/prompt tokens used */
|
|
170
|
+
USAGE_PROMPT_TOKENS: "ai.usage.promptTokens",
|
|
171
|
+
/** Output/completion tokens used */
|
|
172
|
+
USAGE_COMPLETION_TOKENS: "ai.usage.completionTokens",
|
|
173
|
+
/** Function identifier from telemetry config */
|
|
174
|
+
FUNCTION_ID: "ai.telemetry.functionId",
|
|
175
|
+
/** Finish reason (e.g., "stop", "length") */
|
|
176
|
+
FINISH_REASON: "ai.response.finishReason",
|
|
177
|
+
/** The prompt text (if recordInputs is true) */
|
|
178
|
+
PROMPT: "ai.prompt",
|
|
179
|
+
/** The response text (if recordOutputs is true) */
|
|
180
|
+
RESPONSE_TEXT: "ai.response.text",
|
|
181
|
+
};
|
|
182
|
+
/**
|
|
183
|
+
* Span names used by the Vercel AI SDK.
|
|
184
|
+
*
|
|
185
|
+
* Use these to filter/identify Vercel AI SDK spans in processors.
|
|
186
|
+
*/
|
|
187
|
+
export const VERCEL_AI_SPAN_NAMES = {
|
|
188
|
+
/** Top-level generateText span */
|
|
189
|
+
GENERATE_TEXT: "ai.generateText",
|
|
190
|
+
/** Provider-level doGenerate span */
|
|
191
|
+
GENERATE_TEXT_DO_GENERATE: "ai.generateText.doGenerate",
|
|
192
|
+
/** Top-level streamText span */
|
|
193
|
+
STREAM_TEXT: "ai.streamText",
|
|
194
|
+
/** Provider-level doStream span */
|
|
195
|
+
STREAM_TEXT_DO_STREAM: "ai.streamText.doStream",
|
|
196
|
+
/** Top-level generateObject span */
|
|
197
|
+
GENERATE_OBJECT: "ai.generateObject",
|
|
198
|
+
/** Provider-level doGenerate span for objects */
|
|
199
|
+
GENERATE_OBJECT_DO_GENERATE: "ai.generateObject.doGenerate",
|
|
200
|
+
/** Top-level streamObject span */
|
|
201
|
+
STREAM_OBJECT: "ai.streamObject",
|
|
202
|
+
/** Tool call span */
|
|
203
|
+
TOOL_CALL: "ai.toolCall",
|
|
204
|
+
/** Embedding span */
|
|
205
|
+
EMBED: "ai.embed",
|
|
206
|
+
/** Batch embedding span */
|
|
207
|
+
EMBED_MANY: "ai.embedMany",
|
|
208
|
+
};
|
|
209
|
+
/**
|
|
210
|
+
* Check if a span name is from the Vercel AI SDK.
|
|
211
|
+
*
|
|
212
|
+
* Useful in custom SpanProcessors to identify Vercel AI spans.
|
|
213
|
+
*
|
|
214
|
+
* @example
|
|
215
|
+
* ```typescript
|
|
216
|
+
* import { isVercelAISpan } from 'tokenmeter/vercel-ai';
|
|
217
|
+
*
|
|
218
|
+
* class MyProcessor implements SpanProcessor {
|
|
219
|
+
* onEnd(span: ReadableSpan) {
|
|
220
|
+
* if (isVercelAISpan(span.name)) {
|
|
221
|
+
* // Process Vercel AI SDK span
|
|
222
|
+
* }
|
|
223
|
+
* }
|
|
224
|
+
* }
|
|
225
|
+
* ```
|
|
226
|
+
*/
|
|
227
|
+
export function isVercelAISpan(spanName) {
|
|
228
|
+
return spanName.startsWith("ai.");
|
|
229
|
+
}
|
|
230
|
+
/**
|
|
231
|
+
* Extract provider and model from Vercel AI SDK span attributes.
|
|
232
|
+
*
|
|
233
|
+
* @example
|
|
234
|
+
* ```typescript
|
|
235
|
+
* const { provider, model } = extractModelInfo(span.attributes);
|
|
236
|
+
* // provider: "openai", model: "gpt-4o"
|
|
237
|
+
* ```
|
|
238
|
+
*/
|
|
239
|
+
export function extractModelInfo(attributes) {
|
|
240
|
+
return {
|
|
241
|
+
provider: attributes[VERCEL_AI_ATTRIBUTES.MODEL_PROVIDER] || "unknown",
|
|
242
|
+
model: attributes[VERCEL_AI_ATTRIBUTES.MODEL_ID] || "unknown",
|
|
243
|
+
};
|
|
244
|
+
}
|
|
245
|
+
/**
|
|
246
|
+
* Extract usage data from Vercel AI SDK span attributes.
|
|
247
|
+
*
|
|
248
|
+
* @example
|
|
249
|
+
* ```typescript
|
|
250
|
+
* const usage = extractUsage(span.attributes);
|
|
251
|
+
* // { promptTokens: 100, completionTokens: 50 }
|
|
252
|
+
* ```
|
|
253
|
+
*/
|
|
254
|
+
export function extractUsage(attributes) {
|
|
255
|
+
return {
|
|
256
|
+
promptTokens: attributes[VERCEL_AI_ATTRIBUTES.USAGE_PROMPT_TOKENS],
|
|
257
|
+
completionTokens: attributes[VERCEL_AI_ATTRIBUTES.USAGE_COMPLETION_TOKENS],
|
|
258
|
+
};
|
|
259
|
+
}
|
|
260
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/integrations/vercel-ai/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;;;GAqBG;AAEH,OAAO,EAAE,KAAK,EAAe,MAAM,oBAAoB,CAAC;AAyFxD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA8DG;AACH,MAAM,UAAU,SAAS,CACvB,UAAsC,EAAE;IAExC,mDAAmD;IACnD,MAAM,QAAQ,GAA8C,EAAE,CAAC;IAE/D,8DAA8D;IAC9D,IAAI,OAAO,CAAC,QAAQ,EAAE,CAAC;QACrB,KAAK,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,EAAE,CAAC;YAC5D,IAAI,KAAK,KAAK,SAAS,EAAE,CAAC;gBACxB,QAAQ,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;YACxB,CAAC;QACH,CAAC;IACH,CAAC;IAED,qCAAqC;IACrC,IAAI,OAAO,CAAC,MAAM,EAAE,CAAC;QACnB,QAAQ,CAAC,oBAAoB,CAAC,GAAG,OAAO,CAAC,MAAM,CAAC;IAClD,CAAC;IACD,IAAI,OAAO,CAAC,KAAK,EAAE,CAAC;QAClB,QAAQ,CAAC,mBAAmB,CAAC,GAAG,OAAO,CAAC,KAAK,CAAC;IAChD,CAAC;IACD,IAAI,OAAO,CAAC,UAAU,EAAE,CAAC;QACvB,QAAQ,CAAC,wBAAwB,CAAC,GAAG,OAAO,CAAC,UAAU,CAAC;IAC1D,CAAC;IAED,OAAO;QACL,SAAS,EAAE,IAAI;QACf,UAAU,EAAE,OAAO,CAAC,UAAU;QAC9B,QAAQ,EAAE,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,SAAS;QACjE,YAAY,EAAE,OAAO,CAAC,YAAY,IAAI,IAAI;QAC1C,aAAa,EAAE,OAAO,CAAC,aAAa,IAAI,IAAI;QAC5C,MAAM,EAAE,KAAK,CAAC,SAAS,CAAC,YAAY,EAAE,OAAO,CAAC;KAC/C,CAAC;AACJ,CAAC;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA4BG;AACH,MAAM,UAAU,eAAe,CAC7B,iBAA6C,EAAE;IAE/C,OAAO,CAAC,OAAoC,EAAE,EAAE;QAC9C,OAAO,SAAS,CAAC;YACf,GAAG,cAAc;YACjB,GAAG,OAAO;YACV,QAAQ,EAAE;gBACR,GAAG,cAAc,CAAC,QAAQ;gBAC1B,GAAG,OAAO,EAAE,QAAQ;aACrB;SACF,CAAC,CAAC;IACL,CAAC,CAAC;AACJ,CAAC;AAED;;;;;GAKG;AACH,MAAM,CAAC,MAAM,oBAAoB,GAAG;IAClC,wCAAwC;IACxC,QAAQ,EAAE,aAAa;IACvB,qCAAqC;IACrC,cAAc,EAAE,mBAAmB;IACnC,+BAA+B;IAC/B,mBAAmB,EAAE,uBAAuB;IAC5C,oCAAoC;IACpC,uBAAuB,EAAE,2BAA2B;IACpD,gDAAgD;IAChD,WAAW,EAAE,yBAAyB;IACtC,6CAA6C;IAC7C,aAAa,EAAE,0BAA0B;IACzC,gDAAgD;IAChD,MAAM,EAAE,WAAW;IACnB,mDAAmD;IACnD,aAAa,EAAE,kBAAkB;CACzB,CAAC;AAEX;;;;GAIG;AACH,MAAM,CAAC,MAAM,oBAAoB,GAAG;IAClC,kCAAkC;IAClC,aAAa,EAAE,iBAAiB;IAChC,qCAAqC;IACrC,yBAAyB,EAAE,4BAA4B;IACvD,gCAAgC;IAChC,WAAW,EAAE,eAAe;IAC5B,mCAAmC;IACnC,qBAAqB,EAAE,wBAAwB;IAC/C,oCAAoC;IACpC,eAAe,EAAE,mBAAmB;IACpC,iDAAiD;IACjD,2BAA2B,EAAE,8BAA8B;IAC3D,kCAAkC;IAClC,aAAa,EAAE,iBAAiB;IAChC,qBAAqB;IACrB,SAAS,EAAE,aAAa;IACxB,qBAAqB;IACrB,KAAK,EAAE,UAAU;IACjB,2BAA2B;IAC3B,UAAU,EAAE,cAAc;CAClB,CAAC;AAEX;;;;;;;;;;;;;;;;;GAiBG;AACH,MAAM,UAAU,cAAc,CAAC,QAAgB;IAC7C,OAAO,QAAQ,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;AACpC,CAAC;AAED;;;;;;;;GAQG;AACH,MAAM,UAAU,gBAAgB,CAAC,UAAmC;IAIlE,OAAO;QACL,QAAQ,EACL,UAAU,CAAC,oBAAoB,CAAC,cAAc,CAAY,IAAI,SAAS;QAC1E,KAAK,EAAG,UAAU,CAAC,oBAAoB,CAAC,QAAQ,CAAY,IAAI,SAAS;KAC1E,CAAC;AACJ,CAAC;AAED;;;;;;;;GAQG;AACH,MAAM,UAAU,YAAY,CAAC,UAAmC;IAI9D,OAAO;QACL,YAAY,EAAE,UAAU,CAAC,oBAAoB,CAAC,mBAAmB,CAEpD;QACb,gBAAgB,EAAE,UAAU,CAC1B,oBAAoB,CAAC,uBAAuB,CACvB;KACxB,CAAC;AACJ,CAAC"}
|
package/dist/logger.d.ts
ADDED
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Configurable Logger
|
|
3
|
+
*
|
|
4
|
+
* Provides a centralized logging mechanism that can be configured or disabled.
|
|
5
|
+
* By default, logging is disabled in production to avoid polluting stdout.
|
|
6
|
+
*/
|
|
7
|
+
export type LogLevel = "debug" | "info" | "warn" | "error" | "none";
|
|
8
|
+
export interface LoggerConfig {
|
|
9
|
+
/**
|
|
10
|
+
* Minimum log level to output. Set to "none" to disable all logging.
|
|
11
|
+
* @default "none"
|
|
12
|
+
*/
|
|
13
|
+
level: LogLevel;
|
|
14
|
+
/**
|
|
15
|
+
* Custom logger implementation. If provided, all logs will be sent to this function.
|
|
16
|
+
* This allows integration with existing logging frameworks (winston, pino, etc.)
|
|
17
|
+
*/
|
|
18
|
+
custom?: (level: LogLevel, message: string, ...args: unknown[]) => void;
|
|
19
|
+
}
|
|
20
|
+
/**
|
|
21
|
+
* Configure the tokenmeter logger.
|
|
22
|
+
*
|
|
23
|
+
* @example
|
|
24
|
+
* ```typescript
|
|
25
|
+
* import { configureLogger } from 'tokenmeter';
|
|
26
|
+
*
|
|
27
|
+
* // Enable warning and error logs
|
|
28
|
+
* configureLogger({ level: 'warn' });
|
|
29
|
+
*
|
|
30
|
+
* // Use custom logger
|
|
31
|
+
* configureLogger({
|
|
32
|
+
* level: 'debug',
|
|
33
|
+
* custom: (level, message, ...args) => {
|
|
34
|
+
* myLogger[level](message, ...args);
|
|
35
|
+
* }
|
|
36
|
+
* });
|
|
37
|
+
* ```
|
|
38
|
+
*/
|
|
39
|
+
export declare function configureLogger(newConfig: Partial<LoggerConfig>): void;
|
|
40
|
+
/**
|
|
41
|
+
* Get the current logger configuration.
|
|
42
|
+
*/
|
|
43
|
+
export declare function getLoggerConfig(): LoggerConfig;
|
|
44
|
+
/**
|
|
45
|
+
* Reset logger to default configuration (disabled).
|
|
46
|
+
*/
|
|
47
|
+
export declare function resetLogger(): void;
|
|
48
|
+
/**
|
|
49
|
+
* Internal logger for tokenmeter.
|
|
50
|
+
* By default, all logging is disabled. Use configureLogger() to enable.
|
|
51
|
+
*/
|
|
52
|
+
export declare const logger: {
|
|
53
|
+
debug: (message: string, ...args: unknown[]) => void;
|
|
54
|
+
info: (message: string, ...args: unknown[]) => void;
|
|
55
|
+
warn: (message: string, ...args: unknown[]) => void;
|
|
56
|
+
error: (message: string, ...args: unknown[]) => void;
|
|
57
|
+
};
|
|
58
|
+
//# sourceMappingURL=logger.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"logger.d.ts","sourceRoot":"","sources":["../src/logger.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAEH,MAAM,MAAM,QAAQ,GAAG,OAAO,GAAG,MAAM,GAAG,MAAM,GAAG,OAAO,GAAG,MAAM,CAAC;AAEpE,MAAM,WAAW,YAAY;IAC3B;;;OAGG;IACH,KAAK,EAAE,QAAQ,CAAC;IAEhB;;;OAGG;IACH,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,MAAM,EAAE,GAAG,IAAI,EAAE,OAAO,EAAE,KAAK,IAAI,CAAC;CACzE;AAeD;;;;;;;;;;;;;;;;;;GAkBG;AACH,wBAAgB,eAAe,CAAC,SAAS,EAAE,OAAO,CAAC,YAAY,CAAC,GAAG,IAAI,CAEtE;AAED;;GAEG;AACH,wBAAgB,eAAe,IAAI,YAAY,CAE9C;AAED;;GAEG;AACH,wBAAgB,WAAW,IAAI,IAAI,CAElC;AAkCD;;;GAGG;AACH,eAAO,MAAM,MAAM;qBACA,MAAM,WAAW,OAAO,EAAE;oBAC3B,MAAM,WAAW,OAAO,EAAE;oBAC1B,MAAM,WAAW,OAAO,EAAE;qBACzB,MAAM,WAAW,OAAO,EAAE;CAC5C,CAAC"}
|