openlit 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.prettierrc.json +7 -0
- package/LICENSE +201 -0
- package/README.md +175 -0
- package/eslint.config.mjs +10 -0
- package/package.json +60 -0
- package/src/config.ts +45 -0
- package/src/constant.ts +5 -0
- package/src/helpers.ts +122 -0
- package/src/index.ts +72 -0
- package/src/instrumentation/anthropic/index.ts +59 -0
- package/src/instrumentation/anthropic/wrapper.ts +276 -0
- package/src/instrumentation/index.ts +66 -0
- package/src/instrumentation/openai/index.ts +114 -0
- package/src/instrumentation/openai/wrapper.ts +658 -0
- package/src/semantic-convention.ts +113 -0
- package/src/tracing.ts +59 -0
- package/src/types.ts +48 -0
- package/tsconfig.json +15 -0
package/src/index.ts
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
import { Resource } from '@opentelemetry/resources';
|
|
2
|
+
import {
|
|
3
|
+
SEMRESATTRS_DEPLOYMENT_ENVIRONMENT,
|
|
4
|
+
SEMRESATTRS_SERVICE_NAME,
|
|
5
|
+
SEMRESATTRS_TELEMETRY_SDK_NAME,
|
|
6
|
+
} from '@opentelemetry/semantic-conventions';
|
|
7
|
+
import { NodeSDK } from '@opentelemetry/sdk-node';
|
|
8
|
+
import { OpenlitOptions } from './types';
|
|
9
|
+
import Tracing from './tracing';
|
|
10
|
+
import { DEFAULT_APPLICATION_NAME, DEFAULT_ENVIRONMENT, SDK_NAME } from './constant';
|
|
11
|
+
import { SpanExporter } from '@opentelemetry/sdk-trace-base';
|
|
12
|
+
|
|
13
|
+
export default class Openlit {
|
|
14
|
+
static resource: Resource;
|
|
15
|
+
static options: OpenlitOptions;
|
|
16
|
+
static _sdk: NodeSDK;
|
|
17
|
+
static init(options?: OpenlitOptions) {
|
|
18
|
+
try {
|
|
19
|
+
const { environment = DEFAULT_ENVIRONMENT, applicationName = DEFAULT_APPLICATION_NAME } =
|
|
20
|
+
options || {};
|
|
21
|
+
|
|
22
|
+
const otlpEndpoint =
|
|
23
|
+
options?.otlpEndpoint || process.env.OTEL_EXPORTER_OTLP_ENDPOINT || undefined;
|
|
24
|
+
let otlpHeaders = options?.otlpHeaders;
|
|
25
|
+
if (!otlpHeaders) {
|
|
26
|
+
if (process.env.OTEL_EXPORTER_OTLP_HEADERS) {
|
|
27
|
+
otlpHeaders = process.env.OTEL_EXPORTER_OTLP_HEADERS.split(',').reduce(
|
|
28
|
+
(acc: Record<string, any>, items: string) => {
|
|
29
|
+
const keyVal: string[] = items.split('=');
|
|
30
|
+
acc[keyVal[0]] = keyVal[1];
|
|
31
|
+
return acc;
|
|
32
|
+
},
|
|
33
|
+
{}
|
|
34
|
+
);
|
|
35
|
+
} else {
|
|
36
|
+
otlpHeaders = {};
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
this.options = options || {};
|
|
41
|
+
this.options.otlpEndpoint = otlpEndpoint;
|
|
42
|
+
this.options.otlpHeaders = otlpHeaders;
|
|
43
|
+
this.options.disableBatch =
|
|
44
|
+
options?.disableBatch === undefined ? true : !!options.disableBatch;
|
|
45
|
+
|
|
46
|
+
this.resource = new Resource({
|
|
47
|
+
[SEMRESATTRS_SERVICE_NAME]: applicationName,
|
|
48
|
+
[SEMRESATTRS_DEPLOYMENT_ENVIRONMENT]: environment,
|
|
49
|
+
[SEMRESATTRS_TELEMETRY_SDK_NAME]: SDK_NAME,
|
|
50
|
+
});
|
|
51
|
+
|
|
52
|
+
Tracing.setup({
|
|
53
|
+
...this.options,
|
|
54
|
+
environment,
|
|
55
|
+
applicationName,
|
|
56
|
+
otlpEndpoint,
|
|
57
|
+
otlpHeaders,
|
|
58
|
+
resource: this.resource,
|
|
59
|
+
});
|
|
60
|
+
|
|
61
|
+
this._sdk = new NodeSDK({
|
|
62
|
+
resource: this.resource,
|
|
63
|
+
traceExporter: Tracing.traceExporter as SpanExporter,
|
|
64
|
+
});
|
|
65
|
+
|
|
66
|
+
// This was causing the traceProvider initilization with multiple instances.
|
|
67
|
+
// this._sdk.start();
|
|
68
|
+
} catch (e) {
|
|
69
|
+
console.log('Connection time out', e);
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
}
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
import {
|
|
2
|
+
InstrumentationBase,
|
|
3
|
+
InstrumentationModuleDefinition,
|
|
4
|
+
InstrumentationNodeModuleDefinition,
|
|
5
|
+
isWrapped,
|
|
6
|
+
} from '@opentelemetry/instrumentation';
|
|
7
|
+
import { InstrumentationConfig } from '@opentelemetry/instrumentation';
|
|
8
|
+
import { INSTRUMENTATION_PREFIX } from '../../constant';
|
|
9
|
+
import Anthropic from '@anthropic-ai/sdk';
|
|
10
|
+
import AnthropicWrapper from './wrapper';
|
|
11
|
+
|
|
12
|
+
export interface AnthropicInstrumentationConfig extends InstrumentationConfig {}
|
|
13
|
+
|
|
14
|
+
export default class OpenlitAnthropicInstrumentation extends InstrumentationBase {
|
|
15
|
+
constructor(config: AnthropicInstrumentationConfig = {}) {
|
|
16
|
+
super(`${INSTRUMENTATION_PREFIX}/instrumentation-anthropic`, '1.0.0', config);
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
protected init(): void | InstrumentationModuleDefinition | InstrumentationModuleDefinition[] {
|
|
20
|
+
const module = new InstrumentationNodeModuleDefinition(
|
|
21
|
+
'@anthropic-ai/sdk',
|
|
22
|
+
['>= 0.21.0'],
|
|
23
|
+
(moduleExports) => {
|
|
24
|
+
this._patch(moduleExports);
|
|
25
|
+
return moduleExports;
|
|
26
|
+
},
|
|
27
|
+
(moduleExports) => {
|
|
28
|
+
if (moduleExports !== undefined) {
|
|
29
|
+
this._unpatch(moduleExports);
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
);
|
|
33
|
+
return [module];
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
public manualPatch(anthropic: any): void {
|
|
37
|
+
this._patch(anthropic);
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
protected _patch(moduleExports: typeof Anthropic) {
|
|
41
|
+
try {
|
|
42
|
+
if (isWrapped(moduleExports.Anthropic.Messages.prototype.create)) {
|
|
43
|
+
this._unwrap(moduleExports.Anthropic.Messages.prototype, 'create');
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
this._wrap(
|
|
47
|
+
moduleExports.Anthropic.Messages.prototype,
|
|
48
|
+
'create',
|
|
49
|
+
AnthropicWrapper._patchMessageCreate(this.tracer)
|
|
50
|
+
);
|
|
51
|
+
} catch (e) {
|
|
52
|
+
console.error('Error in _patch method:', e);
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
protected _unpatch(moduleExports: typeof Anthropic) {
|
|
57
|
+
this._unwrap(moduleExports.Anthropic.Messages.prototype, 'create');
|
|
58
|
+
}
|
|
59
|
+
}
|
|
@@ -0,0 +1,276 @@
|
|
|
1
|
+
import { Span, SpanKind, SpanStatusCode, Tracer, context, trace } from '@opentelemetry/api';
|
|
2
|
+
import OpenlitConfig from '../../config';
|
|
3
|
+
import OpenLitHelper from '../../helpers';
|
|
4
|
+
import SemanticConvention from '../../semantic-convention';
|
|
5
|
+
import { SDK_NAME, TELEMETRY_SDK_NAME } from '../../constant';
|
|
6
|
+
|
|
7
|
+
export default class AnthropicWrapper {
|
|
8
|
+
static setBaseSpanAttributes(
|
|
9
|
+
span: any,
|
|
10
|
+
{ genAIEndpoint, model, user, cost, environment, applicationName }: any
|
|
11
|
+
) {
|
|
12
|
+
span.setAttributes({
|
|
13
|
+
[TELEMETRY_SDK_NAME]: SDK_NAME,
|
|
14
|
+
});
|
|
15
|
+
|
|
16
|
+
span.setAttribute(TELEMETRY_SDK_NAME, SDK_NAME);
|
|
17
|
+
span.setAttribute(SemanticConvention.GEN_AI_SYSTEM, SemanticConvention.GEN_AI_SYSTEM_ANTHROPIC);
|
|
18
|
+
span.setAttribute(SemanticConvention.GEN_AI_ENDPOINT, genAIEndpoint);
|
|
19
|
+
span.setAttribute(SemanticConvention.GEN_AI_ENVIRONMENT, environment);
|
|
20
|
+
span.setAttribute(SemanticConvention.GEN_AI_APPLICATION_NAME, applicationName);
|
|
21
|
+
span.setAttribute(SemanticConvention.GEN_AI_REQUEST_MODEL, model);
|
|
22
|
+
span.setAttribute(SemanticConvention.GEN_AI_REQUEST_USER, user);
|
|
23
|
+
if (cost !== undefined) span.setAttribute(SemanticConvention.GEN_AI_USAGE_COST, cost);
|
|
24
|
+
|
|
25
|
+
span.setStatus({ code: SpanStatusCode.OK });
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
static _patchMessageCreate(tracer: Tracer): any {
|
|
29
|
+
const genAIEndpoint = 'anthropic.resources.messages';
|
|
30
|
+
return (originalMethod: (...args: any[]) => any) => {
|
|
31
|
+
return async function (this: any, ...args: any[]) {
|
|
32
|
+
const span = tracer.startSpan(genAIEndpoint, { kind: SpanKind.CLIENT });
|
|
33
|
+
const { stream = false } = args[0];
|
|
34
|
+
return context
|
|
35
|
+
.with(trace.setSpan(context.active(), span), async () => {
|
|
36
|
+
return originalMethod.apply(this, args);
|
|
37
|
+
})
|
|
38
|
+
.then((response) => {
|
|
39
|
+
if (!!stream) {
|
|
40
|
+
return OpenLitHelper.createStreamProxy(
|
|
41
|
+
response,
|
|
42
|
+
AnthropicWrapper._messageCreateGenerator({
|
|
43
|
+
args,
|
|
44
|
+
genAIEndpoint,
|
|
45
|
+
response,
|
|
46
|
+
span,
|
|
47
|
+
})
|
|
48
|
+
);
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
return AnthropicWrapper._messageCreate({ args, genAIEndpoint, response, span });
|
|
52
|
+
})
|
|
53
|
+
.catch((e: any) => {
|
|
54
|
+
OpenLitHelper.handleException(span, e);
|
|
55
|
+
span.end();
|
|
56
|
+
throw e;
|
|
57
|
+
});
|
|
58
|
+
};
|
|
59
|
+
};
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
static async _messageCreate({
|
|
63
|
+
args,
|
|
64
|
+
genAIEndpoint,
|
|
65
|
+
response,
|
|
66
|
+
span,
|
|
67
|
+
}: {
|
|
68
|
+
args: any[];
|
|
69
|
+
genAIEndpoint: string;
|
|
70
|
+
response: any;
|
|
71
|
+
span: Span;
|
|
72
|
+
}) {
|
|
73
|
+
try {
|
|
74
|
+
await AnthropicWrapper._messageCreateCommonSetter({
|
|
75
|
+
args,
|
|
76
|
+
genAIEndpoint,
|
|
77
|
+
result: response,
|
|
78
|
+
span,
|
|
79
|
+
});
|
|
80
|
+
return response;
|
|
81
|
+
} catch (e: any) {
|
|
82
|
+
OpenLitHelper.handleException(span, e);
|
|
83
|
+
} finally {
|
|
84
|
+
span.end();
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
static async *_messageCreateGenerator({
|
|
89
|
+
args,
|
|
90
|
+
genAIEndpoint,
|
|
91
|
+
response,
|
|
92
|
+
span,
|
|
93
|
+
}: {
|
|
94
|
+
args: any[];
|
|
95
|
+
genAIEndpoint: string;
|
|
96
|
+
response: any;
|
|
97
|
+
span: Span;
|
|
98
|
+
}) {
|
|
99
|
+
try {
|
|
100
|
+
const result = {
|
|
101
|
+
id: '0',
|
|
102
|
+
model: '',
|
|
103
|
+
stop_reason: '',
|
|
104
|
+
content: [
|
|
105
|
+
{
|
|
106
|
+
text: '',
|
|
107
|
+
role: '',
|
|
108
|
+
},
|
|
109
|
+
],
|
|
110
|
+
usage: {
|
|
111
|
+
input_tokens: 0,
|
|
112
|
+
output_tokens: 0,
|
|
113
|
+
total_tokens: 0,
|
|
114
|
+
},
|
|
115
|
+
};
|
|
116
|
+
for await (const chunk of response) {
|
|
117
|
+
switch (chunk.type) {
|
|
118
|
+
case 'content_block_delta':
|
|
119
|
+
result.content[0].text += chunk.delta?.text ?? '';
|
|
120
|
+
break;
|
|
121
|
+
case 'message_stop':
|
|
122
|
+
break;
|
|
123
|
+
|
|
124
|
+
case 'content_block_stop':
|
|
125
|
+
break;
|
|
126
|
+
|
|
127
|
+
case 'message_start':
|
|
128
|
+
if (chunk.message) {
|
|
129
|
+
result.id = chunk.message.id;
|
|
130
|
+
result.model = chunk.message.model;
|
|
131
|
+
result.content[0].role = chunk.message.role;
|
|
132
|
+
result.usage.input_tokens += Number(chunk.message.usage?.input_tokens) ?? 0;
|
|
133
|
+
result.usage.output_tokens += Number(chunk.message.usage?.output_tokens) ?? 0;
|
|
134
|
+
result.stop_reason = chunk.message?.stop_reason ?? '';
|
|
135
|
+
}
|
|
136
|
+
break;
|
|
137
|
+
|
|
138
|
+
case 'content_block_start':
|
|
139
|
+
result.content[0].text = chunk.content_block?.text ?? '';
|
|
140
|
+
break;
|
|
141
|
+
case 'message_delta':
|
|
142
|
+
result.stop_reason = chunk.delta?.stop_reason ?? '';
|
|
143
|
+
result.usage.output_tokens += Number(chunk.usage?.output_tokens) ?? 0;
|
|
144
|
+
break;
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
yield chunk;
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
result.usage.total_tokens = result.usage.output_tokens + result.usage.input_tokens;
|
|
151
|
+
|
|
152
|
+
await AnthropicWrapper._messageCreateCommonSetter({
|
|
153
|
+
args,
|
|
154
|
+
genAIEndpoint,
|
|
155
|
+
result,
|
|
156
|
+
span,
|
|
157
|
+
});
|
|
158
|
+
return response;
|
|
159
|
+
} catch (e: any) {
|
|
160
|
+
OpenLitHelper.handleException(span, e);
|
|
161
|
+
} finally {
|
|
162
|
+
span.end();
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
static async _messageCreateCommonSetter({
|
|
167
|
+
args,
|
|
168
|
+
genAIEndpoint,
|
|
169
|
+
result,
|
|
170
|
+
span,
|
|
171
|
+
}: {
|
|
172
|
+
args: any[];
|
|
173
|
+
genAIEndpoint: string;
|
|
174
|
+
result: any;
|
|
175
|
+
span: Span;
|
|
176
|
+
}) {
|
|
177
|
+
const applicationName = OpenlitConfig.applicationName;
|
|
178
|
+
const environment = OpenlitConfig.environment;
|
|
179
|
+
const traceContent = OpenlitConfig.traceContent;
|
|
180
|
+
const {
|
|
181
|
+
messages,
|
|
182
|
+
max_tokens = null,
|
|
183
|
+
seed = null,
|
|
184
|
+
temperature = 1,
|
|
185
|
+
top_p,
|
|
186
|
+
top_k,
|
|
187
|
+
user,
|
|
188
|
+
stream = false,
|
|
189
|
+
stop_reason,
|
|
190
|
+
} = args[0];
|
|
191
|
+
|
|
192
|
+
// Format 'messages' into a single string
|
|
193
|
+
const messagePrompt = messages || '';
|
|
194
|
+
const formattedMessages = [];
|
|
195
|
+
|
|
196
|
+
for (const message of messagePrompt) {
|
|
197
|
+
const role = message.role;
|
|
198
|
+
const content = message.content;
|
|
199
|
+
|
|
200
|
+
if (Array.isArray(content)) {
|
|
201
|
+
const contentStr = content
|
|
202
|
+
.map((item) => {
|
|
203
|
+
if ('type' in item) {
|
|
204
|
+
return `${item.type}: ${item.text ? item.text : item.image_url}`;
|
|
205
|
+
} else {
|
|
206
|
+
return `text: ${item.text}`;
|
|
207
|
+
}
|
|
208
|
+
})
|
|
209
|
+
.join(', ');
|
|
210
|
+
formattedMessages.push(`${role}: ${contentStr}`);
|
|
211
|
+
} else {
|
|
212
|
+
formattedMessages.push(`${role}: ${content}`);
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
const prompt = formattedMessages.join('\n');
|
|
217
|
+
span.setAttribute(SemanticConvention.GEN_AI_TYPE, SemanticConvention.GEN_AI_TYPE_CHAT);
|
|
218
|
+
span.setAttribute(SemanticConvention.GEN_AI_RESPONSE_ID, result.id);
|
|
219
|
+
|
|
220
|
+
const model = result.model || 'claude-3-sonnet-20240229';
|
|
221
|
+
|
|
222
|
+
const pricingInfo = await OpenlitConfig.updatePricingJson(OpenlitConfig.pricing_json);
|
|
223
|
+
|
|
224
|
+
// Calculate cost of the operation
|
|
225
|
+
const cost = OpenLitHelper.getChatModelCost(
|
|
226
|
+
model,
|
|
227
|
+
pricingInfo,
|
|
228
|
+
result.usage.input_tokens,
|
|
229
|
+
result.usage.output_tokens
|
|
230
|
+
);
|
|
231
|
+
|
|
232
|
+
AnthropicWrapper.setBaseSpanAttributes(span, {
|
|
233
|
+
genAIEndpoint,
|
|
234
|
+
model,
|
|
235
|
+
user,
|
|
236
|
+
cost,
|
|
237
|
+
applicationName,
|
|
238
|
+
environment,
|
|
239
|
+
});
|
|
240
|
+
|
|
241
|
+
// Request Params attributes : Start
|
|
242
|
+
span.setAttribute(SemanticConvention.GEN_AI_REQUEST_TOP_P, top_p);
|
|
243
|
+
span.setAttribute(SemanticConvention.GEN_AI_REQUEST_TOP_K, top_k);
|
|
244
|
+
span.setAttribute(SemanticConvention.GEN_AI_REQUEST_MAX_TOKENS, max_tokens);
|
|
245
|
+
span.setAttribute(SemanticConvention.GEN_AI_REQUEST_TEMPERATURE, temperature);
|
|
246
|
+
|
|
247
|
+
span.setAttribute(SemanticConvention.GEN_AI_RESPONSE_FINISH_REASON, stop_reason);
|
|
248
|
+
span.setAttribute(SemanticConvention.GEN_AI_REQUEST_IS_STREAM, stream);
|
|
249
|
+
span.setAttribute(SemanticConvention.GEN_AI_REQUEST_SEED, seed);
|
|
250
|
+
if (traceContent) {
|
|
251
|
+
span.setAttribute(SemanticConvention.GEN_AI_CONTENT_PROMPT, prompt);
|
|
252
|
+
}
|
|
253
|
+
// Request Params attributes : End
|
|
254
|
+
|
|
255
|
+
span.setAttribute(SemanticConvention.GEN_AI_USAGE_PROMPT_TOKENS, result.usage.input_tokens);
|
|
256
|
+
span.setAttribute(
|
|
257
|
+
SemanticConvention.GEN_AI_USAGE_COMPLETION_TOKENS,
|
|
258
|
+
result.usage.output_tokens
|
|
259
|
+
);
|
|
260
|
+
span.setAttribute(
|
|
261
|
+
SemanticConvention.GEN_AI_USAGE_TOTAL_TOKENS,
|
|
262
|
+
result.usage.input_tokens + result.usage.output_tokens
|
|
263
|
+
);
|
|
264
|
+
|
|
265
|
+
if (result.stop_reason) {
|
|
266
|
+
span.setAttribute(SemanticConvention.GEN_AI_RESPONSE_FINISH_REASON, result.stop_reason);
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
if (traceContent) {
|
|
270
|
+
span.setAttribute(
|
|
271
|
+
SemanticConvention.GEN_AI_CONTENT_COMPLETION,
|
|
272
|
+
result.content?.[0]?.text || ''
|
|
273
|
+
);
|
|
274
|
+
}
|
|
275
|
+
}
|
|
276
|
+
}
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
import { registerInstrumentations } from '@opentelemetry/instrumentation';
|
|
2
|
+
import { InstrumentationType, OpenlitInstrumentations } from '../types';
|
|
3
|
+
|
|
4
|
+
import { TracerProvider } from '@opentelemetry/api';
|
|
5
|
+
import OpenAIInstrumentation from './openai';
|
|
6
|
+
import AnthropicInstrumentation from './anthropic';
|
|
7
|
+
|
|
8
|
+
export default class Instrumentations {
|
|
9
|
+
static availableInstrumentations: OpenlitInstrumentations = {
|
|
10
|
+
openai: new OpenAIInstrumentation(),
|
|
11
|
+
anthropic: new AnthropicInstrumentation(),
|
|
12
|
+
};
|
|
13
|
+
|
|
14
|
+
static setup(
|
|
15
|
+
tracerProvider: TracerProvider,
|
|
16
|
+
disabledInstrumentations: string[] = [],
|
|
17
|
+
instrumentations?: OpenlitInstrumentations
|
|
18
|
+
) {
|
|
19
|
+
if (instrumentations === undefined) {
|
|
20
|
+
const filteredInstrumentations = this.getFilteredInstrumentations(disabledInstrumentations);
|
|
21
|
+
registerInstrumentations({
|
|
22
|
+
instrumentations: filteredInstrumentations.map(([_, instrumentation]) => instrumentation),
|
|
23
|
+
tracerProvider,
|
|
24
|
+
});
|
|
25
|
+
} else {
|
|
26
|
+
const filteredInstrumentations = this.getFilteredInstrumentations(
|
|
27
|
+
disabledInstrumentations,
|
|
28
|
+
instrumentations
|
|
29
|
+
);
|
|
30
|
+
filteredInstrumentations.forEach(([k, instrumentation]) => {
|
|
31
|
+
if (this.availableInstrumentations[k].setTracerProvider) {
|
|
32
|
+
this.availableInstrumentations[k].setTracerProvider(tracerProvider);
|
|
33
|
+
}
|
|
34
|
+
if (this.availableInstrumentations[k].manualPatch) {
|
|
35
|
+
this.availableInstrumentations[k].manualPatch(instrumentation);
|
|
36
|
+
}
|
|
37
|
+
});
|
|
38
|
+
registerInstrumentations({
|
|
39
|
+
tracerProvider,
|
|
40
|
+
});
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
static getFilteredInstrumentations(
|
|
45
|
+
disabledInstrumentations: string[],
|
|
46
|
+
instrumentations?: OpenlitInstrumentations
|
|
47
|
+
): [InstrumentationType, any][] {
|
|
48
|
+
const availableInstrumentations = instrumentations || this.availableInstrumentations;
|
|
49
|
+
return Object.keys(availableInstrumentations)
|
|
50
|
+
.filter((k) => {
|
|
51
|
+
if (disabledInstrumentations.includes(k)) {
|
|
52
|
+
if (typeof availableInstrumentations[k as InstrumentationType].disable === 'function') {
|
|
53
|
+
availableInstrumentations[k as InstrumentationType].disable();
|
|
54
|
+
}
|
|
55
|
+
return false;
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
if (typeof availableInstrumentations[k as InstrumentationType].enable === 'function') {
|
|
59
|
+
availableInstrumentations[k as InstrumentationType].enable();
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
return true;
|
|
63
|
+
})
|
|
64
|
+
.map((k) => [k as InstrumentationType, availableInstrumentations[k as InstrumentationType]]);
|
|
65
|
+
}
|
|
66
|
+
}
|
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
import {
|
|
2
|
+
InstrumentationBase,
|
|
3
|
+
InstrumentationModuleDefinition,
|
|
4
|
+
InstrumentationNodeModuleDefinition,
|
|
5
|
+
isWrapped,
|
|
6
|
+
} from '@opentelemetry/instrumentation';
|
|
7
|
+
import { InstrumentationConfig } from '@opentelemetry/instrumentation';
|
|
8
|
+
import { INSTRUMENTATION_PREFIX } from '../../constant';
|
|
9
|
+
import OpenAI from 'openai';
|
|
10
|
+
import OpenAIWrapper from './wrapper';
|
|
11
|
+
|
|
12
|
+
export interface OpenAIInstrumentationConfig extends InstrumentationConfig {}
|
|
13
|
+
|
|
14
|
+
export default class OpenlitOpenAIInstrumentation extends InstrumentationBase {
|
|
15
|
+
constructor(config: OpenAIInstrumentationConfig = {}) {
|
|
16
|
+
super(`${INSTRUMENTATION_PREFIX}/instrumentation-openai`, '1.0.0', config);
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
protected init(): void | InstrumentationModuleDefinition | InstrumentationModuleDefinition[] {
|
|
20
|
+
const module = new InstrumentationNodeModuleDefinition(
|
|
21
|
+
'openai',
|
|
22
|
+
['>=3.1.0 <5'],
|
|
23
|
+
(moduleExports) => {
|
|
24
|
+
this._patch(moduleExports);
|
|
25
|
+
return moduleExports;
|
|
26
|
+
},
|
|
27
|
+
(moduleExports) => {
|
|
28
|
+
if (moduleExports !== undefined) {
|
|
29
|
+
this._unpatch(moduleExports);
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
);
|
|
33
|
+
return [module];
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
public manualPatch(openai: any): void {
|
|
37
|
+
this._patch(openai);
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
protected _patch(moduleExports: typeof OpenAI) {
|
|
41
|
+
try {
|
|
42
|
+
if (isWrapped(moduleExports.OpenAI.Chat.Completions.prototype.create)) {
|
|
43
|
+
this._unwrap(moduleExports.OpenAI.Chat.Completions.prototype, 'create');
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
if (isWrapped(moduleExports.OpenAI.Embeddings.prototype.create)) {
|
|
47
|
+
this._unwrap(moduleExports.OpenAI.Embeddings.prototype, 'create');
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
if (isWrapped(moduleExports.OpenAI.FineTuning.Jobs.prototype.create)) {
|
|
51
|
+
this._unwrap(moduleExports.OpenAI.FineTuning.Jobs.prototype, 'create');
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
if (isWrapped(moduleExports.OpenAI.Images.prototype.generate)) {
|
|
55
|
+
this._unwrap(moduleExports.OpenAI.Images.prototype, 'generate');
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
if (isWrapped(moduleExports.OpenAI.Images.prototype.createVariation)) {
|
|
59
|
+
this._unwrap(moduleExports.OpenAI.Images.prototype, 'createVariation');
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
if (isWrapped(moduleExports.OpenAI.Audio.Speech.prototype)) {
|
|
63
|
+
this._unwrap(moduleExports.OpenAI.Audio.Speech.prototype, 'create');
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
this._wrap(
|
|
67
|
+
moduleExports.OpenAI.Chat.Completions.prototype,
|
|
68
|
+
'create',
|
|
69
|
+
OpenAIWrapper._patchChatCompletionCreate(this.tracer)
|
|
70
|
+
);
|
|
71
|
+
|
|
72
|
+
this._wrap(
|
|
73
|
+
moduleExports.OpenAI.Embeddings.prototype,
|
|
74
|
+
'create',
|
|
75
|
+
OpenAIWrapper._patchEmbedding(this.tracer)
|
|
76
|
+
);
|
|
77
|
+
|
|
78
|
+
this._wrap(
|
|
79
|
+
moduleExports.OpenAI.FineTuning.Jobs.prototype,
|
|
80
|
+
'create',
|
|
81
|
+
OpenAIWrapper._patchFineTune(this.tracer)
|
|
82
|
+
);
|
|
83
|
+
|
|
84
|
+
this._wrap(
|
|
85
|
+
moduleExports.OpenAI.Images.prototype,
|
|
86
|
+
'generate',
|
|
87
|
+
OpenAIWrapper._patchImageGenerate(this.tracer)
|
|
88
|
+
);
|
|
89
|
+
|
|
90
|
+
this._wrap(
|
|
91
|
+
moduleExports.OpenAI.Images.prototype,
|
|
92
|
+
'createVariation',
|
|
93
|
+
OpenAIWrapper._patchImageVariation(this.tracer)
|
|
94
|
+
);
|
|
95
|
+
|
|
96
|
+
this._wrap(
|
|
97
|
+
moduleExports.OpenAI.Audio.Speech.prototype,
|
|
98
|
+
'create',
|
|
99
|
+
OpenAIWrapper._patchAudioCreate(this.tracer)
|
|
100
|
+
);
|
|
101
|
+
} catch (e) {
|
|
102
|
+
console.error('Error in _patch method:', e);
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
protected _unpatch(moduleExports: typeof OpenAI) {
|
|
107
|
+
this._unwrap(moduleExports.OpenAI.Chat.Completions.prototype, 'create');
|
|
108
|
+
this._unwrap(moduleExports.OpenAI.Embeddings.prototype, 'create');
|
|
109
|
+
this._unwrap(moduleExports.OpenAI.FineTuning.prototype, 'jobs');
|
|
110
|
+
this._unwrap(moduleExports.OpenAI.Images.prototype, 'generate');
|
|
111
|
+
this._unwrap(moduleExports.OpenAI.Images.prototype, 'createVariation');
|
|
112
|
+
this._unwrap(moduleExports.OpenAI.Audio.prototype, 'speech');
|
|
113
|
+
}
|
|
114
|
+
}
|