@traceloop/instrumentation-anthropic 0.9.2 → 0.11.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -5,7 +5,7 @@ var api = require('@opentelemetry/api');
5
5
  var instrumentation = require('@opentelemetry/instrumentation');
6
6
  var aiSemanticConventions = require('@traceloop/ai-semantic-conventions');
7
7
 
8
- var version = "0.9.2";
8
+ var version = "0.11.0";
9
9
 
10
10
  class AnthropicInstrumentation extends instrumentation.InstrumentationBase {
11
11
  constructor(config = {}) {
package/dist/index.mjs ADDED
@@ -0,0 +1,303 @@
1
+ import { __asyncGenerator, __asyncValues, __await } from 'tslib';
2
+ import { trace, context, SpanKind, SpanStatusCode } from '@opentelemetry/api';
3
+ import { InstrumentationBase, InstrumentationNodeModuleDefinition, safeExecuteInTheMiddle } from '@opentelemetry/instrumentation';
4
+ import { SpanAttributes, CONTEXT_KEY_ALLOW_TRACE_CONTENT } from '@traceloop/ai-semantic-conventions';
5
+
6
+ var version = "0.11.0";
7
+
8
+ class AnthropicInstrumentation extends InstrumentationBase {
9
+ constructor(config = {}) {
10
+ super("@traceloop/instrumentation-anthropic", version, config);
11
+ }
12
+ setConfig(config = {}) {
13
+ super.setConfig(config);
14
+ }
15
+ manuallyInstrument(module) {
16
+ this._diag.debug(`Patching @anthropic-ai/sdk manually`);
17
+ this._wrap(module.Anthropic.Completions.prototype, "create", this.patchAnthropic("completion"));
18
+ this._wrap(module.Anthropic.Messages.prototype, "create", this.patchAnthropic("chat"));
19
+ }
20
+ init() {
21
+ const module = new InstrumentationNodeModuleDefinition("@anthropic-ai/sdk", [">=0.9.1"], this.patch.bind(this), this.unpatch.bind(this));
22
+ return module;
23
+ }
24
+ patch(moduleExports, moduleVersion) {
25
+ this._diag.debug(`Patching @anthropic-ai/sdk@${moduleVersion}`);
26
+ this._wrap(moduleExports.Anthropic.Completions.prototype, "create", this.patchAnthropic("completion"));
27
+ this._wrap(moduleExports.Anthropic.Messages.prototype, "create", this.patchAnthropic("chat"));
28
+ return moduleExports;
29
+ }
30
+ unpatch(moduleExports, moduleVersion) {
31
+ this._diag.debug(`Unpatching @azure/openai@${moduleVersion}`);
32
+ this._unwrap(moduleExports.Anthropic.Completions.prototype, "create");
33
+ this._unwrap(moduleExports.Anthropic.Messages.prototype, "create");
34
+ }
35
+ patchAnthropic(type) {
36
+ // eslint-disable-next-line @typescript-eslint/no-this-alias
37
+ const plugin = this;
38
+ // eslint-disable-next-line @typescript-eslint/ban-types
39
+ return (original) => {
40
+ return function method(...args) {
41
+ const span = type === "chat"
42
+ ? plugin.startSpan({
43
+ type,
44
+ params: args[0],
45
+ })
46
+ : plugin.startSpan({
47
+ type,
48
+ params: args[0],
49
+ });
50
+ const execContext = trace.setSpan(context.active(), span);
51
+ const execPromise = safeExecuteInTheMiddle(() => {
52
+ return context.with(execContext, () => {
53
+ var _a;
54
+ if ((_a = args === null || args === void 0 ? void 0 : args[0]) === null || _a === void 0 ? void 0 : _a.extraAttributes) {
55
+ delete args[0].extraAttributes;
56
+ }
57
+ return original.apply(this, args);
58
+ });
59
+ }, (e) => {
60
+ if (e) {
61
+ plugin._diag.error("Error in Anthropic instrumentation", e);
62
+ }
63
+ });
64
+ if (args[0].stream &&
65
+ type === "completion" // For some reason, this causes an exception with chat, so disabled for now
66
+ ) {
67
+ return context.bind(execContext, plugin._streamingWrapPromise({
68
+ span,
69
+ type,
70
+ promise: execPromise,
71
+ }));
72
+ }
73
+ const wrappedPromise = plugin._wrapPromise(type, span, execPromise);
74
+ return context.bind(execContext, wrappedPromise);
75
+ };
76
+ };
77
+ }
78
+ startSpan({ type, params, }) {
79
+ var _a, _b;
80
+ const attributes = {
81
+ [SpanAttributes.LLM_SYSTEM]: "Anthropic",
82
+ [SpanAttributes.LLM_REQUEST_TYPE]: type,
83
+ };
84
+ try {
85
+ attributes[SpanAttributes.LLM_REQUEST_MODEL] = params.model;
86
+ attributes[SpanAttributes.LLM_REQUEST_TEMPERATURE] = params.temperature;
87
+ attributes[SpanAttributes.LLM_REQUEST_TOP_P] = params.top_p;
88
+ attributes[SpanAttributes.LLM_TOP_K] = params.top_k;
89
+ if (type === "completion") {
90
+ attributes[SpanAttributes.LLM_REQUEST_MAX_TOKENS] =
91
+ params.max_tokens_to_sample;
92
+ }
93
+ else {
94
+ attributes[SpanAttributes.LLM_REQUEST_MAX_TOKENS] = params.max_tokens;
95
+ }
96
+ if (params.extraAttributes !== undefined &&
97
+ typeof params.extraAttributes === "object") {
98
+ Object.keys(params.extraAttributes).forEach((key) => {
99
+ attributes[key] = params.extraAttributes[key];
100
+ });
101
+ }
102
+ if (this._shouldSendPrompts()) {
103
+ if (type === "chat") {
104
+ params.messages.forEach((message, index) => {
105
+ attributes[`${SpanAttributes.LLM_PROMPTS}.${index}.role`] =
106
+ message.role;
107
+ if (typeof message.content === "string") {
108
+ attributes[`${SpanAttributes.LLM_PROMPTS}.${index}.content`] =
109
+ message.content || "";
110
+ }
111
+ else {
112
+ attributes[`${SpanAttributes.LLM_PROMPTS}.${index}.content`] =
113
+ JSON.stringify(message.content);
114
+ }
115
+ });
116
+ }
117
+ else {
118
+ attributes[`${SpanAttributes.LLM_PROMPTS}.0.role`] = "user";
119
+ attributes[`${SpanAttributes.LLM_PROMPTS}.0.content`] = params.prompt;
120
+ }
121
+ }
122
+ }
123
+ catch (e) {
124
+ this._diag.debug(e);
125
+ (_b = (_a = this._config).exceptionLogger) === null || _b === void 0 ? void 0 : _b.call(_a, e);
126
+ }
127
+ return this.tracer.startSpan(`anthropic.${type}`, {
128
+ kind: SpanKind.CLIENT,
129
+ attributes,
130
+ });
131
+ }
132
+ _streamingWrapPromise({ span, type, promise, }) {
133
+ var _a, _b, _c, _d;
134
+ return __asyncGenerator(this, arguments, function* _streamingWrapPromise_1() {
135
+ var _e, e_1, _f, _g, _h, e_2, _j, _k;
136
+ if (type === "chat") {
137
+ const result = {
138
+ id: "0",
139
+ type: "message",
140
+ model: "",
141
+ role: "assistant",
142
+ stop_reason: null,
143
+ stop_sequence: null,
144
+ usage: { input_tokens: 0, output_tokens: 0 },
145
+ content: [],
146
+ };
147
+ try {
148
+ for (var _l = true, _m = __asyncValues(yield __await(promise)), _o; _o = yield __await(_m.next()), _e = _o.done, !_e; _l = true) {
149
+ _g = _o.value;
150
+ _l = false;
151
+ const chunk = _g;
152
+ yield yield __await(chunk);
153
+ try {
154
+ switch (chunk.type) {
155
+ case "content_block_start":
156
+ if (result.content.length <= chunk.index) {
157
+ result.content.push(chunk.content_block);
158
+ }
159
+ break;
160
+ case "content_block_delta":
161
+ if (chunk.index < result.content.length) {
162
+ result.content[chunk.index] = {
163
+ type: "text",
164
+ text: result.content[chunk.index].text + chunk.delta.text,
165
+ };
166
+ }
167
+ }
168
+ }
169
+ catch (e) {
170
+ this._diag.debug(e);
171
+ (_b = (_a = this._config).exceptionLogger) === null || _b === void 0 ? void 0 : _b.call(_a, e);
172
+ }
173
+ }
174
+ }
175
+ catch (e_1_1) { e_1 = { error: e_1_1 }; }
176
+ finally {
177
+ try {
178
+ if (!_l && !_e && (_f = _m.return)) yield __await(_f.call(_m));
179
+ }
180
+ finally { if (e_1) throw e_1.error; }
181
+ }
182
+ this._endSpan({ span, type, result });
183
+ }
184
+ else {
185
+ const result = {
186
+ id: "0",
187
+ type: "completion",
188
+ model: "",
189
+ completion: "",
190
+ stop_reason: null,
191
+ };
192
+ try {
193
+ for (var _p = true, _q = __asyncValues(yield __await(promise)), _r; _r = yield __await(_q.next()), _h = _r.done, !_h; _p = true) {
194
+ _k = _r.value;
195
+ _p = false;
196
+ const chunk = _k;
197
+ yield yield __await(chunk);
198
+ try {
199
+ result.id = chunk.id;
200
+ result.model = chunk.model;
201
+ if (chunk.stop_reason) {
202
+ result.stop_reason = chunk.stop_reason;
203
+ }
204
+ if (chunk.model) {
205
+ result.model = chunk.model;
206
+ }
207
+ if (chunk.completion) {
208
+ result.completion += chunk.completion;
209
+ }
210
+ }
211
+ catch (e) {
212
+ this._diag.debug(e);
213
+ (_d = (_c = this._config).exceptionLogger) === null || _d === void 0 ? void 0 : _d.call(_c, e);
214
+ }
215
+ }
216
+ }
217
+ catch (e_2_1) { e_2 = { error: e_2_1 }; }
218
+ finally {
219
+ try {
220
+ if (!_p && !_h && (_j = _q.return)) yield __await(_j.call(_q));
221
+ }
222
+ finally { if (e_2) throw e_2.error; }
223
+ }
224
+ this._endSpan({ span, type, result });
225
+ }
226
+ });
227
+ }
228
+ _wrapPromise(type, span, promise) {
229
+ return promise
230
+ .then((result) => {
231
+ return new Promise((resolve) => {
232
+ if (type === "chat") {
233
+ this._endSpan({
234
+ type,
235
+ span,
236
+ result: result,
237
+ });
238
+ }
239
+ else {
240
+ this._endSpan({
241
+ type,
242
+ span,
243
+ result: result,
244
+ });
245
+ }
246
+ resolve(result);
247
+ });
248
+ })
249
+ .catch((error) => {
250
+ return new Promise((_, reject) => {
251
+ span.setStatus({
252
+ code: SpanStatusCode.ERROR,
253
+ message: error.message,
254
+ });
255
+ span.recordException(error);
256
+ span.end();
257
+ reject(error);
258
+ });
259
+ });
260
+ }
261
+ _endSpan({ span, type, result, }) {
262
+ var _a, _b, _c, _d, _e, _f;
263
+ try {
264
+ span.setAttribute(SpanAttributes.LLM_RESPONSE_MODEL, result.model);
265
+ if (type === "chat" && result.usage) {
266
+ span.setAttribute(SpanAttributes.LLM_USAGE_TOTAL_TOKENS, ((_a = result.usage) === null || _a === void 0 ? void 0 : _a.input_tokens) + ((_b = result.usage) === null || _b === void 0 ? void 0 : _b.output_tokens));
267
+ span.setAttribute(SpanAttributes.LLM_USAGE_COMPLETION_TOKENS, (_c = result.usage) === null || _c === void 0 ? void 0 : _c.output_tokens);
268
+ span.setAttribute(SpanAttributes.LLM_USAGE_PROMPT_TOKENS, (_d = result.usage) === null || _d === void 0 ? void 0 : _d.input_tokens);
269
+ }
270
+ result.stop_reason &&
271
+ span.setAttribute(`${SpanAttributes.LLM_COMPLETIONS}.0.finish_reason`, result.stop_reason);
272
+ if (this._shouldSendPrompts()) {
273
+ if (type === "chat") {
274
+ span.setAttribute(`${SpanAttributes.LLM_COMPLETIONS}.0.role`, "assistant");
275
+ span.setAttribute(`${SpanAttributes.LLM_COMPLETIONS}.0.content`, JSON.stringify(result.content));
276
+ }
277
+ else {
278
+ span.setAttribute(`${SpanAttributes.LLM_COMPLETIONS}.0.role`, "assistant");
279
+ span.setAttribute(`${SpanAttributes.LLM_COMPLETIONS}.0.content`, result.completion);
280
+ }
281
+ }
282
+ }
283
+ catch (e) {
284
+ this._diag.debug(e);
285
+ (_f = (_e = this._config).exceptionLogger) === null || _f === void 0 ? void 0 : _f.call(_e, e);
286
+ }
287
+ span.end();
288
+ }
289
+ _shouldSendPrompts() {
290
+ const contextShouldSendPrompts = context
291
+ .active()
292
+ .getValue(CONTEXT_KEY_ALLOW_TRACE_CONTENT);
293
+ if (contextShouldSendPrompts !== undefined) {
294
+ return contextShouldSendPrompts;
295
+ }
296
+ return this._config.traceContent !== undefined
297
+ ? this._config.traceContent
298
+ : true;
299
+ }
300
+ }
301
+
302
+ export { AnthropicInstrumentation };
303
+ //# sourceMappingURL=index.mjs.map
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@traceloop/instrumentation-anthropic",
3
- "version": "0.9.2",
3
+ "version": "0.11.0",
4
4
  "description": "Anthropic Instrumentaion",
5
5
  "main": "dist/index.js",
6
6
  "module": "dist/index.mjs",
@@ -26,6 +26,7 @@
26
26
  },
27
27
  "files": [
28
28
  "dist/**/*.js",
29
+ "dist/**/*.mjs",
29
30
  "dist/**/*.js.map",
30
31
  "dist/**/*.d.ts",
31
32
  "doc",
@@ -40,7 +41,7 @@
40
41
  "@opentelemetry/core": "^1.25.0",
41
42
  "@opentelemetry/instrumentation": "^0.52.0",
42
43
  "@opentelemetry/semantic-conventions": "^1.25.0",
43
- "@traceloop/ai-semantic-conventions": "^0.9.0",
44
+ "@traceloop/ai-semantic-conventions": "^0.11.0",
44
45
  "tslib": "^2.3.0"
45
46
  },
46
47
  "devDependencies": {
@@ -52,5 +53,5 @@
52
53
  "ts-mocha": "^10.0.0"
53
54
  },
54
55
  "homepage": "https://github.com/traceloop/openllmetry-js/tree/main/packages/instrumentation-anthropic",
55
- "gitHead": "5019c20e284836be10641e7d2c5692ef8621475d"
56
+ "gitHead": "77fe2d5853f90f5a9c1f1fedece8e3e8fc702adf"
56
57
  }