@mtharrison/loupe 1.2.0 → 1.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -3,21 +3,21 @@ Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.isTraceEnabled = isTraceEnabled;
4
4
  exports.getLocalLLMTracer = getLocalLLMTracer;
5
5
  exports.startTraceServer = startTraceServer;
6
- exports.recordInvokeStart = recordInvokeStart;
7
- exports.recordInvokeFinish = recordInvokeFinish;
8
- exports.recordStreamStart = recordStreamStart;
9
- exports.recordStreamChunk = recordStreamChunk;
10
- exports.recordStreamFinish = recordStreamFinish;
11
- exports.recordError = recordError;
6
+ exports.startSpan = startSpan;
7
+ exports.endSpan = endSpan;
8
+ exports.addSpanEvent = addSpanEvent;
9
+ exports.recordException = recordException;
12
10
  exports.__resetLocalLLMTracerForTests = __resetLocalLLMTracerForTests;
13
11
  exports.wrapChatModel = wrapChatModel;
14
12
  exports.wrapOpenAIClient = wrapOpenAIClient;
13
+ const node_async_hooks_1 = require("node:async_hooks");
15
14
  const server_1 = require("./server");
16
15
  const store_1 = require("./store");
17
16
  const ui_build_1 = require("./ui-build");
18
17
  const utils_1 = require("./utils");
19
18
  let singleton = null;
20
19
  const DEFAULT_TRACE_PORT = 4319;
20
+ const activeSpanStorage = new node_async_hooks_1.AsyncLocalStorage();
21
21
  function isTraceEnabled() {
22
22
  return (0, utils_1.envFlag)('LLM_TRACE_ENABLED');
23
23
  }
@@ -33,23 +33,17 @@ function getLocalLLMTracer(config = {}) {
33
33
  function startTraceServer(config = {}) {
34
34
  return getLocalLLMTracer(config).startServer();
35
35
  }
36
- function recordInvokeStart(context, request, config = {}) {
37
- return getLocalLLMTracer(config).recordInvokeStart(context, request);
36
+ function startSpan(context, options = {}, config = {}) {
37
+ return getLocalLLMTracer(config).startSpan(context, options);
38
38
  }
39
- function recordInvokeFinish(traceId, response, config = {}) {
40
- getLocalLLMTracer(config).recordInvokeFinish(traceId, response);
39
+ function endSpan(spanId, response, config = {}) {
40
+ getLocalLLMTracer(config).endSpan(spanId, response);
41
41
  }
42
- function recordStreamStart(context, request, config = {}) {
43
- return getLocalLLMTracer(config).recordStreamStart(context, request);
42
+ function addSpanEvent(spanId, event, config = {}) {
43
+ getLocalLLMTracer(config).addSpanEvent(spanId, event);
44
44
  }
45
- function recordStreamChunk(traceId, chunk, config = {}) {
46
- getLocalLLMTracer(config).recordStreamChunk(traceId, chunk);
47
- }
48
- function recordStreamFinish(traceId, chunk, config = {}) {
49
- getLocalLLMTracer(config).recordStreamFinish(traceId, chunk);
50
- }
51
- function recordError(traceId, error, config = {}) {
52
- getLocalLLMTracer(config).recordError(traceId, error);
45
+ function recordException(spanId, error, config = {}) {
46
+ getLocalLLMTracer(config).recordException(spanId, error);
53
47
  }
54
48
  function __resetLocalLLMTracerForTests() {
55
49
  if (singleton?.uiWatcher) {
@@ -70,14 +64,19 @@ function wrapChatModel(model, getContext, config) {
70
64
  if (!tracer.isEnabled()) {
71
65
  return model.invoke(input, options);
72
66
  }
73
- const traceId = tracer.recordInvokeStart(getContext ? getContext() : {}, { input: input, options: options });
67
+ const traceId = tracer.startSpan(getContext ? getContext() : {}, {
68
+ attributes: { 'gen_ai.operation.name': 'chat' },
69
+ mode: 'invoke',
70
+ name: 'llm.invoke',
71
+ request: { input: input, options: options },
72
+ });
74
73
  try {
75
- const response = await model.invoke(input, options);
76
- tracer.recordInvokeFinish(traceId, response);
74
+ const response = await tracer.runWithActiveSpan(traceId, () => model.invoke(input, options));
75
+ tracer.endSpan(traceId, response);
77
76
  return response;
78
77
  }
79
78
  catch (error) {
80
- tracer.recordError(traceId, error);
79
+ tracer.recordException(traceId, error);
81
80
  throw error;
82
81
  }
83
82
  },
@@ -87,21 +86,26 @@ function wrapChatModel(model, getContext, config) {
87
86
  yield* model.stream(input, options);
88
87
  return;
89
88
  }
90
- const traceId = tracer.recordStreamStart(getContext ? getContext() : {}, { input: input, options: options });
89
+ const traceId = tracer.startSpan(getContext ? getContext() : {}, {
90
+ attributes: { 'gen_ai.operation.name': 'chat' },
91
+ mode: 'stream',
92
+ name: 'llm.stream',
93
+ request: { input: input, options: options },
94
+ });
91
95
  try {
92
- const stream = model.stream(input, options);
96
+ const stream = tracer.runWithActiveSpan(traceId, () => model.stream(input, options));
93
97
  for await (const chunk of stream) {
94
98
  if (chunk?.type === 'finish') {
95
- tracer.recordStreamFinish(traceId, chunk);
99
+ tracer.endSpan(traceId, chunk);
96
100
  }
97
101
  else {
98
- tracer.recordStreamChunk(traceId, chunk);
102
+ tracer.addSpanEvent(traceId, (0, utils_1.toSpanEventInputFromChunk)(chunk));
99
103
  }
100
104
  yield chunk;
101
105
  }
102
106
  }
103
107
  catch (error) {
104
- tracer.recordError(traceId, error);
108
+ tracer.recordException(traceId, error);
105
109
  throw error;
106
110
  }
107
111
  },
@@ -121,24 +125,34 @@ function wrapOpenAIClient(client, getContext, config) {
121
125
  }
122
126
  const context = withOpenAITraceContext(getContext ? getContext() : {}, params);
123
127
  if (params?.stream) {
124
- const traceId = tracer.recordStreamStart(context, { input: params, options: options });
128
+ const traceId = tracer.startSpan(context, {
129
+ attributes: { 'gen_ai.operation.name': 'chat' },
130
+ mode: 'stream',
131
+ name: 'openai.chat.completions',
132
+ request: { input: params, options: options },
133
+ });
125
134
  try {
126
- const stream = await target.create.call(target, params, options);
135
+ const stream = await tracer.runWithActiveSpan(traceId, () => target.create.call(target, params, options));
127
136
  return wrapOpenAIChatCompletionsStream(stream, tracer, traceId);
128
137
  }
129
138
  catch (error) {
130
- tracer.recordError(traceId, error);
139
+ tracer.recordException(traceId, error);
131
140
  throw error;
132
141
  }
133
142
  }
134
- const traceId = tracer.recordInvokeStart(context, { input: params, options: options });
143
+ const traceId = tracer.startSpan(context, {
144
+ attributes: { 'gen_ai.operation.name': 'chat' },
145
+ mode: 'invoke',
146
+ name: 'openai.chat.completions',
147
+ request: { input: params, options: options },
148
+ });
135
149
  try {
136
- const response = await target.create.call(target, params, options);
137
- tracer.recordInvokeFinish(traceId, normalizeOpenAIChatCompletionResponse(response));
150
+ const response = await tracer.runWithActiveSpan(traceId, () => target.create.call(target, params, options));
151
+ tracer.endSpan(traceId, normalizeOpenAIChatCompletionResponse(response));
138
152
  return response;
139
153
  }
140
154
  catch (error) {
141
- tracer.recordError(traceId, error);
155
+ tracer.recordException(traceId, error);
142
156
  throw error;
143
157
  }
144
158
  };
@@ -213,6 +227,27 @@ class LocalLLMTracerImpl {
213
227
  isEnabled() {
214
228
  return isTraceEnabled();
215
229
  }
230
+ startSpan(context, options = {}) {
231
+ void this.startServer();
232
+ const parentSpanId = options.parentSpanId || activeSpanStorage.getStore() || null;
233
+ return this.store.startSpan(context, {
234
+ ...options,
235
+ parentSpanId,
236
+ request: normaliseRequest(options.request || {}),
237
+ });
238
+ }
239
+ runWithActiveSpan(spanId, callback) {
240
+ return activeSpanStorage.run(spanId, callback);
241
+ }
242
+ addSpanEvent(spanId, event) {
243
+ this.store.addSpanEvent(spanId, (0, utils_1.safeClone)(event));
244
+ }
245
+ endSpan(spanId, response) {
246
+ this.store.endSpan(spanId, (0, utils_1.safeClone)(response));
247
+ }
248
+ recordException(spanId, error) {
249
+ this.store.recordException(spanId, error);
250
+ }
216
251
  startServer() {
217
252
  if (!this.isEnabled() || this.serverFailed) {
218
253
  return Promise.resolve(this.serverInfo);
@@ -234,7 +269,7 @@ class LocalLLMTracerImpl {
234
269
  this.uiWatcher = await (0, ui_build_1.maybeStartUIWatcher)(() => {
235
270
  this.server?.broadcast({
236
271
  timestamp: new Date().toISOString(),
237
- traceId: null,
272
+ spanId: null,
238
273
  type: 'ui:reload',
239
274
  });
240
275
  }, this.config.uiHotReload);
@@ -256,26 +291,6 @@ class LocalLLMTracerImpl {
256
291
  })();
257
292
  return this.serverStartPromise;
258
293
  }
259
- recordInvokeStart(context, request) {
260
- void this.startServer();
261
- return this.store.recordInvokeStart(context, normaliseRequest(request));
262
- }
263
- recordInvokeFinish(traceId, response) {
264
- this.store.recordInvokeFinish(traceId, (0, utils_1.safeClone)(response));
265
- }
266
- recordStreamStart(context, request) {
267
- void this.startServer();
268
- return this.store.recordStreamStart(context, normaliseRequest(request));
269
- }
270
- recordStreamChunk(traceId, chunk) {
271
- this.store.recordStreamChunk(traceId, (0, utils_1.safeClone)(chunk));
272
- }
273
- recordStreamFinish(traceId, chunk) {
274
- this.store.recordStreamFinish(traceId, (0, utils_1.safeClone)(chunk));
275
- }
276
- recordError(traceId, error) {
277
- this.store.recordError(traceId, error);
278
- }
279
294
  }
280
295
  function normaliseRequest(request) {
281
296
  return {
@@ -322,7 +337,7 @@ function wrapOpenAIChatCompletionsStream(stream, tracer, traceId) {
322
337
  state.began = true;
323
338
  const nextRole = role || state.role || 'assistant';
324
339
  state.role = nextRole;
325
- tracer.recordStreamChunk(traceId, { type: 'begin', role: nextRole });
340
+ tracer.addSpanEvent(traceId, (0, utils_1.toSpanEventInputFromChunk)({ type: 'begin', role: nextRole }));
326
341
  };
327
342
  const emitFinish = () => {
328
343
  if (state.finished) {
@@ -330,7 +345,7 @@ function wrapOpenAIChatCompletionsStream(stream, tracer, traceId) {
330
345
  }
331
346
  emitBegin(state.role);
332
347
  state.finished = true;
333
- tracer.recordStreamFinish(traceId, {
348
+ tracer.endSpan(traceId, {
334
349
  type: 'finish',
335
350
  finish_reasons: state.finishReasons,
336
351
  message: {
@@ -382,23 +397,23 @@ function wrapOpenAIChatCompletionsStream(stream, tracer, traceId) {
382
397
  emitBegin(state.role);
383
398
  }
384
399
  if (contentParts.length > 0) {
385
- tracer.recordStreamChunk(traceId, {
400
+ tracer.addSpanEvent(traceId, (0, utils_1.toSpanEventInputFromChunk)({
386
401
  type: 'chunk',
387
402
  content: contentParts.join(''),
388
403
  finish_reasons: [...finishReasons],
389
404
  raw,
390
405
  tool_calls: chunkToolCalls,
391
406
  usage,
392
- });
407
+ }));
393
408
  return;
394
409
  }
395
- tracer.recordStreamChunk(traceId, {
410
+ tracer.addSpanEvent(traceId, (0, utils_1.toSpanEventInputFromChunk)({
396
411
  type: 'event',
397
412
  finish_reasons: [...finishReasons],
398
413
  raw,
399
414
  tool_calls: chunkToolCalls,
400
415
  usage,
401
- });
416
+ }));
402
417
  };
403
418
  const createWrappedIterator = (iterator) => ({
404
419
  async next(...args) {
@@ -412,7 +427,7 @@ function wrapOpenAIChatCompletionsStream(stream, tracer, traceId) {
412
427
  return result;
413
428
  }
414
429
  catch (error) {
415
- tracer.recordError(traceId, error);
430
+ tracer.recordException(traceId, error);
416
431
  throw error;
417
432
  }
418
433
  },
@@ -428,12 +443,12 @@ function wrapOpenAIChatCompletionsStream(stream, tracer, traceId) {
428
443
  return result;
429
444
  }
430
445
  catch (error) {
431
- tracer.recordError(traceId, error);
446
+ tracer.recordException(traceId, error);
432
447
  throw error;
433
448
  }
434
449
  },
435
450
  async throw(error) {
436
- tracer.recordError(traceId, error);
451
+ tracer.recordException(traceId, error);
437
452
  if (typeof iterator.throw === 'function') {
438
453
  return iterator.throw(error);
439
454
  }
@@ -41,4 +41,4 @@ export declare function findSessionNodePath(nodes: SessionNavHierarchyNode[], id
41
41
  export declare function findSessionNodeById(nodes: SessionNavHierarchyNode[], id: string): SessionNavHierarchyNode | null;
42
42
  export declare function getNewestTraceIdForNode(node: SessionNavHierarchyNode | null | undefined): string | null;
43
43
  export declare function resolveSessionTreeSelection(sessionNodes: SessionNavHierarchyNode[], selectedNodeId: string | null, selectedTraceId: string | null): SessionTreeSelection;
44
- export declare function getDefaultExpandedSessionTreeNodeIds(sessionNodes: SessionNavHierarchyNode[], activeSessionId: string | null, selectedNodeId: string | null): Set<string>;
44
+ export declare function getDefaultExpandedSessionTreeNodeIds(sessionNodes: SessionNavHierarchyNode[], activeSessionId: string | null, selectedNodeId: string | null, selectedTraceId?: string | null): Set<string>;
@@ -66,7 +66,7 @@ function resolveSessionTreeSelection(sessionNodes, selectedNodeId, selectedTrace
66
66
  selectedTraceId: nextSelectedTraceId,
67
67
  };
68
68
  }
69
- function getDefaultExpandedSessionTreeNodeIds(sessionNodes, activeSessionId, selectedNodeId) {
69
+ function getDefaultExpandedSessionTreeNodeIds(sessionNodes, activeSessionId, selectedNodeId, selectedTraceId = null) {
70
70
  const expanded = new Set();
71
71
  const activeSession = (activeSessionId
72
72
  ? sessionNodes.find((node) => node.id === activeSessionId) ?? null
@@ -89,6 +89,13 @@ function getDefaultExpandedSessionTreeNodeIds(sessionNodes, activeSessionId, sel
89
89
  }
90
90
  }
91
91
  }
92
+ if (selectedTraceId) {
93
+ for (const node of findSessionNodePath([activeSession], `trace:${selectedTraceId}`)) {
94
+ if (node.children.length) {
95
+ expanded.add(node.id);
96
+ }
97
+ }
98
+ }
92
99
  return expanded;
93
100
  }
94
101
  function deriveSessionNavItem(node, traceById) {
package/dist/store.d.ts CHANGED
@@ -1,5 +1,5 @@
1
1
  import { EventEmitter } from 'node:events';
2
- import { type HierarchyResponse, type NormalizedTraceContext, type TraceFilters, type TraceListResponse, type TraceRecord, type TraceRequest } from './types';
2
+ import { type HierarchyResponse, type NormalizedTraceContext, type SpanEventInput, type SpanStartOptions, type TraceFilters, type TraceListResponse, type TraceRecord } from './types';
3
3
  export declare class TraceStore extends EventEmitter {
4
4
  maxTraces: number;
5
5
  order: string[];
@@ -7,17 +7,17 @@ export declare class TraceStore extends EventEmitter {
7
7
  constructor(options?: {
8
8
  maxTraces?: number;
9
9
  });
10
- recordInvokeStart(context: NormalizedTraceContext | undefined, request: TraceRequest): string;
11
- recordInvokeFinish(traceId: string, response: any): void;
12
- recordStreamStart(context: NormalizedTraceContext | undefined, request: TraceRequest): string;
13
- recordStreamChunk(traceId: string, chunk: any): void;
14
- recordStreamFinish(traceId: string, chunk: any): void;
15
- recordError(traceId: string, error: unknown): void;
10
+ startSpan(context: NormalizedTraceContext | undefined, options?: SpanStartOptions): string;
11
+ addSpanEvent(spanId: string, event: SpanEventInput): void;
12
+ endSpan(spanId: string, response: any): void;
13
+ recordException(spanId: string, error: unknown): void;
14
+ private applyStreamPayload;
16
15
  list(filters?: TraceFilters): TraceListResponse;
17
16
  get(traceId: string): TraceRecord | null;
18
17
  clear(): void;
19
18
  hierarchy(filters?: TraceFilters): HierarchyResponse;
20
19
  private recordStart;
20
+ private findTraceBySpanReference;
21
21
  private evictIfNeeded;
22
22
  private cloneTrace;
23
23
  private filteredTraces;