risicare 0.1.4 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/frameworks/instructor.cjs +178 -0
- package/dist/frameworks/instructor.cjs.map +1 -0
- package/dist/frameworks/instructor.d.cts +28 -0
- package/dist/frameworks/instructor.d.ts +28 -0
- package/dist/frameworks/instructor.js +151 -0
- package/dist/frameworks/instructor.js.map +1 -0
- package/dist/frameworks/langchain.cjs +262 -0
- package/dist/frameworks/langchain.cjs.map +1 -0
- package/dist/frameworks/langchain.d.cts +45 -0
- package/dist/frameworks/langchain.d.ts +45 -0
- package/dist/frameworks/langchain.js +235 -0
- package/dist/frameworks/langchain.js.map +1 -0
- package/dist/frameworks/langgraph.cjs +296 -0
- package/dist/frameworks/langgraph.cjs.map +1 -0
- package/dist/frameworks/langgraph.d.cts +28 -0
- package/dist/frameworks/langgraph.d.ts +28 -0
- package/dist/frameworks/langgraph.js +269 -0
- package/dist/frameworks/langgraph.js.map +1 -0
- package/dist/frameworks/llamaindex.cjs +239 -0
- package/dist/frameworks/llamaindex.cjs.map +1 -0
- package/dist/frameworks/llamaindex.d.cts +55 -0
- package/dist/frameworks/llamaindex.d.ts +55 -0
- package/dist/frameworks/llamaindex.js +212 -0
- package/dist/frameworks/llamaindex.js.map +1 -0
- package/dist/index.cjs +468 -7
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +124 -6
- package/dist/index.d.ts +124 -6
- package/dist/index.js +459 -7
- package/dist/index.js.map +1 -1
- package/dist/providers/anthropic/index.cjs +22 -0
- package/dist/providers/anthropic/index.cjs.map +1 -1
- package/dist/providers/anthropic/index.js +22 -0
- package/dist/providers/anthropic/index.js.map +1 -1
- package/dist/providers/bedrock/index.cjs +334 -0
- package/dist/providers/bedrock/index.cjs.map +1 -0
- package/dist/providers/bedrock/index.d.cts +37 -0
- package/dist/providers/bedrock/index.d.ts +37 -0
- package/dist/providers/bedrock/index.js +307 -0
- package/dist/providers/bedrock/index.js.map +1 -0
- package/dist/providers/cerebras/index.cjs +282 -0
- package/dist/providers/cerebras/index.cjs.map +1 -0
- package/dist/providers/cerebras/index.d.cts +24 -0
- package/dist/providers/cerebras/index.d.ts +24 -0
- package/dist/providers/cerebras/index.js +255 -0
- package/dist/providers/cerebras/index.js.map +1 -0
- package/dist/providers/cohere/index.cjs +347 -0
- package/dist/providers/cohere/index.cjs.map +1 -0
- package/dist/providers/cohere/index.d.cts +24 -0
- package/dist/providers/cohere/index.d.ts +24 -0
- package/dist/providers/cohere/index.js +320 -0
- package/dist/providers/cohere/index.js.map +1 -0
- package/dist/providers/google/index.cjs +337 -0
- package/dist/providers/google/index.cjs.map +1 -0
- package/dist/providers/google/index.d.cts +25 -0
- package/dist/providers/google/index.d.ts +25 -0
- package/dist/providers/google/index.js +310 -0
- package/dist/providers/google/index.js.map +1 -0
- package/dist/providers/groq/index.cjs +282 -0
- package/dist/providers/groq/index.cjs.map +1 -0
- package/dist/providers/groq/index.d.cts +23 -0
- package/dist/providers/groq/index.d.ts +23 -0
- package/dist/providers/groq/index.js +255 -0
- package/dist/providers/groq/index.js.map +1 -0
- package/dist/providers/huggingface/index.cjs +289 -0
- package/dist/providers/huggingface/index.cjs.map +1 -0
- package/dist/providers/huggingface/index.d.cts +24 -0
- package/dist/providers/huggingface/index.d.ts +24 -0
- package/dist/providers/huggingface/index.js +262 -0
- package/dist/providers/huggingface/index.js.map +1 -0
- package/dist/providers/mistral/index.cjs +336 -0
- package/dist/providers/mistral/index.cjs.map +1 -0
- package/dist/providers/mistral/index.d.cts +24 -0
- package/dist/providers/mistral/index.d.ts +24 -0
- package/dist/providers/mistral/index.js +309 -0
- package/dist/providers/mistral/index.js.map +1 -0
- package/dist/providers/ollama/index.cjs +376 -0
- package/dist/providers/ollama/index.cjs.map +1 -0
- package/dist/providers/ollama/index.d.cts +28 -0
- package/dist/providers/ollama/index.d.ts +28 -0
- package/dist/providers/ollama/index.js +349 -0
- package/dist/providers/ollama/index.js.map +1 -0
- package/dist/providers/openai/index.cjs +31 -1
- package/dist/providers/openai/index.cjs.map +1 -1
- package/dist/providers/openai/index.js +31 -1
- package/dist/providers/openai/index.js.map +1 -1
- package/dist/providers/together/index.cjs +275 -0
- package/dist/providers/together/index.cjs.map +1 -0
- package/dist/providers/together/index.d.cts +24 -0
- package/dist/providers/together/index.d.ts +24 -0
- package/dist/providers/together/index.js +248 -0
- package/dist/providers/together/index.js.map +1 -0
- package/dist/providers/vercel-ai/index.cjs.map +1 -1
- package/dist/providers/vercel-ai/index.js.map +1 -1
- package/package.json +124 -2
|
@@ -0,0 +1,262 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
|
|
20
|
+
// src/frameworks/langchain.ts
|
|
21
|
+
var langchain_exports = {};
|
|
22
|
+
__export(langchain_exports, {
|
|
23
|
+
RisicareCallbackHandler: () => RisicareCallbackHandler
|
|
24
|
+
});
|
|
25
|
+
module.exports = __toCommonJS(langchain_exports);
|
|
26
|
+
|
|
27
|
+
// src/ids.ts
|
|
28
|
+
var import_node_crypto = require("crypto");
|
|
29
|
+
|
|
30
|
+
// src/noop.ts
|
|
31
|
+
var NOOP_SPAN = Object.freeze({
|
|
32
|
+
traceId: "00000000000000000000000000000000",
|
|
33
|
+
spanId: "0000000000000000",
|
|
34
|
+
parentSpanId: void 0,
|
|
35
|
+
name: "noop",
|
|
36
|
+
kind: "internal" /* INTERNAL */,
|
|
37
|
+
startTime: "",
|
|
38
|
+
startHrtime: 0,
|
|
39
|
+
endTime: void 0,
|
|
40
|
+
status: "unset" /* UNSET */,
|
|
41
|
+
statusMessage: void 0,
|
|
42
|
+
attributes: Object.freeze({}),
|
|
43
|
+
events: Object.freeze([]),
|
|
44
|
+
links: Object.freeze([]),
|
|
45
|
+
sessionId: void 0,
|
|
46
|
+
agentId: void 0,
|
|
47
|
+
agentName: void 0,
|
|
48
|
+
semanticPhase: void 0,
|
|
49
|
+
llmProvider: void 0,
|
|
50
|
+
llmModel: void 0,
|
|
51
|
+
llmPromptTokens: void 0,
|
|
52
|
+
llmCompletionTokens: void 0,
|
|
53
|
+
llmTotalTokens: void 0,
|
|
54
|
+
llmCostUsd: void 0,
|
|
55
|
+
toolName: void 0,
|
|
56
|
+
toolSuccess: void 0,
|
|
57
|
+
isEnded: true,
|
|
58
|
+
durationMs: 0,
|
|
59
|
+
setAttribute() {
|
|
60
|
+
return this;
|
|
61
|
+
},
|
|
62
|
+
setAttributes() {
|
|
63
|
+
return this;
|
|
64
|
+
},
|
|
65
|
+
setStatus() {
|
|
66
|
+
return this;
|
|
67
|
+
},
|
|
68
|
+
addEvent() {
|
|
69
|
+
return this;
|
|
70
|
+
},
|
|
71
|
+
addLink() {
|
|
72
|
+
return this;
|
|
73
|
+
},
|
|
74
|
+
recordException() {
|
|
75
|
+
return this;
|
|
76
|
+
},
|
|
77
|
+
setLlmFields() {
|
|
78
|
+
return this;
|
|
79
|
+
},
|
|
80
|
+
setToolFields() {
|
|
81
|
+
return this;
|
|
82
|
+
},
|
|
83
|
+
end() {
|
|
84
|
+
},
|
|
85
|
+
toPayload() {
|
|
86
|
+
return {
|
|
87
|
+
traceId: this.traceId,
|
|
88
|
+
spanId: this.spanId,
|
|
89
|
+
name: this.name,
|
|
90
|
+
kind: this.kind,
|
|
91
|
+
startTime: this.startTime,
|
|
92
|
+
status: this.status,
|
|
93
|
+
attributes: {},
|
|
94
|
+
events: [],
|
|
95
|
+
links: []
|
|
96
|
+
};
|
|
97
|
+
}
|
|
98
|
+
});
|
|
99
|
+
|
|
100
|
+
// src/globals.ts
|
|
101
|
+
var import_node_async_hooks = require("async_hooks");
|
|
102
|
+
var G = globalThis;
|
|
103
|
+
var PREFIX = "__risicare_";
|
|
104
|
+
function getTracer() {
|
|
105
|
+
return G[PREFIX + "tracer"];
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
// src/client.ts
|
|
109
|
+
function getTracer2() {
|
|
110
|
+
return getTracer();
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
// src/frameworks/langchain.ts
|
|
114
|
+
var RisicareCallbackHandler = class {
|
|
115
|
+
name = "RisicareCallbackHandler";
|
|
116
|
+
_spans = /* @__PURE__ */ new Map();
|
|
117
|
+
// ── Chain lifecycle ────────────────────────────────────────────────────────
|
|
118
|
+
handleChainStart(chain, _inputs, runId, parentRunId) {
|
|
119
|
+
const tracer = getTracer2();
|
|
120
|
+
if (!tracer?.enabled) return;
|
|
121
|
+
const parentEntry = parentRunId ? this._spans.get(parentRunId) : void 0;
|
|
122
|
+
const span = tracer.createSpan({
|
|
123
|
+
name: chain.name || chain._type || "chain",
|
|
124
|
+
kind: "internal" /* INTERNAL */,
|
|
125
|
+
parentSpanId: parentEntry?.span.spanId,
|
|
126
|
+
traceId: parentEntry?.span.traceId,
|
|
127
|
+
attributes: {
|
|
128
|
+
"framework": "langchain",
|
|
129
|
+
"langchain.run_id": runId,
|
|
130
|
+
...parentRunId ? { "langchain.parent_run_id": parentRunId } : {},
|
|
131
|
+
...chain._type ? { "langchain.chain_type": chain._type } : {}
|
|
132
|
+
}
|
|
133
|
+
});
|
|
134
|
+
this._spans.set(runId, { span, startTime: Date.now() });
|
|
135
|
+
}
|
|
136
|
+
handleChainEnd(_outputs, runId) {
|
|
137
|
+
const entry = this._spans.get(runId);
|
|
138
|
+
if (!entry) return;
|
|
139
|
+
entry.span.setAttribute("gen_ai.latency_ms", Date.now() - entry.startTime);
|
|
140
|
+
entry.span.end();
|
|
141
|
+
this._spans.delete(runId);
|
|
142
|
+
}
|
|
143
|
+
handleChainError(error, runId) {
|
|
144
|
+
const entry = this._spans.get(runId);
|
|
145
|
+
if (!entry) return;
|
|
146
|
+
entry.span.recordException(error);
|
|
147
|
+
entry.span.end();
|
|
148
|
+
this._spans.delete(runId);
|
|
149
|
+
}
|
|
150
|
+
// ── LLM lifecycle ─────────────────────────────────────────────────────────
|
|
151
|
+
handleLLMStart(llm, _prompts, runId, parentRunId) {
|
|
152
|
+
const tracer = getTracer2();
|
|
153
|
+
if (!tracer?.enabled) return;
|
|
154
|
+
const parentEntry = parentRunId ? this._spans.get(parentRunId) : void 0;
|
|
155
|
+
const span = tracer.createSpan({
|
|
156
|
+
name: llm.name || llm._type || "llm",
|
|
157
|
+
kind: "llm_call" /* LLM_CALL */,
|
|
158
|
+
parentSpanId: parentEntry?.span.spanId,
|
|
159
|
+
traceId: parentEntry?.span.traceId,
|
|
160
|
+
attributes: {
|
|
161
|
+
"framework": "langchain",
|
|
162
|
+
"langchain.run_id": runId,
|
|
163
|
+
"gen_ai.system": "langchain",
|
|
164
|
+
"gen_ai.prompt.count": _prompts.length
|
|
165
|
+
}
|
|
166
|
+
});
|
|
167
|
+
this._spans.set(runId, { span, startTime: Date.now() });
|
|
168
|
+
}
|
|
169
|
+
handleLLMEnd(output, runId) {
|
|
170
|
+
const entry = this._spans.get(runId);
|
|
171
|
+
if (!entry) return;
|
|
172
|
+
const usage = output.llmOutput?.tokenUsage;
|
|
173
|
+
if (usage) {
|
|
174
|
+
entry.span.setLlmFields({
|
|
175
|
+
promptTokens: usage.promptTokens ?? usage.prompt_tokens,
|
|
176
|
+
completionTokens: usage.completionTokens ?? usage.completion_tokens,
|
|
177
|
+
totalTokens: usage.totalTokens ?? usage.total_tokens
|
|
178
|
+
});
|
|
179
|
+
}
|
|
180
|
+
const model = output.llmOutput?.model ?? output.llmOutput?.modelName;
|
|
181
|
+
if (model) {
|
|
182
|
+
entry.span.setLlmFields({ model });
|
|
183
|
+
}
|
|
184
|
+
entry.span.setAttribute("gen_ai.latency_ms", Date.now() - entry.startTime);
|
|
185
|
+
entry.span.end();
|
|
186
|
+
this._spans.delete(runId);
|
|
187
|
+
}
|
|
188
|
+
handleLLMError(error, runId) {
|
|
189
|
+
const entry = this._spans.get(runId);
|
|
190
|
+
if (!entry) return;
|
|
191
|
+
entry.span.recordException(error);
|
|
192
|
+
entry.span.end();
|
|
193
|
+
this._spans.delete(runId);
|
|
194
|
+
}
|
|
195
|
+
// ── Tool lifecycle ────────────────────────────────────────────────────────
|
|
196
|
+
handleToolStart(tool, input, runId, parentRunId) {
|
|
197
|
+
const tracer = getTracer2();
|
|
198
|
+
if (!tracer?.enabled) return;
|
|
199
|
+
const parentEntry = parentRunId ? this._spans.get(parentRunId) : void 0;
|
|
200
|
+
const span = tracer.createSpan({
|
|
201
|
+
name: tool.name || "tool",
|
|
202
|
+
kind: "tool_call" /* TOOL_CALL */,
|
|
203
|
+
parentSpanId: parentEntry?.span.spanId,
|
|
204
|
+
traceId: parentEntry?.span.traceId,
|
|
205
|
+
attributes: {
|
|
206
|
+
"framework": "langchain",
|
|
207
|
+
"langchain.run_id": runId,
|
|
208
|
+
"tool.name": tool.name || "unknown",
|
|
209
|
+
"tool.input_length": input?.length ?? 0
|
|
210
|
+
}
|
|
211
|
+
});
|
|
212
|
+
this._spans.set(runId, { span, startTime: Date.now() });
|
|
213
|
+
}
|
|
214
|
+
handleToolEnd(output, runId) {
|
|
215
|
+
const entry = this._spans.get(runId);
|
|
216
|
+
if (!entry) return;
|
|
217
|
+
entry.span.setAttribute("tool.output_length", output?.length ?? 0);
|
|
218
|
+
entry.span.setAttribute("gen_ai.latency_ms", Date.now() - entry.startTime);
|
|
219
|
+
entry.span.end();
|
|
220
|
+
this._spans.delete(runId);
|
|
221
|
+
}
|
|
222
|
+
handleToolError(error, runId) {
|
|
223
|
+
const entry = this._spans.get(runId);
|
|
224
|
+
if (!entry) return;
|
|
225
|
+
entry.span.recordException(error);
|
|
226
|
+
entry.span.end();
|
|
227
|
+
this._spans.delete(runId);
|
|
228
|
+
}
|
|
229
|
+
// ── Retriever lifecycle ───────────────────────────────────────────────────
|
|
230
|
+
handleRetrieverStart(retriever, _query, runId, parentRunId) {
|
|
231
|
+
const tracer = getTracer2();
|
|
232
|
+
if (!tracer?.enabled) return;
|
|
233
|
+
const parentEntry = parentRunId ? this._spans.get(parentRunId) : void 0;
|
|
234
|
+
const span = tracer.createSpan({
|
|
235
|
+
name: retriever.name || "retriever",
|
|
236
|
+
kind: "retrieval" /* RETRIEVAL */,
|
|
237
|
+
parentSpanId: parentEntry?.span.spanId,
|
|
238
|
+
traceId: parentEntry?.span.traceId,
|
|
239
|
+
attributes: {
|
|
240
|
+
"framework": "langchain",
|
|
241
|
+
"langchain.run_id": runId
|
|
242
|
+
}
|
|
243
|
+
});
|
|
244
|
+
this._spans.set(runId, { span, startTime: Date.now() });
|
|
245
|
+
}
|
|
246
|
+
handleRetrieverEnd(documents, runId) {
|
|
247
|
+
const entry = this._spans.get(runId);
|
|
248
|
+
if (!entry) return;
|
|
249
|
+
entry.span.setAttribute(
|
|
250
|
+
"retriever.document_count",
|
|
251
|
+
documents?.length ?? 0
|
|
252
|
+
);
|
|
253
|
+
entry.span.setAttribute("gen_ai.latency_ms", Date.now() - entry.startTime);
|
|
254
|
+
entry.span.end();
|
|
255
|
+
this._spans.delete(runId);
|
|
256
|
+
}
|
|
257
|
+
};
|
|
258
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
259
|
+
0 && (module.exports = {
|
|
260
|
+
RisicareCallbackHandler
|
|
261
|
+
});
|
|
262
|
+
//# sourceMappingURL=langchain.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/frameworks/langchain.ts","../../src/ids.ts","../../src/noop.ts","../../src/globals.ts","../../src/client.ts"],"sourcesContent":["/**\n * LangChain.js integration via callback handler.\n *\n * LangChain.js uses BaseCallbackHandler from @langchain/core/callbacks/base.\n * This handler receives lifecycle events (handleChainStart, handleLLMEnd, etc.)\n * with a runId (UUID string) for parent-child correlation.\n *\n * Usage:\n * import { RisicareCallbackHandler } from 'risicare/frameworks/langchain';\n * const handler = new RisicareCallbackHandler();\n * const result = await chain.invoke(input, { callbacks: [handler] });\n *\n * Does NOT suppress provider instrumentation -- LangChain callbacks are\n * supplementary to the underlying LLM provider spans.\n */\n\nimport { getTracer } from '../client.js';\nimport { SpanKind } from '../types.js';\nimport type { Span } from '../span.js';\n\ninterface SpanEntry {\n span: Span;\n startTime: number;\n}\n\nexport class RisicareCallbackHandler {\n name = 'RisicareCallbackHandler';\n\n private _spans = new Map<string, SpanEntry>();\n\n // ── Chain lifecycle ────────────────────────────────────────────────────────\n\n handleChainStart(\n chain: { name?: string; _type?: string },\n _inputs: Record<string, unknown>,\n runId: string,\n parentRunId?: string,\n ): void {\n const tracer = getTracer();\n if (!tracer?.enabled) return;\n\n const parentEntry = parentRunId ? this._spans.get(parentRunId) : undefined;\n const span = tracer.createSpan({\n name: chain.name || chain._type || 'chain',\n kind: SpanKind.INTERNAL,\n parentSpanId: parentEntry?.span.spanId,\n traceId: parentEntry?.span.traceId,\n attributes: {\n 'framework': 'langchain',\n 'langchain.run_id': runId,\n ...(parentRunId ? { 'langchain.parent_run_id': parentRunId } : {}),\n ...(chain._type ? { 'langchain.chain_type': chain._type } : {}),\n },\n });\n\n this._spans.set(runId, { span, startTime: Date.now() });\n }\n\n handleChainEnd(_outputs: Record<string, unknown>, runId: string): void {\n const entry = this._spans.get(runId);\n if (!entry) return;\n entry.span.setAttribute('gen_ai.latency_ms', Date.now() - entry.startTime);\n entry.span.end();\n this._spans.delete(runId);\n }\n\n handleChainError(error: Error, runId: string): void {\n const entry = this._spans.get(runId);\n if (!entry) return;\n entry.span.recordException(error);\n entry.span.end();\n this._spans.delete(runId);\n }\n\n // ── LLM lifecycle ─────────────────────────────────────────────────────────\n\n handleLLMStart(\n llm: { name?: string; _type?: string },\n _prompts: string[],\n runId: string,\n parentRunId?: string,\n ): void {\n const tracer = getTracer();\n if (!tracer?.enabled) return;\n\n const parentEntry = parentRunId ? this._spans.get(parentRunId) : undefined;\n const span = tracer.createSpan({\n name: llm.name || llm._type || 'llm',\n kind: SpanKind.LLM_CALL,\n parentSpanId: parentEntry?.span.spanId,\n traceId: parentEntry?.span.traceId,\n attributes: {\n 'framework': 'langchain',\n 'langchain.run_id': runId,\n 'gen_ai.system': 'langchain',\n 'gen_ai.prompt.count': _prompts.length,\n },\n });\n\n this._spans.set(runId, { span, startTime: Date.now() });\n }\n\n handleLLMEnd(\n output: {\n generations?: unknown[][];\n llmOutput?: Record<string, unknown>;\n },\n runId: string,\n ): void {\n const entry = this._spans.get(runId);\n if (!entry) return;\n\n // Extract token usage from llmOutput\n const usage = output.llmOutput?.tokenUsage as\n | Record<string, number>\n | undefined;\n if (usage) {\n entry.span.setLlmFields({\n promptTokens: usage.promptTokens ?? usage.prompt_tokens,\n completionTokens: usage.completionTokens ?? usage.completion_tokens,\n totalTokens: usage.totalTokens ?? usage.total_tokens,\n });\n }\n\n const model = (output.llmOutput?.model ??\n output.llmOutput?.modelName) as string | undefined;\n if (model) {\n entry.span.setLlmFields({ model });\n }\n\n entry.span.setAttribute('gen_ai.latency_ms', Date.now() - entry.startTime);\n entry.span.end();\n this._spans.delete(runId);\n }\n\n handleLLMError(error: Error, runId: string): void {\n const entry = this._spans.get(runId);\n if (!entry) return;\n entry.span.recordException(error);\n entry.span.end();\n this._spans.delete(runId);\n }\n\n // ── Tool lifecycle ────────────────────────────────────────────────────────\n\n handleToolStart(\n tool: { name?: string },\n input: string,\n runId: string,\n parentRunId?: string,\n ): void {\n const tracer = getTracer();\n if (!tracer?.enabled) return;\n\n const parentEntry = parentRunId ? this._spans.get(parentRunId) : undefined;\n const span = tracer.createSpan({\n name: tool.name || 'tool',\n kind: SpanKind.TOOL_CALL,\n parentSpanId: parentEntry?.span.spanId,\n traceId: parentEntry?.span.traceId,\n attributes: {\n 'framework': 'langchain',\n 'langchain.run_id': runId,\n 'tool.name': tool.name || 'unknown',\n 'tool.input_length': input?.length ?? 0,\n },\n });\n\n this._spans.set(runId, { span, startTime: Date.now() });\n }\n\n handleToolEnd(output: string, runId: string): void {\n const entry = this._spans.get(runId);\n if (!entry) return;\n entry.span.setAttribute('tool.output_length', output?.length ?? 0);\n entry.span.setAttribute('gen_ai.latency_ms', Date.now() - entry.startTime);\n entry.span.end();\n this._spans.delete(runId);\n }\n\n handleToolError(error: Error, runId: string): void {\n const entry = this._spans.get(runId);\n if (!entry) return;\n entry.span.recordException(error);\n entry.span.end();\n this._spans.delete(runId);\n }\n\n // ── Retriever lifecycle ───────────────────────────────────────────────────\n\n handleRetrieverStart(\n retriever: { name?: string },\n _query: string,\n runId: string,\n parentRunId?: string,\n ): void {\n const tracer = getTracer();\n if (!tracer?.enabled) return;\n\n const parentEntry = parentRunId ? this._spans.get(parentRunId) : undefined;\n const span = tracer.createSpan({\n name: retriever.name || 'retriever',\n kind: SpanKind.RETRIEVAL,\n parentSpanId: parentEntry?.span.spanId,\n traceId: parentEntry?.span.traceId,\n attributes: {\n 'framework': 'langchain',\n 'langchain.run_id': runId,\n },\n });\n\n this._spans.set(runId, { span, startTime: Date.now() });\n }\n\n handleRetrieverEnd(documents: unknown[], runId: string): void {\n const entry = this._spans.get(runId);\n if (!entry) return;\n entry.span.setAttribute(\n 'retriever.document_count',\n documents?.length ?? 0,\n );\n entry.span.setAttribute('gen_ai.latency_ms', Date.now() - entry.startTime);\n entry.span.end();\n this._spans.delete(runId);\n }\n}\n","/**\n * ID generation for traces and spans.\n *\n * Trace IDs: 32 lowercase hex characters (16 random bytes)\n * Span IDs: 16 lowercase hex characters (8 random bytes)\n *\n * Uses crypto.randomBytes for cryptographically secure randomness.\n */\n\nimport { randomBytes } from 'node:crypto';\n\nconst HEX_REGEX_32 = /^[0-9a-f]{32}$/;\nconst HEX_REGEX_16 = /^[0-9a-f]{16}$/;\n\nexport function generateTraceId(): string {\n return randomBytes(16).toString('hex');\n}\n\nexport function generateSpanId(): string {\n return randomBytes(8).toString('hex');\n}\n\nexport function generateAgentId(prefix?: string): string {\n const suffix = randomBytes(8).toString('hex');\n return prefix ? `${prefix}-${suffix}` : suffix;\n}\n\nexport function validateTraceId(id: string): boolean {\n return HEX_REGEX_32.test(id);\n}\n\nexport function validateSpanId(id: string): boolean {\n return HEX_REGEX_16.test(id);\n}\n","/**\n * No-op implementations for the disabled path.\n *\n * When tracing is disabled, all operations return these no-op objects\n * to maintain zero overhead. No allocations, no side effects.\n */\n\nimport { SpanKind, SpanStatus, type SpanPayload } from './types.js';\n\n/**\n * A frozen no-op span that silently ignores all operations.\n * Used when SDK is disabled to avoid overhead.\n */\nexport const NOOP_SPAN = Object.freeze({\n traceId: '00000000000000000000000000000000',\n spanId: '0000000000000000',\n parentSpanId: undefined,\n name: 'noop',\n kind: SpanKind.INTERNAL,\n startTime: '',\n startHrtime: 0,\n endTime: undefined,\n status: SpanStatus.UNSET,\n statusMessage: undefined,\n attributes: Object.freeze({}) as Record<string, unknown>,\n events: Object.freeze([]) as readonly [],\n links: Object.freeze([]) as readonly [],\n sessionId: undefined,\n agentId: undefined,\n agentName: undefined,\n semanticPhase: undefined,\n llmProvider: undefined,\n llmModel: undefined,\n llmPromptTokens: undefined,\n llmCompletionTokens: undefined,\n llmTotalTokens: undefined,\n llmCostUsd: undefined,\n toolName: undefined,\n toolSuccess: undefined,\n isEnded: true,\n durationMs: 0,\n\n setAttribute() { return this; },\n setAttributes() { return this; },\n setStatus() { return this; },\n addEvent() { return this; },\n addLink() { return this; },\n recordException() { return this; },\n setLlmFields() { return this; },\n setToolFields() { return this; },\n end() {},\n toPayload(): SpanPayload {\n return {\n traceId: this.traceId,\n spanId: this.spanId,\n name: this.name,\n kind: this.kind,\n startTime: this.startTime,\n status: this.status,\n attributes: {},\n events: [],\n links: [],\n };\n },\n});\n\nexport type NoopSpan = typeof NOOP_SPAN;\n","/**\n * Shared state via globalThis — ensures all entry point bundles share\n * the same singleton instances.\n *\n * Problem: tsup with `splitting: false` gives each entry point (index,\n * openai, anthropic, vercel-ai) its own copy of module-level variables.\n * This means `init()` from 'risicare' sets a tracer that 'risicare/openai'\n * can't see — breaking all provider instrumentation silently.\n *\n * Solution: Store all mutable singletons on globalThis with a namespaced\n * prefix. Every bundle reads/writes the same global slots.\n *\n * This pattern is used by React, OpenTelemetry, and other SDKs that must\n * share state across independently bundled entry points.\n *\n * @internal\n */\n\nimport { AsyncLocalStorage } from 'node:async_hooks';\n\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nconst G = globalThis as any;\nconst PREFIX = '__risicare_';\n\n// ─── Client & Tracer ────────────────────────────────────────────────────────\n\nexport function getClient(): unknown {\n return G[PREFIX + 'client'];\n}\n\nexport function setClient(client: unknown): void {\n G[PREFIX + 'client'] = client;\n}\n\nexport function getTracer(): unknown {\n return G[PREFIX + 'tracer'];\n}\n\nexport function setTracer(tracer: unknown): void {\n G[PREFIX + 'tracer'] = tracer;\n}\n\n// ─── Context Storage ────────────────────────────────────────────────────────\n\nexport function getContextStorage(): AsyncLocalStorage<unknown> {\n if (!G[PREFIX + 'ctx']) {\n G[PREFIX + 'ctx'] = new AsyncLocalStorage();\n }\n return G[PREFIX + 'ctx'];\n}\n\n// ─── Span Registry ──────────────────────────────────────────────────────────\n\nexport function getRegistry(): Map<string, unknown> {\n if (!G[PREFIX + 'registry']) {\n G[PREFIX + 'registry'] = new Map();\n }\n return G[PREFIX + 'registry'];\n}\n\nexport function getOpCount(): number {\n return G[PREFIX + 'opcount'] ?? 0;\n}\n\nexport function setOpCount(n: number): void {\n G[PREFIX + 'opcount'] = n;\n}\n\n// ─── Debug Flag ─────────────────────────────────────────────────────────────\n\nexport function getDebug(): boolean {\n return G[PREFIX + 'debug'] ?? false;\n}\n\nexport function setDebugFlag(enabled: boolean): void {\n G[PREFIX + 'debug'] = enabled;\n}\n","/**\n * RisicareClient — singleton client managing SDK lifecycle.\n *\n * Handles initialization, shutdown, and the connection between\n * the Tracer and the export pipeline (batch processor + HTTP exporter).\n *\n * Usage:\n * import { init, shutdown } from 'risicare';\n * init({ apiKey: 'rsk-...' }); // API key determines project\n * // ... instrument code ...\n * await shutdown(); // flush remaining spans\n */\n\nimport { type RisicareConfig, resolveConfig } from './config.js';\nimport { Tracer } from './tracer.js';\nimport { BatchSpanProcessor } from './exporters/batch.js';\nimport { HttpExporter } from './exporters/http.js';\nimport { ConsoleExporter } from './exporters/console.js';\nimport { SpanKind, SpanStatus } from './types.js';\nimport type { SpanExporter } from './exporters/base.js';\nimport { setDebug, debug } from './utils/log.js';\nimport {\n getClient as getGlobalClient,\n setClient as setGlobalClient,\n getTracer as getGlobalTracer,\n setTracer as setGlobalTracer,\n} from './globals.js';\n\n// ─── Client Class ───────────────────────────────────────────────────────────\n\nclass RisicareClient {\n readonly config: ReturnType<typeof resolveConfig>;\n readonly processor: BatchSpanProcessor;\n readonly tracer: Tracer;\n private _shutdownPromise: Promise<void> | undefined;\n private _shutdownHandlers: { signal: string; handler: () => void }[] = [];\n\n constructor(config?: Partial<RisicareConfig>) {\n this.config = resolveConfig(config);\n\n // API key format validation\n if (this.config.apiKey && !this.config.apiKey.startsWith('rsk-')) {\n debug('Warning: API key should start with \"rsk-\". Got: ' + this.config.apiKey.slice(0, 4) + '...');\n }\n\n // Build exporter chain\n let exporter: SpanExporter;\n if (this.config.debug && !this.config.apiKey) {\n exporter = new ConsoleExporter();\n } else if (this.config.apiKey) {\n exporter = new HttpExporter({\n endpoint: this.config.endpoint,\n apiKey: this.config.apiKey,\n projectId: this.config.projectId || undefined,\n environment: this.config.environment || undefined,\n compress: this.config.compress,\n });\n } else {\n // No API key and not debug — use console as fallback\n exporter = new ConsoleExporter();\n }\n\n this.processor = new BatchSpanProcessor({\n exporters: [exporter],\n batchSize: this.config.batchSize,\n batchTimeoutMs: this.config.batchTimeoutMs,\n maxQueueSize: this.config.maxQueueSize,\n debug: this.config.debug,\n });\n\n this.tracer = new Tracer({\n onSpanEnd: (span) => this.processor.onSpanEnd(span),\n sampleRate: this.config.sampleRate,\n enabled: this.config.enabled,\n traceContent: this.config.traceContent,\n });\n\n // Start the batch processor (enables span queuing and periodic flushing)\n this.processor.start();\n\n // Register shutdown hooks\n this._registerShutdownHooks();\n\n // Enable internal debug logging if configured\n setDebug(this.config.debug);\n debug(`Initialized: enabled=${this.config.enabled}, endpoint=${this.config.endpoint}`);\n }\n\n get enabled(): boolean {\n return this.tracer.enabled;\n }\n\n set enabled(value: boolean) {\n this.tracer.enabled = value;\n }\n\n // Audit #6: Promise-based shutdown dedup (fixes TOCTOU race condition)\n async shutdown(): Promise<void> {\n if (this._shutdownPromise) return this._shutdownPromise;\n this._shutdownPromise = this._doShutdown();\n return this._shutdownPromise;\n }\n\n private async _doShutdown(): Promise<void> {\n debug('Shutting down...');\n\n // Audit #3: Remove process listeners to prevent leak\n for (const { signal, handler } of this._shutdownHandlers) {\n process.removeListener(signal, handler);\n }\n this._shutdownHandlers = [];\n\n await this.processor.shutdown();\n }\n\n async flush(): Promise<void> {\n await this.processor.flush();\n }\n\n private _registerShutdownHooks(): void {\n const onShutdown = () => {\n // Audit #3: Add 5s timeout to prevent hanging on signal\n const timeout = setTimeout(() => process.exit(1), 5000);\n timeout.unref();\n this.shutdown().catch(() => {}).finally(() => clearTimeout(timeout));\n };\n\n const signals = ['beforeExit', 'SIGTERM', 'SIGINT'];\n for (const signal of signals) {\n process.once(signal, onShutdown);\n this._shutdownHandlers.push({ signal, handler: onShutdown });\n }\n }\n}\n\n// ─── Public API ─────────────────────────────────────────────────────────────\n\n/**\n * Initialize the Risicare SDK. Call once at application startup.\n *\n * @example\n * import { init } from 'risicare';\n * init({ apiKey: 'rsk-...', serviceName: 'my-agent', environment: 'production' });\n */\nexport function init(config?: Partial<RisicareConfig>): void {\n if (getGlobalClient()) {\n debug('Already initialized. Call shutdown() first to re-initialize.');\n return;\n }\n\n const client = new RisicareClient(config);\n setGlobalClient(client);\n setGlobalTracer(client.tracer);\n}\n\n/**\n * Gracefully shut down the SDK. Flushes pending spans before resolving.\n */\nexport async function shutdown(): Promise<void> {\n const client = getGlobalClient() as RisicareClient | undefined;\n if (!client) return;\n await client.shutdown();\n setGlobalClient(undefined);\n setGlobalTracer(undefined);\n}\n\n/**\n * Flush all pending spans without shutting down.\n */\nexport async function flush(): Promise<void> {\n const client = getGlobalClient() as RisicareClient | undefined;\n if (!client) return;\n await client.flush();\n}\n\n/**\n * Enable tracing at runtime.\n */\nexport function enable(): void {\n const client = getGlobalClient() as RisicareClient | undefined;\n if (client) client.enabled = true;\n}\n\n/**\n * Disable tracing at runtime. Spans will not be created or exported.\n */\nexport function disable(): void {\n const client = getGlobalClient() as RisicareClient | undefined;\n if (client) client.enabled = false;\n}\n\n/**\n * Check whether tracing is currently enabled.\n */\nexport function isEnabled(): boolean {\n const client = getGlobalClient() as RisicareClient | undefined;\n return client?.enabled ?? false;\n}\n\n/**\n * Get the global tracer instance. Returns undefined if not initialized.\n */\nexport function getTracer(): Tracer | undefined {\n return getGlobalTracer() as Tracer | undefined;\n}\n\n/**\n * Get the global tracer, or throw if not initialized.\n * @internal Used by decorators and providers that require an active tracer.\n */\nexport function requireTracer(): Tracer {\n const tracer = getGlobalTracer() as Tracer | undefined;\n if (!tracer) {\n throw new Error(\n 'Risicare SDK not initialized. Call init() before using tracing features.',\n );\n }\n return tracer;\n}\n\n/**\n * Check whether content tracing (prompt/completion capture) is enabled.\n */\nexport function getTraceContent(): boolean {\n const tracer = getGlobalTracer() as Tracer | undefined;\n return tracer?.traceContent ?? true;\n}\n\n/**\n * Get SDK metrics: exported spans, dropped spans, failed exports, queue stats.\n * Returns zero-valued metrics if SDK is not initialized.\n */\nexport function getMetrics() {\n const client = getGlobalClient() as RisicareClient | undefined;\n return client?.processor.getMetrics() ?? {\n exportedSpans: 0,\n droppedSpans: 0,\n failedExports: 0,\n queueSize: 0,\n queueCapacity: 0,\n queueUtilization: 0,\n };\n}\n\n// ─── reportError ──────────────────────────────────────────────────────────\n\n/**\n * Report a caught exception to the self-healing pipeline.\n *\n * Creates an error span that triggers diagnosis and fix generation.\n * This function never throws and is non-blocking.\n *\n * @param error - The caught exception (Error object or string)\n * @param options - Optional attributes and context overrides\n */\nexport function reportError(\n error: unknown,\n options?: { name?: string; attributes?: Record<string, unknown> },\n): void {\n try {\n const tracer = getTracer();\n if (!tracer) return;\n\n const err = error instanceof Error ? error : new Error(String(error));\n const spanName = options?.name ?? `error:${err.constructor.name}`;\n\n tracer.startSpan({ name: spanName, kind: SpanKind.INTERNAL }, (span) => {\n span.setStatus(SpanStatus.ERROR, err.message);\n span.setAttribute('error', true);\n span.setAttribute('error.type', err.constructor.name);\n span.setAttribute('error.message', err.message.slice(0, 2000));\n if (err.stack) span.setAttribute('error.stack', err.stack.slice(0, 4000));\n span.setAttribute('risicare.reported_error', true);\n if (options?.attributes) {\n for (const [k, v] of Object.entries(options.attributes)) {\n span.setAttribute(k, v);\n }\n }\n });\n } catch {\n // Never crash the host application\n debug('reportError failed');\n }\n}\n\n// ─── score ─────────────────────────────────────────────────────────────────\n\n/**\n * Record a custom evaluation score on a trace.\n *\n * Sends the score to the server in a fire-and-forget fashion.\n * This function never throws and is non-blocking.\n *\n * @param traceId - The trace to score\n * @param name - Score name (e.g., \"accuracy\", \"user_satisfaction\")\n * @param value - Score value between 0.0 and 1.0 inclusive\n * @param options - Optional span_id and comment\n */\nexport function score(\n traceId: string,\n name: string,\n value: number,\n options?: { spanId?: string; comment?: string },\n): void {\n try {\n if (typeof value !== 'number' || value < 0.0 || value > 1.0) {\n debug(`score: value must be in [0.0, 1.0], got ${value}. Score not sent.`);\n return;\n }\n if (!traceId || !name) {\n debug('score: traceId and name are required');\n return;\n }\n\n const client = getGlobalClient() as RisicareClient | undefined;\n if (!client?.enabled || !client.config.apiKey) return;\n\n const endpoint = client.config.endpoint.replace(/\\/$/, '');\n const url = `${endpoint}/api/v1/scores`;\n const body = JSON.stringify({\n trace_id: traceId,\n name,\n score: value,\n source: 'sdk',\n ...(options?.spanId && { span_id: options.spanId }),\n ...(options?.comment && { comment: options.comment }),\n });\n\n // Fire-and-forget — never blocks caller\n fetch(url, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n 'Authorization': `Bearer ${client.config.apiKey}`,\n },\n body,\n }).catch((err) => debug(`score: send failed: ${err}`));\n } catch {\n // Never crash the host application\n debug('score failed');\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACSA,yBAA4B;;;ACIrB,IAAM,YAAY,OAAO,OAAO;AAAA,EACrC,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,cAAc;AAAA,EACd,MAAM;AAAA,EACN;AAAA,EACA,WAAW;AAAA,EACX,aAAa;AAAA,EACb,SAAS;AAAA,EACT;AAAA,EACA,eAAe;AAAA,EACf,YAAY,OAAO,OAAO,CAAC,CAAC;AAAA,EAC5B,QAAQ,OAAO,OAAO,CAAC,CAAC;AAAA,EACxB,OAAO,OAAO,OAAO,CAAC,CAAC;AAAA,EACvB,WAAW;AAAA,EACX,SAAS;AAAA,EACT,WAAW;AAAA,EACX,eAAe;AAAA,EACf,aAAa;AAAA,EACb,UAAU;AAAA,EACV,iBAAiB;AAAA,EACjB,qBAAqB;AAAA,EACrB,gBAAgB;AAAA,EAChB,YAAY;AAAA,EACZ,UAAU;AAAA,EACV,aAAa;AAAA,EACb,SAAS;AAAA,EACT,YAAY;AAAA,EAEZ,eAAe;AAAE,WAAO;AAAA,EAAM;AAAA,EAC9B,gBAAgB;AAAE,WAAO;AAAA,EAAM;AAAA,EAC/B,YAAY;AAAE,WAAO;AAAA,EAAM;AAAA,EAC3B,WAAW;AAAE,WAAO;AAAA,EAAM;AAAA,EAC1B,UAAU;AAAE,WAAO;AAAA,EAAM;AAAA,EACzB,kBAAkB;AAAE,WAAO;AAAA,EAAM;AAAA,EACjC,eAAe;AAAE,WAAO;AAAA,EAAM;AAAA,EAC9B,gBAAgB;AAAE,WAAO;AAAA,EAAM;AAAA,EAC/B,MAAM;AAAA,EAAC;AAAA,EACP,YAAyB;AACvB,WAAO;AAAA,MACL,SAAS,KAAK;AAAA,MACd,QAAQ,KAAK;AAAA,MACb,MAAM,KAAK;AAAA,MACX,MAAM,KAAK;AAAA,MACX,WAAW,KAAK;AAAA,MAChB,QAAQ,KAAK;AAAA,MACb,YAAY,CAAC;AAAA,MACb,QAAQ,CAAC;AAAA,MACT,OAAO,CAAC;AAAA,IACV;AAAA,EACF;AACF,CAAC;;;AC9CD,8BAAkC;AAGlC,IAAM,IAAI;AACV,IAAM,SAAS;AAYR,SAAS,YAAqB;AACnC,SAAO,EAAE,SAAS,QAAQ;AAC5B;;;ACsKO,SAASA,aAAgC;AAC9C,SAAO,UAAgB;AACzB;;;AJnLO,IAAM,0BAAN,MAA8B;AAAA,EACnC,OAAO;AAAA,EAEC,SAAS,oBAAI,IAAuB;AAAA;AAAA,EAI5C,iBACE,OACA,SACA,OACA,aACM;AACN,UAAM,SAASC,WAAU;AACzB,QAAI,CAAC,QAAQ,QAAS;AAEtB,UAAM,cAAc,cAAc,KAAK,OAAO,IAAI,WAAW,IAAI;AACjE,UAAM,OAAO,OAAO,WAAW;AAAA,MAC7B,MAAM,MAAM,QAAQ,MAAM,SAAS;AAAA,MACnC;AAAA,MACA,cAAc,aAAa,KAAK;AAAA,MAChC,SAAS,aAAa,KAAK;AAAA,MAC3B,YAAY;AAAA,QACV,aAAa;AAAA,QACb,oBAAoB;AAAA,QACpB,GAAI,cAAc,EAAE,2BAA2B,YAAY,IAAI,CAAC;AAAA,QAChE,GAAI,MAAM,QAAQ,EAAE,wBAAwB,MAAM,MAAM,IAAI,CAAC;AAAA,MAC/D;AAAA,IACF,CAAC;AAED,SAAK,OAAO,IAAI,OAAO,EAAE,MAAM,WAAW,KAAK,IAAI,EAAE,CAAC;AAAA,EACxD;AAAA,EAEA,eAAe,UAAmC,OAAqB;AACrE,UAAM,QAAQ,KAAK,OAAO,IAAI,KAAK;AACnC,QAAI,CAAC,MAAO;AACZ,UAAM,KAAK,aAAa,qBAAqB,KAAK,IAAI,IAAI,MAAM,SAAS;AACzE,UAAM,KAAK,IAAI;AACf,SAAK,OAAO,OAAO,KAAK;AAAA,EAC1B;AAAA,EAEA,iBAAiB,OAAc,OAAqB;AAClD,UAAM,QAAQ,KAAK,OAAO,IAAI,KAAK;AACnC,QAAI,CAAC,MAAO;AACZ,UAAM,KAAK,gBAAgB,KAAK;AAChC,UAAM,KAAK,IAAI;AACf,SAAK,OAAO,OAAO,KAAK;AAAA,EAC1B;AAAA;AAAA,EAIA,eACE,KACA,UACA,OACA,aACM;AACN,UAAM,SAASA,WAAU;AACzB,QAAI,CAAC,QAAQ,QAAS;AAEtB,UAAM,cAAc,cAAc,KAAK,OAAO,IAAI,WAAW,IAAI;AACjE,UAAM,OAAO,OAAO,WAAW;AAAA,MAC7B,MAAM,IAAI,QAAQ,IAAI,SAAS;AAAA,MAC/B;AAAA,MACA,cAAc,aAAa,KAAK;AAAA,MAChC,SAAS,aAAa,KAAK;AAAA,MAC3B,YAAY;AAAA,QACV,aAAa;AAAA,QACb,oBAAoB;AAAA,QACpB,iBAAiB;AAAA,QACjB,uBAAuB,SAAS;AAAA,MAClC;AAAA,IACF,CAAC;AAED,SAAK,OAAO,IAAI,OAAO,EAAE,MAAM,WAAW,KAAK,IAAI,EAAE,CAAC;AAAA,EACxD;AAAA,EAEA,aACE,QAIA,OACM;AACN,UAAM,QAAQ,KAAK,OAAO,IAAI,KAAK;AACnC,QAAI,CAAC,MAAO;AAGZ,UAAM,QAAQ,OAAO,WAAW;AAGhC,QAAI,OAAO;AACT,YAAM,KAAK,aAAa;AAAA,QACtB,cAAc,MAAM,gBAAgB,MAAM;AAAA,QAC1C,kBAAkB,MAAM,oBAAoB,MAAM;AAAA,QAClD,aAAa,MAAM,eAAe,MAAM;AAAA,MAC1C,CAAC;AAAA,IACH;AAEA,UAAM,QAAS,OAAO,WAAW,SAC/B,OAAO,WAAW;AACpB,QAAI,OAAO;AACT,YAAM,KAAK,aAAa,EAAE,MAAM,CAAC;AAAA,IACnC;AAEA,UAAM,KAAK,aAAa,qBAAqB,KAAK,IAAI,IAAI,MAAM,SAAS;AACzE,UAAM,KAAK,IAAI;AACf,SAAK,OAAO,OAAO,KAAK;AAAA,EAC1B;AAAA,EAEA,eAAe,OAAc,OAAqB;AAChD,UAAM,QAAQ,KAAK,OAAO,IAAI,KAAK;AACnC,QAAI,CAAC,MAAO;AACZ,UAAM,KAAK,gBAAgB,KAAK;AAChC,UAAM,KAAK,IAAI;AACf,SAAK,OAAO,OAAO,KAAK;AAAA,EAC1B;AAAA;AAAA,EAIA,gBACE,MACA,OACA,OACA,aACM;AACN,UAAM,SAASA,WAAU;AACzB,QAAI,CAAC,QAAQ,QAAS;AAEtB,UAAM,cAAc,cAAc,KAAK,OAAO,IAAI,WAAW,IAAI;AACjE,UAAM,OAAO,OAAO,WAAW;AAAA,MAC7B,MAAM,KAAK,QAAQ;AAAA,MACnB;AAAA,MACA,cAAc,aAAa,KAAK;AAAA,MAChC,SAAS,aAAa,KAAK;AAAA,MAC3B,YAAY;AAAA,QACV,aAAa;AAAA,QACb,oBAAoB;AAAA,QACpB,aAAa,KAAK,QAAQ;AAAA,QAC1B,qBAAqB,OAAO,UAAU;AAAA,MACxC;AAAA,IACF,CAAC;AAED,SAAK,OAAO,IAAI,OAAO,EAAE,MAAM,WAAW,KAAK,IAAI,EAAE,CAAC;AAAA,EACxD;AAAA,EAEA,cAAc,QAAgB,OAAqB;AACjD,UAAM,QAAQ,KAAK,OAAO,IAAI,KAAK;AACnC,QAAI,CAAC,MAAO;AACZ,UAAM,KAAK,aAAa,sBAAsB,QAAQ,UAAU,CAAC;AACjE,UAAM,KAAK,aAAa,qBAAqB,KAAK,IAAI,IAAI,MAAM,SAAS;AACzE,UAAM,KAAK,IAAI;AACf,SAAK,OAAO,OAAO,KAAK;AAAA,EAC1B;AAAA,EAEA,gBAAgB,OAAc,OAAqB;AACjD,UAAM,QAAQ,KAAK,OAAO,IAAI,KAAK;AACnC,QAAI,CAAC,MAAO;AACZ,UAAM,KAAK,gBAAgB,KAAK;AAChC,UAAM,KAAK,IAAI;AACf,SAAK,OAAO,OAAO,KAAK;AAAA,EAC1B;AAAA;AAAA,EAIA,qBACE,WACA,QACA,OACA,aACM;AACN,UAAM,SAASA,WAAU;AACzB,QAAI,CAAC,QAAQ,QAAS;AAEtB,UAAM,cAAc,cAAc,KAAK,OAAO,IAAI,WAAW,IAAI;AACjE,UAAM,OAAO,OAAO,WAAW;AAAA,MAC7B,MAAM,UAAU,QAAQ;AAAA,MACxB;AAAA,MACA,cAAc,aAAa,KAAK;AAAA,MAChC,SAAS,aAAa,KAAK;AAAA,MAC3B,YAAY;AAAA,QACV,aAAa;AAAA,QACb,oBAAoB;AAAA,MACtB;AAAA,IACF,CAAC;AAED,SAAK,OAAO,IAAI,OAAO,EAAE,MAAM,WAAW,KAAK,IAAI,EAAE,CAAC;AAAA,EACxD;AAAA,EAEA,mBAAmB,WAAsB,OAAqB;AAC5D,UAAM,QAAQ,KAAK,OAAO,IAAI,KAAK;AACnC,QAAI,CAAC,MAAO;AACZ,UAAM,KAAK;AAAA,MACT;AAAA,MACA,WAAW,UAAU;AAAA,IACvB;AACA,UAAM,KAAK,aAAa,qBAAqB,KAAK,IAAI,IAAI,MAAM,SAAS;AACzE,UAAM,KAAK,IAAI;AACf,SAAK,OAAO,OAAO,KAAK;AAAA,EAC1B;AACF;","names":["getTracer","getTracer"]}
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* LangChain.js integration via callback handler.
|
|
3
|
+
*
|
|
4
|
+
* LangChain.js uses BaseCallbackHandler from @langchain/core/callbacks/base.
|
|
5
|
+
* This handler receives lifecycle events (handleChainStart, handleLLMEnd, etc.)
|
|
6
|
+
* with a runId (UUID string) for parent-child correlation.
|
|
7
|
+
*
|
|
8
|
+
* Usage:
|
|
9
|
+
* import { RisicareCallbackHandler } from 'risicare/frameworks/langchain';
|
|
10
|
+
* const handler = new RisicareCallbackHandler();
|
|
11
|
+
* const result = await chain.invoke(input, { callbacks: [handler] });
|
|
12
|
+
*
|
|
13
|
+
* Does NOT suppress provider instrumentation -- LangChain callbacks are
|
|
14
|
+
* supplementary to the underlying LLM provider spans.
|
|
15
|
+
*/
|
|
16
|
+
declare class RisicareCallbackHandler {
|
|
17
|
+
name: string;
|
|
18
|
+
private _spans;
|
|
19
|
+
handleChainStart(chain: {
|
|
20
|
+
name?: string;
|
|
21
|
+
_type?: string;
|
|
22
|
+
}, _inputs: Record<string, unknown>, runId: string, parentRunId?: string): void;
|
|
23
|
+
handleChainEnd(_outputs: Record<string, unknown>, runId: string): void;
|
|
24
|
+
handleChainError(error: Error, runId: string): void;
|
|
25
|
+
handleLLMStart(llm: {
|
|
26
|
+
name?: string;
|
|
27
|
+
_type?: string;
|
|
28
|
+
}, _prompts: string[], runId: string, parentRunId?: string): void;
|
|
29
|
+
handleLLMEnd(output: {
|
|
30
|
+
generations?: unknown[][];
|
|
31
|
+
llmOutput?: Record<string, unknown>;
|
|
32
|
+
}, runId: string): void;
|
|
33
|
+
handleLLMError(error: Error, runId: string): void;
|
|
34
|
+
handleToolStart(tool: {
|
|
35
|
+
name?: string;
|
|
36
|
+
}, input: string, runId: string, parentRunId?: string): void;
|
|
37
|
+
handleToolEnd(output: string, runId: string): void;
|
|
38
|
+
handleToolError(error: Error, runId: string): void;
|
|
39
|
+
handleRetrieverStart(retriever: {
|
|
40
|
+
name?: string;
|
|
41
|
+
}, _query: string, runId: string, parentRunId?: string): void;
|
|
42
|
+
handleRetrieverEnd(documents: unknown[], runId: string): void;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
export { RisicareCallbackHandler };
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* LangChain.js integration via callback handler.
|
|
3
|
+
*
|
|
4
|
+
* LangChain.js uses BaseCallbackHandler from @langchain/core/callbacks/base.
|
|
5
|
+
* This handler receives lifecycle events (handleChainStart, handleLLMEnd, etc.)
|
|
6
|
+
* with a runId (UUID string) for parent-child correlation.
|
|
7
|
+
*
|
|
8
|
+
* Usage:
|
|
9
|
+
* import { RisicareCallbackHandler } from 'risicare/frameworks/langchain';
|
|
10
|
+
* const handler = new RisicareCallbackHandler();
|
|
11
|
+
* const result = await chain.invoke(input, { callbacks: [handler] });
|
|
12
|
+
*
|
|
13
|
+
* Does NOT suppress provider instrumentation -- LangChain callbacks are
|
|
14
|
+
* supplementary to the underlying LLM provider spans.
|
|
15
|
+
*/
|
|
16
|
+
declare class RisicareCallbackHandler {
|
|
17
|
+
name: string;
|
|
18
|
+
private _spans;
|
|
19
|
+
handleChainStart(chain: {
|
|
20
|
+
name?: string;
|
|
21
|
+
_type?: string;
|
|
22
|
+
}, _inputs: Record<string, unknown>, runId: string, parentRunId?: string): void;
|
|
23
|
+
handleChainEnd(_outputs: Record<string, unknown>, runId: string): void;
|
|
24
|
+
handleChainError(error: Error, runId: string): void;
|
|
25
|
+
handleLLMStart(llm: {
|
|
26
|
+
name?: string;
|
|
27
|
+
_type?: string;
|
|
28
|
+
}, _prompts: string[], runId: string, parentRunId?: string): void;
|
|
29
|
+
handleLLMEnd(output: {
|
|
30
|
+
generations?: unknown[][];
|
|
31
|
+
llmOutput?: Record<string, unknown>;
|
|
32
|
+
}, runId: string): void;
|
|
33
|
+
handleLLMError(error: Error, runId: string): void;
|
|
34
|
+
handleToolStart(tool: {
|
|
35
|
+
name?: string;
|
|
36
|
+
}, input: string, runId: string, parentRunId?: string): void;
|
|
37
|
+
handleToolEnd(output: string, runId: string): void;
|
|
38
|
+
handleToolError(error: Error, runId: string): void;
|
|
39
|
+
handleRetrieverStart(retriever: {
|
|
40
|
+
name?: string;
|
|
41
|
+
}, _query: string, runId: string, parentRunId?: string): void;
|
|
42
|
+
handleRetrieverEnd(documents: unknown[], runId: string): void;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
export { RisicareCallbackHandler };
|
|
@@ -0,0 +1,235 @@
|
|
|
1
|
+
// src/ids.ts
|
|
2
|
+
import { randomBytes } from "crypto";
|
|
3
|
+
|
|
4
|
+
// src/noop.ts
|
|
5
|
+
var NOOP_SPAN = Object.freeze({
|
|
6
|
+
traceId: "00000000000000000000000000000000",
|
|
7
|
+
spanId: "0000000000000000",
|
|
8
|
+
parentSpanId: void 0,
|
|
9
|
+
name: "noop",
|
|
10
|
+
kind: "internal" /* INTERNAL */,
|
|
11
|
+
startTime: "",
|
|
12
|
+
startHrtime: 0,
|
|
13
|
+
endTime: void 0,
|
|
14
|
+
status: "unset" /* UNSET */,
|
|
15
|
+
statusMessage: void 0,
|
|
16
|
+
attributes: Object.freeze({}),
|
|
17
|
+
events: Object.freeze([]),
|
|
18
|
+
links: Object.freeze([]),
|
|
19
|
+
sessionId: void 0,
|
|
20
|
+
agentId: void 0,
|
|
21
|
+
agentName: void 0,
|
|
22
|
+
semanticPhase: void 0,
|
|
23
|
+
llmProvider: void 0,
|
|
24
|
+
llmModel: void 0,
|
|
25
|
+
llmPromptTokens: void 0,
|
|
26
|
+
llmCompletionTokens: void 0,
|
|
27
|
+
llmTotalTokens: void 0,
|
|
28
|
+
llmCostUsd: void 0,
|
|
29
|
+
toolName: void 0,
|
|
30
|
+
toolSuccess: void 0,
|
|
31
|
+
isEnded: true,
|
|
32
|
+
durationMs: 0,
|
|
33
|
+
setAttribute() {
|
|
34
|
+
return this;
|
|
35
|
+
},
|
|
36
|
+
setAttributes() {
|
|
37
|
+
return this;
|
|
38
|
+
},
|
|
39
|
+
setStatus() {
|
|
40
|
+
return this;
|
|
41
|
+
},
|
|
42
|
+
addEvent() {
|
|
43
|
+
return this;
|
|
44
|
+
},
|
|
45
|
+
addLink() {
|
|
46
|
+
return this;
|
|
47
|
+
},
|
|
48
|
+
recordException() {
|
|
49
|
+
return this;
|
|
50
|
+
},
|
|
51
|
+
setLlmFields() {
|
|
52
|
+
return this;
|
|
53
|
+
},
|
|
54
|
+
setToolFields() {
|
|
55
|
+
return this;
|
|
56
|
+
},
|
|
57
|
+
end() {
|
|
58
|
+
},
|
|
59
|
+
toPayload() {
|
|
60
|
+
return {
|
|
61
|
+
traceId: this.traceId,
|
|
62
|
+
spanId: this.spanId,
|
|
63
|
+
name: this.name,
|
|
64
|
+
kind: this.kind,
|
|
65
|
+
startTime: this.startTime,
|
|
66
|
+
status: this.status,
|
|
67
|
+
attributes: {},
|
|
68
|
+
events: [],
|
|
69
|
+
links: []
|
|
70
|
+
};
|
|
71
|
+
}
|
|
72
|
+
});
|
|
73
|
+
|
|
74
|
+
// src/globals.ts
|
|
75
|
+
import { AsyncLocalStorage } from "async_hooks";
|
|
76
|
+
var G = globalThis;
|
|
77
|
+
var PREFIX = "__risicare_";
|
|
78
|
+
function getTracer() {
|
|
79
|
+
return G[PREFIX + "tracer"];
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
// src/client.ts
|
|
83
|
+
function getTracer2() {
|
|
84
|
+
return getTracer();
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
// src/frameworks/langchain.ts
|
|
88
|
+
var RisicareCallbackHandler = class {
|
|
89
|
+
name = "RisicareCallbackHandler";
|
|
90
|
+
_spans = /* @__PURE__ */ new Map();
|
|
91
|
+
// ── Chain lifecycle ────────────────────────────────────────────────────────
|
|
92
|
+
handleChainStart(chain, _inputs, runId, parentRunId) {
|
|
93
|
+
const tracer = getTracer2();
|
|
94
|
+
if (!tracer?.enabled) return;
|
|
95
|
+
const parentEntry = parentRunId ? this._spans.get(parentRunId) : void 0;
|
|
96
|
+
const span = tracer.createSpan({
|
|
97
|
+
name: chain.name || chain._type || "chain",
|
|
98
|
+
kind: "internal" /* INTERNAL */,
|
|
99
|
+
parentSpanId: parentEntry?.span.spanId,
|
|
100
|
+
traceId: parentEntry?.span.traceId,
|
|
101
|
+
attributes: {
|
|
102
|
+
"framework": "langchain",
|
|
103
|
+
"langchain.run_id": runId,
|
|
104
|
+
...parentRunId ? { "langchain.parent_run_id": parentRunId } : {},
|
|
105
|
+
...chain._type ? { "langchain.chain_type": chain._type } : {}
|
|
106
|
+
}
|
|
107
|
+
});
|
|
108
|
+
this._spans.set(runId, { span, startTime: Date.now() });
|
|
109
|
+
}
|
|
110
|
+
handleChainEnd(_outputs, runId) {
|
|
111
|
+
const entry = this._spans.get(runId);
|
|
112
|
+
if (!entry) return;
|
|
113
|
+
entry.span.setAttribute("gen_ai.latency_ms", Date.now() - entry.startTime);
|
|
114
|
+
entry.span.end();
|
|
115
|
+
this._spans.delete(runId);
|
|
116
|
+
}
|
|
117
|
+
handleChainError(error, runId) {
|
|
118
|
+
const entry = this._spans.get(runId);
|
|
119
|
+
if (!entry) return;
|
|
120
|
+
entry.span.recordException(error);
|
|
121
|
+
entry.span.end();
|
|
122
|
+
this._spans.delete(runId);
|
|
123
|
+
}
|
|
124
|
+
// ── LLM lifecycle ─────────────────────────────────────────────────────────
|
|
125
|
+
handleLLMStart(llm, _prompts, runId, parentRunId) {
|
|
126
|
+
const tracer = getTracer2();
|
|
127
|
+
if (!tracer?.enabled) return;
|
|
128
|
+
const parentEntry = parentRunId ? this._spans.get(parentRunId) : void 0;
|
|
129
|
+
const span = tracer.createSpan({
|
|
130
|
+
name: llm.name || llm._type || "llm",
|
|
131
|
+
kind: "llm_call" /* LLM_CALL */,
|
|
132
|
+
parentSpanId: parentEntry?.span.spanId,
|
|
133
|
+
traceId: parentEntry?.span.traceId,
|
|
134
|
+
attributes: {
|
|
135
|
+
"framework": "langchain",
|
|
136
|
+
"langchain.run_id": runId,
|
|
137
|
+
"gen_ai.system": "langchain",
|
|
138
|
+
"gen_ai.prompt.count": _prompts.length
|
|
139
|
+
}
|
|
140
|
+
});
|
|
141
|
+
this._spans.set(runId, { span, startTime: Date.now() });
|
|
142
|
+
}
|
|
143
|
+
handleLLMEnd(output, runId) {
|
|
144
|
+
const entry = this._spans.get(runId);
|
|
145
|
+
if (!entry) return;
|
|
146
|
+
const usage = output.llmOutput?.tokenUsage;
|
|
147
|
+
if (usage) {
|
|
148
|
+
entry.span.setLlmFields({
|
|
149
|
+
promptTokens: usage.promptTokens ?? usage.prompt_tokens,
|
|
150
|
+
completionTokens: usage.completionTokens ?? usage.completion_tokens,
|
|
151
|
+
totalTokens: usage.totalTokens ?? usage.total_tokens
|
|
152
|
+
});
|
|
153
|
+
}
|
|
154
|
+
const model = output.llmOutput?.model ?? output.llmOutput?.modelName;
|
|
155
|
+
if (model) {
|
|
156
|
+
entry.span.setLlmFields({ model });
|
|
157
|
+
}
|
|
158
|
+
entry.span.setAttribute("gen_ai.latency_ms", Date.now() - entry.startTime);
|
|
159
|
+
entry.span.end();
|
|
160
|
+
this._spans.delete(runId);
|
|
161
|
+
}
|
|
162
|
+
handleLLMError(error, runId) {
|
|
163
|
+
const entry = this._spans.get(runId);
|
|
164
|
+
if (!entry) return;
|
|
165
|
+
entry.span.recordException(error);
|
|
166
|
+
entry.span.end();
|
|
167
|
+
this._spans.delete(runId);
|
|
168
|
+
}
|
|
169
|
+
// ── Tool lifecycle ────────────────────────────────────────────────────────
|
|
170
|
+
handleToolStart(tool, input, runId, parentRunId) {
|
|
171
|
+
const tracer = getTracer2();
|
|
172
|
+
if (!tracer?.enabled) return;
|
|
173
|
+
const parentEntry = parentRunId ? this._spans.get(parentRunId) : void 0;
|
|
174
|
+
const span = tracer.createSpan({
|
|
175
|
+
name: tool.name || "tool",
|
|
176
|
+
kind: "tool_call" /* TOOL_CALL */,
|
|
177
|
+
parentSpanId: parentEntry?.span.spanId,
|
|
178
|
+
traceId: parentEntry?.span.traceId,
|
|
179
|
+
attributes: {
|
|
180
|
+
"framework": "langchain",
|
|
181
|
+
"langchain.run_id": runId,
|
|
182
|
+
"tool.name": tool.name || "unknown",
|
|
183
|
+
"tool.input_length": input?.length ?? 0
|
|
184
|
+
}
|
|
185
|
+
});
|
|
186
|
+
this._spans.set(runId, { span, startTime: Date.now() });
|
|
187
|
+
}
|
|
188
|
+
handleToolEnd(output, runId) {
|
|
189
|
+
const entry = this._spans.get(runId);
|
|
190
|
+
if (!entry) return;
|
|
191
|
+
entry.span.setAttribute("tool.output_length", output?.length ?? 0);
|
|
192
|
+
entry.span.setAttribute("gen_ai.latency_ms", Date.now() - entry.startTime);
|
|
193
|
+
entry.span.end();
|
|
194
|
+
this._spans.delete(runId);
|
|
195
|
+
}
|
|
196
|
+
handleToolError(error, runId) {
|
|
197
|
+
const entry = this._spans.get(runId);
|
|
198
|
+
if (!entry) return;
|
|
199
|
+
entry.span.recordException(error);
|
|
200
|
+
entry.span.end();
|
|
201
|
+
this._spans.delete(runId);
|
|
202
|
+
}
|
|
203
|
+
// ── Retriever lifecycle ───────────────────────────────────────────────────
|
|
204
|
+
handleRetrieverStart(retriever, _query, runId, parentRunId) {
|
|
205
|
+
const tracer = getTracer2();
|
|
206
|
+
if (!tracer?.enabled) return;
|
|
207
|
+
const parentEntry = parentRunId ? this._spans.get(parentRunId) : void 0;
|
|
208
|
+
const span = tracer.createSpan({
|
|
209
|
+
name: retriever.name || "retriever",
|
|
210
|
+
kind: "retrieval" /* RETRIEVAL */,
|
|
211
|
+
parentSpanId: parentEntry?.span.spanId,
|
|
212
|
+
traceId: parentEntry?.span.traceId,
|
|
213
|
+
attributes: {
|
|
214
|
+
"framework": "langchain",
|
|
215
|
+
"langchain.run_id": runId
|
|
216
|
+
}
|
|
217
|
+
});
|
|
218
|
+
this._spans.set(runId, { span, startTime: Date.now() });
|
|
219
|
+
}
|
|
220
|
+
handleRetrieverEnd(documents, runId) {
|
|
221
|
+
const entry = this._spans.get(runId);
|
|
222
|
+
if (!entry) return;
|
|
223
|
+
entry.span.setAttribute(
|
|
224
|
+
"retriever.document_count",
|
|
225
|
+
documents?.length ?? 0
|
|
226
|
+
);
|
|
227
|
+
entry.span.setAttribute("gen_ai.latency_ms", Date.now() - entry.startTime);
|
|
228
|
+
entry.span.end();
|
|
229
|
+
this._spans.delete(runId);
|
|
230
|
+
}
|
|
231
|
+
};
|
|
232
|
+
export {
|
|
233
|
+
RisicareCallbackHandler
|
|
234
|
+
};
|
|
235
|
+
//# sourceMappingURL=langchain.js.map
|