@garrix82/reactgenie-dsl 1.0.0 → 1.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env +10 -0
- package/.env.example +17 -0
- package/.github/workflows/publish.yml +20 -0
- package/README.md +5 -1
- package/package.json +1 -5
- package/dist/__test__/dsl-descriptor.test.d.ts +0 -1
- package/dist/__test__/dsl-descriptor.test.js +0 -27
- package/dist/__test__/dsl-descriptor.test.js.map +0 -1
- package/dist/__test__/example_descriptor.d.ts +0 -125
- package/dist/__test__/example_descriptor.js +0 -607
- package/dist/__test__/example_descriptor.js.map +0 -1
- package/dist/__test__/food_descriptor.state.json +0 -1
- package/dist/__test__/food_descriptor.test.d.ts +0 -74
- package/dist/__test__/food_descriptor.test.js +0 -205
- package/dist/__test__/food_descriptor.test.js.map +0 -1
- package/dist/__test__/nl-interpreter-provider-selection.test.d.ts +0 -1
- package/dist/__test__/nl-interpreter-provider-selection.test.js +0 -73
- package/dist/__test__/nl-interpreter-provider-selection.test.js.map +0 -1
- package/dist/__test__/nl-interpreter.test.d.ts +0 -1
- package/dist/__test__/nl-interpreter.test.js +0 -86
- package/dist/__test__/nl-interpreter.test.js.map +0 -1
- package/dist/decorators/__test__/decorators.test.d.ts +0 -1
- package/dist/decorators/__test__/decorators.test.js +0 -182
- package/dist/decorators/__test__/decorators.test.js.map +0 -1
- package/dist/decorators/__test__/inheritance-descriptor.test.d.ts +0 -1
- package/dist/decorators/__test__/inheritance-descriptor.test.js +0 -107
- package/dist/decorators/__test__/inheritance-descriptor.test.js.map +0 -1
- package/dist/dsl/__test__/dsl-interpreter.test.d.ts +0 -1
- package/dist/dsl/__test__/dsl-interpreter.test.js +0 -334
- package/dist/dsl/__test__/dsl-interpreter.test.js.map +0 -1
- package/dist/dsl/__test__/parser.gen.test.d.ts +0 -1
- package/dist/dsl/__test__/parser.gen.test.js +0 -283
- package/dist/dsl/__test__/parser.gen.test.js.map +0 -1
- package/dist/nl/__test__/context-aware-prompt.test.d.ts +0 -1
- package/dist/nl/__test__/context-aware-prompt.test.js +0 -247
- package/dist/nl/__test__/context-aware-prompt.test.js.map +0 -1
- package/dist/nl/__test__/context-selector.test.d.ts +0 -1
- package/dist/nl/__test__/context-selector.test.js +0 -20
- package/dist/nl/__test__/context-selector.test.js.map +0 -1
- package/dist/nl/__test__/nl-parser-groq-transport.test.d.ts +0 -1
- package/dist/nl/__test__/nl-parser-groq-transport.test.js +0 -87
- package/dist/nl/__test__/nl-parser-groq-transport.test.js.map +0 -1
- package/dist/nl/__test__/nl-parser-openai-parity.test.d.ts +0 -1
- package/dist/nl/__test__/nl-parser-openai-parity.test.js +0 -206
- package/dist/nl/__test__/nl-parser-openai-parity.test.js.map +0 -1
- package/dist/nl/__test__/nl-parser-openai-sampling.test.d.ts +0 -1
- package/dist/nl/__test__/nl-parser-openai-sampling.test.js +0 -44
- package/dist/nl/__test__/nl-parser-openai-sampling.test.js.map +0 -1
- package/dist/nl/__test__/nl-parser-openai-transport.test.d.ts +0 -1
- package/dist/nl/__test__/nl-parser-openai-transport.test.js +0 -55
- package/dist/nl/__test__/nl-parser-openai-transport.test.js.map +0 -1
- package/dist/nl/__test__/nl-parser-utils.test.d.ts +0 -1
- package/dist/nl/__test__/nl-parser-utils.test.js +0 -70
- package/dist/nl/__test__/nl-parser-utils.test.js.map +0 -1
- package/dist/nl/__test__/nl-parser.test.d.ts +0 -1
- package/dist/nl/__test__/nl-parser.test.js +0 -64
- package/dist/nl/__test__/nl-parser.test.js.map +0 -1
- package/dist/nl/__test__/parameter-tuning.test.d.ts +0 -1
- package/dist/nl/__test__/parameter-tuning.test.js +0 -95
- package/dist/nl/__test__/parameter-tuning.test.js.map +0 -1
- package/dist/nl/__test__/semantic-parsing-experiment.test.d.ts +0 -1
- package/dist/nl/__test__/semantic-parsing-experiment.test.js +0 -178
- package/dist/nl/__test__/semantic-parsing-experiment.test.js.map +0 -1
- package/dist/nl/llm-monitoring.test.d.ts +0 -5
- package/dist/nl/llm-monitoring.test.js +0 -101
- package/dist/nl/llm-monitoring.test.js.map +0 -1
- package/lib/__test__/dsl-descriptor.test.ts +0 -27
- package/lib/__test__/example_descriptor.ts +0 -762
- package/lib/__test__/food_descriptor.state.json +0 -1
- package/lib/__test__/food_descriptor.test.ts +0 -331
- package/lib/__test__/nl-interpreter-provider-selection.test.ts +0 -126
- package/lib/__test__/nl-interpreter.test.ts +0 -129
- package/lib/decorators/__test__/decorators.test.ts +0 -177
- package/lib/decorators/__test__/inheritance-descriptor.test.ts +0 -92
- package/lib/decorators/decorators.ts +0 -754
- package/lib/decorators/index.ts +0 -2
- package/lib/decorators/store.ts +0 -47
- package/lib/dsl/__test__/dsl-interpreter.test.ts +0 -453
- package/lib/dsl/__test__/parser.gen.test.ts +0 -296
- package/lib/dsl/dsl-interpreter.ts +0 -974
- package/lib/dsl/index.ts +0 -1
- package/lib/dsl/parser.gen.js +0 -1479
- package/lib/dsl/parser.pegjs +0 -130
- package/lib/dsl-descriptor.ts +0 -241
- package/lib/index.ts +0 -5
- package/lib/nl/__test__/context-aware-prompt.test.ts +0 -372
- package/lib/nl/__test__/context-selector.test.ts +0 -27
- package/lib/nl/__test__/nl-parser-groq-transport.test.ts +0 -139
- package/lib/nl/__test__/nl-parser-openai-parity.test.ts +0 -381
- package/lib/nl/__test__/nl-parser-openai-sampling.test.ts +0 -73
- package/lib/nl/__test__/nl-parser-openai-transport.test.ts +0 -79
- package/lib/nl/__test__/nl-parser-utils.test.ts +0 -98
- package/lib/nl/__test__/nl-parser.test.ts +0 -119
- package/lib/nl/__test__/parameter-tuning.test.ts +0 -137
- package/lib/nl/__test__/semantic-parsing-experiment.test.ts +0 -260
- package/lib/nl/context-selector.ts +0 -123
- package/lib/nl/index.ts +0 -19
- package/lib/nl/llm-monitoring.test.ts +0 -136
- package/lib/nl/llm-monitoring.ts +0 -339
- package/lib/nl/nl-parser-groq.ts +0 -510
- package/lib/nl/nl-parser-utils.ts +0 -310
- package/lib/nl/nl-parser.ts +0 -616
- package/lib/nl/prompt-gen.ts +0 -607
- package/lib/nl/prompt-res.ts +0 -207
- package/lib/nl-interpreter.ts +0 -262
|
@@ -1,136 +0,0 @@
|
|
|
1
|
-
const mockCreateRun = jest.fn();
|
|
2
|
-
const mockTraceable = jest.fn((fn) => fn);
|
|
3
|
-
const mockGetCurrentRunTree = jest.fn();
|
|
4
|
-
|
|
5
|
-
jest.mock("langsmith", () => ({
|
|
6
|
-
Client: jest.fn().mockImplementation(() => ({
|
|
7
|
-
createRun: mockCreateRun,
|
|
8
|
-
})),
|
|
9
|
-
}));
|
|
10
|
-
|
|
11
|
-
jest.mock("langsmith/traceable", () => ({
|
|
12
|
-
traceable: mockTraceable,
|
|
13
|
-
getCurrentRunTree: mockGetCurrentRunTree,
|
|
14
|
-
}));
|
|
15
|
-
|
|
16
|
-
function loadMonitor() {
|
|
17
|
-
return require("./llm-monitoring") as typeof import("./llm-monitoring");
|
|
18
|
-
}
|
|
19
|
-
|
|
20
|
-
describe("LLMMonitor tracing", () => {
|
|
21
|
-
beforeEach(() => {
|
|
22
|
-
jest.clearAllMocks();
|
|
23
|
-
jest.resetModules();
|
|
24
|
-
});
|
|
25
|
-
|
|
26
|
-
it("wraps traced calls with langsmith traceable chain runs", async () => {
|
|
27
|
-
const { getLLMMonitor } = loadMonitor();
|
|
28
|
-
const monitor = getLLMMonitor({
|
|
29
|
-
apiKey: "ls-test-key",
|
|
30
|
-
project: "reactgenie-dsl",
|
|
31
|
-
});
|
|
32
|
-
|
|
33
|
-
await monitor.traceCall(
|
|
34
|
-
"parse-command",
|
|
35
|
-
{ model: "gpt-4o-mini", provider: "openai" },
|
|
36
|
-
async () => "ok"
|
|
37
|
-
);
|
|
38
|
-
|
|
39
|
-
expect(mockTraceable).toHaveBeenCalledWith(
|
|
40
|
-
expect.any(Function),
|
|
41
|
-
expect.objectContaining({
|
|
42
|
-
name: "parse-command",
|
|
43
|
-
run_type: "chain",
|
|
44
|
-
project_name: "reactgenie-dsl",
|
|
45
|
-
})
|
|
46
|
-
);
|
|
47
|
-
});
|
|
48
|
-
|
|
49
|
-
it("logs llm child runs with token and cost metadata", async () => {
|
|
50
|
-
const childRun = {
|
|
51
|
-
postRun: jest.fn().mockResolvedValue(undefined),
|
|
52
|
-
end: jest.fn().mockResolvedValue(undefined),
|
|
53
|
-
patchRun: jest.fn().mockResolvedValue(undefined),
|
|
54
|
-
};
|
|
55
|
-
const createChild = jest.fn().mockReturnValue(childRun);
|
|
56
|
-
mockGetCurrentRunTree.mockReturnValue({ createChild });
|
|
57
|
-
|
|
58
|
-
const { getLLMMonitor } = loadMonitor();
|
|
59
|
-
const monitor = getLLMMonitor({
|
|
60
|
-
apiKey: "ls-test-key",
|
|
61
|
-
project: "reactgenie-dsl",
|
|
62
|
-
});
|
|
63
|
-
|
|
64
|
-
await monitor.logCall({
|
|
65
|
-
model: "gpt-4o-mini",
|
|
66
|
-
provider: "openai",
|
|
67
|
-
promptTokens: 12,
|
|
68
|
-
completionTokens: 4,
|
|
69
|
-
totalTokens: 16,
|
|
70
|
-
inputCost: 0.00003,
|
|
71
|
-
outputCost: 0.00008,
|
|
72
|
-
totalCost: 0.00011,
|
|
73
|
-
prompt: "hello",
|
|
74
|
-
completion: "world",
|
|
75
|
-
latency: 42,
|
|
76
|
-
timestamp: new Date("2026-03-11T10:00:00.000Z"),
|
|
77
|
-
error: "upstream failed",
|
|
78
|
-
});
|
|
79
|
-
|
|
80
|
-
expect(createChild).toHaveBeenCalledWith(
|
|
81
|
-
expect.objectContaining({
|
|
82
|
-
run_type: "llm",
|
|
83
|
-
inputs: { prompt: "hello" },
|
|
84
|
-
})
|
|
85
|
-
);
|
|
86
|
-
expect(childRun.end).toHaveBeenCalledWith(
|
|
87
|
-
expect.objectContaining({
|
|
88
|
-
completion: "world",
|
|
89
|
-
usage_metadata: expect.objectContaining({
|
|
90
|
-
input_tokens: 12,
|
|
91
|
-
output_tokens: 4,
|
|
92
|
-
total_tokens: 16,
|
|
93
|
-
input_cost: 0.00003,
|
|
94
|
-
output_cost: 0.00008,
|
|
95
|
-
total_cost: 0.00011,
|
|
96
|
-
}),
|
|
97
|
-
}),
|
|
98
|
-
"upstream failed",
|
|
99
|
-
expect.any(Number)
|
|
100
|
-
);
|
|
101
|
-
expect(childRun.patchRun).toHaveBeenCalled();
|
|
102
|
-
});
|
|
103
|
-
|
|
104
|
-
it("does not emit synthetic failed llm runs from traceCall failures", async () => {
|
|
105
|
-
const captureException = jest.fn();
|
|
106
|
-
const { getLLMMonitor } = loadMonitor();
|
|
107
|
-
const monitor = getLLMMonitor({
|
|
108
|
-
apiKey: "ls-test-key",
|
|
109
|
-
project: "reactgenie-dsl",
|
|
110
|
-
telemetryBridge: {
|
|
111
|
-
captureException,
|
|
112
|
-
},
|
|
113
|
-
});
|
|
114
|
-
|
|
115
|
-
await expect(
|
|
116
|
-
monitor.traceCall(
|
|
117
|
-
"parse-command",
|
|
118
|
-
{ model: "gpt-4o-mini", provider: "openai" },
|
|
119
|
-
async () => {
|
|
120
|
-
throw new Error("trace failed");
|
|
121
|
-
}
|
|
122
|
-
)
|
|
123
|
-
).rejects.toThrow("trace failed");
|
|
124
|
-
|
|
125
|
-
expect(captureException).toHaveBeenCalledWith(
|
|
126
|
-
expect.objectContaining({ message: "trace failed" }),
|
|
127
|
-
expect.objectContaining({
|
|
128
|
-
source: "llm-monitor",
|
|
129
|
-
name: "parse-command",
|
|
130
|
-
model: "gpt-4o-mini",
|
|
131
|
-
provider: "openai",
|
|
132
|
-
})
|
|
133
|
-
);
|
|
134
|
-
expect(mockCreateRun).not.toHaveBeenCalled();
|
|
135
|
-
});
|
|
136
|
-
});
|
package/lib/nl/llm-monitoring.ts
DELETED
|
@@ -1,339 +0,0 @@
|
|
|
1
|
-
import { Client as LangSmithClient, RunTree } from "langsmith";
|
|
2
|
-
|
|
3
|
-
/**
|
|
4
|
-
* Interface for LLM call metrics
|
|
5
|
-
*/
|
|
6
|
-
export interface LLMCallMetrics {
|
|
7
|
-
model: string;
|
|
8
|
-
provider: "openai" | "groq";
|
|
9
|
-
promptTokens?: number;
|
|
10
|
-
completionTokens?: number;
|
|
11
|
-
totalTokens?: number;
|
|
12
|
-
inputCost?: number;
|
|
13
|
-
outputCost?: number;
|
|
14
|
-
totalCost?: number;
|
|
15
|
-
prompt: string;
|
|
16
|
-
completion: string | null;
|
|
17
|
-
latency: number;
|
|
18
|
-
timestamp: Date;
|
|
19
|
-
error?: string;
|
|
20
|
-
}
|
|
21
|
-
|
|
22
|
-
export type LLMMonitorOptions = {
|
|
23
|
-
apiKey?: string;
|
|
24
|
-
project?: string;
|
|
25
|
-
endpoint?: string;
|
|
26
|
-
telemetryBridge?: LLMMonitorTelemetryBridge;
|
|
27
|
-
};
|
|
28
|
-
|
|
29
|
-
export type LLMMonitorTelemetryBridge = {
|
|
30
|
-
captureException?: (error: Error, context?: Record<string, unknown>) => void;
|
|
31
|
-
captureMessage?: (
|
|
32
|
-
message: string,
|
|
33
|
-
level?: "error" | "warn" | "info" | "debug",
|
|
34
|
-
context?: Record<string, unknown>
|
|
35
|
-
) => void;
|
|
36
|
-
};
|
|
37
|
-
|
|
38
|
-
const VOICE_PIPELINE_NAME = "voice.pipeline";
|
|
39
|
-
const VOICE_PIPELINE_VERSION = "v1";
|
|
40
|
-
const VOICE_PIPELINE_FLOW = "speech_to_response";
|
|
41
|
-
|
|
42
|
-
type LangSmithTraceableModule = {
|
|
43
|
-
traceable?: <T extends (...args: any[]) => any>(
|
|
44
|
-
wrappedFunc: T,
|
|
45
|
-
config?: Record<string, unknown>
|
|
46
|
-
) => T;
|
|
47
|
-
getCurrentRunTree?: (
|
|
48
|
-
permitAbsentRunTree?: boolean
|
|
49
|
-
) => (RunTree & { createChild?: (config: Record<string, unknown>) => RunTree }) | undefined;
|
|
50
|
-
};
|
|
51
|
-
|
|
52
|
-
let langSmithTraceableModuleCache: LangSmithTraceableModule | null | undefined;
|
|
53
|
-
|
|
54
|
-
function loadLangSmithTraceableModule(): LangSmithTraceableModule | null {
|
|
55
|
-
if (langSmithTraceableModuleCache !== undefined) {
|
|
56
|
-
return langSmithTraceableModuleCache;
|
|
57
|
-
}
|
|
58
|
-
|
|
59
|
-
try {
|
|
60
|
-
const dynamicRequire: ((id: string) => unknown) | undefined =
|
|
61
|
-
typeof require === "function"
|
|
62
|
-
? require
|
|
63
|
-
: (globalThis as { require?: (id: string) => unknown }).require;
|
|
64
|
-
|
|
65
|
-
if (!dynamicRequire) {
|
|
66
|
-
langSmithTraceableModuleCache = null;
|
|
67
|
-
return null;
|
|
68
|
-
}
|
|
69
|
-
|
|
70
|
-
// Keep this out of static dependency analysis so React Native doesn't bundle Node-only async_hooks.
|
|
71
|
-
const traceableModuleName = ["langsmith", "traceable"].join("/");
|
|
72
|
-
const loaded = dynamicRequire(traceableModuleName) as LangSmithTraceableModule;
|
|
73
|
-
langSmithTraceableModuleCache = loaded;
|
|
74
|
-
return loaded;
|
|
75
|
-
} catch {
|
|
76
|
-
langSmithTraceableModuleCache = null;
|
|
77
|
-
return null;
|
|
78
|
-
}
|
|
79
|
-
}
|
|
80
|
-
|
|
81
|
-
function isReactNativeRuntime(): boolean {
|
|
82
|
-
const maybeNavigator = (globalThis as { navigator?: { product?: string } }).navigator;
|
|
83
|
-
return maybeNavigator?.product === "ReactNative";
|
|
84
|
-
}
|
|
85
|
-
|
|
86
|
-
function maskKey(s?: string) {
|
|
87
|
-
if (!s) return s;
|
|
88
|
-
if (s.length <= 10) return '****';
|
|
89
|
-
return `${s.slice(0,4)}...${s.slice(-4)}`;
|
|
90
|
-
}
|
|
91
|
-
|
|
92
|
-
export class LLMMonitor {
|
|
93
|
-
private static instance: LLMMonitor | null = null;
|
|
94
|
-
private langsmith: LangSmithClient | null = null;
|
|
95
|
-
private enabled: boolean = false;
|
|
96
|
-
private apiKey?: string;
|
|
97
|
-
private project?: string;
|
|
98
|
-
private endpoint?: string;
|
|
99
|
-
private currentThread?: string | number | null = null;
|
|
100
|
-
private telemetryBridge: LLMMonitorTelemetryBridge | null = null;
|
|
101
|
-
private authErrorLogged: boolean = false;
|
|
102
|
-
|
|
103
|
-
private constructor(options?: LLMMonitorOptions) {
|
|
104
|
-
this.configure(options);
|
|
105
|
-
}
|
|
106
|
-
|
|
107
|
-
static getInstance(options?: LLMMonitorOptions): LLMMonitor {
|
|
108
|
-
if (!LLMMonitor.instance) {
|
|
109
|
-
LLMMonitor.instance = new LLMMonitor(options);
|
|
110
|
-
return LLMMonitor.instance;
|
|
111
|
-
}
|
|
112
|
-
|
|
113
|
-
const shouldReconfigure =
|
|
114
|
-
(typeof options?.apiKey === "string" &&
|
|
115
|
-
options.apiKey !== LLMMonitor.instance.apiKey) ||
|
|
116
|
-
(typeof options?.endpoint === "string" &&
|
|
117
|
-
options.endpoint !== LLMMonitor.instance.endpoint);
|
|
118
|
-
|
|
119
|
-
if (shouldReconfigure) {
|
|
120
|
-
LLMMonitor.instance.configure(options);
|
|
121
|
-
} else if (options?.project) {
|
|
122
|
-
LLMMonitor.instance.project = options.project;
|
|
123
|
-
if (options.telemetryBridge) {
|
|
124
|
-
LLMMonitor.instance.telemetryBridge = options.telemetryBridge;
|
|
125
|
-
}
|
|
126
|
-
} else if (options?.telemetryBridge) {
|
|
127
|
-
LLMMonitor.instance.telemetryBridge = options.telemetryBridge;
|
|
128
|
-
}
|
|
129
|
-
|
|
130
|
-
return LLMMonitor.instance;
|
|
131
|
-
}
|
|
132
|
-
|
|
133
|
-
private configure(options?: LLMMonitorOptions) {
|
|
134
|
-
this.apiKey = options?.apiKey ?? this.apiKey;
|
|
135
|
-
this.project = options?.project ?? this.project;
|
|
136
|
-
this.endpoint = options?.endpoint ?? this.endpoint;
|
|
137
|
-
this.telemetryBridge = options?.telemetryBridge || this.telemetryBridge;
|
|
138
|
-
|
|
139
|
-
if (this.apiKey) {
|
|
140
|
-
this.langsmith = new LangSmithClient({
|
|
141
|
-
apiKey: this.apiKey,
|
|
142
|
-
...(this.endpoint ? { apiUrl: this.endpoint } : {}),
|
|
143
|
-
});
|
|
144
|
-
this.enabled = true;
|
|
145
|
-
this.authErrorLogged = false;
|
|
146
|
-
console.log(`LLMMonitor: LangSmith initialized (key: ${maskKey(this.apiKey)})`);
|
|
147
|
-
} else {
|
|
148
|
-
this.langsmith = null;
|
|
149
|
-
this.enabled = false;
|
|
150
|
-
this.authErrorLogged = false;
|
|
151
|
-
console.log("LLMMonitor: LangSmith API key not found, monitoring disabled.");
|
|
152
|
-
console.log(
|
|
153
|
-
"Provide a LangSmith key via app config and pass it into the LLM monitor."
|
|
154
|
-
);
|
|
155
|
-
}
|
|
156
|
-
}
|
|
157
|
-
|
|
158
|
-
/**
|
|
159
|
-
* Set the current thread id for grouping runs/traces.
|
|
160
|
-
* Accepts a string or number. Pass `null`/`undefined` to clear.
|
|
161
|
-
*/
|
|
162
|
-
public setThread(threadId?: string | number | null) {
|
|
163
|
-
this.currentThread = threadId ?? null;
|
|
164
|
-
}
|
|
165
|
-
|
|
166
|
-
/**
|
|
167
|
-
* Get the project name from environment or use default
|
|
168
|
-
*/
|
|
169
|
-
private getProjectName(): string {
|
|
170
|
-
if (this.project) return this.project;
|
|
171
|
-
console.warn("LLMMonitor: No project specified, using 'default'.");
|
|
172
|
-
return "default";
|
|
173
|
-
}
|
|
174
|
-
|
|
175
|
-
private buildPipelineMetadata(timestamp: Date): Record<string, unknown> {
|
|
176
|
-
return {
|
|
177
|
-
timestamp: timestamp.toISOString(),
|
|
178
|
-
thread_id: this.currentThread ?? null,
|
|
179
|
-
pipeline_name: this.currentThread ? VOICE_PIPELINE_NAME : null,
|
|
180
|
-
pipeline_version: this.currentThread ? VOICE_PIPELINE_VERSION : null,
|
|
181
|
-
pipeline_flow: this.currentThread ? VOICE_PIPELINE_FLOW : null,
|
|
182
|
-
};
|
|
183
|
-
}
|
|
184
|
-
|
|
185
|
-
private buildUsageMetadata(metrics: LLMCallMetrics): Record<string, number> | undefined {
|
|
186
|
-
const usageMetadata: Record<string, number> = {};
|
|
187
|
-
|
|
188
|
-
if (typeof metrics.promptTokens === "number") {
|
|
189
|
-
usageMetadata.input_tokens = metrics.promptTokens;
|
|
190
|
-
}
|
|
191
|
-
if (typeof metrics.completionTokens === "number") {
|
|
192
|
-
usageMetadata.output_tokens = metrics.completionTokens;
|
|
193
|
-
}
|
|
194
|
-
if (typeof metrics.totalTokens === "number") {
|
|
195
|
-
usageMetadata.total_tokens = metrics.totalTokens;
|
|
196
|
-
}
|
|
197
|
-
if (typeof metrics.inputCost === "number") {
|
|
198
|
-
usageMetadata.input_cost = metrics.inputCost;
|
|
199
|
-
}
|
|
200
|
-
if (typeof metrics.outputCost === "number") {
|
|
201
|
-
usageMetadata.output_cost = metrics.outputCost;
|
|
202
|
-
}
|
|
203
|
-
if (typeof metrics.totalCost === "number") {
|
|
204
|
-
usageMetadata.total_cost = metrics.totalCost;
|
|
205
|
-
}
|
|
206
|
-
|
|
207
|
-
return Object.keys(usageMetadata).length > 0 ? usageMetadata : undefined;
|
|
208
|
-
}
|
|
209
|
-
|
|
210
|
-
/**
|
|
211
|
-
* Log an LLM call with metrics
|
|
212
|
-
*/
|
|
213
|
-
async logCall(metrics: LLMCallMetrics): Promise<void> {
|
|
214
|
-
// Only send to LangSmith if configured
|
|
215
|
-
if (!this.enabled || !this.langsmith) {
|
|
216
|
-
return;
|
|
217
|
-
}
|
|
218
|
-
|
|
219
|
-
try {
|
|
220
|
-
const usageMetadata = this.buildUsageMetadata(metrics);
|
|
221
|
-
const outputs: Record<string, unknown> = {
|
|
222
|
-
completion: metrics.completion,
|
|
223
|
-
};
|
|
224
|
-
if (usageMetadata) {
|
|
225
|
-
outputs.usage_metadata = usageMetadata;
|
|
226
|
-
}
|
|
227
|
-
|
|
228
|
-
const runExtraMetadata: Record<string, unknown> = {
|
|
229
|
-
provider: metrics.provider,
|
|
230
|
-
model: metrics.model,
|
|
231
|
-
...this.buildPipelineMetadata(metrics.timestamp),
|
|
232
|
-
};
|
|
233
|
-
if (usageMetadata) {
|
|
234
|
-
runExtraMetadata.usage_metadata = usageMetadata;
|
|
235
|
-
}
|
|
236
|
-
|
|
237
|
-
const runConfig = {
|
|
238
|
-
name: `${metrics.provider}_${metrics.model}`,
|
|
239
|
-
run_type: "llm",
|
|
240
|
-
project_name: this.getProjectName(),
|
|
241
|
-
start_time: metrics.timestamp.getTime(),
|
|
242
|
-
end_time: metrics.timestamp.getTime() + metrics.latency,
|
|
243
|
-
inputs: {
|
|
244
|
-
prompt: metrics.prompt,
|
|
245
|
-
},
|
|
246
|
-
extra: {
|
|
247
|
-
metadata: runExtraMetadata,
|
|
248
|
-
invocation_params: {
|
|
249
|
-
ls_provider: metrics.provider,
|
|
250
|
-
ls_model_name: metrics.model,
|
|
251
|
-
ls_model_type: "chat",
|
|
252
|
-
},
|
|
253
|
-
},
|
|
254
|
-
outputs,
|
|
255
|
-
error: metrics.error,
|
|
256
|
-
} as const;
|
|
257
|
-
|
|
258
|
-
// React Native runtime does not support UUID random source required by RunTree internals.
|
|
259
|
-
// Use direct client.createRun there.
|
|
260
|
-
if (isReactNativeRuntime()) {
|
|
261
|
-
await this.langsmith.createRun(runConfig);
|
|
262
|
-
return;
|
|
263
|
-
}
|
|
264
|
-
|
|
265
|
-
const traceableModule = loadLangSmithTraceableModule();
|
|
266
|
-
const parentRun = traceableModule?.getCurrentRunTree?.(true);
|
|
267
|
-
if (parentRun && typeof parentRun.createChild === "function") {
|
|
268
|
-
const runTree = parentRun.createChild(runConfig);
|
|
269
|
-
await runTree.postRun();
|
|
270
|
-
await runTree.end(outputs, metrics.error, runConfig.end_time);
|
|
271
|
-
await runTree.patchRun();
|
|
272
|
-
return;
|
|
273
|
-
}
|
|
274
|
-
|
|
275
|
-
await this.langsmith.createRun(runConfig);
|
|
276
|
-
} catch (error) {
|
|
277
|
-
const message = error instanceof Error ? error.message : String(error);
|
|
278
|
-
if (message.includes("status [403]")) {
|
|
279
|
-
if (!this.authErrorLogged) {
|
|
280
|
-
this.authErrorLogged = true;
|
|
281
|
-
console.error(
|
|
282
|
-
"LangSmith rejected tracing with 403. Check API key scope, endpoint region, and workspace ID."
|
|
283
|
-
);
|
|
284
|
-
}
|
|
285
|
-
return;
|
|
286
|
-
}
|
|
287
|
-
console.error("Failed to log to LangSmith:", error);
|
|
288
|
-
}
|
|
289
|
-
}
|
|
290
|
-
|
|
291
|
-
/**
|
|
292
|
-
* Create a traced wrapper for async functions
|
|
293
|
-
*/
|
|
294
|
-
async traceCall<T>(
|
|
295
|
-
name: string,
|
|
296
|
-
metadata: { model: string; provider: "openai" | "groq" },
|
|
297
|
-
fn: () => Promise<T>
|
|
298
|
-
): Promise<T> {
|
|
299
|
-
try {
|
|
300
|
-
const traced =
|
|
301
|
-
this.enabled && this.langsmith && !isReactNativeRuntime()
|
|
302
|
-
? loadLangSmithTraceableModule()?.traceable?.(fn, {
|
|
303
|
-
name,
|
|
304
|
-
run_type: "chain",
|
|
305
|
-
project_name: this.getProjectName(),
|
|
306
|
-
client: this.langsmith,
|
|
307
|
-
metadata: {
|
|
308
|
-
model: metadata.model,
|
|
309
|
-
provider: metadata.provider,
|
|
310
|
-
...this.buildPipelineMetadata(new Date()),
|
|
311
|
-
},
|
|
312
|
-
}) || fn
|
|
313
|
-
: fn;
|
|
314
|
-
|
|
315
|
-
const result = await traced();
|
|
316
|
-
return result;
|
|
317
|
-
} catch (error) {
|
|
318
|
-
const normalizedError =
|
|
319
|
-
error instanceof Error ? error : new Error(String(error));
|
|
320
|
-
|
|
321
|
-
this.telemetryBridge?.captureException?.(normalizedError, {
|
|
322
|
-
source: "llm-monitor",
|
|
323
|
-
name,
|
|
324
|
-
model: metadata.model,
|
|
325
|
-
provider: metadata.provider,
|
|
326
|
-
thread_id: this.currentThread ?? null,
|
|
327
|
-
pipeline_name: this.currentThread ? VOICE_PIPELINE_NAME : undefined,
|
|
328
|
-
pipeline_version: this.currentThread ? VOICE_PIPELINE_VERSION : undefined,
|
|
329
|
-
pipeline_flow: this.currentThread ? VOICE_PIPELINE_FLOW : undefined,
|
|
330
|
-
});
|
|
331
|
-
|
|
332
|
-
throw error;
|
|
333
|
-
}
|
|
334
|
-
}
|
|
335
|
-
}
|
|
336
|
-
|
|
337
|
-
export function getLLMMonitor(options?: LLMMonitorOptions): LLMMonitor {
|
|
338
|
-
return LLMMonitor.getInstance(options);
|
|
339
|
-
}
|