@livekit/agents 1.0.22 → 1.0.23
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/inference/api_protos.cjs +2 -2
- package/dist/inference/api_protos.cjs.map +1 -1
- package/dist/inference/api_protos.d.cts +16 -16
- package/dist/inference/api_protos.d.ts +16 -16
- package/dist/inference/api_protos.js +2 -2
- package/dist/inference/api_protos.js.map +1 -1
- package/dist/ipc/job_proc_lazy_main.cjs +35 -1
- package/dist/ipc/job_proc_lazy_main.cjs.map +1 -1
- package/dist/ipc/job_proc_lazy_main.js +13 -1
- package/dist/ipc/job_proc_lazy_main.js.map +1 -1
- package/dist/job.cjs +52 -6
- package/dist/job.cjs.map +1 -1
- package/dist/job.d.cts +2 -0
- package/dist/job.d.ts +2 -0
- package/dist/job.d.ts.map +1 -1
- package/dist/job.js +52 -6
- package/dist/job.js.map +1 -1
- package/dist/llm/llm.cjs +38 -3
- package/dist/llm/llm.cjs.map +1 -1
- package/dist/llm/llm.d.cts +1 -0
- package/dist/llm/llm.d.ts +1 -0
- package/dist/llm/llm.d.ts.map +1 -1
- package/dist/llm/llm.js +38 -3
- package/dist/llm/llm.js.map +1 -1
- package/dist/log.cjs +34 -10
- package/dist/log.cjs.map +1 -1
- package/dist/log.d.cts +7 -0
- package/dist/log.d.ts +7 -0
- package/dist/log.d.ts.map +1 -1
- package/dist/log.js +34 -11
- package/dist/log.js.map +1 -1
- package/dist/telemetry/index.cjs +23 -2
- package/dist/telemetry/index.cjs.map +1 -1
- package/dist/telemetry/index.d.cts +4 -1
- package/dist/telemetry/index.d.ts +4 -1
- package/dist/telemetry/index.d.ts.map +1 -1
- package/dist/telemetry/index.js +27 -2
- package/dist/telemetry/index.js.map +1 -1
- package/dist/telemetry/logging.cjs +65 -0
- package/dist/telemetry/logging.cjs.map +1 -0
- package/dist/telemetry/logging.d.cts +21 -0
- package/dist/telemetry/logging.d.ts +21 -0
- package/dist/telemetry/logging.d.ts.map +1 -0
- package/dist/telemetry/logging.js +40 -0
- package/dist/telemetry/logging.js.map +1 -0
- package/dist/telemetry/otel_http_exporter.cjs +144 -0
- package/dist/telemetry/otel_http_exporter.cjs.map +1 -0
- package/dist/telemetry/otel_http_exporter.d.cts +62 -0
- package/dist/telemetry/otel_http_exporter.d.ts +62 -0
- package/dist/telemetry/otel_http_exporter.d.ts.map +1 -0
- package/dist/telemetry/otel_http_exporter.js +120 -0
- package/dist/telemetry/otel_http_exporter.js.map +1 -0
- package/dist/telemetry/pino_otel_transport.cjs +217 -0
- package/dist/telemetry/pino_otel_transport.cjs.map +1 -0
- package/dist/telemetry/pino_otel_transport.d.cts +58 -0
- package/dist/telemetry/pino_otel_transport.d.ts +58 -0
- package/dist/telemetry/pino_otel_transport.d.ts.map +1 -0
- package/dist/telemetry/pino_otel_transport.js +189 -0
- package/dist/telemetry/pino_otel_transport.js.map +1 -0
- package/dist/telemetry/traces.cjs +225 -16
- package/dist/telemetry/traces.cjs.map +1 -1
- package/dist/telemetry/traces.d.cts +17 -0
- package/dist/telemetry/traces.d.ts +17 -0
- package/dist/telemetry/traces.d.ts.map +1 -1
- package/dist/telemetry/traces.js +211 -14
- package/dist/telemetry/traces.js.map +1 -1
- package/dist/tts/tts.cjs +62 -5
- package/dist/tts/tts.cjs.map +1 -1
- package/dist/tts/tts.d.cts +2 -0
- package/dist/tts/tts.d.ts +2 -0
- package/dist/tts/tts.d.ts.map +1 -1
- package/dist/tts/tts.js +62 -5
- package/dist/tts/tts.js.map +1 -1
- package/dist/utils.cjs +6 -0
- package/dist/utils.cjs.map +1 -1
- package/dist/utils.d.cts +1 -0
- package/dist/utils.d.ts +1 -0
- package/dist/utils.d.ts.map +1 -1
- package/dist/utils.js +5 -0
- package/dist/utils.js.map +1 -1
- package/dist/voice/agent_activity.cjs +93 -7
- package/dist/voice/agent_activity.cjs.map +1 -1
- package/dist/voice/agent_activity.d.cts +3 -0
- package/dist/voice/agent_activity.d.ts +3 -0
- package/dist/voice/agent_activity.d.ts.map +1 -1
- package/dist/voice/agent_activity.js +93 -7
- package/dist/voice/agent_activity.js.map +1 -1
- package/dist/voice/agent_session.cjs +122 -27
- package/dist/voice/agent_session.cjs.map +1 -1
- package/dist/voice/agent_session.d.cts +15 -0
- package/dist/voice/agent_session.d.ts +15 -0
- package/dist/voice/agent_session.d.ts.map +1 -1
- package/dist/voice/agent_session.js +122 -27
- package/dist/voice/agent_session.js.map +1 -1
- package/dist/voice/audio_recognition.cjs +69 -22
- package/dist/voice/audio_recognition.cjs.map +1 -1
- package/dist/voice/audio_recognition.d.cts +5 -0
- package/dist/voice/audio_recognition.d.ts +5 -0
- package/dist/voice/audio_recognition.d.ts.map +1 -1
- package/dist/voice/audio_recognition.js +69 -22
- package/dist/voice/audio_recognition.js.map +1 -1
- package/dist/voice/generation.cjs +43 -3
- package/dist/voice/generation.cjs.map +1 -1
- package/dist/voice/generation.d.ts.map +1 -1
- package/dist/voice/generation.js +43 -3
- package/dist/voice/generation.js.map +1 -1
- package/dist/voice/report.cjs +3 -2
- package/dist/voice/report.cjs.map +1 -1
- package/dist/voice/report.d.cts +7 -1
- package/dist/voice/report.d.ts +7 -1
- package/dist/voice/report.d.ts.map +1 -1
- package/dist/voice/report.js +3 -2
- package/dist/voice/report.js.map +1 -1
- package/package.json +8 -2
- package/src/inference/api_protos.ts +2 -2
- package/src/ipc/job_proc_lazy_main.ts +12 -1
- package/src/job.ts +59 -10
- package/src/llm/llm.ts +48 -5
- package/src/log.ts +52 -15
- package/src/telemetry/index.ts +22 -4
- package/src/telemetry/logging.ts +55 -0
- package/src/telemetry/otel_http_exporter.ts +191 -0
- package/src/telemetry/pino_otel_transport.ts +265 -0
- package/src/telemetry/traces.ts +320 -20
- package/src/tts/tts.ts +71 -9
- package/src/utils.ts +5 -0
- package/src/voice/agent_activity.ts +140 -22
- package/src/voice/agent_session.ts +174 -34
- package/src/voice/audio_recognition.ts +85 -26
- package/src/voice/generation.ts +59 -7
- package/src/voice/report.ts +10 -4
package/dist/job.d.cts
CHANGED
|
@@ -59,6 +59,7 @@ export declare class JobContext {
|
|
|
59
59
|
get workerId(): string;
|
|
60
60
|
/** @returns The room the agent was called into */
|
|
61
61
|
get room(): Room;
|
|
62
|
+
get info(): RunningJobInfo;
|
|
62
63
|
/** @returns The agent's participant if connected to the room, otherwise `undefined` */
|
|
63
64
|
get agent(): LocalParticipant | undefined;
|
|
64
65
|
/** @returns The global inference executor */
|
|
@@ -93,6 +94,7 @@ export declare class JobContext {
|
|
|
93
94
|
* @throws {@link FunctionExistsError} if an entrypoint already exists
|
|
94
95
|
*/
|
|
95
96
|
addParticipantEntrypoint(callback: (job: JobContext, p: RemoteParticipant) => Promise<void>): void;
|
|
97
|
+
initRecording(): Promise<void>;
|
|
96
98
|
}
|
|
97
99
|
export declare class JobProcess {
|
|
98
100
|
#private;
|
package/dist/job.d.ts
CHANGED
|
@@ -59,6 +59,7 @@ export declare class JobContext {
|
|
|
59
59
|
get workerId(): string;
|
|
60
60
|
/** @returns The room the agent was called into */
|
|
61
61
|
get room(): Room;
|
|
62
|
+
get info(): RunningJobInfo;
|
|
62
63
|
/** @returns The agent's participant if connected to the room, otherwise `undefined` */
|
|
63
64
|
get agent(): LocalParticipant | undefined;
|
|
64
65
|
/** @returns The global inference executor */
|
|
@@ -93,6 +94,7 @@ export declare class JobContext {
|
|
|
93
94
|
* @throws {@link FunctionExistsError} if an entrypoint already exists
|
|
94
95
|
*/
|
|
95
96
|
addParticipantEntrypoint(callback: (job: JobContext, p: RemoteParticipant) => Promise<void>): void;
|
|
97
|
+
initRecording(): Promise<void>;
|
|
96
98
|
}
|
|
97
99
|
export declare class JobProcess {
|
|
98
100
|
#private;
|
package/dist/job.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"job.d.ts","sourceRoot":"","sources":["../src/job.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,KAAK,KAAK,MAAM,mBAAmB,CAAC;AAChD,OAAO,KAAK,EACV,WAAW,EACX,gBAAgB,EAChB,iBAAiB,EACjB,IAAI,EACJ,gBAAgB,EACjB,MAAM,mBAAmB,CAAC;AAI3B,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,6BAA6B,CAAC;
|
|
1
|
+
{"version":3,"file":"job.d.ts","sourceRoot":"","sources":["../src/job.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,KAAK,KAAK,MAAM,mBAAmB,CAAC;AAChD,OAAO,KAAK,EACV,WAAW,EACX,gBAAgB,EAChB,iBAAiB,EACjB,IAAI,EACJ,gBAAgB,EACjB,MAAM,mBAAmB,CAAC;AAI3B,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,6BAA6B,CAAC;AAIrE,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,0BAA0B,CAAC;AAC7D,OAAO,EAAE,KAAK,aAAa,EAAuB,MAAM,mBAAmB,CAAC;AAK5E;;;;GAIG;AACH,wBAAgB,aAAa,IAAI,UAAU,CAM1C;AAED;;;GAGG;AACH,wBAAgB,iBAAiB,CAAC,CAAC,EAAE,OAAO,EAAE,UAAU,EAAE,EAAE,EAAE,MAAM,CAAC,GAAG,CAAC,CAExE;AAED;;;GAGG;AACH,wBAAgB,sBAAsB,CAAC,CAAC,EAAE,OAAO,EAAE,UAAU,EAAE,EAAE,EAAE,MAAM,OAAO,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,CAE/F;AAED,yEAAyE;AACzE,oBAAY,aAAa;IACvB,aAAa,IAAA;IACb,cAAc,IAAA;IACd,UAAU,IAAA;IACV,UAAU,IAAA;CACX;AAED,MAAM,MAAM,kBAAkB,GAAG;IAC/B,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,EAAE,MAAM,CAAC;IACjB,QAAQ,EAAE,MAAM,CAAC;IACjB,UAAU,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAAA;KAAE,CAAC;CACxC,CAAC;AAEF,MAAM,MAAM,cAAc,GAAG;IAC3B,eAAe,EAAE,kBAAkB,CAAC;IACpC,GAAG,EAAE,KAAK,CAAC,GAAG,CAAC;IACf,GAAG,EAAE,MAAM,CAAC;IACZ,KAAK,EAAE,MAAM,CAAC;IACd,QAAQ,EAAE,MAAM,CAAC;CAClB,CAAC;AAEF,6EAA6E;AAC7E,qBAAa,mBAAoB,SAAQ,KAAK;gBAChC,GAAG,CAAC,EAAE,MAAM;CAIzB;AAED,mGAAmG;AACnG,qBAAa,UAAU;;IAMrB,gBAAgB;IAChB,iBAAiB,EAAE,CAAC,MAAM,OAAO,CAAC,IAAI,CAAC,CAAC,EAAE,CAAM;IAWhD,gBAAgB;IAChB,oBAAoB,CAAC,EAAE,YAAY,CAAC;IAEpC,OAAO,CAAC,SAAS,CAAkB;gBAGjC,IAAI,EAAE,UAAU,EAChB,IAAI,EAAE,cAAc,EACpB,IAAI,EAAE,IAAI,EACV,SAAS,EAAE,MAAM,IAAI,EACrB,UAAU,EAAE,CAAC,CAAC,EAAE,MAAM,KAAK,IAAI,EAC/B,iBAAiB,EAAE,iBAAiB;IAatC,IAAI,IAAI,IAAI,UAAU,CAErB;IAED,IAAI,GAAG,IAAI,KAAK,CAAC,GAAG,CAEnB;IAED,IAAI,QAAQ,IAAI,MAAM,CAErB;IAED,kDAAkD;IAClD,IAAI,IAAI,IAAI,IAAI,CAEf;IAED,IAAI,IAAI,IAAI,cAAc,CAEzB;IAED,uFAAuF;IACvF,IAAI,KAAK,IAAI,gBAAgB,GAAG,SAAS,CAExC;IAED,6CAA6C;IAC7C,IAAI,iBAAiB,IAAI,iBAAiB,CAEzC;IAED,0FAA0F;IAC1F,mBAAmB,CAAC,QAAQ,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC;IAI3C,kBAAkB,CAAC,QAAQ,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC,iBAAiB,CAAC;IAoCvE;;;;;;;;;OASG;IACG,OAAO,CACX,IAAI,CAAC,EAAE,WAAW,EAClB,aAAa,GAAE,aAA2C,EAC1D,SAAS,CAAC,EAAE,gBAAgB;IAiC9B,iBAAiB,CAAC,OAAO,CAAC,EAAE,YAAY,GAAG,aAAa;IAqBlD,aAAa,IAAI,OAAO,CAAC,IAAI,CAAC;IAmDpC;;;;OAIG;IACH,QAAQ,CAAC,MAAM,SAAK;IAIpB,gBAAgB;IAChB,sBAAsB,CAAC,CAAC,EAAE,iBAAiB;IAc3C;;;;OAIG;IACH,wBAAwB,CAAC,QAAQ,EAAE,CAAC,GAAG,EAAE,UAAU,EAAE,CAAC,EAAE,iBAAiB,KAAK,OAAO,CAAC,IAAI,CAAC;IAQrF,aAAa;CAapB;AAED,qBAAa,UAAU;;IAErB,QAAQ,EAAE;QAAE,CAAC,EAAE,EAAE,MAAM,GAAG,OAAO,CAAA;KAAE,CAAM;IAEzC,IAAI,GAAG,IAAI,MAAM,CAEhB;CACF;AAED;;;;;;;GAOG;AACH,qBAAa,UAAU;;IAKrB,gBAAgB;gBAEd,GAAG,EAAE,KAAK,CAAC,GAAG,EACd,QAAQ,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC,EAC7B,QAAQ,EAAE,CAAC,IAAI,EAAE,kBAAkB,KAAK,OAAO,CAAC,IAAI,CAAC;IAOvD,4DAA4D;IAC5D,IAAI,EAAE,IAAI,MAAM,CAEf;IAED,uFAAuF;IACvF,IAAI,GAAG,IAAI,KAAK,CAAC,GAAG,CAEnB;IAED,uFAAuF;IACvF,IAAI,IAAI,IAAI,KAAK,CAAC,IAAI,GAAG,SAAS,CAEjC;IAED,uFAAuF;IACvF,IAAI,SAAS,IAAI,KAAK,CAAC,eAAe,GAAG,SAAS,CAEjD;IAED,iEAAiE;IACjE,IAAI,SAAS,IAAI,MAAM,CAEtB;IAED,uBAAuB;IACjB,MAAM;IAIZ,8DAA8D;IACxD,MAAM,CAAC,IAAI,SAAK,EAAE,QAAQ,SAAK,EAAE,QAAQ,SAAK,EAAE,UAAU,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAAA;KAAE;CAK7F"}
|
package/dist/job.js
CHANGED
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
import { ParticipantKind, RoomEvent, TrackKind } from "@livekit/rtc-node";
|
|
2
2
|
import { AsyncLocalStorage } from "node:async_hooks";
|
|
3
3
|
import { log } from "./log.js";
|
|
4
|
+
import { flushOtelLogs, setupCloudTracer, uploadSessionReport } from "./telemetry/index.js";
|
|
5
|
+
import { isCloud } from "./utils.js";
|
|
4
6
|
import { createSessionReport } from "./voice/report.js";
|
|
5
7
|
const jobContextStorage = new AsyncLocalStorage();
|
|
6
8
|
function getJobContext() {
|
|
@@ -68,6 +70,9 @@ class JobContext {
|
|
|
68
70
|
get room() {
|
|
69
71
|
return this.#room;
|
|
70
72
|
}
|
|
73
|
+
get info() {
|
|
74
|
+
return this.#info;
|
|
75
|
+
}
|
|
71
76
|
/** @returns The agent's participant if connected to the room, otherwise `undefined` */
|
|
72
77
|
get agent() {
|
|
73
78
|
return this.#room.localParticipant;
|
|
@@ -155,7 +160,8 @@ class JobContext {
|
|
|
155
160
|
options: targetSession.options,
|
|
156
161
|
events: targetSession._recordedEvents,
|
|
157
162
|
enableUserDataTraining: true,
|
|
158
|
-
chatHistory: targetSession.history.copy()
|
|
163
|
+
chatHistory: targetSession.history.copy(),
|
|
164
|
+
startedAt: targetSession._startedAt
|
|
159
165
|
});
|
|
160
166
|
}
|
|
161
167
|
async _onSessionEnd() {
|
|
@@ -164,11 +170,39 @@ class JobContext {
|
|
|
164
170
|
return;
|
|
165
171
|
}
|
|
166
172
|
const report = this.makeSessionReport(session);
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
173
|
+
const url = new URL(this.#info.url);
|
|
174
|
+
if (report.enableRecording && isCloud(url)) {
|
|
175
|
+
try {
|
|
176
|
+
await uploadSessionReport({
|
|
177
|
+
agentName: this.job.agentName,
|
|
178
|
+
cloudHostname: url.hostname,
|
|
179
|
+
report
|
|
180
|
+
});
|
|
181
|
+
this.#logger.info(
|
|
182
|
+
{
|
|
183
|
+
jobId: report.jobId,
|
|
184
|
+
roomId: report.roomId
|
|
185
|
+
},
|
|
186
|
+
"Session report uploaded to LiveKit Cloud"
|
|
187
|
+
);
|
|
188
|
+
} catch (error) {
|
|
189
|
+
this.#logger.error({ error }, "Failed to upload session report");
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
this.#logger.debug(
|
|
193
|
+
{
|
|
194
|
+
jobId: report.jobId,
|
|
195
|
+
roomId: report.roomId,
|
|
196
|
+
eventsCount: report.events.length
|
|
197
|
+
},
|
|
198
|
+
"Session ended, report generated"
|
|
199
|
+
);
|
|
200
|
+
session._recordedEvents = [];
|
|
201
|
+
try {
|
|
202
|
+
await flushOtelLogs();
|
|
203
|
+
} catch (error) {
|
|
204
|
+
this.#logger.error({ error }, "Failed to flush OTEL logs");
|
|
205
|
+
}
|
|
172
206
|
}
|
|
173
207
|
/**
|
|
174
208
|
* Gracefully shuts down the job, and runs all shutdown promises.
|
|
@@ -204,6 +238,18 @@ class JobContext {
|
|
|
204
238
|
}
|
|
205
239
|
this.#participantEntrypoints.push(callback);
|
|
206
240
|
}
|
|
241
|
+
async initRecording() {
|
|
242
|
+
const url = new URL(this.#info.url);
|
|
243
|
+
if (!isCloud(url)) {
|
|
244
|
+
return;
|
|
245
|
+
}
|
|
246
|
+
this.#logger.debug({ hostname: url.hostname }, "Configuring session recording (cloud tracer)");
|
|
247
|
+
await setupCloudTracer({
|
|
248
|
+
roomId: this.job.room.sid,
|
|
249
|
+
jobId: this.job.id,
|
|
250
|
+
cloudHostname: url.hostname
|
|
251
|
+
});
|
|
252
|
+
}
|
|
207
253
|
}
|
|
208
254
|
class JobProcess {
|
|
209
255
|
#pid = process.pid;
|
package/dist/job.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/job.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2024 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport type * as proto from '@livekit/protocol';\nimport type {\n E2EEOptions,\n LocalParticipant,\n RemoteParticipant,\n Room,\n RtcConfiguration,\n} from '@livekit/rtc-node';\nimport { ParticipantKind, RoomEvent, TrackKind } from '@livekit/rtc-node';\nimport { AsyncLocalStorage } from 'node:async_hooks';\nimport type { Logger } from 'pino';\nimport type { InferenceExecutor } from './ipc/inference_executor.js';\nimport { log } from './log.js';\nimport type { AgentSession } from './voice/agent_session.js';\nimport { type SessionReport, createSessionReport } from './voice/report.js';\n\n// AsyncLocalStorage for job context, similar to Python's contextvars\nconst jobContextStorage = new AsyncLocalStorage<JobContext>();\n\n/**\n * Returns the current job context.\n *\n * @throws {Error} if no job context is found\n */\nexport function getJobContext(): JobContext {\n const ctx = jobContextStorage.getStore();\n if (!ctx) {\n throw new Error('no job context found, are you running this code inside a job entrypoint?');\n }\n return ctx;\n}\n\n/**\n * Runs a function within a job context, similar to Python's contextvars.\n * @internal\n */\nexport function runWithJobContext<T>(context: JobContext, fn: () => T): T {\n return jobContextStorage.run(context, fn);\n}\n\n/**\n * Runs an async function within a job context, similar to Python's contextvars.\n * @internal\n */\nexport function runWithJobContextAsync<T>(context: JobContext, fn: () => Promise<T>): Promise<T> {\n return jobContextStorage.run(context, fn);\n}\n\n/** Which tracks, if any, should the agent automatically subscribe to? */\nexport enum AutoSubscribe {\n SUBSCRIBE_ALL,\n SUBSCRIBE_NONE,\n VIDEO_ONLY,\n AUDIO_ONLY,\n}\n\nexport type JobAcceptArguments = {\n name: string;\n identity: string;\n metadata: string;\n attributes?: { [key: string]: string };\n};\n\nexport type RunningJobInfo = {\n acceptArguments: JobAcceptArguments;\n job: proto.Job;\n url: string;\n token: string;\n workerId: string;\n};\n\n/** Attempted to add a function callback, but the function already exists. */\nexport class FunctionExistsError extends Error {\n constructor(msg?: string) {\n super(msg);\n Object.setPrototypeOf(this, new.target.prototype);\n }\n}\n\n/** The job and environment context as seen by the agent, accessible by the entrypoint function. */\n// TODO(brian): PR3 - Add @tracer.startActiveSpan('job_entrypoint') wrapper in entrypoint\n// TODO(brian): PR5 - Add uploadSessionReport() call in cleanup/session end\nexport class JobContext {\n #proc: JobProcess;\n #info: RunningJobInfo;\n #room: Room;\n #onConnect: () => void;\n #onShutdown: (s: string) => void;\n /** @internal */\n shutdownCallbacks: (() => Promise<void>)[] = [];\n #participantEntrypoints: ((job: JobContext, p: RemoteParticipant) => Promise<void>)[] = [];\n #participantTasks: {\n [id: string]: {\n callback: (job: JobContext, p: RemoteParticipant) => Promise<void>;\n result: Promise<void>;\n };\n } = {};\n #logger: Logger;\n #inferenceExecutor: InferenceExecutor;\n\n /** @internal */\n _primaryAgentSession?: AgentSession;\n\n private connected: boolean = false;\n\n constructor(\n proc: JobProcess,\n info: RunningJobInfo,\n room: Room,\n onConnect: () => void,\n onShutdown: (s: string) => void,\n inferenceExecutor: InferenceExecutor,\n ) {\n this.#proc = proc;\n this.#info = info;\n this.#room = room;\n this.#onConnect = onConnect;\n this.#onShutdown = onShutdown;\n this.onParticipantConnected = this.onParticipantConnected.bind(this);\n this.#room.on(RoomEvent.ParticipantConnected, this.onParticipantConnected);\n this.#logger = log().child({ info: this.#info });\n this.#inferenceExecutor = inferenceExecutor;\n }\n\n get proc(): JobProcess {\n return this.#proc;\n }\n\n get job(): proto.Job {\n return this.#info.job;\n }\n\n get workerId(): string {\n return this.#info.workerId;\n }\n\n /** @returns The room the agent was called into */\n get room(): Room {\n return this.#room;\n }\n\n /** @returns The agent's participant if connected to the room, otherwise `undefined` */\n get agent(): LocalParticipant | undefined {\n return this.#room.localParticipant;\n }\n\n /** @returns The global inference executor */\n get inferenceExecutor(): InferenceExecutor {\n return this.#inferenceExecutor;\n }\n\n /** Adds a promise to be awaited when {@link JobContext.shutdown | shutdown} is called. */\n addShutdownCallback(callback: () => Promise<void>) {\n this.shutdownCallbacks.push(callback);\n }\n\n async waitForParticipant(identity?: string): Promise<RemoteParticipant> {\n if (!this.#room.isConnected) {\n throw new Error('room is not connected');\n }\n\n for (const p of this.#room.remoteParticipants.values()) {\n if ((!identity || p.identity === identity) && p.info.kind != ParticipantKind.AGENT) {\n return p;\n }\n }\n\n return new Promise((resolve, reject) => {\n const onParticipantConnected = (participant: RemoteParticipant) => {\n if (\n (!identity || participant.identity === identity) &&\n participant.info.kind != ParticipantKind.AGENT\n ) {\n clearHandlers();\n resolve(participant);\n }\n };\n const onDisconnected = () => {\n clearHandlers();\n reject(new Error('Room disconnected while waiting for participant'));\n };\n\n const clearHandlers = () => {\n this.#room.off(RoomEvent.ParticipantConnected, onParticipantConnected);\n this.#room.off(RoomEvent.Disconnected, onDisconnected);\n };\n\n this.#room.on(RoomEvent.ParticipantConnected, onParticipantConnected);\n this.#room.on(RoomEvent.Disconnected, onDisconnected);\n });\n }\n\n /**\n * Connects the agent to the room.\n *\n * @remarks\n * It is recommended to run this command as early in the function as possible, as executing it\n * later may cause noticeable delay between user and agent joins.\n *\n * @see {@link https://github.com/livekit/node-sdks/tree/main/packages/livekit-rtc#readme |\n * @livekit/rtc-node} for more information about the parameters.\n */\n async connect(\n e2ee?: E2EEOptions,\n autoSubscribe: AutoSubscribe = AutoSubscribe.SUBSCRIBE_ALL,\n rtcConfig?: RtcConfiguration,\n ) {\n if (this.connected) {\n return;\n }\n\n const opts = {\n e2ee,\n autoSubscribe: autoSubscribe == AutoSubscribe.SUBSCRIBE_ALL,\n rtcConfig,\n dynacast: false,\n };\n\n await this.#room.connect(this.#info.url, this.#info.token, opts);\n this.#onConnect();\n\n this.#room.remoteParticipants.forEach(this.onParticipantConnected);\n\n if ([AutoSubscribe.AUDIO_ONLY, AutoSubscribe.VIDEO_ONLY].includes(autoSubscribe)) {\n this.#room.remoteParticipants.forEach((p) => {\n p.trackPublications.forEach((pub) => {\n if (\n (autoSubscribe === AutoSubscribe.AUDIO_ONLY && pub.kind === TrackKind.KIND_AUDIO) ||\n (autoSubscribe === AutoSubscribe.VIDEO_ONLY && pub.kind === TrackKind.KIND_VIDEO)\n ) {\n pub.setSubscribed(true);\n }\n });\n });\n }\n this.connected = true;\n }\n\n makeSessionReport(session?: AgentSession): SessionReport {\n const targetSession = session || this._primaryAgentSession;\n\n if (!targetSession) {\n throw new Error('Cannot prepare report, no AgentSession was found');\n }\n\n // TODO(brian): implement and check recorder io\n // TODO(brian): PR5 - Ensure chat history serialization includes all required fields (use sessionReportToJSON helper)\n\n return createSessionReport({\n jobId: this.job.id,\n roomId: this.job.room?.sid || '',\n room: this.job.room?.name || '',\n options: targetSession.options,\n events: targetSession._recordedEvents,\n enableUserDataTraining: true,\n chatHistory: targetSession.history.copy(),\n });\n }\n\n async _onSessionEnd(): Promise<void> {\n const session = this._primaryAgentSession;\n if (!session) {\n return;\n }\n\n const report = this.makeSessionReport(session);\n\n // TODO(brian): Implement CLI/console\n\n // TODO(brian): PR5 - Call uploadSessionReport() if report.enableUserDataTraining is true\n // TODO(brian): PR5 - Upload includes: multipart form with header (protobuf), chat_history (JSON), and audio recording (if available)\n\n this.#logger.debug('Session ended, report generated', {\n jobId: report.jobId,\n roomId: report.roomId,\n eventsCount: report.events.length,\n });\n }\n\n /**\n * Gracefully shuts down the job, and runs all shutdown promises.\n *\n * @param reason - Optional reason for shutdown\n */\n shutdown(reason = '') {\n this.#onShutdown(reason);\n }\n\n /** @internal */\n onParticipantConnected(p: RemoteParticipant) {\n for (const callback of this.#participantEntrypoints) {\n if (this.#participantTasks[p.identity!]?.callback == callback) {\n this.#logger.warn(\n 'a participant has joined before a prior prticipant task matching the same identity has finished:',\n p.identity,\n );\n }\n const result = callback(this, p);\n result.finally(() => delete this.#participantTasks[p.identity!]);\n this.#participantTasks[p.identity!] = { callback, result };\n }\n }\n\n /**\n * Adds a promise to be awaited whenever a new participant joins the room.\n *\n * @throws {@link FunctionExistsError} if an entrypoint already exists\n */\n addParticipantEntrypoint(callback: (job: JobContext, p: RemoteParticipant) => Promise<void>) {\n if (this.#participantEntrypoints.includes(callback)) {\n throw new FunctionExistsError('entrypoints cannot be added more than once');\n }\n\n this.#participantEntrypoints.push(callback);\n }\n}\n\nexport class JobProcess {\n #pid = process.pid;\n userData: { [id: string]: unknown } = {};\n\n get pid(): number {\n return this.#pid;\n }\n}\n\n/**\n * A request sent by the server to spawn a new agent job.\n *\n * @remarks\n * For most applications, this is best left to the default, which simply accepts the job and\n * handles the logic inside the entrypoint function. This class is useful for vetting which\n * requests should fill idle processes and which should be outright rejected.\n */\nexport class JobRequest {\n #job: proto.Job;\n #onReject: () => Promise<void>;\n #onAccept: (args: JobAcceptArguments) => Promise<void>;\n\n /** @internal */\n constructor(\n job: proto.Job,\n onReject: () => Promise<void>,\n onAccept: (args: JobAcceptArguments) => Promise<void>,\n ) {\n this.#job = job;\n this.#onReject = onReject;\n this.#onAccept = onAccept;\n }\n\n /** @returns The ID of the job, set by the LiveKit server */\n get id(): string {\n return this.#job.id;\n }\n\n /** @see {@link https://www.npmjs.com/package/@livekit/protocol | @livekit/protocol} */\n get job(): proto.Job {\n return this.#job;\n }\n\n /** @see {@link https://www.npmjs.com/package/@livekit/protocol | @livekit/protocol} */\n get room(): proto.Room | undefined {\n return this.#job.room;\n }\n\n /** @see {@link https://www.npmjs.com/package/@livekit/protocol | @livekit/protocol} */\n get publisher(): proto.ParticipantInfo | undefined {\n return this.#job.participant;\n }\n\n /** @returns The agent's name, as set in {@link WorkerOptions} */\n get agentName(): string {\n return this.#job.agentName;\n }\n\n /** Rejects the job. */\n async reject() {\n await this.#onReject();\n }\n\n /** Accepts the job, launching it on an idle child process. */\n async accept(name = '', identity = '', metadata = '', attributes?: { [key: string]: string }) {\n if (identity === '') identity = 'agent-' + this.id;\n\n this.#onAccept({ name, identity, metadata, attributes });\n }\n}\n"],"mappings":"AAWA,SAAS,iBAAiB,WAAW,iBAAiB;AACtD,SAAS,yBAAyB;AAGlC,SAAS,WAAW;AAEpB,SAA6B,2BAA2B;AAGxD,MAAM,oBAAoB,IAAI,kBAA8B;AAOrD,SAAS,gBAA4B;AAC1C,QAAM,MAAM,kBAAkB,SAAS;AACvC,MAAI,CAAC,KAAK;AACR,UAAM,IAAI,MAAM,0EAA0E;AAAA,EAC5F;AACA,SAAO;AACT;AAMO,SAAS,kBAAqB,SAAqB,IAAgB;AACxE,SAAO,kBAAkB,IAAI,SAAS,EAAE;AAC1C;AAMO,SAAS,uBAA0B,SAAqB,IAAkC;AAC/F,SAAO,kBAAkB,IAAI,SAAS,EAAE;AAC1C;AAGO,IAAK,gBAAL,kBAAKA,mBAAL;AACL,EAAAA,8BAAA;AACA,EAAAA,8BAAA;AACA,EAAAA,8BAAA;AACA,EAAAA,8BAAA;AAJU,SAAAA;AAAA,GAAA;AAuBL,MAAM,4BAA4B,MAAM;AAAA,EAC7C,YAAY,KAAc;AACxB,UAAM,GAAG;AACT,WAAO,eAAe,MAAM,WAAW,SAAS;AAAA,EAClD;AACF;AAKO,MAAM,WAAW;AAAA,EACtB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAEA,oBAA6C,CAAC;AAAA,EAC9C,0BAAwF,CAAC;AAAA,EACzF,oBAKI,CAAC;AAAA,EACL;AAAA,EACA;AAAA;AAAA,EAGA;AAAA,EAEQ,YAAqB;AAAA,EAE7B,YACE,MACA,MACA,MACA,WACA,YACA,mBACA;AACA,SAAK,QAAQ;AACb,SAAK,QAAQ;AACb,SAAK,QAAQ;AACb,SAAK,aAAa;AAClB,SAAK,cAAc;AACnB,SAAK,yBAAyB,KAAK,uBAAuB,KAAK,IAAI;AACnE,SAAK,MAAM,GAAG,UAAU,sBAAsB,KAAK,sBAAsB;AACzE,SAAK,UAAU,IAAI,EAAE,MAAM,EAAE,MAAM,KAAK,MAAM,CAAC;AAC/C,SAAK,qBAAqB;AAAA,EAC5B;AAAA,EAEA,IAAI,OAAmB;AACrB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,IAAI,MAAiB;AACnB,WAAO,KAAK,MAAM;AAAA,EACpB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,MAAM;AAAA,EACpB;AAAA;AAAA,EAGA,IAAI,OAAa;AACf,WAAO,KAAK;AAAA,EACd;AAAA;AAAA,EAGA,IAAI,QAAsC;AACxC,WAAO,KAAK,MAAM;AAAA,EACpB;AAAA;AAAA,EAGA,IAAI,oBAAuC;AACzC,WAAO,KAAK;AAAA,EACd;AAAA;AAAA,EAGA,oBAAoB,UAA+B;AACjD,SAAK,kBAAkB,KAAK,QAAQ;AAAA,EACtC;AAAA,EAEA,MAAM,mBAAmB,UAA+C;AACtE,QAAI,CAAC,KAAK,MAAM,aAAa;AAC3B,YAAM,IAAI,MAAM,uBAAuB;AAAA,IACzC;AAEA,eAAW,KAAK,KAAK,MAAM,mBAAmB,OAAO,GAAG;AACtD,WAAK,CAAC,YAAY,EAAE,aAAa,aAAa,EAAE,KAAK,QAAQ,gBAAgB,OAAO;AAClF,eAAO;AAAA,MACT;AAAA,IACF;AAEA,WAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,YAAM,yBAAyB,CAAC,gBAAmC;AACjE,aACG,CAAC,YAAY,YAAY,aAAa,aACvC,YAAY,KAAK,QAAQ,gBAAgB,OACzC;AACA,wBAAc;AACd,kBAAQ,WAAW;AAAA,QACrB;AAAA,MACF;AACA,YAAM,iBAAiB,MAAM;AAC3B,sBAAc;AACd,eAAO,IAAI,MAAM,iDAAiD,CAAC;AAAA,MACrE;AAEA,YAAM,gBAAgB,MAAM;AAC1B,aAAK,MAAM,IAAI,UAAU,sBAAsB,sBAAsB;AACrE,aAAK,MAAM,IAAI,UAAU,cAAc,cAAc;AAAA,MACvD;AAEA,WAAK,MAAM,GAAG,UAAU,sBAAsB,sBAAsB;AACpE,WAAK,MAAM,GAAG,UAAU,cAAc,cAAc;AAAA,IACtD,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,QACJ,MACA,gBAA+B,uBAC/B,WACA;AACA,QAAI,KAAK,WAAW;AAClB;AAAA,IACF;AAEA,UAAM,OAAO;AAAA,MACX;AAAA,MACA,eAAe,iBAAiB;AAAA,MAChC;AAAA,MACA,UAAU;AAAA,IACZ;AAEA,UAAM,KAAK,MAAM,QAAQ,KAAK,MAAM,KAAK,KAAK,MAAM,OAAO,IAAI;AAC/D,SAAK,WAAW;AAEhB,SAAK,MAAM,mBAAmB,QAAQ,KAAK,sBAAsB;AAEjE,QAAI,CAAC,oBAA0B,kBAAwB,EAAE,SAAS,aAAa,GAAG;AAChF,WAAK,MAAM,mBAAmB,QAAQ,CAAC,MAAM;AAC3C,UAAE,kBAAkB,QAAQ,CAAC,QAAQ;AACnC,cACG,kBAAkB,sBAA4B,IAAI,SAAS,UAAU,cACrE,kBAAkB,sBAA4B,IAAI,SAAS,UAAU,YACtE;AACA,gBAAI,cAAc,IAAI;AAAA,UACxB;AAAA,QACF,CAAC;AAAA,MACH,CAAC;AAAA,IACH;AACA,SAAK,YAAY;AAAA,EACnB;AAAA,EAEA,kBAAkB,SAAuC;AAjP3D;AAkPI,UAAM,gBAAgB,WAAW,KAAK;AAEtC,QAAI,CAAC,eAAe;AAClB,YAAM,IAAI,MAAM,kDAAkD;AAAA,IACpE;AAKA,WAAO,oBAAoB;AAAA,MACzB,OAAO,KAAK,IAAI;AAAA,MAChB,UAAQ,UAAK,IAAI,SAAT,mBAAe,QAAO;AAAA,MAC9B,QAAM,UAAK,IAAI,SAAT,mBAAe,SAAQ;AAAA,MAC7B,SAAS,cAAc;AAAA,MACvB,QAAQ,cAAc;AAAA,MACtB,wBAAwB;AAAA,MACxB,aAAa,cAAc,QAAQ,KAAK;AAAA,IAC1C,CAAC;AAAA,EACH;AAAA,EAEA,MAAM,gBAA+B;AACnC,UAAM,UAAU,KAAK;AACrB,QAAI,CAAC,SAAS;AACZ;AAAA,IACF;AAEA,UAAM,SAAS,KAAK,kBAAkB,OAAO;AAO7C,SAAK,QAAQ,MAAM,mCAAmC;AAAA,MACpD,OAAO,OAAO;AAAA,MACd,QAAQ,OAAO;AAAA,MACf,aAAa,OAAO,OAAO;AAAA,IAC7B,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,SAAS,SAAS,IAAI;AACpB,SAAK,YAAY,MAAM;AAAA,EACzB;AAAA;AAAA,EAGA,uBAAuB,GAAsB;AApS/C;AAqSI,eAAW,YAAY,KAAK,yBAAyB;AACnD,YAAI,UAAK,kBAAkB,EAAE,QAAS,MAAlC,mBAAqC,aAAY,UAAU;AAC7D,aAAK,QAAQ;AAAA,UACX;AAAA,UACA,EAAE;AAAA,QACJ;AAAA,MACF;AACA,YAAM,SAAS,SAAS,MAAM,CAAC;AAC/B,aAAO,QAAQ,MAAM,OAAO,KAAK,kBAAkB,EAAE,QAAS,CAAC;AAC/D,WAAK,kBAAkB,EAAE,QAAS,IAAI,EAAE,UAAU,OAAO;AAAA,IAC3D;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,yBAAyB,UAAoE;AAC3F,QAAI,KAAK,wBAAwB,SAAS,QAAQ,GAAG;AACnD,YAAM,IAAI,oBAAoB,4CAA4C;AAAA,IAC5E;AAEA,SAAK,wBAAwB,KAAK,QAAQ;AAAA,EAC5C;AACF;AAEO,MAAM,WAAW;AAAA,EACtB,OAAO,QAAQ;AAAA,EACf,WAAsC,CAAC;AAAA,EAEvC,IAAI,MAAc;AAChB,WAAO,KAAK;AAAA,EACd;AACF;AAUO,MAAM,WAAW;AAAA,EACtB;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGA,YACE,KACA,UACA,UACA;AACA,SAAK,OAAO;AACZ,SAAK,YAAY;AACjB,SAAK,YAAY;AAAA,EACnB;AAAA;AAAA,EAGA,IAAI,KAAa;AACf,WAAO,KAAK,KAAK;AAAA,EACnB;AAAA;AAAA,EAGA,IAAI,MAAiB;AACnB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA,EAGA,IAAI,OAA+B;AACjC,WAAO,KAAK,KAAK;AAAA,EACnB;AAAA;AAAA,EAGA,IAAI,YAA+C;AACjD,WAAO,KAAK,KAAK;AAAA,EACnB;AAAA;AAAA,EAGA,IAAI,YAAoB;AACtB,WAAO,KAAK,KAAK;AAAA,EACnB;AAAA;AAAA,EAGA,MAAM,SAAS;AACb,UAAM,KAAK,UAAU;AAAA,EACvB;AAAA;AAAA,EAGA,MAAM,OAAO,OAAO,IAAI,WAAW,IAAI,WAAW,IAAI,YAAwC;AAC5F,QAAI,aAAa,GAAI,YAAW,WAAW,KAAK;AAEhD,SAAK,UAAU,EAAE,MAAM,UAAU,UAAU,WAAW,CAAC;AAAA,EACzD;AACF;","names":["AutoSubscribe"]}
|
|
1
|
+
{"version":3,"sources":["../src/job.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2024 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport type * as proto from '@livekit/protocol';\nimport type {\n E2EEOptions,\n LocalParticipant,\n RemoteParticipant,\n Room,\n RtcConfiguration,\n} from '@livekit/rtc-node';\nimport { ParticipantKind, RoomEvent, TrackKind } from '@livekit/rtc-node';\nimport { AsyncLocalStorage } from 'node:async_hooks';\nimport type { Logger } from 'pino';\nimport type { InferenceExecutor } from './ipc/inference_executor.js';\nimport { log } from './log.js';\nimport { flushOtelLogs, setupCloudTracer, uploadSessionReport } from './telemetry/index.js';\nimport { isCloud } from './utils.js';\nimport type { AgentSession } from './voice/agent_session.js';\nimport { type SessionReport, createSessionReport } from './voice/report.js';\n\n// AsyncLocalStorage for job context, similar to Python's contextvars\nconst jobContextStorage = new AsyncLocalStorage<JobContext>();\n\n/**\n * Returns the current job context.\n *\n * @throws {Error} if no job context is found\n */\nexport function getJobContext(): JobContext {\n const ctx = jobContextStorage.getStore();\n if (!ctx) {\n throw new Error('no job context found, are you running this code inside a job entrypoint?');\n }\n return ctx;\n}\n\n/**\n * Runs a function within a job context, similar to Python's contextvars.\n * @internal\n */\nexport function runWithJobContext<T>(context: JobContext, fn: () => T): T {\n return jobContextStorage.run(context, fn);\n}\n\n/**\n * Runs an async function within a job context, similar to Python's contextvars.\n * @internal\n */\nexport function runWithJobContextAsync<T>(context: JobContext, fn: () => Promise<T>): Promise<T> {\n return jobContextStorage.run(context, fn);\n}\n\n/** Which tracks, if any, should the agent automatically subscribe to? */\nexport enum AutoSubscribe {\n SUBSCRIBE_ALL,\n SUBSCRIBE_NONE,\n VIDEO_ONLY,\n AUDIO_ONLY,\n}\n\nexport type JobAcceptArguments = {\n name: string;\n identity: string;\n metadata: string;\n attributes?: { [key: string]: string };\n};\n\nexport type RunningJobInfo = {\n acceptArguments: JobAcceptArguments;\n job: proto.Job;\n url: string;\n token: string;\n workerId: string;\n};\n\n/** Attempted to add a function callback, but the function already exists. */\nexport class FunctionExistsError extends Error {\n constructor(msg?: string) {\n super(msg);\n Object.setPrototypeOf(this, new.target.prototype);\n }\n}\n\n/** The job and environment context as seen by the agent, accessible by the entrypoint function. */\nexport class JobContext {\n #proc: JobProcess;\n #info: RunningJobInfo;\n #room: Room;\n #onConnect: () => void;\n #onShutdown: (s: string) => void;\n /** @internal */\n shutdownCallbacks: (() => Promise<void>)[] = [];\n #participantEntrypoints: ((job: JobContext, p: RemoteParticipant) => Promise<void>)[] = [];\n #participantTasks: {\n [id: string]: {\n callback: (job: JobContext, p: RemoteParticipant) => Promise<void>;\n result: Promise<void>;\n };\n } = {};\n #logger: Logger;\n #inferenceExecutor: InferenceExecutor;\n\n /** @internal */\n _primaryAgentSession?: AgentSession;\n\n private connected: boolean = false;\n\n constructor(\n proc: JobProcess,\n info: RunningJobInfo,\n room: Room,\n onConnect: () => void,\n onShutdown: (s: string) => void,\n inferenceExecutor: InferenceExecutor,\n ) {\n this.#proc = proc;\n this.#info = info;\n this.#room = room;\n this.#onConnect = onConnect;\n this.#onShutdown = onShutdown;\n this.onParticipantConnected = this.onParticipantConnected.bind(this);\n this.#room.on(RoomEvent.ParticipantConnected, this.onParticipantConnected);\n this.#logger = log().child({ info: this.#info });\n this.#inferenceExecutor = inferenceExecutor;\n }\n\n get proc(): JobProcess {\n return this.#proc;\n }\n\n get job(): proto.Job {\n return this.#info.job;\n }\n\n get workerId(): string {\n return this.#info.workerId;\n }\n\n /** @returns The room the agent was called into */\n get room(): Room {\n return this.#room;\n }\n\n get info(): RunningJobInfo {\n return this.#info;\n }\n\n /** @returns The agent's participant if connected to the room, otherwise `undefined` */\n get agent(): LocalParticipant | undefined {\n return this.#room.localParticipant;\n }\n\n /** @returns The global inference executor */\n get inferenceExecutor(): InferenceExecutor {\n return this.#inferenceExecutor;\n }\n\n /** Adds a promise to be awaited when {@link JobContext.shutdown | shutdown} is called. */\n addShutdownCallback(callback: () => Promise<void>) {\n this.shutdownCallbacks.push(callback);\n }\n\n async waitForParticipant(identity?: string): Promise<RemoteParticipant> {\n if (!this.#room.isConnected) {\n throw new Error('room is not connected');\n }\n\n for (const p of this.#room.remoteParticipants.values()) {\n if ((!identity || p.identity === identity) && p.info.kind != ParticipantKind.AGENT) {\n return p;\n }\n }\n\n return new Promise((resolve, reject) => {\n const onParticipantConnected = (participant: RemoteParticipant) => {\n if (\n (!identity || participant.identity === identity) &&\n participant.info.kind != ParticipantKind.AGENT\n ) {\n clearHandlers();\n resolve(participant);\n }\n };\n const onDisconnected = () => {\n clearHandlers();\n reject(new Error('Room disconnected while waiting for participant'));\n };\n\n const clearHandlers = () => {\n this.#room.off(RoomEvent.ParticipantConnected, onParticipantConnected);\n this.#room.off(RoomEvent.Disconnected, onDisconnected);\n };\n\n this.#room.on(RoomEvent.ParticipantConnected, onParticipantConnected);\n this.#room.on(RoomEvent.Disconnected, onDisconnected);\n });\n }\n\n /**\n * Connects the agent to the room.\n *\n * @remarks\n * It is recommended to run this command as early in the function as possible, as executing it\n * later may cause noticeable delay between user and agent joins.\n *\n * @see {@link https://github.com/livekit/node-sdks/tree/main/packages/livekit-rtc#readme |\n * @livekit/rtc-node} for more information about the parameters.\n */\n async connect(\n e2ee?: E2EEOptions,\n autoSubscribe: AutoSubscribe = AutoSubscribe.SUBSCRIBE_ALL,\n rtcConfig?: RtcConfiguration,\n ) {\n if (this.connected) {\n return;\n }\n\n const opts = {\n e2ee,\n autoSubscribe: autoSubscribe == AutoSubscribe.SUBSCRIBE_ALL,\n rtcConfig,\n dynacast: false,\n };\n\n await this.#room.connect(this.#info.url, this.#info.token, opts);\n this.#onConnect();\n\n this.#room.remoteParticipants.forEach(this.onParticipantConnected);\n\n if ([AutoSubscribe.AUDIO_ONLY, AutoSubscribe.VIDEO_ONLY].includes(autoSubscribe)) {\n this.#room.remoteParticipants.forEach((p) => {\n p.trackPublications.forEach((pub) => {\n if (\n (autoSubscribe === AutoSubscribe.AUDIO_ONLY && pub.kind === TrackKind.KIND_AUDIO) ||\n (autoSubscribe === AutoSubscribe.VIDEO_ONLY && pub.kind === TrackKind.KIND_VIDEO)\n ) {\n pub.setSubscribed(true);\n }\n });\n });\n }\n this.connected = true;\n }\n\n makeSessionReport(session?: AgentSession): SessionReport {\n const targetSession = session || this._primaryAgentSession;\n\n if (!targetSession) {\n throw new Error('Cannot prepare report, no AgentSession was found');\n }\n\n // TODO(brian): implement and check recorder io\n\n return createSessionReport({\n jobId: this.job.id,\n roomId: this.job.room?.sid || '',\n room: this.job.room?.name || '',\n options: targetSession.options,\n events: targetSession._recordedEvents,\n enableUserDataTraining: true,\n chatHistory: targetSession.history.copy(),\n startedAt: targetSession._startedAt,\n });\n }\n\n async _onSessionEnd(): Promise<void> {\n const session = this._primaryAgentSession;\n if (!session) {\n return;\n }\n\n const report = this.makeSessionReport(session);\n\n // TODO(brian): Implement CLI/console\n\n // Upload session report to LiveKit Cloud if enabled\n const url = new URL(this.#info.url);\n\n if (report.enableRecording && isCloud(url)) {\n try {\n await uploadSessionReport({\n agentName: this.job.agentName,\n cloudHostname: url.hostname,\n report,\n });\n this.#logger.info(\n {\n jobId: report.jobId,\n roomId: report.roomId,\n },\n 'Session report uploaded to LiveKit Cloud',\n );\n } catch (error) {\n this.#logger.error({ error }, 'Failed to upload session report');\n }\n }\n\n this.#logger.debug(\n {\n jobId: report.jobId,\n roomId: report.roomId,\n eventsCount: report.events.length,\n },\n 'Session ended, report generated',\n );\n\n // Explicitly clear the recorded events to avoid leaking memory\n session._recordedEvents = [];\n\n try {\n await flushOtelLogs();\n } catch (error) {\n this.#logger.error({ error }, 'Failed to flush OTEL logs');\n }\n }\n\n /**\n * Gracefully shuts down the job, and runs all shutdown promises.\n *\n * @param reason - Optional reason for shutdown\n */\n shutdown(reason = '') {\n this.#onShutdown(reason);\n }\n\n /** @internal */\n onParticipantConnected(p: RemoteParticipant) {\n for (const callback of this.#participantEntrypoints) {\n if (this.#participantTasks[p.identity!]?.callback == callback) {\n this.#logger.warn(\n 'a participant has joined before a prior prticipant task matching the same identity has finished:',\n p.identity,\n );\n }\n const result = callback(this, p);\n result.finally(() => delete this.#participantTasks[p.identity!]);\n this.#participantTasks[p.identity!] = { callback, result };\n }\n }\n\n /**\n * Adds a promise to be awaited whenever a new participant joins the room.\n *\n * @throws {@link FunctionExistsError} if an entrypoint already exists\n */\n addParticipantEntrypoint(callback: (job: JobContext, p: RemoteParticipant) => Promise<void>) {\n if (this.#participantEntrypoints.includes(callback)) {\n throw new FunctionExistsError('entrypoints cannot be added more than once');\n }\n\n this.#participantEntrypoints.push(callback);\n }\n\n async initRecording() {\n const url = new URL(this.#info.url);\n if (!isCloud(url)) {\n return;\n }\n\n this.#logger.debug({ hostname: url.hostname }, 'Configuring session recording (cloud tracer)');\n await setupCloudTracer({\n roomId: this.job.room!.sid,\n jobId: this.job.id,\n cloudHostname: url.hostname,\n });\n }\n}\n\nexport class JobProcess {\n #pid = process.pid;\n userData: { [id: string]: unknown } = {};\n\n get pid(): number {\n return this.#pid;\n }\n}\n\n/**\n * A request sent by the server to spawn a new agent job.\n *\n * @remarks\n * For most applications, this is best left to the default, which simply accepts the job and\n * handles the logic inside the entrypoint function. This class is useful for vetting which\n * requests should fill idle processes and which should be outright rejected.\n */\nexport class JobRequest {\n #job: proto.Job;\n #onReject: () => Promise<void>;\n #onAccept: (args: JobAcceptArguments) => Promise<void>;\n\n /** @internal */\n constructor(\n job: proto.Job,\n onReject: () => Promise<void>,\n onAccept: (args: JobAcceptArguments) => Promise<void>,\n ) {\n this.#job = job;\n this.#onReject = onReject;\n this.#onAccept = onAccept;\n }\n\n /** @returns The ID of the job, set by the LiveKit server */\n get id(): string {\n return this.#job.id;\n }\n\n /** @see {@link https://www.npmjs.com/package/@livekit/protocol | @livekit/protocol} */\n get job(): proto.Job {\n return this.#job;\n }\n\n /** @see {@link https://www.npmjs.com/package/@livekit/protocol | @livekit/protocol} */\n get room(): proto.Room | undefined {\n return this.#job.room;\n }\n\n /** @see {@link https://www.npmjs.com/package/@livekit/protocol | @livekit/protocol} */\n get publisher(): proto.ParticipantInfo | undefined {\n return this.#job.participant;\n }\n\n /** @returns The agent's name, as set in {@link WorkerOptions} */\n get agentName(): string {\n return this.#job.agentName;\n }\n\n /** Rejects the job. */\n async reject() {\n await this.#onReject();\n }\n\n /** Accepts the job, launching it on an idle child process. */\n async accept(name = '', identity = '', metadata = '', attributes?: { [key: string]: string }) {\n if (identity === '') identity = 'agent-' + this.id;\n\n this.#onAccept({ name, identity, metadata, attributes });\n }\n}\n"],"mappings":"AAWA,SAAS,iBAAiB,WAAW,iBAAiB;AACtD,SAAS,yBAAyB;AAGlC,SAAS,WAAW;AACpB,SAAS,eAAe,kBAAkB,2BAA2B;AACrE,SAAS,eAAe;AAExB,SAA6B,2BAA2B;AAGxD,MAAM,oBAAoB,IAAI,kBAA8B;AAOrD,SAAS,gBAA4B;AAC1C,QAAM,MAAM,kBAAkB,SAAS;AACvC,MAAI,CAAC,KAAK;AACR,UAAM,IAAI,MAAM,0EAA0E;AAAA,EAC5F;AACA,SAAO;AACT;AAMO,SAAS,kBAAqB,SAAqB,IAAgB;AACxE,SAAO,kBAAkB,IAAI,SAAS,EAAE;AAC1C;AAMO,SAAS,uBAA0B,SAAqB,IAAkC;AAC/F,SAAO,kBAAkB,IAAI,SAAS,EAAE;AAC1C;AAGO,IAAK,gBAAL,kBAAKA,mBAAL;AACL,EAAAA,8BAAA;AACA,EAAAA,8BAAA;AACA,EAAAA,8BAAA;AACA,EAAAA,8BAAA;AAJU,SAAAA;AAAA,GAAA;AAuBL,MAAM,4BAA4B,MAAM;AAAA,EAC7C,YAAY,KAAc;AACxB,UAAM,GAAG;AACT,WAAO,eAAe,MAAM,WAAW,SAAS;AAAA,EAClD;AACF;AAGO,MAAM,WAAW;AAAA,EACtB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAEA,oBAA6C,CAAC;AAAA,EAC9C,0BAAwF,CAAC;AAAA,EACzF,oBAKI,CAAC;AAAA,EACL;AAAA,EACA;AAAA;AAAA,EAGA;AAAA,EAEQ,YAAqB;AAAA,EAE7B,YACE,MACA,MACA,MACA,WACA,YACA,mBACA;AACA,SAAK,QAAQ;AACb,SAAK,QAAQ;AACb,SAAK,QAAQ;AACb,SAAK,aAAa;AAClB,SAAK,cAAc;AACnB,SAAK,yBAAyB,KAAK,uBAAuB,KAAK,IAAI;AACnE,SAAK,MAAM,GAAG,UAAU,sBAAsB,KAAK,sBAAsB;AACzE,SAAK,UAAU,IAAI,EAAE,MAAM,EAAE,MAAM,KAAK,MAAM,CAAC;AAC/C,SAAK,qBAAqB;AAAA,EAC5B;AAAA,EAEA,IAAI,OAAmB;AACrB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,IAAI,MAAiB;AACnB,WAAO,KAAK,MAAM;AAAA,EACpB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,MAAM;AAAA,EACpB;AAAA;AAAA,EAGA,IAAI,OAAa;AACf,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,IAAI,OAAuB;AACzB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA,EAGA,IAAI,QAAsC;AACxC,WAAO,KAAK,MAAM;AAAA,EACpB;AAAA;AAAA,EAGA,IAAI,oBAAuC;AACzC,WAAO,KAAK;AAAA,EACd;AAAA;AAAA,EAGA,oBAAoB,UAA+B;AACjD,SAAK,kBAAkB,KAAK,QAAQ;AAAA,EACtC;AAAA,EAEA,MAAM,mBAAmB,UAA+C;AACtE,QAAI,CAAC,KAAK,MAAM,aAAa;AAC3B,YAAM,IAAI,MAAM,uBAAuB;AAAA,IACzC;AAEA,eAAW,KAAK,KAAK,MAAM,mBAAmB,OAAO,GAAG;AACtD,WAAK,CAAC,YAAY,EAAE,aAAa,aAAa,EAAE,KAAK,QAAQ,gBAAgB,OAAO;AAClF,eAAO;AAAA,MACT;AAAA,IACF;AAEA,WAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,YAAM,yBAAyB,CAAC,gBAAmC;AACjE,aACG,CAAC,YAAY,YAAY,aAAa,aACvC,YAAY,KAAK,QAAQ,gBAAgB,OACzC;AACA,wBAAc;AACd,kBAAQ,WAAW;AAAA,QACrB;AAAA,MACF;AACA,YAAM,iBAAiB,MAAM;AAC3B,sBAAc;AACd,eAAO,IAAI,MAAM,iDAAiD,CAAC;AAAA,MACrE;AAEA,YAAM,gBAAgB,MAAM;AAC1B,aAAK,MAAM,IAAI,UAAU,sBAAsB,sBAAsB;AACrE,aAAK,MAAM,IAAI,UAAU,cAAc,cAAc;AAAA,MACvD;AAEA,WAAK,MAAM,GAAG,UAAU,sBAAsB,sBAAsB;AACpE,WAAK,MAAM,GAAG,UAAU,cAAc,cAAc;AAAA,IACtD,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,QACJ,MACA,gBAA+B,uBAC/B,WACA;AACA,QAAI,KAAK,WAAW;AAClB;AAAA,IACF;AAEA,UAAM,OAAO;AAAA,MACX;AAAA,MACA,eAAe,iBAAiB;AAAA,MAChC;AAAA,MACA,UAAU;AAAA,IACZ;AAEA,UAAM,KAAK,MAAM,QAAQ,KAAK,MAAM,KAAK,KAAK,MAAM,OAAO,IAAI;AAC/D,SAAK,WAAW;AAEhB,SAAK,MAAM,mBAAmB,QAAQ,KAAK,sBAAsB;AAEjE,QAAI,CAAC,oBAA0B,kBAAwB,EAAE,SAAS,aAAa,GAAG;AAChF,WAAK,MAAM,mBAAmB,QAAQ,CAAC,MAAM;AAC3C,UAAE,kBAAkB,QAAQ,CAAC,QAAQ;AACnC,cACG,kBAAkB,sBAA4B,IAAI,SAAS,UAAU,cACrE,kBAAkB,sBAA4B,IAAI,SAAS,UAAU,YACtE;AACA,gBAAI,cAAc,IAAI;AAAA,UACxB;AAAA,QACF,CAAC;AAAA,MACH,CAAC;AAAA,IACH;AACA,SAAK,YAAY;AAAA,EACnB;AAAA,EAEA,kBAAkB,SAAuC;AArP3D;AAsPI,UAAM,gBAAgB,WAAW,KAAK;AAEtC,QAAI,CAAC,eAAe;AAClB,YAAM,IAAI,MAAM,kDAAkD;AAAA,IACpE;AAIA,WAAO,oBAAoB;AAAA,MACzB,OAAO,KAAK,IAAI;AAAA,MAChB,UAAQ,UAAK,IAAI,SAAT,mBAAe,QAAO;AAAA,MAC9B,QAAM,UAAK,IAAI,SAAT,mBAAe,SAAQ;AAAA,MAC7B,SAAS,cAAc;AAAA,MACvB,QAAQ,cAAc;AAAA,MACtB,wBAAwB;AAAA,MACxB,aAAa,cAAc,QAAQ,KAAK;AAAA,MACxC,WAAW,cAAc;AAAA,IAC3B,CAAC;AAAA,EACH;AAAA,EAEA,MAAM,gBAA+B;AACnC,UAAM,UAAU,KAAK;AACrB,QAAI,CAAC,SAAS;AACZ;AAAA,IACF;AAEA,UAAM,SAAS,KAAK,kBAAkB,OAAO;AAK7C,UAAM,MAAM,IAAI,IAAI,KAAK,MAAM,GAAG;AAElC,QAAI,OAAO,mBAAmB,QAAQ,GAAG,GAAG;AAC1C,UAAI;AACF,cAAM,oBAAoB;AAAA,UACxB,WAAW,KAAK,IAAI;AAAA,UACpB,eAAe,IAAI;AAAA,UACnB;AAAA,QACF,CAAC;AACD,aAAK,QAAQ;AAAA,UACX;AAAA,YACE,OAAO,OAAO;AAAA,YACd,QAAQ,OAAO;AAAA,UACjB;AAAA,UACA;AAAA,QACF;AAAA,MACF,SAAS,OAAO;AACd,aAAK,QAAQ,MAAM,EAAE,MAAM,GAAG,iCAAiC;AAAA,MACjE;AAAA,IACF;AAEA,SAAK,QAAQ;AAAA,MACX;AAAA,QACE,OAAO,OAAO;AAAA,QACd,QAAQ,OAAO;AAAA,QACf,aAAa,OAAO,OAAO;AAAA,MAC7B;AAAA,MACA;AAAA,IACF;AAGA,YAAQ,kBAAkB,CAAC;AAE3B,QAAI;AACF,YAAM,cAAc;AAAA,IACtB,SAAS,OAAO;AACd,WAAK,QAAQ,MAAM,EAAE,MAAM,GAAG,2BAA2B;AAAA,IAC3D;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,SAAS,SAAS,IAAI;AACpB,SAAK,YAAY,MAAM;AAAA,EACzB;AAAA;AAAA,EAGA,uBAAuB,GAAsB;AAvU/C;AAwUI,eAAW,YAAY,KAAK,yBAAyB;AACnD,YAAI,UAAK,kBAAkB,EAAE,QAAS,MAAlC,mBAAqC,aAAY,UAAU;AAC7D,aAAK,QAAQ;AAAA,UACX;AAAA,UACA,EAAE;AAAA,QACJ;AAAA,MACF;AACA,YAAM,SAAS,SAAS,MAAM,CAAC;AAC/B,aAAO,QAAQ,MAAM,OAAO,KAAK,kBAAkB,EAAE,QAAS,CAAC;AAC/D,WAAK,kBAAkB,EAAE,QAAS,IAAI,EAAE,UAAU,OAAO;AAAA,IAC3D;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,yBAAyB,UAAoE;AAC3F,QAAI,KAAK,wBAAwB,SAAS,QAAQ,GAAG;AACnD,YAAM,IAAI,oBAAoB,4CAA4C;AAAA,IAC5E;AAEA,SAAK,wBAAwB,KAAK,QAAQ;AAAA,EAC5C;AAAA,EAEA,MAAM,gBAAgB;AACpB,UAAM,MAAM,IAAI,IAAI,KAAK,MAAM,GAAG;AAClC,QAAI,CAAC,QAAQ,GAAG,GAAG;AACjB;AAAA,IACF;AAEA,SAAK,QAAQ,MAAM,EAAE,UAAU,IAAI,SAAS,GAAG,8CAA8C;AAC7F,UAAM,iBAAiB;AAAA,MACrB,QAAQ,KAAK,IAAI,KAAM;AAAA,MACvB,OAAO,KAAK,IAAI;AAAA,MAChB,eAAe,IAAI;AAAA,IACrB,CAAC;AAAA,EACH;AACF;AAEO,MAAM,WAAW;AAAA,EACtB,OAAO,QAAQ;AAAA,EACf,WAAsC,CAAC;AAAA,EAEvC,IAAI,MAAc;AAChB,WAAO,KAAK;AAAA,EACd;AACF;AAUO,MAAM,WAAW;AAAA,EACtB;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGA,YACE,KACA,UACA,UACA;AACA,SAAK,OAAO;AACZ,SAAK,YAAY;AACjB,SAAK,YAAY;AAAA,EACnB;AAAA;AAAA,EAGA,IAAI,KAAa;AACf,WAAO,KAAK,KAAK;AAAA,EACnB;AAAA;AAAA,EAGA,IAAI,MAAiB;AACnB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA,EAGA,IAAI,OAA+B;AACjC,WAAO,KAAK,KAAK;AAAA,EACnB;AAAA;AAAA,EAGA,IAAI,YAA+C;AACjD,WAAO,KAAK,KAAK;AAAA,EACnB;AAAA;AAAA,EAGA,IAAI,YAAoB;AACtB,WAAO,KAAK,KAAK;AAAA,EACnB;AAAA;AAAA,EAGA,MAAM,SAAS;AACb,UAAM,KAAK,UAAU;AAAA,EACvB;AAAA;AAAA,EAGA,MAAM,OAAO,OAAO,IAAI,WAAW,IAAI,WAAW,IAAI,YAAwC;AAC5F,QAAI,aAAa,GAAI,YAAW,WAAW,KAAK;AAEhD,SAAK,UAAU,EAAE,MAAM,UAAU,UAAU,WAAW,CAAC;AAAA,EACzD;AACF;","names":["AutoSubscribe"]}
|
package/dist/llm/llm.cjs
CHANGED
|
@@ -25,6 +25,7 @@ module.exports = __toCommonJS(llm_exports);
|
|
|
25
25
|
var import_node_events = require("node:events");
|
|
26
26
|
var import_exceptions = require("../_exceptions.cjs");
|
|
27
27
|
var import_log = require("../log.cjs");
|
|
28
|
+
var import_telemetry = require("../telemetry/index.cjs");
|
|
28
29
|
var import_utils = require("../utils.cjs");
|
|
29
30
|
var import_chat_context = require("./chat_context.cjs");
|
|
30
31
|
class LLM extends import_node_events.EventEmitter {
|
|
@@ -60,6 +61,7 @@ class LLMStream {
|
|
|
60
61
|
#llm;
|
|
61
62
|
#chatCtx;
|
|
62
63
|
#toolCtx;
|
|
64
|
+
#llmRequestSpan;
|
|
63
65
|
constructor(llm, {
|
|
64
66
|
chatCtx,
|
|
65
67
|
toolCtx,
|
|
@@ -76,10 +78,23 @@ class LLMStream {
|
|
|
76
78
|
});
|
|
77
79
|
(0, import_utils.startSoon)(() => this.mainTask().then(() => this.queue.close()));
|
|
78
80
|
}
|
|
79
|
-
async
|
|
81
|
+
_mainTaskImpl = async (span) => {
|
|
82
|
+
this.#llmRequestSpan = span;
|
|
83
|
+
span.setAttribute(import_telemetry.traceTypes.ATTR_GEN_AI_REQUEST_MODEL, this.#llm.model);
|
|
80
84
|
for (let i = 0; i < this._connOptions.maxRetry + 1; i++) {
|
|
81
85
|
try {
|
|
82
|
-
return await
|
|
86
|
+
return await import_telemetry.tracer.startActiveSpan(
|
|
87
|
+
async (attemptSpan) => {
|
|
88
|
+
attemptSpan.setAttribute(import_telemetry.traceTypes.ATTR_RETRY_COUNT, i);
|
|
89
|
+
try {
|
|
90
|
+
return await this.run();
|
|
91
|
+
} catch (error) {
|
|
92
|
+
(0, import_telemetry.recordException)(attemptSpan, (0, import_utils.toError)(error));
|
|
93
|
+
throw error;
|
|
94
|
+
}
|
|
95
|
+
},
|
|
96
|
+
{ name: "llm_request_run" }
|
|
97
|
+
);
|
|
83
98
|
} catch (error) {
|
|
84
99
|
if (error instanceof import_exceptions.APIError) {
|
|
85
100
|
const retryInterval = this._connOptions._intervalForRetry(i);
|
|
@@ -108,7 +123,11 @@ class LLMStream {
|
|
|
108
123
|
}
|
|
109
124
|
}
|
|
110
125
|
}
|
|
111
|
-
}
|
|
126
|
+
};
|
|
127
|
+
mainTask = async () => import_telemetry.tracer.startActiveSpan(async (span) => this._mainTaskImpl(span), {
|
|
128
|
+
name: "llm_request",
|
|
129
|
+
endOnExit: false
|
|
130
|
+
});
|
|
112
131
|
emitError({ error, recoverable }) {
|
|
113
132
|
this.#llm.emit("error", {
|
|
114
133
|
type: "llm_error",
|
|
@@ -123,6 +142,7 @@ class LLMStream {
|
|
|
123
142
|
let ttft = BigInt(-1);
|
|
124
143
|
let requestId = "";
|
|
125
144
|
let usage;
|
|
145
|
+
let completionStartTime;
|
|
126
146
|
for await (const ev of this.queue) {
|
|
127
147
|
if (this.abortController.signal.aborted) {
|
|
128
148
|
break;
|
|
@@ -131,6 +151,7 @@ class LLMStream {
|
|
|
131
151
|
requestId = ev.id;
|
|
132
152
|
if (ttft === BigInt(-1)) {
|
|
133
153
|
ttft = process.hrtime.bigint() - startTime;
|
|
154
|
+
completionStartTime = (/* @__PURE__ */ new Date()).toISOString();
|
|
134
155
|
}
|
|
135
156
|
if (ev.usage) {
|
|
136
157
|
usage = ev.usage;
|
|
@@ -158,6 +179,20 @@ class LLMStream {
|
|
|
158
179
|
return ((usage == null ? void 0 : usage.completionTokens) || 0) / (durationMs / 1e3);
|
|
159
180
|
})()
|
|
160
181
|
};
|
|
182
|
+
if (this.#llmRequestSpan) {
|
|
183
|
+
this.#llmRequestSpan.setAttribute(import_telemetry.traceTypes.ATTR_LLM_METRICS, JSON.stringify(metrics));
|
|
184
|
+
this.#llmRequestSpan.setAttributes({
|
|
185
|
+
[import_telemetry.traceTypes.ATTR_GEN_AI_USAGE_INPUT_TOKENS]: metrics.promptTokens,
|
|
186
|
+
[import_telemetry.traceTypes.ATTR_GEN_AI_USAGE_OUTPUT_TOKENS]: metrics.completionTokens
|
|
187
|
+
});
|
|
188
|
+
if (completionStartTime) {
|
|
189
|
+
this.#llmRequestSpan.setAttribute(
|
|
190
|
+
import_telemetry.traceTypes.ATTR_LANGFUSE_COMPLETION_START_TIME,
|
|
191
|
+
completionStartTime
|
|
192
|
+
);
|
|
193
|
+
}
|
|
194
|
+
this.#llmRequestSpan.end();
|
|
195
|
+
}
|
|
161
196
|
this.#llm.emit("metrics_collected", metrics);
|
|
162
197
|
}
|
|
163
198
|
/** The function context of this stream. */
|
package/dist/llm/llm.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/llm/llm.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2025 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport type { TypedEventEmitter as TypedEmitter } from '@livekit/typed-emitter';\nimport { EventEmitter } from 'node:events';\nimport { APIConnectionError, APIError } from '../_exceptions.js';\nimport { log } from '../log.js';\nimport type { LLMMetrics } from '../metrics/base.js';\nimport type { APIConnectOptions } from '../types.js';\nimport { AsyncIterableQueue, delay, startSoon, toError } from '../utils.js';\nimport { type ChatContext, type ChatRole, type FunctionCall } from './chat_context.js';\nimport type { ToolChoice, ToolContext } from './tool_context.js';\n\nexport interface ChoiceDelta {\n role: ChatRole;\n content?: string;\n toolCalls?: FunctionCall[];\n}\n\nexport interface CompletionUsage {\n completionTokens: number;\n promptTokens: number;\n promptCachedTokens: number;\n totalTokens: number;\n}\n\nexport interface ChatChunk {\n id: string;\n delta?: ChoiceDelta;\n usage?: CompletionUsage;\n}\n\nexport interface LLMError {\n type: 'llm_error';\n timestamp: number;\n label: string;\n error: Error;\n recoverable: boolean;\n}\n\nexport type LLMCallbacks = {\n ['metrics_collected']: (metrics: LLMMetrics) => void;\n ['error']: (error: LLMError) => void;\n};\n\nexport abstract class LLM extends (EventEmitter as new () => TypedEmitter<LLMCallbacks>) {\n constructor() {\n super();\n }\n\n abstract label(): string;\n\n /**\n * Get the model name/identifier for this LLM instance.\n *\n * @returns The model name if available, \"unknown\" otherwise.\n *\n * @remarks\n * Plugins should override this property to provide their model information.\n */\n get model(): string {\n return 'unknown';\n }\n\n /**\n * Returns a {@link LLMStream} that can be used to push text and receive LLM responses.\n */\n abstract chat({\n chatCtx,\n toolCtx,\n connOptions,\n parallelToolCalls,\n toolChoice,\n extraKwargs,\n }: {\n chatCtx: ChatContext;\n toolCtx?: ToolContext;\n connOptions?: APIConnectOptions;\n parallelToolCalls?: boolean;\n toolChoice?: ToolChoice;\n extraKwargs?: Record<string, unknown>;\n }): LLMStream;\n\n /**\n * Pre-warm connection to the LLM service\n */\n prewarm(): void {\n // Default implementation - subclasses can override\n }\n\n async aclose(): Promise<void> {\n // Default implementation - subclasses can override\n }\n}\n\nexport abstract class LLMStream implements AsyncIterableIterator<ChatChunk> {\n protected output = new AsyncIterableQueue<ChatChunk>();\n protected queue = new AsyncIterableQueue<ChatChunk>();\n protected closed = false;\n protected abortController = new AbortController();\n protected _connOptions: APIConnectOptions;\n protected logger = log();\n\n #llm: LLM;\n #chatCtx: ChatContext;\n #toolCtx?: ToolContext;\n\n constructor(\n llm: LLM,\n {\n chatCtx,\n toolCtx,\n connOptions,\n }: {\n chatCtx: ChatContext;\n toolCtx?: ToolContext;\n connOptions: APIConnectOptions;\n },\n ) {\n this.#llm = llm;\n this.#chatCtx = chatCtx;\n this.#toolCtx = toolCtx;\n this._connOptions = connOptions;\n this.monitorMetrics();\n this.abortController.signal.addEventListener('abort', () => {\n // TODO (AJS-37) clean this up when we refactor with streams\n this.output.close();\n this.closed = true;\n });\n\n // this is a hack to immitate asyncio.create_task so that mainTask\n // is run **after** the constructor has finished. Otherwise we get\n // runtime error when trying to access class variables in the\n // `run` method.\n startSoon(() => this.mainTask().then(() => this.queue.close()));\n }\n\n private async mainTask() {\n // TODO(brian): PR3 - Add span wrapping: tracer.startActiveSpan('llm_request', ..., { endOnExit: false })\n for (let i = 0; i < this._connOptions.maxRetry + 1; i++) {\n try {\n // TODO(brian): PR3 - Add span for retry attempts: tracer.startActiveSpan('llm_request_run', ...)\n return await this.run();\n } catch (error) {\n if (error instanceof APIError) {\n const retryInterval = this._connOptions._intervalForRetry(i);\n\n if (this._connOptions.maxRetry === 0 || !error.retryable) {\n this.emitError({ error, recoverable: false });\n throw error;\n } else if (i === this._connOptions.maxRetry) {\n this.emitError({ error, recoverable: false });\n throw new APIConnectionError({\n message: `failed to generate LLM completion after ${this._connOptions.maxRetry + 1} attempts`,\n options: { retryable: false },\n });\n } else {\n this.emitError({ error, recoverable: true });\n this.logger.warn(\n { llm: this.#llm.label(), attempt: i + 1, error },\n `failed to generate LLM completion, retrying in ${retryInterval}s`,\n );\n }\n\n if (retryInterval > 0) {\n await delay(retryInterval);\n }\n } else {\n this.emitError({ error: toError(error), recoverable: false });\n throw error;\n }\n }\n }\n }\n\n private emitError({ error, recoverable }: { error: Error; recoverable: boolean }) {\n this.#llm.emit('error', {\n type: 'llm_error',\n timestamp: Date.now(),\n label: this.#llm.label(),\n error,\n recoverable,\n });\n }\n\n protected async monitorMetrics() {\n const startTime = process.hrtime.bigint();\n let ttft: bigint = BigInt(-1);\n let requestId = '';\n let usage: CompletionUsage | undefined;\n\n for await (const ev of this.queue) {\n if (this.abortController.signal.aborted) {\n break;\n }\n this.output.put(ev);\n requestId = ev.id;\n if (ttft === BigInt(-1)) {\n ttft = process.hrtime.bigint() - startTime;\n }\n if (ev.usage) {\n usage = ev.usage;\n }\n }\n this.output.close();\n\n const duration = process.hrtime.bigint() - startTime;\n const durationMs = Math.trunc(Number(duration / BigInt(1000000)));\n const metrics: LLMMetrics = {\n type: 'llm_metrics',\n timestamp: Date.now(),\n requestId,\n ttftMs: ttft === BigInt(-1) ? -1 : Math.trunc(Number(ttft / BigInt(1000000))),\n durationMs,\n cancelled: this.abortController.signal.aborted,\n label: this.#llm.label(),\n completionTokens: usage?.completionTokens || 0,\n promptTokens: usage?.promptTokens || 0,\n promptCachedTokens: usage?.promptCachedTokens || 0,\n totalTokens: usage?.totalTokens || 0,\n tokensPerSecond: (() => {\n if (durationMs <= 0) {\n return 0;\n }\n return (usage?.completionTokens || 0) / (durationMs / 1000);\n })(),\n };\n this.#llm.emit('metrics_collected', metrics);\n }\n\n protected abstract run(): Promise<void>;\n\n /** The function context of this stream. */\n get toolCtx(): ToolContext | undefined {\n return this.#toolCtx;\n }\n\n /** The initial chat context of this stream. */\n get chatCtx(): ChatContext {\n return this.#chatCtx;\n }\n\n /** The connection options for this stream. */\n get connOptions(): APIConnectOptions {\n return this._connOptions;\n }\n\n next(): Promise<IteratorResult<ChatChunk>> {\n return this.output.next();\n }\n\n close() {\n this.abortController.abort();\n }\n\n [Symbol.asyncIterator](): LLMStream {\n return this;\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAIA,yBAA6B;AAC7B,wBAA6C;AAC7C,iBAAoB;AAGpB,mBAA8D;AAC9D,0BAAmE;AAmC5D,MAAe,YAAa,gCAAsD;AAAA,EACvF,cAAc;AACZ,UAAM;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,IAAI,QAAgB;AAClB,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAwBA,UAAgB;AAAA,EAEhB;AAAA,EAEA,MAAM,SAAwB;AAAA,EAE9B;AACF;AAEO,MAAe,UAAsD;AAAA,EAChE,SAAS,IAAI,gCAA8B;AAAA,EAC3C,QAAQ,IAAI,gCAA8B;AAAA,EAC1C,SAAS;AAAA,EACT,kBAAkB,IAAI,gBAAgB;AAAA,EACtC;AAAA,EACA,aAAS,gBAAI;AAAA,EAEvB;AAAA,EACA;AAAA,EACA;AAAA,EAEA,YACE,KACA;AAAA,IACE;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAKA;AACA,SAAK,OAAO;AACZ,SAAK,WAAW;AAChB,SAAK,WAAW;AAChB,SAAK,eAAe;AACpB,SAAK,eAAe;AACpB,SAAK,gBAAgB,OAAO,iBAAiB,SAAS,MAAM;AAE1D,WAAK,OAAO,MAAM;AAClB,WAAK,SAAS;AAAA,IAChB,CAAC;AAMD,gCAAU,MAAM,KAAK,SAAS,EAAE,KAAK,MAAM,KAAK,MAAM,MAAM,CAAC,CAAC;AAAA,EAChE;AAAA,EAEA,MAAc,WAAW;AAEvB,aAAS,IAAI,GAAG,IAAI,KAAK,aAAa,WAAW,GAAG,KAAK;AACvD,UAAI;AAEF,eAAO,MAAM,KAAK,IAAI;AAAA,MACxB,SAAS,OAAO;AACd,YAAI,iBAAiB,4BAAU;AAC7B,gBAAM,gBAAgB,KAAK,aAAa,kBAAkB,CAAC;AAE3D,cAAI,KAAK,aAAa,aAAa,KAAK,CAAC,MAAM,WAAW;AACxD,iBAAK,UAAU,EAAE,OAAO,aAAa,MAAM,CAAC;AAC5C,kBAAM;AAAA,UACR,WAAW,MAAM,KAAK,aAAa,UAAU;AAC3C,iBAAK,UAAU,EAAE,OAAO,aAAa,MAAM,CAAC;AAC5C,kBAAM,IAAI,qCAAmB;AAAA,cAC3B,SAAS,2CAA2C,KAAK,aAAa,WAAW,CAAC;AAAA,cAClF,SAAS,EAAE,WAAW,MAAM;AAAA,YAC9B,CAAC;AAAA,UACH,OAAO;AACL,iBAAK,UAAU,EAAE,OAAO,aAAa,KAAK,CAAC;AAC3C,iBAAK,OAAO;AAAA,cACV,EAAE,KAAK,KAAK,KAAK,MAAM,GAAG,SAAS,IAAI,GAAG,MAAM;AAAA,cAChD,kDAAkD,aAAa;AAAA,YACjE;AAAA,UACF;AAEA,cAAI,gBAAgB,GAAG;AACrB,sBAAM,oBAAM,aAAa;AAAA,UAC3B;AAAA,QACF,OAAO;AACL,eAAK,UAAU,EAAE,WAAO,sBAAQ,KAAK,GAAG,aAAa,MAAM,CAAC;AAC5D,gBAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,UAAU,EAAE,OAAO,YAAY,GAA2C;AAChF,SAAK,KAAK,KAAK,SAAS;AAAA,MACtB,MAAM;AAAA,MACN,WAAW,KAAK,IAAI;AAAA,MACpB,OAAO,KAAK,KAAK,MAAM;AAAA,MACvB;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EAEA,MAAgB,iBAAiB;AAC/B,UAAM,YAAY,QAAQ,OAAO,OAAO;AACxC,QAAI,OAAe,OAAO,EAAE;AAC5B,QAAI,YAAY;AAChB,QAAI;AAEJ,qBAAiB,MAAM,KAAK,OAAO;AACjC,UAAI,KAAK,gBAAgB,OAAO,SAAS;AACvC;AAAA,MACF;AACA,WAAK,OAAO,IAAI,EAAE;AAClB,kBAAY,GAAG;AACf,UAAI,SAAS,OAAO,EAAE,GAAG;AACvB,eAAO,QAAQ,OAAO,OAAO,IAAI;AAAA,MACnC;AACA,UAAI,GAAG,OAAO;AACZ,gBAAQ,GAAG;AAAA,MACb;AAAA,IACF;AACA,SAAK,OAAO,MAAM;AAElB,UAAM,WAAW,QAAQ,OAAO,OAAO,IAAI;AAC3C,UAAM,aAAa,KAAK,MAAM,OAAO,WAAW,OAAO,GAAO,CAAC,CAAC;AAChE,UAAM,UAAsB;AAAA,MAC1B,MAAM;AAAA,MACN,WAAW,KAAK,IAAI;AAAA,MACpB;AAAA,MACA,QAAQ,SAAS,OAAO,EAAE,IAAI,KAAK,KAAK,MAAM,OAAO,OAAO,OAAO,GAAO,CAAC,CAAC;AAAA,MAC5E;AAAA,MACA,WAAW,KAAK,gBAAgB,OAAO;AAAA,MACvC,OAAO,KAAK,KAAK,MAAM;AAAA,MACvB,mBAAkB,+BAAO,qBAAoB;AAAA,MAC7C,eAAc,+BAAO,iBAAgB;AAAA,MACrC,qBAAoB,+BAAO,uBAAsB;AAAA,MACjD,cAAa,+BAAO,gBAAe;AAAA,MACnC,kBAAkB,MAAM;AACtB,YAAI,cAAc,GAAG;AACnB,iBAAO;AAAA,QACT;AACA,iBAAQ,+BAAO,qBAAoB,MAAM,aAAa;AAAA,MACxD,GAAG;AAAA,IACL;AACA,SAAK,KAAK,KAAK,qBAAqB,OAAO;AAAA,EAC7C;AAAA;AAAA,EAKA,IAAI,UAAmC;AACrC,WAAO,KAAK;AAAA,EACd;AAAA;AAAA,EAGA,IAAI,UAAuB;AACzB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA,EAGA,IAAI,cAAiC;AACnC,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,OAA2C;AACzC,WAAO,KAAK,OAAO,KAAK;AAAA,EAC1B;AAAA,EAEA,QAAQ;AACN,SAAK,gBAAgB,MAAM;AAAA,EAC7B;AAAA,EAEA,CAAC,OAAO,aAAa,IAAe;AAClC,WAAO;AAAA,EACT;AACF;","names":[]}
|
|
1
|
+
{"version":3,"sources":["../../src/llm/llm.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2025 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport type { TypedEventEmitter as TypedEmitter } from '@livekit/typed-emitter';\nimport type { Span } from '@opentelemetry/api';\nimport { EventEmitter } from 'node:events';\nimport { APIConnectionError, APIError } from '../_exceptions.js';\nimport { log } from '../log.js';\nimport type { LLMMetrics } from '../metrics/base.js';\nimport { recordException, traceTypes, tracer } from '../telemetry/index.js';\nimport type { APIConnectOptions } from '../types.js';\nimport { AsyncIterableQueue, delay, startSoon, toError } from '../utils.js';\nimport { type ChatContext, type ChatRole, type FunctionCall } from './chat_context.js';\nimport type { ToolChoice, ToolContext } from './tool_context.js';\n\nexport interface ChoiceDelta {\n role: ChatRole;\n content?: string;\n toolCalls?: FunctionCall[];\n}\n\nexport interface CompletionUsage {\n completionTokens: number;\n promptTokens: number;\n promptCachedTokens: number;\n totalTokens: number;\n}\n\nexport interface ChatChunk {\n id: string;\n delta?: ChoiceDelta;\n usage?: CompletionUsage;\n}\n\nexport interface LLMError {\n type: 'llm_error';\n timestamp: number;\n label: string;\n error: Error;\n recoverable: boolean;\n}\n\nexport type LLMCallbacks = {\n ['metrics_collected']: (metrics: LLMMetrics) => void;\n ['error']: (error: LLMError) => void;\n};\n\nexport abstract class LLM extends (EventEmitter as new () => TypedEmitter<LLMCallbacks>) {\n constructor() {\n super();\n }\n\n abstract label(): string;\n\n /**\n * Get the model name/identifier for this LLM instance.\n *\n * @returns The model name if available, \"unknown\" otherwise.\n *\n * @remarks\n * Plugins should override this property to provide their model information.\n */\n get model(): string {\n return 'unknown';\n }\n\n /**\n * Returns a {@link LLMStream} that can be used to push text and receive LLM responses.\n */\n abstract chat({\n chatCtx,\n toolCtx,\n connOptions,\n parallelToolCalls,\n toolChoice,\n extraKwargs,\n }: {\n chatCtx: ChatContext;\n toolCtx?: ToolContext;\n connOptions?: APIConnectOptions;\n parallelToolCalls?: boolean;\n toolChoice?: ToolChoice;\n extraKwargs?: Record<string, unknown>;\n }): LLMStream;\n\n /**\n * Pre-warm connection to the LLM service\n */\n prewarm(): void {\n // Default implementation - subclasses can override\n }\n\n async aclose(): Promise<void> {\n // Default implementation - subclasses can override\n }\n}\n\nexport abstract class LLMStream implements AsyncIterableIterator<ChatChunk> {\n protected output = new AsyncIterableQueue<ChatChunk>();\n protected queue = new AsyncIterableQueue<ChatChunk>();\n protected closed = false;\n protected abortController = new AbortController();\n protected _connOptions: APIConnectOptions;\n protected logger = log();\n\n #llm: LLM;\n #chatCtx: ChatContext;\n #toolCtx?: ToolContext;\n #llmRequestSpan?: Span;\n\n constructor(\n llm: LLM,\n {\n chatCtx,\n toolCtx,\n connOptions,\n }: {\n chatCtx: ChatContext;\n toolCtx?: ToolContext;\n connOptions: APIConnectOptions;\n },\n ) {\n this.#llm = llm;\n this.#chatCtx = chatCtx;\n this.#toolCtx = toolCtx;\n this._connOptions = connOptions;\n this.monitorMetrics();\n this.abortController.signal.addEventListener('abort', () => {\n // TODO (AJS-37) clean this up when we refactor with streams\n this.output.close();\n this.closed = true;\n });\n\n // this is a hack to immitate asyncio.create_task so that mainTask\n // is run **after** the constructor has finished. Otherwise we get\n // runtime error when trying to access class variables in the\n // `run` method.\n startSoon(() => this.mainTask().then(() => this.queue.close()));\n }\n\n private _mainTaskImpl = async (span: Span) => {\n this.#llmRequestSpan = span;\n span.setAttribute(traceTypes.ATTR_GEN_AI_REQUEST_MODEL, this.#llm.model);\n\n for (let i = 0; i < this._connOptions.maxRetry + 1; i++) {\n try {\n return await tracer.startActiveSpan(\n async (attemptSpan) => {\n attemptSpan.setAttribute(traceTypes.ATTR_RETRY_COUNT, i);\n try {\n return await this.run();\n } catch (error) {\n recordException(attemptSpan, toError(error));\n throw error;\n }\n },\n { name: 'llm_request_run' },\n );\n } catch (error) {\n if (error instanceof APIError) {\n const retryInterval = this._connOptions._intervalForRetry(i);\n\n if (this._connOptions.maxRetry === 0 || !error.retryable) {\n this.emitError({ error, recoverable: false });\n throw error;\n } else if (i === this._connOptions.maxRetry) {\n this.emitError({ error, recoverable: false });\n throw new APIConnectionError({\n message: `failed to generate LLM completion after ${this._connOptions.maxRetry + 1} attempts`,\n options: { retryable: false },\n });\n } else {\n this.emitError({ error, recoverable: true });\n this.logger.warn(\n { llm: this.#llm.label(), attempt: i + 1, error },\n `failed to generate LLM completion, retrying in ${retryInterval}s`,\n );\n }\n\n if (retryInterval > 0) {\n await delay(retryInterval);\n }\n } else {\n this.emitError({ error: toError(error), recoverable: false });\n throw error;\n }\n }\n }\n };\n\n private mainTask = async () =>\n tracer.startActiveSpan(async (span) => this._mainTaskImpl(span), {\n name: 'llm_request',\n endOnExit: false,\n });\n\n private emitError({ error, recoverable }: { error: Error; recoverable: boolean }) {\n this.#llm.emit('error', {\n type: 'llm_error',\n timestamp: Date.now(),\n label: this.#llm.label(),\n error,\n recoverable,\n });\n }\n\n protected async monitorMetrics() {\n const startTime = process.hrtime.bigint();\n let ttft: bigint = BigInt(-1);\n let requestId = '';\n let usage: CompletionUsage | undefined;\n let completionStartTime: string | undefined;\n\n for await (const ev of this.queue) {\n if (this.abortController.signal.aborted) {\n break;\n }\n this.output.put(ev);\n requestId = ev.id;\n if (ttft === BigInt(-1)) {\n ttft = process.hrtime.bigint() - startTime;\n completionStartTime = new Date().toISOString();\n }\n if (ev.usage) {\n usage = ev.usage;\n }\n }\n this.output.close();\n\n const duration = process.hrtime.bigint() - startTime;\n const durationMs = Math.trunc(Number(duration / BigInt(1000000)));\n const metrics: LLMMetrics = {\n type: 'llm_metrics',\n timestamp: Date.now(),\n requestId,\n ttftMs: ttft === BigInt(-1) ? -1 : Math.trunc(Number(ttft / BigInt(1000000))),\n durationMs,\n cancelled: this.abortController.signal.aborted,\n label: this.#llm.label(),\n completionTokens: usage?.completionTokens || 0,\n promptTokens: usage?.promptTokens || 0,\n promptCachedTokens: usage?.promptCachedTokens || 0,\n totalTokens: usage?.totalTokens || 0,\n tokensPerSecond: (() => {\n if (durationMs <= 0) {\n return 0;\n }\n return (usage?.completionTokens || 0) / (durationMs / 1000);\n })(),\n };\n\n if (this.#llmRequestSpan) {\n this.#llmRequestSpan.setAttribute(traceTypes.ATTR_LLM_METRICS, JSON.stringify(metrics));\n\n this.#llmRequestSpan.setAttributes({\n [traceTypes.ATTR_GEN_AI_USAGE_INPUT_TOKENS]: metrics.promptTokens,\n [traceTypes.ATTR_GEN_AI_USAGE_OUTPUT_TOKENS]: metrics.completionTokens,\n });\n\n if (completionStartTime) {\n this.#llmRequestSpan.setAttribute(\n traceTypes.ATTR_LANGFUSE_COMPLETION_START_TIME,\n completionStartTime,\n );\n }\n\n // End the span now that metrics are collected\n this.#llmRequestSpan.end();\n }\n\n this.#llm.emit('metrics_collected', metrics);\n }\n\n protected abstract run(): Promise<void>;\n\n /** The function context of this stream. */\n get toolCtx(): ToolContext | undefined {\n return this.#toolCtx;\n }\n\n /** The initial chat context of this stream. */\n get chatCtx(): ChatContext {\n return this.#chatCtx;\n }\n\n /** The connection options for this stream. */\n get connOptions(): APIConnectOptions {\n return this._connOptions;\n }\n\n next(): Promise<IteratorResult<ChatChunk>> {\n return this.output.next();\n }\n\n close() {\n this.abortController.abort();\n }\n\n [Symbol.asyncIterator](): LLMStream {\n return this;\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAKA,yBAA6B;AAC7B,wBAA6C;AAC7C,iBAAoB;AAEpB,uBAAoD;AAEpD,mBAA8D;AAC9D,0BAAmE;AAmC5D,MAAe,YAAa,gCAAsD;AAAA,EACvF,cAAc;AACZ,UAAM;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,IAAI,QAAgB;AAClB,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAwBA,UAAgB;AAAA,EAEhB;AAAA,EAEA,MAAM,SAAwB;AAAA,EAE9B;AACF;AAEO,MAAe,UAAsD;AAAA,EAChE,SAAS,IAAI,gCAA8B;AAAA,EAC3C,QAAQ,IAAI,gCAA8B;AAAA,EAC1C,SAAS;AAAA,EACT,kBAAkB,IAAI,gBAAgB;AAAA,EACtC;AAAA,EACA,aAAS,gBAAI;AAAA,EAEvB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA,YACE,KACA;AAAA,IACE;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAKA;AACA,SAAK,OAAO;AACZ,SAAK,WAAW;AAChB,SAAK,WAAW;AAChB,SAAK,eAAe;AACpB,SAAK,eAAe;AACpB,SAAK,gBAAgB,OAAO,iBAAiB,SAAS,MAAM;AAE1D,WAAK,OAAO,MAAM;AAClB,WAAK,SAAS;AAAA,IAChB,CAAC;AAMD,gCAAU,MAAM,KAAK,SAAS,EAAE,KAAK,MAAM,KAAK,MAAM,MAAM,CAAC,CAAC;AAAA,EAChE;AAAA,EAEQ,gBAAgB,OAAO,SAAe;AAC5C,SAAK,kBAAkB;AACvB,SAAK,aAAa,4BAAW,2BAA2B,KAAK,KAAK,KAAK;AAEvE,aAAS,IAAI,GAAG,IAAI,KAAK,aAAa,WAAW,GAAG,KAAK;AACvD,UAAI;AACF,eAAO,MAAM,wBAAO;AAAA,UAClB,OAAO,gBAAgB;AACrB,wBAAY,aAAa,4BAAW,kBAAkB,CAAC;AACvD,gBAAI;AACF,qBAAO,MAAM,KAAK,IAAI;AAAA,YACxB,SAAS,OAAO;AACd,oDAAgB,iBAAa,sBAAQ,KAAK,CAAC;AAC3C,oBAAM;AAAA,YACR;AAAA,UACF;AAAA,UACA,EAAE,MAAM,kBAAkB;AAAA,QAC5B;AAAA,MACF,SAAS,OAAO;AACd,YAAI,iBAAiB,4BAAU;AAC7B,gBAAM,gBAAgB,KAAK,aAAa,kBAAkB,CAAC;AAE3D,cAAI,KAAK,aAAa,aAAa,KAAK,CAAC,MAAM,WAAW;AACxD,iBAAK,UAAU,EAAE,OAAO,aAAa,MAAM,CAAC;AAC5C,kBAAM;AAAA,UACR,WAAW,MAAM,KAAK,aAAa,UAAU;AAC3C,iBAAK,UAAU,EAAE,OAAO,aAAa,MAAM,CAAC;AAC5C,kBAAM,IAAI,qCAAmB;AAAA,cAC3B,SAAS,2CAA2C,KAAK,aAAa,WAAW,CAAC;AAAA,cAClF,SAAS,EAAE,WAAW,MAAM;AAAA,YAC9B,CAAC;AAAA,UACH,OAAO;AACL,iBAAK,UAAU,EAAE,OAAO,aAAa,KAAK,CAAC;AAC3C,iBAAK,OAAO;AAAA,cACV,EAAE,KAAK,KAAK,KAAK,MAAM,GAAG,SAAS,IAAI,GAAG,MAAM;AAAA,cAChD,kDAAkD,aAAa;AAAA,YACjE;AAAA,UACF;AAEA,cAAI,gBAAgB,GAAG;AACrB,sBAAM,oBAAM,aAAa;AAAA,UAC3B;AAAA,QACF,OAAO;AACL,eAAK,UAAU,EAAE,WAAO,sBAAQ,KAAK,GAAG,aAAa,MAAM,CAAC;AAC5D,gBAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,WAAW,YACjB,wBAAO,gBAAgB,OAAO,SAAS,KAAK,cAAc,IAAI,GAAG;AAAA,IAC/D,MAAM;AAAA,IACN,WAAW;AAAA,EACb,CAAC;AAAA,EAEK,UAAU,EAAE,OAAO,YAAY,GAA2C;AAChF,SAAK,KAAK,KAAK,SAAS;AAAA,MACtB,MAAM;AAAA,MACN,WAAW,KAAK,IAAI;AAAA,MACpB,OAAO,KAAK,KAAK,MAAM;AAAA,MACvB;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EAEA,MAAgB,iBAAiB;AAC/B,UAAM,YAAY,QAAQ,OAAO,OAAO;AACxC,QAAI,OAAe,OAAO,EAAE;AAC5B,QAAI,YAAY;AAChB,QAAI;AACJ,QAAI;AAEJ,qBAAiB,MAAM,KAAK,OAAO;AACjC,UAAI,KAAK,gBAAgB,OAAO,SAAS;AACvC;AAAA,MACF;AACA,WAAK,OAAO,IAAI,EAAE;AAClB,kBAAY,GAAG;AACf,UAAI,SAAS,OAAO,EAAE,GAAG;AACvB,eAAO,QAAQ,OAAO,OAAO,IAAI;AACjC,+BAAsB,oBAAI,KAAK,GAAE,YAAY;AAAA,MAC/C;AACA,UAAI,GAAG,OAAO;AACZ,gBAAQ,GAAG;AAAA,MACb;AAAA,IACF;AACA,SAAK,OAAO,MAAM;AAElB,UAAM,WAAW,QAAQ,OAAO,OAAO,IAAI;AAC3C,UAAM,aAAa,KAAK,MAAM,OAAO,WAAW,OAAO,GAAO,CAAC,CAAC;AAChE,UAAM,UAAsB;AAAA,MAC1B,MAAM;AAAA,MACN,WAAW,KAAK,IAAI;AAAA,MACpB;AAAA,MACA,QAAQ,SAAS,OAAO,EAAE,IAAI,KAAK,KAAK,MAAM,OAAO,OAAO,OAAO,GAAO,CAAC,CAAC;AAAA,MAC5E;AAAA,MACA,WAAW,KAAK,gBAAgB,OAAO;AAAA,MACvC,OAAO,KAAK,KAAK,MAAM;AAAA,MACvB,mBAAkB,+BAAO,qBAAoB;AAAA,MAC7C,eAAc,+BAAO,iBAAgB;AAAA,MACrC,qBAAoB,+BAAO,uBAAsB;AAAA,MACjD,cAAa,+BAAO,gBAAe;AAAA,MACnC,kBAAkB,MAAM;AACtB,YAAI,cAAc,GAAG;AACnB,iBAAO;AAAA,QACT;AACA,iBAAQ,+BAAO,qBAAoB,MAAM,aAAa;AAAA,MACxD,GAAG;AAAA,IACL;AAEA,QAAI,KAAK,iBAAiB;AACxB,WAAK,gBAAgB,aAAa,4BAAW,kBAAkB,KAAK,UAAU,OAAO,CAAC;AAEtF,WAAK,gBAAgB,cAAc;AAAA,QACjC,CAAC,4BAAW,8BAA8B,GAAG,QAAQ;AAAA,QACrD,CAAC,4BAAW,+BAA+B,GAAG,QAAQ;AAAA,MACxD,CAAC;AAED,UAAI,qBAAqB;AACvB,aAAK,gBAAgB;AAAA,UACnB,4BAAW;AAAA,UACX;AAAA,QACF;AAAA,MACF;AAGA,WAAK,gBAAgB,IAAI;AAAA,IAC3B;AAEA,SAAK,KAAK,KAAK,qBAAqB,OAAO;AAAA,EAC7C;AAAA;AAAA,EAKA,IAAI,UAAmC;AACrC,WAAO,KAAK;AAAA,EACd;AAAA;AAAA,EAGA,IAAI,UAAuB;AACzB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA,EAGA,IAAI,cAAiC;AACnC,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,OAA2C;AACzC,WAAO,KAAK,OAAO,KAAK;AAAA,EAC1B;AAAA,EAEA,QAAQ;AACN,SAAK,gBAAgB,MAAM;AAAA,EAC7B;AAAA,EAEA,CAAC,OAAO,aAAa,IAAe;AAClC,WAAO;AAAA,EACT;AACF;","names":[]}
|
package/dist/llm/llm.d.cts
CHANGED
|
@@ -74,6 +74,7 @@ export declare abstract class LLMStream implements AsyncIterableIterator<ChatChu
|
|
|
74
74
|
toolCtx?: ToolContext;
|
|
75
75
|
connOptions: APIConnectOptions;
|
|
76
76
|
});
|
|
77
|
+
private _mainTaskImpl;
|
|
77
78
|
private mainTask;
|
|
78
79
|
private emitError;
|
|
79
80
|
protected monitorMetrics(): Promise<void>;
|
package/dist/llm/llm.d.ts
CHANGED
|
@@ -74,6 +74,7 @@ export declare abstract class LLMStream implements AsyncIterableIterator<ChatChu
|
|
|
74
74
|
toolCtx?: ToolContext;
|
|
75
75
|
connOptions: APIConnectOptions;
|
|
76
76
|
});
|
|
77
|
+
private _mainTaskImpl;
|
|
77
78
|
private mainTask;
|
|
78
79
|
private emitError;
|
|
79
80
|
protected monitorMetrics(): Promise<void>;
|
package/dist/llm/llm.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"llm.d.ts","sourceRoot":"","sources":["../../src/llm/llm.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EAAE,iBAAiB,IAAI,YAAY,EAAE,MAAM,wBAAwB,CAAC;
|
|
1
|
+
{"version":3,"file":"llm.d.ts","sourceRoot":"","sources":["../../src/llm/llm.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EAAE,iBAAiB,IAAI,YAAY,EAAE,MAAM,wBAAwB,CAAC;AAKhF,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,oBAAoB,CAAC;AAErD,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,aAAa,CAAC;AACrD,OAAO,EAAE,kBAAkB,EAA6B,MAAM,aAAa,CAAC;AAC5E,OAAO,EAAE,KAAK,WAAW,EAAE,KAAK,QAAQ,EAAE,KAAK,YAAY,EAAE,MAAM,mBAAmB,CAAC;AACvF,OAAO,KAAK,EAAE,UAAU,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAC;AAEjE,MAAM,WAAW,WAAW;IAC1B,IAAI,EAAE,QAAQ,CAAC;IACf,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,SAAS,CAAC,EAAE,YAAY,EAAE,CAAC;CAC5B;AAED,MAAM,WAAW,eAAe;IAC9B,gBAAgB,EAAE,MAAM,CAAC;IACzB,YAAY,EAAE,MAAM,CAAC;IACrB,kBAAkB,EAAE,MAAM,CAAC;IAC3B,WAAW,EAAE,MAAM,CAAC;CACrB;AAED,MAAM,WAAW,SAAS;IACxB,EAAE,EAAE,MAAM,CAAC;IACX,KAAK,CAAC,EAAE,WAAW,CAAC;IACpB,KAAK,CAAC,EAAE,eAAe,CAAC;CACzB;AAED,MAAM,WAAW,QAAQ;IACvB,IAAI,EAAE,WAAW,CAAC;IAClB,SAAS,EAAE,MAAM,CAAC;IAClB,KAAK,EAAE,MAAM,CAAC;IACd,KAAK,EAAE,KAAK,CAAC;IACb,WAAW,EAAE,OAAO,CAAC;CACtB;AAED,MAAM,MAAM,YAAY,GAAG;IACzB,CAAC,mBAAmB,CAAC,EAAE,CAAC,OAAO,EAAE,UAAU,KAAK,IAAI,CAAC;IACrD,CAAC,OAAO,CAAC,EAAE,CAAC,KAAK,EAAE,QAAQ,KAAK,IAAI,CAAC;CACtC,CAAC;kCAE2D,aAAa,YAAY,CAAC;AAAvF,8BAAsB,GAAI,SAAQ,QAAsD;;IAKtF,QAAQ,CAAC,KAAK,IAAI,MAAM;IAExB;;;;;;;OAOG;IACH,IAAI,KAAK,IAAI,MAAM,CAElB;IAED;;OAEG;IACH,QAAQ,CAAC,IAAI,CAAC,EACZ,OAAO,EACP,OAAO,EACP,WAAW,EACX,iBAAiB,EACjB,UAAU,EACV,WAAW,GACZ,EAAE;QACD,OAAO,EAAE,WAAW,CAAC;QACrB,OAAO,CAAC,EAAE,WAAW,CAAC;QACtB,WAAW,CAAC,EAAE,iBAAiB,CAAC;QAChC,iBAAiB,CAAC,EAAE,OAAO,CAAC;QAC5B,UAAU,CAAC,EAAE,UAAU,CAAC;QACxB,WAAW,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;KACvC,GAAG,SAAS;IAEb;;OAEG;IACH,OAAO,IAAI,IAAI;IAIT,MAAM,IAAI,OAAO,CAAC,IAAI,CAAC;CAG9B;AAED,8BAAsB,SAAU,YAAW,qBAAqB,CAAC,SAAS,CAAC;;IACzE,SAAS,CAAC,MAAM,gCAAuC;IACvD,SAAS,CAAC,KAAK,gCAAuC;IACtD,SAAS,CAAC,MAAM,UAAS;IACzB,SAAS,CAAC,eAAe,kBAAyB;IAClD,SAAS,CAAC,YAAY,EAAE,iBAAiB,CAAC;IAC1C,SAAS,CAAC,MAAM,wBAAS;gBAQvB,GAAG,EAAE,GAAG,EACR,EACE,OAAO,EACP,OAAO,EACP,WAAW,GACZ,EAAE;QACD,OAAO,EAAE,WAAW,CAAC;QACrB,OAAO,CAAC,EAAE,WAAW,CAAC;QACtB,WAAW,EAAE,iBAAiB,CAAC;KAChC;IAoBH,OAAO,CAAC,aAAa,CAgDnB;IAEF,OAAO,CAAC,QAAQ,CAIX;IAEL,OAAO,CAAC,SAAS;cAUD,cAAc;IAmE9B,SAAS,CAAC,QAAQ,CAAC,GAAG,IAAI,OAAO,CAAC,IAAI,CAAC;IAEvC,2CAA2C;IAC3C,IAAI,OAAO,IAAI,WAAW,GAAG,SAAS,CAErC;IAED,+CAA+C;IAC/C,IAAI,OAAO,IAAI,WAAW,CAEzB;IAED,8CAA8C;IAC9C,IAAI,WAAW,IAAI,iBAAiB,CAEnC;IAED,IAAI,IAAI,OAAO,CAAC,cAAc,CAAC,SAAS,CAAC,CAAC;IAI1C,KAAK;IAIL,CAAC,MAAM,CAAC,aAAa,CAAC,IAAI,SAAS;CAGpC"}
|
package/dist/llm/llm.js
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import { EventEmitter } from "node:events";
|
|
2
2
|
import { APIConnectionError, APIError } from "../_exceptions.js";
|
|
3
3
|
import { log } from "../log.js";
|
|
4
|
+
import { recordException, traceTypes, tracer } from "../telemetry/index.js";
|
|
4
5
|
import { AsyncIterableQueue, delay, startSoon, toError } from "../utils.js";
|
|
5
6
|
import {} from "./chat_context.js";
|
|
6
7
|
class LLM extends EventEmitter {
|
|
@@ -36,6 +37,7 @@ class LLMStream {
|
|
|
36
37
|
#llm;
|
|
37
38
|
#chatCtx;
|
|
38
39
|
#toolCtx;
|
|
40
|
+
#llmRequestSpan;
|
|
39
41
|
constructor(llm, {
|
|
40
42
|
chatCtx,
|
|
41
43
|
toolCtx,
|
|
@@ -52,10 +54,23 @@ class LLMStream {
|
|
|
52
54
|
});
|
|
53
55
|
startSoon(() => this.mainTask().then(() => this.queue.close()));
|
|
54
56
|
}
|
|
55
|
-
async
|
|
57
|
+
_mainTaskImpl = async (span) => {
|
|
58
|
+
this.#llmRequestSpan = span;
|
|
59
|
+
span.setAttribute(traceTypes.ATTR_GEN_AI_REQUEST_MODEL, this.#llm.model);
|
|
56
60
|
for (let i = 0; i < this._connOptions.maxRetry + 1; i++) {
|
|
57
61
|
try {
|
|
58
|
-
return await
|
|
62
|
+
return await tracer.startActiveSpan(
|
|
63
|
+
async (attemptSpan) => {
|
|
64
|
+
attemptSpan.setAttribute(traceTypes.ATTR_RETRY_COUNT, i);
|
|
65
|
+
try {
|
|
66
|
+
return await this.run();
|
|
67
|
+
} catch (error) {
|
|
68
|
+
recordException(attemptSpan, toError(error));
|
|
69
|
+
throw error;
|
|
70
|
+
}
|
|
71
|
+
},
|
|
72
|
+
{ name: "llm_request_run" }
|
|
73
|
+
);
|
|
59
74
|
} catch (error) {
|
|
60
75
|
if (error instanceof APIError) {
|
|
61
76
|
const retryInterval = this._connOptions._intervalForRetry(i);
|
|
@@ -84,7 +99,11 @@ class LLMStream {
|
|
|
84
99
|
}
|
|
85
100
|
}
|
|
86
101
|
}
|
|
87
|
-
}
|
|
102
|
+
};
|
|
103
|
+
mainTask = async () => tracer.startActiveSpan(async (span) => this._mainTaskImpl(span), {
|
|
104
|
+
name: "llm_request",
|
|
105
|
+
endOnExit: false
|
|
106
|
+
});
|
|
88
107
|
emitError({ error, recoverable }) {
|
|
89
108
|
this.#llm.emit("error", {
|
|
90
109
|
type: "llm_error",
|
|
@@ -99,6 +118,7 @@ class LLMStream {
|
|
|
99
118
|
let ttft = BigInt(-1);
|
|
100
119
|
let requestId = "";
|
|
101
120
|
let usage;
|
|
121
|
+
let completionStartTime;
|
|
102
122
|
for await (const ev of this.queue) {
|
|
103
123
|
if (this.abortController.signal.aborted) {
|
|
104
124
|
break;
|
|
@@ -107,6 +127,7 @@ class LLMStream {
|
|
|
107
127
|
requestId = ev.id;
|
|
108
128
|
if (ttft === BigInt(-1)) {
|
|
109
129
|
ttft = process.hrtime.bigint() - startTime;
|
|
130
|
+
completionStartTime = (/* @__PURE__ */ new Date()).toISOString();
|
|
110
131
|
}
|
|
111
132
|
if (ev.usage) {
|
|
112
133
|
usage = ev.usage;
|
|
@@ -134,6 +155,20 @@ class LLMStream {
|
|
|
134
155
|
return ((usage == null ? void 0 : usage.completionTokens) || 0) / (durationMs / 1e3);
|
|
135
156
|
})()
|
|
136
157
|
};
|
|
158
|
+
if (this.#llmRequestSpan) {
|
|
159
|
+
this.#llmRequestSpan.setAttribute(traceTypes.ATTR_LLM_METRICS, JSON.stringify(metrics));
|
|
160
|
+
this.#llmRequestSpan.setAttributes({
|
|
161
|
+
[traceTypes.ATTR_GEN_AI_USAGE_INPUT_TOKENS]: metrics.promptTokens,
|
|
162
|
+
[traceTypes.ATTR_GEN_AI_USAGE_OUTPUT_TOKENS]: metrics.completionTokens
|
|
163
|
+
});
|
|
164
|
+
if (completionStartTime) {
|
|
165
|
+
this.#llmRequestSpan.setAttribute(
|
|
166
|
+
traceTypes.ATTR_LANGFUSE_COMPLETION_START_TIME,
|
|
167
|
+
completionStartTime
|
|
168
|
+
);
|
|
169
|
+
}
|
|
170
|
+
this.#llmRequestSpan.end();
|
|
171
|
+
}
|
|
137
172
|
this.#llm.emit("metrics_collected", metrics);
|
|
138
173
|
}
|
|
139
174
|
/** The function context of this stream. */
|
package/dist/llm/llm.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/llm/llm.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2025 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport type { TypedEventEmitter as TypedEmitter } from '@livekit/typed-emitter';\nimport { EventEmitter } from 'node:events';\nimport { APIConnectionError, APIError } from '../_exceptions.js';\nimport { log } from '../log.js';\nimport type { LLMMetrics } from '../metrics/base.js';\nimport type { APIConnectOptions } from '../types.js';\nimport { AsyncIterableQueue, delay, startSoon, toError } from '../utils.js';\nimport { type ChatContext, type ChatRole, type FunctionCall } from './chat_context.js';\nimport type { ToolChoice, ToolContext } from './tool_context.js';\n\nexport interface ChoiceDelta {\n role: ChatRole;\n content?: string;\n toolCalls?: FunctionCall[];\n}\n\nexport interface CompletionUsage {\n completionTokens: number;\n promptTokens: number;\n promptCachedTokens: number;\n totalTokens: number;\n}\n\nexport interface ChatChunk {\n id: string;\n delta?: ChoiceDelta;\n usage?: CompletionUsage;\n}\n\nexport interface LLMError {\n type: 'llm_error';\n timestamp: number;\n label: string;\n error: Error;\n recoverable: boolean;\n}\n\nexport type LLMCallbacks = {\n ['metrics_collected']: (metrics: LLMMetrics) => void;\n ['error']: (error: LLMError) => void;\n};\n\nexport abstract class LLM extends (EventEmitter as new () => TypedEmitter<LLMCallbacks>) {\n constructor() {\n super();\n }\n\n abstract label(): string;\n\n /**\n * Get the model name/identifier for this LLM instance.\n *\n * @returns The model name if available, \"unknown\" otherwise.\n *\n * @remarks\n * Plugins should override this property to provide their model information.\n */\n get model(): string {\n return 'unknown';\n }\n\n /**\n * Returns a {@link LLMStream} that can be used to push text and receive LLM responses.\n */\n abstract chat({\n chatCtx,\n toolCtx,\n connOptions,\n parallelToolCalls,\n toolChoice,\n extraKwargs,\n }: {\n chatCtx: ChatContext;\n toolCtx?: ToolContext;\n connOptions?: APIConnectOptions;\n parallelToolCalls?: boolean;\n toolChoice?: ToolChoice;\n extraKwargs?: Record<string, unknown>;\n }): LLMStream;\n\n /**\n * Pre-warm connection to the LLM service\n */\n prewarm(): void {\n // Default implementation - subclasses can override\n }\n\n async aclose(): Promise<void> {\n // Default implementation - subclasses can override\n }\n}\n\nexport abstract class LLMStream implements AsyncIterableIterator<ChatChunk> {\n protected output = new AsyncIterableQueue<ChatChunk>();\n protected queue = new AsyncIterableQueue<ChatChunk>();\n protected closed = false;\n protected abortController = new AbortController();\n protected _connOptions: APIConnectOptions;\n protected logger = log();\n\n #llm: LLM;\n #chatCtx: ChatContext;\n #toolCtx?: ToolContext;\n\n constructor(\n llm: LLM,\n {\n chatCtx,\n toolCtx,\n connOptions,\n }: {\n chatCtx: ChatContext;\n toolCtx?: ToolContext;\n connOptions: APIConnectOptions;\n },\n ) {\n this.#llm = llm;\n this.#chatCtx = chatCtx;\n this.#toolCtx = toolCtx;\n this._connOptions = connOptions;\n this.monitorMetrics();\n this.abortController.signal.addEventListener('abort', () => {\n // TODO (AJS-37) clean this up when we refactor with streams\n this.output.close();\n this.closed = true;\n });\n\n // this is a hack to immitate asyncio.create_task so that mainTask\n // is run **after** the constructor has finished. Otherwise we get\n // runtime error when trying to access class variables in the\n // `run` method.\n startSoon(() => this.mainTask().then(() => this.queue.close()));\n }\n\n private async mainTask() {\n // TODO(brian): PR3 - Add span wrapping: tracer.startActiveSpan('llm_request', ..., { endOnExit: false })\n for (let i = 0; i < this._connOptions.maxRetry + 1; i++) {\n try {\n // TODO(brian): PR3 - Add span for retry attempts: tracer.startActiveSpan('llm_request_run', ...)\n return await this.run();\n } catch (error) {\n if (error instanceof APIError) {\n const retryInterval = this._connOptions._intervalForRetry(i);\n\n if (this._connOptions.maxRetry === 0 || !error.retryable) {\n this.emitError({ error, recoverable: false });\n throw error;\n } else if (i === this._connOptions.maxRetry) {\n this.emitError({ error, recoverable: false });\n throw new APIConnectionError({\n message: `failed to generate LLM completion after ${this._connOptions.maxRetry + 1} attempts`,\n options: { retryable: false },\n });\n } else {\n this.emitError({ error, recoverable: true });\n this.logger.warn(\n { llm: this.#llm.label(), attempt: i + 1, error },\n `failed to generate LLM completion, retrying in ${retryInterval}s`,\n );\n }\n\n if (retryInterval > 0) {\n await delay(retryInterval);\n }\n } else {\n this.emitError({ error: toError(error), recoverable: false });\n throw error;\n }\n }\n }\n }\n\n private emitError({ error, recoverable }: { error: Error; recoverable: boolean }) {\n this.#llm.emit('error', {\n type: 'llm_error',\n timestamp: Date.now(),\n label: this.#llm.label(),\n error,\n recoverable,\n });\n }\n\n protected async monitorMetrics() {\n const startTime = process.hrtime.bigint();\n let ttft: bigint = BigInt(-1);\n let requestId = '';\n let usage: CompletionUsage | undefined;\n\n for await (const ev of this.queue) {\n if (this.abortController.signal.aborted) {\n break;\n }\n this.output.put(ev);\n requestId = ev.id;\n if (ttft === BigInt(-1)) {\n ttft = process.hrtime.bigint() - startTime;\n }\n if (ev.usage) {\n usage = ev.usage;\n }\n }\n this.output.close();\n\n const duration = process.hrtime.bigint() - startTime;\n const durationMs = Math.trunc(Number(duration / BigInt(1000000)));\n const metrics: LLMMetrics = {\n type: 'llm_metrics',\n timestamp: Date.now(),\n requestId,\n ttftMs: ttft === BigInt(-1) ? -1 : Math.trunc(Number(ttft / BigInt(1000000))),\n durationMs,\n cancelled: this.abortController.signal.aborted,\n label: this.#llm.label(),\n completionTokens: usage?.completionTokens || 0,\n promptTokens: usage?.promptTokens || 0,\n promptCachedTokens: usage?.promptCachedTokens || 0,\n totalTokens: usage?.totalTokens || 0,\n tokensPerSecond: (() => {\n if (durationMs <= 0) {\n return 0;\n }\n return (usage?.completionTokens || 0) / (durationMs / 1000);\n })(),\n };\n this.#llm.emit('metrics_collected', metrics);\n }\n\n protected abstract run(): Promise<void>;\n\n /** The function context of this stream. */\n get toolCtx(): ToolContext | undefined {\n return this.#toolCtx;\n }\n\n /** The initial chat context of this stream. */\n get chatCtx(): ChatContext {\n return this.#chatCtx;\n }\n\n /** The connection options for this stream. */\n get connOptions(): APIConnectOptions {\n return this._connOptions;\n }\n\n next(): Promise<IteratorResult<ChatChunk>> {\n return this.output.next();\n }\n\n close() {\n this.abortController.abort();\n }\n\n [Symbol.asyncIterator](): LLMStream {\n return this;\n }\n}\n"],"mappings":"AAIA,SAAS,oBAAoB;AAC7B,SAAS,oBAAoB,gBAAgB;AAC7C,SAAS,WAAW;AAGpB,SAAS,oBAAoB,OAAO,WAAW,eAAe;AAC9D,eAAmE;AAmC5D,MAAe,YAAa,aAAsD;AAAA,EACvF,cAAc;AACZ,UAAM;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,IAAI,QAAgB;AAClB,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAwBA,UAAgB;AAAA,EAEhB;AAAA,EAEA,MAAM,SAAwB;AAAA,EAE9B;AACF;AAEO,MAAe,UAAsD;AAAA,EAChE,SAAS,IAAI,mBAA8B;AAAA,EAC3C,QAAQ,IAAI,mBAA8B;AAAA,EAC1C,SAAS;AAAA,EACT,kBAAkB,IAAI,gBAAgB;AAAA,EACtC;AAAA,EACA,SAAS,IAAI;AAAA,EAEvB;AAAA,EACA;AAAA,EACA;AAAA,EAEA,YACE,KACA;AAAA,IACE;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAKA;AACA,SAAK,OAAO;AACZ,SAAK,WAAW;AAChB,SAAK,WAAW;AAChB,SAAK,eAAe;AACpB,SAAK,eAAe;AACpB,SAAK,gBAAgB,OAAO,iBAAiB,SAAS,MAAM;AAE1D,WAAK,OAAO,MAAM;AAClB,WAAK,SAAS;AAAA,IAChB,CAAC;AAMD,cAAU,MAAM,KAAK,SAAS,EAAE,KAAK,MAAM,KAAK,MAAM,MAAM,CAAC,CAAC;AAAA,EAChE;AAAA,EAEA,MAAc,WAAW;AAEvB,aAAS,IAAI,GAAG,IAAI,KAAK,aAAa,WAAW,GAAG,KAAK;AACvD,UAAI;AAEF,eAAO,MAAM,KAAK,IAAI;AAAA,MACxB,SAAS,OAAO;AACd,YAAI,iBAAiB,UAAU;AAC7B,gBAAM,gBAAgB,KAAK,aAAa,kBAAkB,CAAC;AAE3D,cAAI,KAAK,aAAa,aAAa,KAAK,CAAC,MAAM,WAAW;AACxD,iBAAK,UAAU,EAAE,OAAO,aAAa,MAAM,CAAC;AAC5C,kBAAM;AAAA,UACR,WAAW,MAAM,KAAK,aAAa,UAAU;AAC3C,iBAAK,UAAU,EAAE,OAAO,aAAa,MAAM,CAAC;AAC5C,kBAAM,IAAI,mBAAmB;AAAA,cAC3B,SAAS,2CAA2C,KAAK,aAAa,WAAW,CAAC;AAAA,cAClF,SAAS,EAAE,WAAW,MAAM;AAAA,YAC9B,CAAC;AAAA,UACH,OAAO;AACL,iBAAK,UAAU,EAAE,OAAO,aAAa,KAAK,CAAC;AAC3C,iBAAK,OAAO;AAAA,cACV,EAAE,KAAK,KAAK,KAAK,MAAM,GAAG,SAAS,IAAI,GAAG,MAAM;AAAA,cAChD,kDAAkD,aAAa;AAAA,YACjE;AAAA,UACF;AAEA,cAAI,gBAAgB,GAAG;AACrB,kBAAM,MAAM,aAAa;AAAA,UAC3B;AAAA,QACF,OAAO;AACL,eAAK,UAAU,EAAE,OAAO,QAAQ,KAAK,GAAG,aAAa,MAAM,CAAC;AAC5D,gBAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,UAAU,EAAE,OAAO,YAAY,GAA2C;AAChF,SAAK,KAAK,KAAK,SAAS;AAAA,MACtB,MAAM;AAAA,MACN,WAAW,KAAK,IAAI;AAAA,MACpB,OAAO,KAAK,KAAK,MAAM;AAAA,MACvB;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EAEA,MAAgB,iBAAiB;AAC/B,UAAM,YAAY,QAAQ,OAAO,OAAO;AACxC,QAAI,OAAe,OAAO,EAAE;AAC5B,QAAI,YAAY;AAChB,QAAI;AAEJ,qBAAiB,MAAM,KAAK,OAAO;AACjC,UAAI,KAAK,gBAAgB,OAAO,SAAS;AACvC;AAAA,MACF;AACA,WAAK,OAAO,IAAI,EAAE;AAClB,kBAAY,GAAG;AACf,UAAI,SAAS,OAAO,EAAE,GAAG;AACvB,eAAO,QAAQ,OAAO,OAAO,IAAI;AAAA,MACnC;AACA,UAAI,GAAG,OAAO;AACZ,gBAAQ,GAAG;AAAA,MACb;AAAA,IACF;AACA,SAAK,OAAO,MAAM;AAElB,UAAM,WAAW,QAAQ,OAAO,OAAO,IAAI;AAC3C,UAAM,aAAa,KAAK,MAAM,OAAO,WAAW,OAAO,GAAO,CAAC,CAAC;AAChE,UAAM,UAAsB;AAAA,MAC1B,MAAM;AAAA,MACN,WAAW,KAAK,IAAI;AAAA,MACpB;AAAA,MACA,QAAQ,SAAS,OAAO,EAAE,IAAI,KAAK,KAAK,MAAM,OAAO,OAAO,OAAO,GAAO,CAAC,CAAC;AAAA,MAC5E;AAAA,MACA,WAAW,KAAK,gBAAgB,OAAO;AAAA,MACvC,OAAO,KAAK,KAAK,MAAM;AAAA,MACvB,mBAAkB,+BAAO,qBAAoB;AAAA,MAC7C,eAAc,+BAAO,iBAAgB;AAAA,MACrC,qBAAoB,+BAAO,uBAAsB;AAAA,MACjD,cAAa,+BAAO,gBAAe;AAAA,MACnC,kBAAkB,MAAM;AACtB,YAAI,cAAc,GAAG;AACnB,iBAAO;AAAA,QACT;AACA,iBAAQ,+BAAO,qBAAoB,MAAM,aAAa;AAAA,MACxD,GAAG;AAAA,IACL;AACA,SAAK,KAAK,KAAK,qBAAqB,OAAO;AAAA,EAC7C;AAAA;AAAA,EAKA,IAAI,UAAmC;AACrC,WAAO,KAAK;AAAA,EACd;AAAA;AAAA,EAGA,IAAI,UAAuB;AACzB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA,EAGA,IAAI,cAAiC;AACnC,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,OAA2C;AACzC,WAAO,KAAK,OAAO,KAAK;AAAA,EAC1B;AAAA,EAEA,QAAQ;AACN,SAAK,gBAAgB,MAAM;AAAA,EAC7B;AAAA,EAEA,CAAC,OAAO,aAAa,IAAe;AAClC,WAAO;AAAA,EACT;AACF;","names":[]}
|
|
1
|
+
{"version":3,"sources":["../../src/llm/llm.ts"],"sourcesContent":["// SPDX-FileCopyrightText: 2025 LiveKit, Inc.\n//\n// SPDX-License-Identifier: Apache-2.0\nimport type { TypedEventEmitter as TypedEmitter } from '@livekit/typed-emitter';\nimport type { Span } from '@opentelemetry/api';\nimport { EventEmitter } from 'node:events';\nimport { APIConnectionError, APIError } from '../_exceptions.js';\nimport { log } from '../log.js';\nimport type { LLMMetrics } from '../metrics/base.js';\nimport { recordException, traceTypes, tracer } from '../telemetry/index.js';\nimport type { APIConnectOptions } from '../types.js';\nimport { AsyncIterableQueue, delay, startSoon, toError } from '../utils.js';\nimport { type ChatContext, type ChatRole, type FunctionCall } from './chat_context.js';\nimport type { ToolChoice, ToolContext } from './tool_context.js';\n\nexport interface ChoiceDelta {\n role: ChatRole;\n content?: string;\n toolCalls?: FunctionCall[];\n}\n\nexport interface CompletionUsage {\n completionTokens: number;\n promptTokens: number;\n promptCachedTokens: number;\n totalTokens: number;\n}\n\nexport interface ChatChunk {\n id: string;\n delta?: ChoiceDelta;\n usage?: CompletionUsage;\n}\n\nexport interface LLMError {\n type: 'llm_error';\n timestamp: number;\n label: string;\n error: Error;\n recoverable: boolean;\n}\n\nexport type LLMCallbacks = {\n ['metrics_collected']: (metrics: LLMMetrics) => void;\n ['error']: (error: LLMError) => void;\n};\n\nexport abstract class LLM extends (EventEmitter as new () => TypedEmitter<LLMCallbacks>) {\n constructor() {\n super();\n }\n\n abstract label(): string;\n\n /**\n * Get the model name/identifier for this LLM instance.\n *\n * @returns The model name if available, \"unknown\" otherwise.\n *\n * @remarks\n * Plugins should override this property to provide their model information.\n */\n get model(): string {\n return 'unknown';\n }\n\n /**\n * Returns a {@link LLMStream} that can be used to push text and receive LLM responses.\n */\n abstract chat({\n chatCtx,\n toolCtx,\n connOptions,\n parallelToolCalls,\n toolChoice,\n extraKwargs,\n }: {\n chatCtx: ChatContext;\n toolCtx?: ToolContext;\n connOptions?: APIConnectOptions;\n parallelToolCalls?: boolean;\n toolChoice?: ToolChoice;\n extraKwargs?: Record<string, unknown>;\n }): LLMStream;\n\n /**\n * Pre-warm connection to the LLM service\n */\n prewarm(): void {\n // Default implementation - subclasses can override\n }\n\n async aclose(): Promise<void> {\n // Default implementation - subclasses can override\n }\n}\n\nexport abstract class LLMStream implements AsyncIterableIterator<ChatChunk> {\n protected output = new AsyncIterableQueue<ChatChunk>();\n protected queue = new AsyncIterableQueue<ChatChunk>();\n protected closed = false;\n protected abortController = new AbortController();\n protected _connOptions: APIConnectOptions;\n protected logger = log();\n\n #llm: LLM;\n #chatCtx: ChatContext;\n #toolCtx?: ToolContext;\n #llmRequestSpan?: Span;\n\n constructor(\n llm: LLM,\n {\n chatCtx,\n toolCtx,\n connOptions,\n }: {\n chatCtx: ChatContext;\n toolCtx?: ToolContext;\n connOptions: APIConnectOptions;\n },\n ) {\n this.#llm = llm;\n this.#chatCtx = chatCtx;\n this.#toolCtx = toolCtx;\n this._connOptions = connOptions;\n this.monitorMetrics();\n this.abortController.signal.addEventListener('abort', () => {\n // TODO (AJS-37) clean this up when we refactor with streams\n this.output.close();\n this.closed = true;\n });\n\n // this is a hack to immitate asyncio.create_task so that mainTask\n // is run **after** the constructor has finished. Otherwise we get\n // runtime error when trying to access class variables in the\n // `run` method.\n startSoon(() => this.mainTask().then(() => this.queue.close()));\n }\n\n private _mainTaskImpl = async (span: Span) => {\n this.#llmRequestSpan = span;\n span.setAttribute(traceTypes.ATTR_GEN_AI_REQUEST_MODEL, this.#llm.model);\n\n for (let i = 0; i < this._connOptions.maxRetry + 1; i++) {\n try {\n return await tracer.startActiveSpan(\n async (attemptSpan) => {\n attemptSpan.setAttribute(traceTypes.ATTR_RETRY_COUNT, i);\n try {\n return await this.run();\n } catch (error) {\n recordException(attemptSpan, toError(error));\n throw error;\n }\n },\n { name: 'llm_request_run' },\n );\n } catch (error) {\n if (error instanceof APIError) {\n const retryInterval = this._connOptions._intervalForRetry(i);\n\n if (this._connOptions.maxRetry === 0 || !error.retryable) {\n this.emitError({ error, recoverable: false });\n throw error;\n } else if (i === this._connOptions.maxRetry) {\n this.emitError({ error, recoverable: false });\n throw new APIConnectionError({\n message: `failed to generate LLM completion after ${this._connOptions.maxRetry + 1} attempts`,\n options: { retryable: false },\n });\n } else {\n this.emitError({ error, recoverable: true });\n this.logger.warn(\n { llm: this.#llm.label(), attempt: i + 1, error },\n `failed to generate LLM completion, retrying in ${retryInterval}s`,\n );\n }\n\n if (retryInterval > 0) {\n await delay(retryInterval);\n }\n } else {\n this.emitError({ error: toError(error), recoverable: false });\n throw error;\n }\n }\n }\n };\n\n private mainTask = async () =>\n tracer.startActiveSpan(async (span) => this._mainTaskImpl(span), {\n name: 'llm_request',\n endOnExit: false,\n });\n\n private emitError({ error, recoverable }: { error: Error; recoverable: boolean }) {\n this.#llm.emit('error', {\n type: 'llm_error',\n timestamp: Date.now(),\n label: this.#llm.label(),\n error,\n recoverable,\n });\n }\n\n protected async monitorMetrics() {\n const startTime = process.hrtime.bigint();\n let ttft: bigint = BigInt(-1);\n let requestId = '';\n let usage: CompletionUsage | undefined;\n let completionStartTime: string | undefined;\n\n for await (const ev of this.queue) {\n if (this.abortController.signal.aborted) {\n break;\n }\n this.output.put(ev);\n requestId = ev.id;\n if (ttft === BigInt(-1)) {\n ttft = process.hrtime.bigint() - startTime;\n completionStartTime = new Date().toISOString();\n }\n if (ev.usage) {\n usage = ev.usage;\n }\n }\n this.output.close();\n\n const duration = process.hrtime.bigint() - startTime;\n const durationMs = Math.trunc(Number(duration / BigInt(1000000)));\n const metrics: LLMMetrics = {\n type: 'llm_metrics',\n timestamp: Date.now(),\n requestId,\n ttftMs: ttft === BigInt(-1) ? -1 : Math.trunc(Number(ttft / BigInt(1000000))),\n durationMs,\n cancelled: this.abortController.signal.aborted,\n label: this.#llm.label(),\n completionTokens: usage?.completionTokens || 0,\n promptTokens: usage?.promptTokens || 0,\n promptCachedTokens: usage?.promptCachedTokens || 0,\n totalTokens: usage?.totalTokens || 0,\n tokensPerSecond: (() => {\n if (durationMs <= 0) {\n return 0;\n }\n return (usage?.completionTokens || 0) / (durationMs / 1000);\n })(),\n };\n\n if (this.#llmRequestSpan) {\n this.#llmRequestSpan.setAttribute(traceTypes.ATTR_LLM_METRICS, JSON.stringify(metrics));\n\n this.#llmRequestSpan.setAttributes({\n [traceTypes.ATTR_GEN_AI_USAGE_INPUT_TOKENS]: metrics.promptTokens,\n [traceTypes.ATTR_GEN_AI_USAGE_OUTPUT_TOKENS]: metrics.completionTokens,\n });\n\n if (completionStartTime) {\n this.#llmRequestSpan.setAttribute(\n traceTypes.ATTR_LANGFUSE_COMPLETION_START_TIME,\n completionStartTime,\n );\n }\n\n // End the span now that metrics are collected\n this.#llmRequestSpan.end();\n }\n\n this.#llm.emit('metrics_collected', metrics);\n }\n\n protected abstract run(): Promise<void>;\n\n /** The function context of this stream. */\n get toolCtx(): ToolContext | undefined {\n return this.#toolCtx;\n }\n\n /** The initial chat context of this stream. */\n get chatCtx(): ChatContext {\n return this.#chatCtx;\n }\n\n /** The connection options for this stream. */\n get connOptions(): APIConnectOptions {\n return this._connOptions;\n }\n\n next(): Promise<IteratorResult<ChatChunk>> {\n return this.output.next();\n }\n\n close() {\n this.abortController.abort();\n }\n\n [Symbol.asyncIterator](): LLMStream {\n return this;\n }\n}\n"],"mappings":"AAKA,SAAS,oBAAoB;AAC7B,SAAS,oBAAoB,gBAAgB;AAC7C,SAAS,WAAW;AAEpB,SAAS,iBAAiB,YAAY,cAAc;AAEpD,SAAS,oBAAoB,OAAO,WAAW,eAAe;AAC9D,eAAmE;AAmC5D,MAAe,YAAa,aAAsD;AAAA,EACvF,cAAc;AACZ,UAAM;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,IAAI,QAAgB;AAClB,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAwBA,UAAgB;AAAA,EAEhB;AAAA,EAEA,MAAM,SAAwB;AAAA,EAE9B;AACF;AAEO,MAAe,UAAsD;AAAA,EAChE,SAAS,IAAI,mBAA8B;AAAA,EAC3C,QAAQ,IAAI,mBAA8B;AAAA,EAC1C,SAAS;AAAA,EACT,kBAAkB,IAAI,gBAAgB;AAAA,EACtC;AAAA,EACA,SAAS,IAAI;AAAA,EAEvB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA,YACE,KACA;AAAA,IACE;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAKA;AACA,SAAK,OAAO;AACZ,SAAK,WAAW;AAChB,SAAK,WAAW;AAChB,SAAK,eAAe;AACpB,SAAK,eAAe;AACpB,SAAK,gBAAgB,OAAO,iBAAiB,SAAS,MAAM;AAE1D,WAAK,OAAO,MAAM;AAClB,WAAK,SAAS;AAAA,IAChB,CAAC;AAMD,cAAU,MAAM,KAAK,SAAS,EAAE,KAAK,MAAM,KAAK,MAAM,MAAM,CAAC,CAAC;AAAA,EAChE;AAAA,EAEQ,gBAAgB,OAAO,SAAe;AAC5C,SAAK,kBAAkB;AACvB,SAAK,aAAa,WAAW,2BAA2B,KAAK,KAAK,KAAK;AAEvE,aAAS,IAAI,GAAG,IAAI,KAAK,aAAa,WAAW,GAAG,KAAK;AACvD,UAAI;AACF,eAAO,MAAM,OAAO;AAAA,UAClB,OAAO,gBAAgB;AACrB,wBAAY,aAAa,WAAW,kBAAkB,CAAC;AACvD,gBAAI;AACF,qBAAO,MAAM,KAAK,IAAI;AAAA,YACxB,SAAS,OAAO;AACd,8BAAgB,aAAa,QAAQ,KAAK,CAAC;AAC3C,oBAAM;AAAA,YACR;AAAA,UACF;AAAA,UACA,EAAE,MAAM,kBAAkB;AAAA,QAC5B;AAAA,MACF,SAAS,OAAO;AACd,YAAI,iBAAiB,UAAU;AAC7B,gBAAM,gBAAgB,KAAK,aAAa,kBAAkB,CAAC;AAE3D,cAAI,KAAK,aAAa,aAAa,KAAK,CAAC,MAAM,WAAW;AACxD,iBAAK,UAAU,EAAE,OAAO,aAAa,MAAM,CAAC;AAC5C,kBAAM;AAAA,UACR,WAAW,MAAM,KAAK,aAAa,UAAU;AAC3C,iBAAK,UAAU,EAAE,OAAO,aAAa,MAAM,CAAC;AAC5C,kBAAM,IAAI,mBAAmB;AAAA,cAC3B,SAAS,2CAA2C,KAAK,aAAa,WAAW,CAAC;AAAA,cAClF,SAAS,EAAE,WAAW,MAAM;AAAA,YAC9B,CAAC;AAAA,UACH,OAAO;AACL,iBAAK,UAAU,EAAE,OAAO,aAAa,KAAK,CAAC;AAC3C,iBAAK,OAAO;AAAA,cACV,EAAE,KAAK,KAAK,KAAK,MAAM,GAAG,SAAS,IAAI,GAAG,MAAM;AAAA,cAChD,kDAAkD,aAAa;AAAA,YACjE;AAAA,UACF;AAEA,cAAI,gBAAgB,GAAG;AACrB,kBAAM,MAAM,aAAa;AAAA,UAC3B;AAAA,QACF,OAAO;AACL,eAAK,UAAU,EAAE,OAAO,QAAQ,KAAK,GAAG,aAAa,MAAM,CAAC;AAC5D,gBAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,WAAW,YACjB,OAAO,gBAAgB,OAAO,SAAS,KAAK,cAAc,IAAI,GAAG;AAAA,IAC/D,MAAM;AAAA,IACN,WAAW;AAAA,EACb,CAAC;AAAA,EAEK,UAAU,EAAE,OAAO,YAAY,GAA2C;AAChF,SAAK,KAAK,KAAK,SAAS;AAAA,MACtB,MAAM;AAAA,MACN,WAAW,KAAK,IAAI;AAAA,MACpB,OAAO,KAAK,KAAK,MAAM;AAAA,MACvB;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EAEA,MAAgB,iBAAiB;AAC/B,UAAM,YAAY,QAAQ,OAAO,OAAO;AACxC,QAAI,OAAe,OAAO,EAAE;AAC5B,QAAI,YAAY;AAChB,QAAI;AACJ,QAAI;AAEJ,qBAAiB,MAAM,KAAK,OAAO;AACjC,UAAI,KAAK,gBAAgB,OAAO,SAAS;AACvC;AAAA,MACF;AACA,WAAK,OAAO,IAAI,EAAE;AAClB,kBAAY,GAAG;AACf,UAAI,SAAS,OAAO,EAAE,GAAG;AACvB,eAAO,QAAQ,OAAO,OAAO,IAAI;AACjC,+BAAsB,oBAAI,KAAK,GAAE,YAAY;AAAA,MAC/C;AACA,UAAI,GAAG,OAAO;AACZ,gBAAQ,GAAG;AAAA,MACb;AAAA,IACF;AACA,SAAK,OAAO,MAAM;AAElB,UAAM,WAAW,QAAQ,OAAO,OAAO,IAAI;AAC3C,UAAM,aAAa,KAAK,MAAM,OAAO,WAAW,OAAO,GAAO,CAAC,CAAC;AAChE,UAAM,UAAsB;AAAA,MAC1B,MAAM;AAAA,MACN,WAAW,KAAK,IAAI;AAAA,MACpB;AAAA,MACA,QAAQ,SAAS,OAAO,EAAE,IAAI,KAAK,KAAK,MAAM,OAAO,OAAO,OAAO,GAAO,CAAC,CAAC;AAAA,MAC5E;AAAA,MACA,WAAW,KAAK,gBAAgB,OAAO;AAAA,MACvC,OAAO,KAAK,KAAK,MAAM;AAAA,MACvB,mBAAkB,+BAAO,qBAAoB;AAAA,MAC7C,eAAc,+BAAO,iBAAgB;AAAA,MACrC,qBAAoB,+BAAO,uBAAsB;AAAA,MACjD,cAAa,+BAAO,gBAAe;AAAA,MACnC,kBAAkB,MAAM;AACtB,YAAI,cAAc,GAAG;AACnB,iBAAO;AAAA,QACT;AACA,iBAAQ,+BAAO,qBAAoB,MAAM,aAAa;AAAA,MACxD,GAAG;AAAA,IACL;AAEA,QAAI,KAAK,iBAAiB;AACxB,WAAK,gBAAgB,aAAa,WAAW,kBAAkB,KAAK,UAAU,OAAO,CAAC;AAEtF,WAAK,gBAAgB,cAAc;AAAA,QACjC,CAAC,WAAW,8BAA8B,GAAG,QAAQ;AAAA,QACrD,CAAC,WAAW,+BAA+B,GAAG,QAAQ;AAAA,MACxD,CAAC;AAED,UAAI,qBAAqB;AACvB,aAAK,gBAAgB;AAAA,UACnB,WAAW;AAAA,UACX;AAAA,QACF;AAAA,MACF;AAGA,WAAK,gBAAgB,IAAI;AAAA,IAC3B;AAEA,SAAK,KAAK,KAAK,qBAAqB,OAAO;AAAA,EAC7C;AAAA;AAAA,EAKA,IAAI,UAAmC;AACrC,WAAO,KAAK;AAAA,EACd;AAAA;AAAA,EAGA,IAAI,UAAuB;AACzB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA,EAGA,IAAI,cAAiC;AACnC,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,OAA2C;AACzC,WAAO,KAAK,OAAO,KAAK;AAAA,EAC1B;AAAA,EAEA,QAAQ;AACN,SAAK,gBAAgB,MAAM;AAAA,EAC7B;AAAA,EAEA,CAAC,OAAO,aAAa,IAAe;AAClC,WAAO;AAAA,EACT;AACF;","names":[]}
|
package/dist/log.cjs
CHANGED
|
@@ -18,14 +18,19 @@ var __copyProps = (to, from, except, desc) => {
|
|
|
18
18
|
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
19
|
var log_exports = {};
|
|
20
20
|
__export(log_exports, {
|
|
21
|
+
enableOtelLogging: () => enableOtelLogging,
|
|
21
22
|
initializeLogger: () => initializeLogger,
|
|
22
23
|
log: () => log,
|
|
23
24
|
loggerOptions: () => loggerOptions
|
|
24
25
|
});
|
|
25
26
|
module.exports = __toCommonJS(log_exports);
|
|
27
|
+
var import_node_stream = require("node:stream");
|
|
26
28
|
var import_pino = require("pino");
|
|
29
|
+
var import_pino_pretty = require("pino-pretty");
|
|
30
|
+
var import_pino_otel_transport = require("./telemetry/pino_otel_transport.cjs");
|
|
27
31
|
let loggerOptions;
|
|
28
32
|
let logger = void 0;
|
|
33
|
+
let otelEnabled = false;
|
|
29
34
|
const log = () => {
|
|
30
35
|
if (!logger) {
|
|
31
36
|
throw new TypeError("logger not initialized. did you forget to run initializeLogger()?");
|
|
@@ -35,21 +40,40 @@ const log = () => {
|
|
|
35
40
|
const initializeLogger = ({ pretty, level }) => {
|
|
36
41
|
loggerOptions = { pretty, level };
|
|
37
42
|
logger = (0, import_pino.pino)(
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
target: "pino-pretty",
|
|
41
|
-
options: {
|
|
42
|
-
colorize: true
|
|
43
|
-
}
|
|
44
|
-
}
|
|
45
|
-
} : {}
|
|
43
|
+
{ level: level || "info" },
|
|
44
|
+
pretty ? (0, import_pino_pretty.build)({ colorize: true }) : process.stdout
|
|
46
45
|
);
|
|
47
|
-
|
|
48
|
-
|
|
46
|
+
};
|
|
47
|
+
class OtelDestination extends import_node_stream.Writable {
|
|
48
|
+
_write(chunk, _encoding, callback) {
|
|
49
|
+
try {
|
|
50
|
+
const line = chunk.toString().trim();
|
|
51
|
+
if (line) {
|
|
52
|
+
const logObj = JSON.parse(line);
|
|
53
|
+
(0, import_pino_otel_transport.emitToOtel)(logObj);
|
|
54
|
+
}
|
|
55
|
+
} catch {
|
|
56
|
+
}
|
|
57
|
+
callback();
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
const enableOtelLogging = () => {
|
|
61
|
+
if (otelEnabled || !logger) {
|
|
62
|
+
console.warn("OTEL logging already enabled or logger not initialized");
|
|
63
|
+
return;
|
|
49
64
|
}
|
|
65
|
+
otelEnabled = true;
|
|
66
|
+
const { pretty, level } = loggerOptions;
|
|
67
|
+
const logLevel = level || "info";
|
|
68
|
+
const streams = [
|
|
69
|
+
{ stream: pretty ? (0, import_pino_pretty.build)({ colorize: true }) : process.stdout, level: logLevel },
|
|
70
|
+
{ stream: new OtelDestination(), level: "debug" }
|
|
71
|
+
];
|
|
72
|
+
logger = (0, import_pino.pino)({ level: logLevel }, (0, import_pino.multistream)(streams));
|
|
50
73
|
};
|
|
51
74
|
// Annotate the CommonJS export names for ESM import in node:
|
|
52
75
|
0 && (module.exports = {
|
|
76
|
+
enableOtelLogging,
|
|
53
77
|
initializeLogger,
|
|
54
78
|
log,
|
|
55
79
|
loggerOptions
|