langsmith 0.3.34 → 0.3.35-rc.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/client.cjs +99 -17
- package/dist/client.d.ts +13 -1
- package/dist/client.js +99 -17
- package/dist/experimental/otel/constants.cjs +55 -0
- package/dist/experimental/otel/constants.d.ts +44 -0
- package/dist/experimental/otel/constants.js +52 -0
- package/dist/experimental/otel/exporter.cjs +109 -0
- package/dist/experimental/otel/exporter.d.ts +5 -0
- package/dist/experimental/otel/exporter.js +72 -0
- package/dist/experimental/otel/setup.cjs +71 -0
- package/dist/experimental/otel/setup.d.ts +1 -0
- package/dist/experimental/otel/setup.js +69 -0
- package/dist/experimental/otel/translator.cjs +459 -0
- package/dist/experimental/otel/translator.d.ts +24 -0
- package/dist/experimental/otel/translator.js +422 -0
- package/dist/experimental/otel/types.cjs +4 -0
- package/dist/experimental/otel/types.d.ts +29 -0
- package/dist/experimental/otel/types.js +3 -0
- package/dist/experimental/otel/utils.cjs +82 -0
- package/dist/experimental/otel/utils.d.ts +24 -0
- package/dist/experimental/otel/utils.js +43 -0
- package/dist/index.cjs +1 -1
- package/dist/index.d.ts +1 -1
- package/dist/index.js +1 -1
- package/dist/singletons/otel.cjs +155 -0
- package/dist/singletons/otel.d.ts +63 -0
- package/dist/singletons/otel.js +147 -0
- package/dist/traceable.cjs +50 -7
- package/dist/traceable.js +50 -7
- package/dist/utils/_uuid.cjs +5 -35
- package/dist/utils/_uuid.js +5 -2
- package/experimental/otel/exporter.cjs +1 -0
- package/experimental/otel/exporter.d.cts +1 -0
- package/experimental/otel/exporter.d.ts +1 -0
- package/experimental/otel/exporter.js +1 -0
- package/experimental/otel/setup.cjs +1 -0
- package/experimental/otel/setup.d.cts +1 -0
- package/experimental/otel/setup.d.ts +1 -0
- package/experimental/otel/setup.js +1 -0
- package/package.json +40 -2
package/dist/client.cjs
CHANGED
|
@@ -36,6 +36,8 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
36
36
|
exports.Client = exports.DEFAULT_BATCH_SIZE_LIMIT_BYTES = exports.AutoBatchQueue = void 0;
|
|
37
37
|
exports.mergeRuntimeEnvIntoRunCreate = mergeRuntimeEnvIntoRunCreate;
|
|
38
38
|
const uuid = __importStar(require("uuid"));
|
|
39
|
+
const translator_js_1 = require("./experimental/otel/translator.cjs");
|
|
40
|
+
const otel_js_1 = require("./singletons/otel.cjs");
|
|
39
41
|
const async_caller_js_1 = require("./utils/async_caller.cjs");
|
|
40
42
|
const messages_js_1 = require("./utils/messages.cjs");
|
|
41
43
|
const env_js_1 = require("./utils/env.cjs");
|
|
@@ -149,6 +151,7 @@ class AutoBatchQueue {
|
|
|
149
151
|
this.items.push({
|
|
150
152
|
action: item.action,
|
|
151
153
|
payload: item.item,
|
|
154
|
+
otelContext: item.otelContext,
|
|
152
155
|
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
|
153
156
|
itemPromiseResolve: itemPromiseResolve,
|
|
154
157
|
itemPromise,
|
|
@@ -182,7 +185,11 @@ class AutoBatchQueue {
|
|
|
182
185
|
this.sizeBytes -= item.size;
|
|
183
186
|
}
|
|
184
187
|
return [
|
|
185
|
-
popped.map((it) => ({
|
|
188
|
+
popped.map((it) => ({
|
|
189
|
+
action: it.action,
|
|
190
|
+
item: it.payload,
|
|
191
|
+
otelContext: it.otelContext,
|
|
192
|
+
})),
|
|
186
193
|
() => popped.forEach((it) => it.itemPromiseResolve()),
|
|
187
194
|
];
|
|
188
195
|
}
|
|
@@ -332,6 +339,12 @@ class Client {
|
|
|
332
339
|
writable: true,
|
|
333
340
|
value: false
|
|
334
341
|
});
|
|
342
|
+
Object.defineProperty(this, "langSmithToOTELTranslator", {
|
|
343
|
+
enumerable: true,
|
|
344
|
+
configurable: true,
|
|
345
|
+
writable: true,
|
|
346
|
+
value: void 0
|
|
347
|
+
});
|
|
335
348
|
Object.defineProperty(this, "debug", {
|
|
336
349
|
enumerable: true,
|
|
337
350
|
configurable: true,
|
|
@@ -377,6 +390,16 @@ class Client {
|
|
|
377
390
|
this.batchSizeBytesLimit = config.batchSizeBytesLimit;
|
|
378
391
|
this.fetchOptions = config.fetchOptions || {};
|
|
379
392
|
this.manualFlushMode = config.manualFlushMode ?? this.manualFlushMode;
|
|
393
|
+
if ((0, env_js_1.getEnvironmentVariable)("OTEL_ENABLED") === "true") {
|
|
394
|
+
const otel_trace = (0, otel_js_1.getOTELTrace)();
|
|
395
|
+
const existingTracerProvider = otel_trace.getTracerProvider();
|
|
396
|
+
const { tracerProvider: langSmithTracerProvider } = (0, otel_js_1.getDefaultOTLPTracerComponents)() ?? {};
|
|
397
|
+
// If user has set global tracer before, this fails and returns false
|
|
398
|
+
const globalSuccessfullyOverridden = otel_trace.setGlobalTracerProvider(langSmithTracerProvider);
|
|
399
|
+
this.langSmithToOTELTranslator = new translator_js_1.LangSmithToOTELTranslator(globalSuccessfullyOverridden
|
|
400
|
+
? langSmithTracerProvider
|
|
401
|
+
: existingTracerProvider);
|
|
402
|
+
}
|
|
380
403
|
}
|
|
381
404
|
static getDefaultClientConfig() {
|
|
382
405
|
const apiKey = (0, env_js_1.getLangSmithEnvironmentVariable)("API_KEY");
|
|
@@ -618,26 +641,62 @@ class Client {
|
|
|
618
641
|
return;
|
|
619
642
|
}
|
|
620
643
|
try {
|
|
621
|
-
|
|
622
|
-
|
|
623
|
-
.filter((item) => item.action === "create")
|
|
624
|
-
.map((item) => item.item),
|
|
625
|
-
runUpdates: batch
|
|
626
|
-
.filter((item) => item.action === "update")
|
|
627
|
-
.map((item) => item.item),
|
|
628
|
-
};
|
|
629
|
-
const serverInfo = await this._ensureServerInfo();
|
|
630
|
-
if (serverInfo?.batch_ingest_config?.use_multipart_endpoint) {
|
|
631
|
-
await this.multipartIngestRuns(ingestParams);
|
|
644
|
+
if (this.langSmithToOTELTranslator !== undefined) {
|
|
645
|
+
this._sendBatchToOTELTranslator(batch);
|
|
632
646
|
}
|
|
633
647
|
else {
|
|
634
|
-
|
|
648
|
+
const ingestParams = {
|
|
649
|
+
runCreates: batch
|
|
650
|
+
.filter((item) => item.action === "create")
|
|
651
|
+
.map((item) => item.item),
|
|
652
|
+
runUpdates: batch
|
|
653
|
+
.filter((item) => item.action === "update")
|
|
654
|
+
.map((item) => item.item),
|
|
655
|
+
};
|
|
656
|
+
const serverInfo = await this._ensureServerInfo();
|
|
657
|
+
if (serverInfo?.batch_ingest_config?.use_multipart_endpoint) {
|
|
658
|
+
await this.multipartIngestRuns(ingestParams);
|
|
659
|
+
}
|
|
660
|
+
else {
|
|
661
|
+
await this.batchIngestRuns(ingestParams);
|
|
662
|
+
}
|
|
635
663
|
}
|
|
636
664
|
}
|
|
665
|
+
catch (e) {
|
|
666
|
+
console.error("Error exporting batch:", e);
|
|
667
|
+
}
|
|
637
668
|
finally {
|
|
638
669
|
done();
|
|
639
670
|
}
|
|
640
671
|
}
|
|
672
|
+
_sendBatchToOTELTranslator(batch) {
|
|
673
|
+
if (this.langSmithToOTELTranslator !== undefined) {
|
|
674
|
+
const otelContextMap = new Map();
|
|
675
|
+
const operations = [];
|
|
676
|
+
for (const item of batch) {
|
|
677
|
+
if (item.item.id && item.otelContext) {
|
|
678
|
+
otelContextMap.set(item.item.id, item.otelContext);
|
|
679
|
+
if (item.action === "create") {
|
|
680
|
+
operations.push({
|
|
681
|
+
operation: "post",
|
|
682
|
+
id: item.item.id,
|
|
683
|
+
trace_id: item.item.trace_id ?? item.item.id,
|
|
684
|
+
run: item.item,
|
|
685
|
+
});
|
|
686
|
+
}
|
|
687
|
+
else {
|
|
688
|
+
operations.push({
|
|
689
|
+
operation: "patch",
|
|
690
|
+
id: item.item.id,
|
|
691
|
+
trace_id: item.item.trace_id ?? item.item.id,
|
|
692
|
+
run: item.item,
|
|
693
|
+
});
|
|
694
|
+
}
|
|
695
|
+
}
|
|
696
|
+
}
|
|
697
|
+
this.langSmithToOTELTranslator.exportBatch(operations, otelContextMap);
|
|
698
|
+
}
|
|
699
|
+
}
|
|
641
700
|
async processRunOperation(item) {
|
|
642
701
|
clearTimeout(this.autoBatchTimeout);
|
|
643
702
|
this.autoBatchTimeout = undefined;
|
|
@@ -711,6 +770,17 @@ class Client {
|
|
|
711
770
|
const sizeLimitBytes = await this._getBatchSizeLimitBytes();
|
|
712
771
|
await this.drainAutoBatchQueue(sizeLimitBytes);
|
|
713
772
|
}
|
|
773
|
+
_cloneCurrentOTELContext() {
|
|
774
|
+
const otel_trace = (0, otel_js_1.getOTELTrace)();
|
|
775
|
+
const otel_context = (0, otel_js_1.getOTELContext)();
|
|
776
|
+
if (this.langSmithToOTELTranslator !== undefined) {
|
|
777
|
+
const currentSpan = otel_trace.getActiveSpan();
|
|
778
|
+
if (currentSpan) {
|
|
779
|
+
return otel_trace.setSpan(otel_context.active(), currentSpan);
|
|
780
|
+
}
|
|
781
|
+
}
|
|
782
|
+
return undefined;
|
|
783
|
+
}
|
|
714
784
|
async createRun(run) {
|
|
715
785
|
if (!this._filterForSampling([run]).length) {
|
|
716
786
|
return;
|
|
@@ -726,9 +796,11 @@ class Client {
|
|
|
726
796
|
if (this.autoBatchTracing &&
|
|
727
797
|
runCreate.trace_id !== undefined &&
|
|
728
798
|
runCreate.dotted_order !== undefined) {
|
|
799
|
+
const otelContext = this._cloneCurrentOTELContext();
|
|
729
800
|
void this.processRunOperation({
|
|
730
801
|
action: "create",
|
|
731
802
|
item: runCreate,
|
|
803
|
+
otelContext,
|
|
732
804
|
}).catch(console.error);
|
|
733
805
|
return;
|
|
734
806
|
}
|
|
@@ -1056,17 +1128,26 @@ class Client {
|
|
|
1056
1128
|
if (this.autoBatchTracing &&
|
|
1057
1129
|
data.trace_id !== undefined &&
|
|
1058
1130
|
data.dotted_order !== undefined) {
|
|
1131
|
+
const otelContext = this._cloneCurrentOTELContext();
|
|
1059
1132
|
if (run.end_time !== undefined &&
|
|
1060
1133
|
data.parent_run_id === undefined &&
|
|
1061
1134
|
this.blockOnRootRunFinalization &&
|
|
1062
1135
|
!this.manualFlushMode) {
|
|
1063
1136
|
// Trigger batches as soon as a root trace ends and wait to ensure trace finishes
|
|
1064
1137
|
// in serverless environments.
|
|
1065
|
-
await this.processRunOperation({
|
|
1138
|
+
await this.processRunOperation({
|
|
1139
|
+
action: "update",
|
|
1140
|
+
item: data,
|
|
1141
|
+
otelContext,
|
|
1142
|
+
}).catch(console.error);
|
|
1066
1143
|
return;
|
|
1067
1144
|
}
|
|
1068
1145
|
else {
|
|
1069
|
-
void this.processRunOperation({
|
|
1146
|
+
void this.processRunOperation({
|
|
1147
|
+
action: "update",
|
|
1148
|
+
item: data,
|
|
1149
|
+
otelContext,
|
|
1150
|
+
}).catch(console.error);
|
|
1070
1151
|
}
|
|
1071
1152
|
return;
|
|
1072
1153
|
}
|
|
@@ -3416,15 +3497,16 @@ class Client {
|
|
|
3416
3497
|
*
|
|
3417
3498
|
* @returns A promise that resolves once all currently pending traces have sent.
|
|
3418
3499
|
*/
|
|
3419
|
-
awaitPendingTraceBatches() {
|
|
3500
|
+
async awaitPendingTraceBatches() {
|
|
3420
3501
|
if (this.manualFlushMode) {
|
|
3421
3502
|
console.warn("[WARNING]: When tracing in manual flush mode, you must call `await client.flush()` manually to submit trace batches.");
|
|
3422
3503
|
return Promise.resolve();
|
|
3423
3504
|
}
|
|
3424
|
-
|
|
3505
|
+
await Promise.all([
|
|
3425
3506
|
...this.autoBatchQueue.items.map(({ itemPromise }) => itemPromise),
|
|
3426
3507
|
this.batchIngestCaller.queue.onIdle(),
|
|
3427
3508
|
]);
|
|
3509
|
+
await (0, otel_js_1.getDefaultOTLPTracerComponents)()?.spanProcessor?.forceFlush();
|
|
3428
3510
|
}
|
|
3429
3511
|
}
|
|
3430
3512
|
exports.Client = Client;
|
package/dist/client.d.ts
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import type { OTELContext, OTELTracerProvider } from "./experimental/otel/types.js";
|
|
1
2
|
import { AsyncCallerParams } from "./utils/async_caller.js";
|
|
2
3
|
import { ComparativeExperiment, DataType, Dataset, DatasetDiffInfo, DatasetShareSchema, Example, ExampleCreate, ExampleUpdate, ExampleUpdateWithoutId, Feedback, FeedbackConfig, FeedbackIngestToken, KVMap, LangChainBaseMessage, LangSmithSettings, LikePromptResponse, Prompt, PromptCommit, PromptSortField, Run, RunCreate, RunUpdate, ScoreType, ExampleSearch, TimeDelta, TracerSession, TracerSessionResult, ValueType, AnnotationQueue, RunWithAnnotationQueueInfo, Attachments, UploadExamplesResponse, UpdateExamplesResponse, DatasetVersion, AnnotationQueueWithDetails } from "./schemas.js";
|
|
3
4
|
import { EvaluationResult, EvaluationResults, RunEvaluator } from "./evaluation/evaluator.js";
|
|
@@ -25,6 +26,11 @@ export interface ClientConfig {
|
|
|
25
26
|
* Enable debug mode for the client. If set, all sent HTTP requests will be logged.
|
|
26
27
|
*/
|
|
27
28
|
debug?: boolean;
|
|
29
|
+
/**
|
|
30
|
+
* Optional tracer provider for OpenTelemetry integration.
|
|
31
|
+
* If not provided, a LangSmith-specific tracer provider will be used.
|
|
32
|
+
*/
|
|
33
|
+
otelTracerProvider?: OTELTracerProvider;
|
|
28
34
|
}
|
|
29
35
|
/**
|
|
30
36
|
* Represents the parameters for listing runs (spans) from the Langsmith server.
|
|
@@ -240,6 +246,7 @@ export type CreateProjectParams = {
|
|
|
240
246
|
type AutoBatchQueueItem = {
|
|
241
247
|
action: "create" | "update";
|
|
242
248
|
item: RunCreate | RunUpdate;
|
|
249
|
+
otelContext?: OTELContext;
|
|
243
250
|
};
|
|
244
251
|
type Thread = {
|
|
245
252
|
filter: string;
|
|
@@ -261,6 +268,7 @@ export declare class AutoBatchQueue {
|
|
|
261
268
|
items: {
|
|
262
269
|
action: "create" | "update";
|
|
263
270
|
payload: RunCreate | RunUpdate;
|
|
271
|
+
otelContext?: OTELContext;
|
|
264
272
|
itemPromiseResolve: () => void;
|
|
265
273
|
itemPromise: Promise<void>;
|
|
266
274
|
size: number;
|
|
@@ -269,6 +277,7 @@ export declare class AutoBatchQueue {
|
|
|
269
277
|
peek(): {
|
|
270
278
|
action: "create" | "update";
|
|
271
279
|
payload: RunCreate | RunUpdate;
|
|
280
|
+
otelContext?: OTELContext;
|
|
272
281
|
itemPromiseResolve: () => void;
|
|
273
282
|
itemPromise: Promise<void>;
|
|
274
283
|
size: number;
|
|
@@ -301,6 +310,7 @@ export declare class Client implements LangSmithTracingClientInterface {
|
|
|
301
310
|
private _serverInfo;
|
|
302
311
|
private _getServerInfoPromise?;
|
|
303
312
|
private manualFlushMode;
|
|
313
|
+
private langSmithToOTELTranslator?;
|
|
304
314
|
debug: boolean;
|
|
305
315
|
constructor(config?: ClientConfig);
|
|
306
316
|
static getDefaultClientConfig(): {
|
|
@@ -325,6 +335,7 @@ export declare class Client implements LangSmithTracingClientInterface {
|
|
|
325
335
|
private _getMultiPartSupport;
|
|
326
336
|
private drainAutoBatchQueue;
|
|
327
337
|
private _processBatch;
|
|
338
|
+
private _sendBatchToOTELTranslator;
|
|
328
339
|
private processRunOperation;
|
|
329
340
|
protected _getServerInfo(): Promise<any>;
|
|
330
341
|
protected _ensureServerInfo(): Promise<Record<string, any>>;
|
|
@@ -333,6 +344,7 @@ export declare class Client implements LangSmithTracingClientInterface {
|
|
|
333
344
|
* Flushes current queued traces.
|
|
334
345
|
*/
|
|
335
346
|
flush(): Promise<void>;
|
|
347
|
+
private _cloneCurrentOTELContext;
|
|
336
348
|
createRun(run: CreateRunParams): Promise<void>;
|
|
337
349
|
/**
|
|
338
350
|
* Batch ingest/upsert multiple runs in the Langsmith system.
|
|
@@ -971,7 +983,7 @@ export declare class Client implements LangSmithTracingClientInterface {
|
|
|
971
983
|
*
|
|
972
984
|
* @returns A promise that resolves once all currently pending traces have sent.
|
|
973
985
|
*/
|
|
974
|
-
awaitPendingTraceBatches(): Promise<void
|
|
986
|
+
awaitPendingTraceBatches(): Promise<void>;
|
|
975
987
|
}
|
|
976
988
|
export interface LangSmithTracingClientInterface {
|
|
977
989
|
createRun: (run: CreateRunParams) => Promise<void>;
|
package/dist/client.js
CHANGED
|
@@ -1,4 +1,6 @@
|
|
|
1
1
|
import * as uuid from "uuid";
|
|
2
|
+
import { LangSmithToOTELTranslator, } from "./experimental/otel/translator.js";
|
|
3
|
+
import { getDefaultOTLPTracerComponents, getOTELTrace, getOTELContext, } from "./singletons/otel.js";
|
|
2
4
|
import { AsyncCaller } from "./utils/async_caller.js";
|
|
3
5
|
import { convertLangChainMessageToExample, isLangChainMessage, } from "./utils/messages.js";
|
|
4
6
|
import { getEnvironmentVariable, getLangChainEnvVarsMetadata, getLangSmithEnvironmentVariable, getRuntimeEnvironment, } from "./utils/env.js";
|
|
@@ -112,6 +114,7 @@ export class AutoBatchQueue {
|
|
|
112
114
|
this.items.push({
|
|
113
115
|
action: item.action,
|
|
114
116
|
payload: item.item,
|
|
117
|
+
otelContext: item.otelContext,
|
|
115
118
|
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
|
116
119
|
itemPromiseResolve: itemPromiseResolve,
|
|
117
120
|
itemPromise,
|
|
@@ -145,7 +148,11 @@ export class AutoBatchQueue {
|
|
|
145
148
|
this.sizeBytes -= item.size;
|
|
146
149
|
}
|
|
147
150
|
return [
|
|
148
|
-
popped.map((it) => ({
|
|
151
|
+
popped.map((it) => ({
|
|
152
|
+
action: it.action,
|
|
153
|
+
item: it.payload,
|
|
154
|
+
otelContext: it.otelContext,
|
|
155
|
+
})),
|
|
149
156
|
() => popped.forEach((it) => it.itemPromiseResolve()),
|
|
150
157
|
];
|
|
151
158
|
}
|
|
@@ -294,6 +301,12 @@ export class Client {
|
|
|
294
301
|
writable: true,
|
|
295
302
|
value: false
|
|
296
303
|
});
|
|
304
|
+
Object.defineProperty(this, "langSmithToOTELTranslator", {
|
|
305
|
+
enumerable: true,
|
|
306
|
+
configurable: true,
|
|
307
|
+
writable: true,
|
|
308
|
+
value: void 0
|
|
309
|
+
});
|
|
297
310
|
Object.defineProperty(this, "debug", {
|
|
298
311
|
enumerable: true,
|
|
299
312
|
configurable: true,
|
|
@@ -339,6 +352,16 @@ export class Client {
|
|
|
339
352
|
this.batchSizeBytesLimit = config.batchSizeBytesLimit;
|
|
340
353
|
this.fetchOptions = config.fetchOptions || {};
|
|
341
354
|
this.manualFlushMode = config.manualFlushMode ?? this.manualFlushMode;
|
|
355
|
+
if (getEnvironmentVariable("OTEL_ENABLED") === "true") {
|
|
356
|
+
const otel_trace = getOTELTrace();
|
|
357
|
+
const existingTracerProvider = otel_trace.getTracerProvider();
|
|
358
|
+
const { tracerProvider: langSmithTracerProvider } = getDefaultOTLPTracerComponents() ?? {};
|
|
359
|
+
// If user has set global tracer before, this fails and returns false
|
|
360
|
+
const globalSuccessfullyOverridden = otel_trace.setGlobalTracerProvider(langSmithTracerProvider);
|
|
361
|
+
this.langSmithToOTELTranslator = new LangSmithToOTELTranslator(globalSuccessfullyOverridden
|
|
362
|
+
? langSmithTracerProvider
|
|
363
|
+
: existingTracerProvider);
|
|
364
|
+
}
|
|
342
365
|
}
|
|
343
366
|
static getDefaultClientConfig() {
|
|
344
367
|
const apiKey = getLangSmithEnvironmentVariable("API_KEY");
|
|
@@ -580,26 +603,62 @@ export class Client {
|
|
|
580
603
|
return;
|
|
581
604
|
}
|
|
582
605
|
try {
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
.filter((item) => item.action === "create")
|
|
586
|
-
.map((item) => item.item),
|
|
587
|
-
runUpdates: batch
|
|
588
|
-
.filter((item) => item.action === "update")
|
|
589
|
-
.map((item) => item.item),
|
|
590
|
-
};
|
|
591
|
-
const serverInfo = await this._ensureServerInfo();
|
|
592
|
-
if (serverInfo?.batch_ingest_config?.use_multipart_endpoint) {
|
|
593
|
-
await this.multipartIngestRuns(ingestParams);
|
|
606
|
+
if (this.langSmithToOTELTranslator !== undefined) {
|
|
607
|
+
this._sendBatchToOTELTranslator(batch);
|
|
594
608
|
}
|
|
595
609
|
else {
|
|
596
|
-
|
|
610
|
+
const ingestParams = {
|
|
611
|
+
runCreates: batch
|
|
612
|
+
.filter((item) => item.action === "create")
|
|
613
|
+
.map((item) => item.item),
|
|
614
|
+
runUpdates: batch
|
|
615
|
+
.filter((item) => item.action === "update")
|
|
616
|
+
.map((item) => item.item),
|
|
617
|
+
};
|
|
618
|
+
const serverInfo = await this._ensureServerInfo();
|
|
619
|
+
if (serverInfo?.batch_ingest_config?.use_multipart_endpoint) {
|
|
620
|
+
await this.multipartIngestRuns(ingestParams);
|
|
621
|
+
}
|
|
622
|
+
else {
|
|
623
|
+
await this.batchIngestRuns(ingestParams);
|
|
624
|
+
}
|
|
597
625
|
}
|
|
598
626
|
}
|
|
627
|
+
catch (e) {
|
|
628
|
+
console.error("Error exporting batch:", e);
|
|
629
|
+
}
|
|
599
630
|
finally {
|
|
600
631
|
done();
|
|
601
632
|
}
|
|
602
633
|
}
|
|
634
|
+
_sendBatchToOTELTranslator(batch) {
|
|
635
|
+
if (this.langSmithToOTELTranslator !== undefined) {
|
|
636
|
+
const otelContextMap = new Map();
|
|
637
|
+
const operations = [];
|
|
638
|
+
for (const item of batch) {
|
|
639
|
+
if (item.item.id && item.otelContext) {
|
|
640
|
+
otelContextMap.set(item.item.id, item.otelContext);
|
|
641
|
+
if (item.action === "create") {
|
|
642
|
+
operations.push({
|
|
643
|
+
operation: "post",
|
|
644
|
+
id: item.item.id,
|
|
645
|
+
trace_id: item.item.trace_id ?? item.item.id,
|
|
646
|
+
run: item.item,
|
|
647
|
+
});
|
|
648
|
+
}
|
|
649
|
+
else {
|
|
650
|
+
operations.push({
|
|
651
|
+
operation: "patch",
|
|
652
|
+
id: item.item.id,
|
|
653
|
+
trace_id: item.item.trace_id ?? item.item.id,
|
|
654
|
+
run: item.item,
|
|
655
|
+
});
|
|
656
|
+
}
|
|
657
|
+
}
|
|
658
|
+
}
|
|
659
|
+
this.langSmithToOTELTranslator.exportBatch(operations, otelContextMap);
|
|
660
|
+
}
|
|
661
|
+
}
|
|
603
662
|
async processRunOperation(item) {
|
|
604
663
|
clearTimeout(this.autoBatchTimeout);
|
|
605
664
|
this.autoBatchTimeout = undefined;
|
|
@@ -673,6 +732,17 @@ export class Client {
|
|
|
673
732
|
const sizeLimitBytes = await this._getBatchSizeLimitBytes();
|
|
674
733
|
await this.drainAutoBatchQueue(sizeLimitBytes);
|
|
675
734
|
}
|
|
735
|
+
_cloneCurrentOTELContext() {
|
|
736
|
+
const otel_trace = getOTELTrace();
|
|
737
|
+
const otel_context = getOTELContext();
|
|
738
|
+
if (this.langSmithToOTELTranslator !== undefined) {
|
|
739
|
+
const currentSpan = otel_trace.getActiveSpan();
|
|
740
|
+
if (currentSpan) {
|
|
741
|
+
return otel_trace.setSpan(otel_context.active(), currentSpan);
|
|
742
|
+
}
|
|
743
|
+
}
|
|
744
|
+
return undefined;
|
|
745
|
+
}
|
|
676
746
|
async createRun(run) {
|
|
677
747
|
if (!this._filterForSampling([run]).length) {
|
|
678
748
|
return;
|
|
@@ -688,9 +758,11 @@ export class Client {
|
|
|
688
758
|
if (this.autoBatchTracing &&
|
|
689
759
|
runCreate.trace_id !== undefined &&
|
|
690
760
|
runCreate.dotted_order !== undefined) {
|
|
761
|
+
const otelContext = this._cloneCurrentOTELContext();
|
|
691
762
|
void this.processRunOperation({
|
|
692
763
|
action: "create",
|
|
693
764
|
item: runCreate,
|
|
765
|
+
otelContext,
|
|
694
766
|
}).catch(console.error);
|
|
695
767
|
return;
|
|
696
768
|
}
|
|
@@ -1018,17 +1090,26 @@ export class Client {
|
|
|
1018
1090
|
if (this.autoBatchTracing &&
|
|
1019
1091
|
data.trace_id !== undefined &&
|
|
1020
1092
|
data.dotted_order !== undefined) {
|
|
1093
|
+
const otelContext = this._cloneCurrentOTELContext();
|
|
1021
1094
|
if (run.end_time !== undefined &&
|
|
1022
1095
|
data.parent_run_id === undefined &&
|
|
1023
1096
|
this.blockOnRootRunFinalization &&
|
|
1024
1097
|
!this.manualFlushMode) {
|
|
1025
1098
|
// Trigger batches as soon as a root trace ends and wait to ensure trace finishes
|
|
1026
1099
|
// in serverless environments.
|
|
1027
|
-
await this.processRunOperation({
|
|
1100
|
+
await this.processRunOperation({
|
|
1101
|
+
action: "update",
|
|
1102
|
+
item: data,
|
|
1103
|
+
otelContext,
|
|
1104
|
+
}).catch(console.error);
|
|
1028
1105
|
return;
|
|
1029
1106
|
}
|
|
1030
1107
|
else {
|
|
1031
|
-
void this.processRunOperation({
|
|
1108
|
+
void this.processRunOperation({
|
|
1109
|
+
action: "update",
|
|
1110
|
+
item: data,
|
|
1111
|
+
otelContext,
|
|
1112
|
+
}).catch(console.error);
|
|
1032
1113
|
}
|
|
1033
1114
|
return;
|
|
1034
1115
|
}
|
|
@@ -3378,15 +3459,16 @@ export class Client {
|
|
|
3378
3459
|
*
|
|
3379
3460
|
* @returns A promise that resolves once all currently pending traces have sent.
|
|
3380
3461
|
*/
|
|
3381
|
-
awaitPendingTraceBatches() {
|
|
3462
|
+
async awaitPendingTraceBatches() {
|
|
3382
3463
|
if (this.manualFlushMode) {
|
|
3383
3464
|
console.warn("[WARNING]: When tracing in manual flush mode, you must call `await client.flush()` manually to submit trace batches.");
|
|
3384
3465
|
return Promise.resolve();
|
|
3385
3466
|
}
|
|
3386
|
-
|
|
3467
|
+
await Promise.all([
|
|
3387
3468
|
...this.autoBatchQueue.items.map(({ itemPromise }) => itemPromise),
|
|
3388
3469
|
this.batchIngestCaller.queue.onIdle(),
|
|
3389
3470
|
]);
|
|
3471
|
+
await getDefaultOTLPTracerComponents()?.spanProcessor?.forceFlush();
|
|
3390
3472
|
}
|
|
3391
3473
|
}
|
|
3392
3474
|
function isExampleCreate(input) {
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.AI_SDK_TOOL_OPERATIONS = exports.AI_SDK_LLM_OPERATIONS = exports.GEN_AI_CHOICE = exports.GEN_AI_ASSISTANT_MESSAGE = exports.GEN_AI_USER_MESSAGE = exports.GEN_AI_SYSTEM_MESSAGE = exports.LANGSMITH_PARENT_RUN_ID = exports.LANGSMITH_DOTTED_ORDER = exports.LANGSMITH_TRACE_ID = exports.LANGSMITH_RUN_ID = exports.LANGSMITH_REQUEST_HEADERS = exports.LANGSMITH_REQUEST_STREAMING = exports.LANGSMITH_RUNTIME = exports.LANGSMITH_TAGS = exports.LANGSMITH_METADATA = exports.LANGSMITH_NAME = exports.LANGSMITH_RUN_TYPE = exports.LANGSMITH_SESSION_NAME = exports.LANGSMITH_SESSION_ID = exports.GEN_AI_USAGE_OUTPUT_TOKEN_DETAILS = exports.GEN_AI_USAGE_INPUT_TOKEN_DETAILS = exports.GEN_AI_RESPONSE_SYSTEM_FINGERPRINT = exports.GEN_AI_RESPONSE_SERVICE_TIER = exports.GEN_AI_RESPONSE_ID = exports.GEN_AI_SERIALIZED_DOC = exports.GEN_AI_SERIALIZED_SIGNATURE = exports.GEN_AI_SERIALIZED_NAME = exports.GEN_AI_REQUEST_EXTRA_BODY = exports.GEN_AI_REQUEST_EXTRA_QUERY = exports.GENAI_COMPLETION = exports.GENAI_PROMPT = exports.GEN_AI_RESPONSE_FINISH_REASONS = exports.GEN_AI_REQUEST_PRESENCE_PENALTY = exports.GEN_AI_REQUEST_FREQUENCY_PENALTY = exports.GEN_AI_REQUEST_TOP_P = exports.GEN_AI_REQUEST_TEMPERATURE = exports.GEN_AI_REQUEST_MAX_TOKENS = exports.GEN_AI_USAGE_TOTAL_TOKENS = exports.GEN_AI_USAGE_OUTPUT_TOKENS = exports.GEN_AI_USAGE_INPUT_TOKENS = exports.GEN_AI_RESPONSE_MODEL = exports.GEN_AI_REQUEST_MODEL = exports.GEN_AI_SYSTEM = exports.GEN_AI_OPERATION_NAME = void 0;
|
|
4
|
+
// OpenTelemetry GenAI semantic convention attribute names
|
|
5
|
+
exports.GEN_AI_OPERATION_NAME = "gen_ai.operation.name";
|
|
6
|
+
exports.GEN_AI_SYSTEM = "gen_ai.system";
|
|
7
|
+
exports.GEN_AI_REQUEST_MODEL = "gen_ai.request.model";
|
|
8
|
+
exports.GEN_AI_RESPONSE_MODEL = "gen_ai.response.model";
|
|
9
|
+
exports.GEN_AI_USAGE_INPUT_TOKENS = "gen_ai.usage.input_tokens";
|
|
10
|
+
exports.GEN_AI_USAGE_OUTPUT_TOKENS = "gen_ai.usage.output_tokens";
|
|
11
|
+
exports.GEN_AI_USAGE_TOTAL_TOKENS = "gen_ai.usage.total_tokens";
|
|
12
|
+
exports.GEN_AI_REQUEST_MAX_TOKENS = "gen_ai.request.max_tokens";
|
|
13
|
+
exports.GEN_AI_REQUEST_TEMPERATURE = "gen_ai.request.temperature";
|
|
14
|
+
exports.GEN_AI_REQUEST_TOP_P = "gen_ai.request.top_p";
|
|
15
|
+
exports.GEN_AI_REQUEST_FREQUENCY_PENALTY = "gen_ai.request.frequency_penalty";
|
|
16
|
+
exports.GEN_AI_REQUEST_PRESENCE_PENALTY = "gen_ai.request.presence_penalty";
|
|
17
|
+
exports.GEN_AI_RESPONSE_FINISH_REASONS = "gen_ai.response.finish_reasons";
|
|
18
|
+
exports.GENAI_PROMPT = "gen_ai.prompt";
|
|
19
|
+
exports.GENAI_COMPLETION = "gen_ai.completion";
|
|
20
|
+
exports.GEN_AI_REQUEST_EXTRA_QUERY = "gen_ai.request.extra_query";
|
|
21
|
+
exports.GEN_AI_REQUEST_EXTRA_BODY = "gen_ai.request.extra_body";
|
|
22
|
+
exports.GEN_AI_SERIALIZED_NAME = "gen_ai.serialized.name";
|
|
23
|
+
exports.GEN_AI_SERIALIZED_SIGNATURE = "gen_ai.serialized.signature";
|
|
24
|
+
exports.GEN_AI_SERIALIZED_DOC = "gen_ai.serialized.doc";
|
|
25
|
+
exports.GEN_AI_RESPONSE_ID = "gen_ai.response.id";
|
|
26
|
+
exports.GEN_AI_RESPONSE_SERVICE_TIER = "gen_ai.response.service_tier";
|
|
27
|
+
exports.GEN_AI_RESPONSE_SYSTEM_FINGERPRINT = "gen_ai.response.system_fingerprint";
|
|
28
|
+
exports.GEN_AI_USAGE_INPUT_TOKEN_DETAILS = "gen_ai.usage.input_token_details";
|
|
29
|
+
exports.GEN_AI_USAGE_OUTPUT_TOKEN_DETAILS = "gen_ai.usage.output_token_details";
|
|
30
|
+
// LangSmith custom attributes
|
|
31
|
+
exports.LANGSMITH_SESSION_ID = "langsmith.trace.session_id";
|
|
32
|
+
exports.LANGSMITH_SESSION_NAME = "langsmith.trace.session_name";
|
|
33
|
+
exports.LANGSMITH_RUN_TYPE = "langsmith.span.kind";
|
|
34
|
+
exports.LANGSMITH_NAME = "langsmith.trace.name";
|
|
35
|
+
exports.LANGSMITH_METADATA = "langsmith.metadata";
|
|
36
|
+
exports.LANGSMITH_TAGS = "langsmith.span.tags";
|
|
37
|
+
exports.LANGSMITH_RUNTIME = "langsmith.span.runtime";
|
|
38
|
+
exports.LANGSMITH_REQUEST_STREAMING = "langsmith.request.streaming";
|
|
39
|
+
exports.LANGSMITH_REQUEST_HEADERS = "langsmith.request.headers";
|
|
40
|
+
exports.LANGSMITH_RUN_ID = "langsmith.span.id";
|
|
41
|
+
exports.LANGSMITH_TRACE_ID = "langsmith.trace.id";
|
|
42
|
+
exports.LANGSMITH_DOTTED_ORDER = "langsmith.span.dotted_order";
|
|
43
|
+
exports.LANGSMITH_PARENT_RUN_ID = "langsmith.span.parent_id";
|
|
44
|
+
// GenAI event names
|
|
45
|
+
exports.GEN_AI_SYSTEM_MESSAGE = "gen_ai.system.message";
|
|
46
|
+
exports.GEN_AI_USER_MESSAGE = "gen_ai.user.message";
|
|
47
|
+
exports.GEN_AI_ASSISTANT_MESSAGE = "gen_ai.assistant.message";
|
|
48
|
+
exports.GEN_AI_CHOICE = "gen_ai.choice";
|
|
49
|
+
exports.AI_SDK_LLM_OPERATIONS = [
|
|
50
|
+
"ai.generateText.doGenerate",
|
|
51
|
+
"ai.streamText.doStream",
|
|
52
|
+
"ai.generateObject.doGenerate",
|
|
53
|
+
"ai.streamObject.doStream",
|
|
54
|
+
];
|
|
55
|
+
exports.AI_SDK_TOOL_OPERATIONS = ["ai.toolCall"];
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
export declare const GEN_AI_OPERATION_NAME = "gen_ai.operation.name";
|
|
2
|
+
export declare const GEN_AI_SYSTEM = "gen_ai.system";
|
|
3
|
+
export declare const GEN_AI_REQUEST_MODEL = "gen_ai.request.model";
|
|
4
|
+
export declare const GEN_AI_RESPONSE_MODEL = "gen_ai.response.model";
|
|
5
|
+
export declare const GEN_AI_USAGE_INPUT_TOKENS = "gen_ai.usage.input_tokens";
|
|
6
|
+
export declare const GEN_AI_USAGE_OUTPUT_TOKENS = "gen_ai.usage.output_tokens";
|
|
7
|
+
export declare const GEN_AI_USAGE_TOTAL_TOKENS = "gen_ai.usage.total_tokens";
|
|
8
|
+
export declare const GEN_AI_REQUEST_MAX_TOKENS = "gen_ai.request.max_tokens";
|
|
9
|
+
export declare const GEN_AI_REQUEST_TEMPERATURE = "gen_ai.request.temperature";
|
|
10
|
+
export declare const GEN_AI_REQUEST_TOP_P = "gen_ai.request.top_p";
|
|
11
|
+
export declare const GEN_AI_REQUEST_FREQUENCY_PENALTY = "gen_ai.request.frequency_penalty";
|
|
12
|
+
export declare const GEN_AI_REQUEST_PRESENCE_PENALTY = "gen_ai.request.presence_penalty";
|
|
13
|
+
export declare const GEN_AI_RESPONSE_FINISH_REASONS = "gen_ai.response.finish_reasons";
|
|
14
|
+
export declare const GENAI_PROMPT = "gen_ai.prompt";
|
|
15
|
+
export declare const GENAI_COMPLETION = "gen_ai.completion";
|
|
16
|
+
export declare const GEN_AI_REQUEST_EXTRA_QUERY = "gen_ai.request.extra_query";
|
|
17
|
+
export declare const GEN_AI_REQUEST_EXTRA_BODY = "gen_ai.request.extra_body";
|
|
18
|
+
export declare const GEN_AI_SERIALIZED_NAME = "gen_ai.serialized.name";
|
|
19
|
+
export declare const GEN_AI_SERIALIZED_SIGNATURE = "gen_ai.serialized.signature";
|
|
20
|
+
export declare const GEN_AI_SERIALIZED_DOC = "gen_ai.serialized.doc";
|
|
21
|
+
export declare const GEN_AI_RESPONSE_ID = "gen_ai.response.id";
|
|
22
|
+
export declare const GEN_AI_RESPONSE_SERVICE_TIER = "gen_ai.response.service_tier";
|
|
23
|
+
export declare const GEN_AI_RESPONSE_SYSTEM_FINGERPRINT = "gen_ai.response.system_fingerprint";
|
|
24
|
+
export declare const GEN_AI_USAGE_INPUT_TOKEN_DETAILS = "gen_ai.usage.input_token_details";
|
|
25
|
+
export declare const GEN_AI_USAGE_OUTPUT_TOKEN_DETAILS = "gen_ai.usage.output_token_details";
|
|
26
|
+
export declare const LANGSMITH_SESSION_ID = "langsmith.trace.session_id";
|
|
27
|
+
export declare const LANGSMITH_SESSION_NAME = "langsmith.trace.session_name";
|
|
28
|
+
export declare const LANGSMITH_RUN_TYPE = "langsmith.span.kind";
|
|
29
|
+
export declare const LANGSMITH_NAME = "langsmith.trace.name";
|
|
30
|
+
export declare const LANGSMITH_METADATA = "langsmith.metadata";
|
|
31
|
+
export declare const LANGSMITH_TAGS = "langsmith.span.tags";
|
|
32
|
+
export declare const LANGSMITH_RUNTIME = "langsmith.span.runtime";
|
|
33
|
+
export declare const LANGSMITH_REQUEST_STREAMING = "langsmith.request.streaming";
|
|
34
|
+
export declare const LANGSMITH_REQUEST_HEADERS = "langsmith.request.headers";
|
|
35
|
+
export declare const LANGSMITH_RUN_ID = "langsmith.span.id";
|
|
36
|
+
export declare const LANGSMITH_TRACE_ID = "langsmith.trace.id";
|
|
37
|
+
export declare const LANGSMITH_DOTTED_ORDER = "langsmith.span.dotted_order";
|
|
38
|
+
export declare const LANGSMITH_PARENT_RUN_ID = "langsmith.span.parent_id";
|
|
39
|
+
export declare const GEN_AI_SYSTEM_MESSAGE = "gen_ai.system.message";
|
|
40
|
+
export declare const GEN_AI_USER_MESSAGE = "gen_ai.user.message";
|
|
41
|
+
export declare const GEN_AI_ASSISTANT_MESSAGE = "gen_ai.assistant.message";
|
|
42
|
+
export declare const GEN_AI_CHOICE = "gen_ai.choice";
|
|
43
|
+
export declare const AI_SDK_LLM_OPERATIONS: string[];
|
|
44
|
+
export declare const AI_SDK_TOOL_OPERATIONS: string[];
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
// OpenTelemetry GenAI semantic convention attribute names
|
|
2
|
+
export const GEN_AI_OPERATION_NAME = "gen_ai.operation.name";
|
|
3
|
+
export const GEN_AI_SYSTEM = "gen_ai.system";
|
|
4
|
+
export const GEN_AI_REQUEST_MODEL = "gen_ai.request.model";
|
|
5
|
+
export const GEN_AI_RESPONSE_MODEL = "gen_ai.response.model";
|
|
6
|
+
export const GEN_AI_USAGE_INPUT_TOKENS = "gen_ai.usage.input_tokens";
|
|
7
|
+
export const GEN_AI_USAGE_OUTPUT_TOKENS = "gen_ai.usage.output_tokens";
|
|
8
|
+
export const GEN_AI_USAGE_TOTAL_TOKENS = "gen_ai.usage.total_tokens";
|
|
9
|
+
export const GEN_AI_REQUEST_MAX_TOKENS = "gen_ai.request.max_tokens";
|
|
10
|
+
export const GEN_AI_REQUEST_TEMPERATURE = "gen_ai.request.temperature";
|
|
11
|
+
export const GEN_AI_REQUEST_TOP_P = "gen_ai.request.top_p";
|
|
12
|
+
export const GEN_AI_REQUEST_FREQUENCY_PENALTY = "gen_ai.request.frequency_penalty";
|
|
13
|
+
export const GEN_AI_REQUEST_PRESENCE_PENALTY = "gen_ai.request.presence_penalty";
|
|
14
|
+
export const GEN_AI_RESPONSE_FINISH_REASONS = "gen_ai.response.finish_reasons";
|
|
15
|
+
export const GENAI_PROMPT = "gen_ai.prompt";
|
|
16
|
+
export const GENAI_COMPLETION = "gen_ai.completion";
|
|
17
|
+
export const GEN_AI_REQUEST_EXTRA_QUERY = "gen_ai.request.extra_query";
|
|
18
|
+
export const GEN_AI_REQUEST_EXTRA_BODY = "gen_ai.request.extra_body";
|
|
19
|
+
export const GEN_AI_SERIALIZED_NAME = "gen_ai.serialized.name";
|
|
20
|
+
export const GEN_AI_SERIALIZED_SIGNATURE = "gen_ai.serialized.signature";
|
|
21
|
+
export const GEN_AI_SERIALIZED_DOC = "gen_ai.serialized.doc";
|
|
22
|
+
export const GEN_AI_RESPONSE_ID = "gen_ai.response.id";
|
|
23
|
+
export const GEN_AI_RESPONSE_SERVICE_TIER = "gen_ai.response.service_tier";
|
|
24
|
+
export const GEN_AI_RESPONSE_SYSTEM_FINGERPRINT = "gen_ai.response.system_fingerprint";
|
|
25
|
+
export const GEN_AI_USAGE_INPUT_TOKEN_DETAILS = "gen_ai.usage.input_token_details";
|
|
26
|
+
export const GEN_AI_USAGE_OUTPUT_TOKEN_DETAILS = "gen_ai.usage.output_token_details";
|
|
27
|
+
// LangSmith custom attributes
|
|
28
|
+
export const LANGSMITH_SESSION_ID = "langsmith.trace.session_id";
|
|
29
|
+
export const LANGSMITH_SESSION_NAME = "langsmith.trace.session_name";
|
|
30
|
+
export const LANGSMITH_RUN_TYPE = "langsmith.span.kind";
|
|
31
|
+
export const LANGSMITH_NAME = "langsmith.trace.name";
|
|
32
|
+
export const LANGSMITH_METADATA = "langsmith.metadata";
|
|
33
|
+
export const LANGSMITH_TAGS = "langsmith.span.tags";
|
|
34
|
+
export const LANGSMITH_RUNTIME = "langsmith.span.runtime";
|
|
35
|
+
export const LANGSMITH_REQUEST_STREAMING = "langsmith.request.streaming";
|
|
36
|
+
export const LANGSMITH_REQUEST_HEADERS = "langsmith.request.headers";
|
|
37
|
+
export const LANGSMITH_RUN_ID = "langsmith.span.id";
|
|
38
|
+
export const LANGSMITH_TRACE_ID = "langsmith.trace.id";
|
|
39
|
+
export const LANGSMITH_DOTTED_ORDER = "langsmith.span.dotted_order";
|
|
40
|
+
export const LANGSMITH_PARENT_RUN_ID = "langsmith.span.parent_id";
|
|
41
|
+
// GenAI event names
|
|
42
|
+
export const GEN_AI_SYSTEM_MESSAGE = "gen_ai.system.message";
|
|
43
|
+
export const GEN_AI_USER_MESSAGE = "gen_ai.user.message";
|
|
44
|
+
export const GEN_AI_ASSISTANT_MESSAGE = "gen_ai.assistant.message";
|
|
45
|
+
export const GEN_AI_CHOICE = "gen_ai.choice";
|
|
46
|
+
export const AI_SDK_LLM_OPERATIONS = [
|
|
47
|
+
"ai.generateText.doGenerate",
|
|
48
|
+
"ai.streamText.doStream",
|
|
49
|
+
"ai.generateObject.doGenerate",
|
|
50
|
+
"ai.streamObject.doStream",
|
|
51
|
+
];
|
|
52
|
+
export const AI_SDK_TOOL_OPERATIONS = ["ai.toolCall"];
|