langsmith 0.4.7 → 0.4.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/client.cjs +10 -2
- package/dist/client.d.ts +1 -0
- package/dist/client.js +10 -2
- package/dist/experimental/anthropic/context.cjs +187 -0
- package/dist/experimental/anthropic/context.d.ts +5 -0
- package/dist/experimental/anthropic/context.js +183 -0
- package/dist/experimental/anthropic/index.cjs +188 -0
- package/dist/experimental/anthropic/index.d.ts +30 -0
- package/dist/experimental/anthropic/index.js +185 -0
- package/dist/experimental/anthropic/messages.cjs +102 -0
- package/dist/experimental/anthropic/messages.d.ts +6 -0
- package/dist/experimental/anthropic/messages.js +96 -0
- package/dist/experimental/anthropic/types.cjs +3 -0
- package/dist/experimental/anthropic/types.d.ts +50 -0
- package/dist/experimental/anthropic/types.js +2 -0
- package/dist/experimental/anthropic/usage.cjs +180 -0
- package/dist/experimental/anthropic/usage.d.ts +1 -0
- package/dist/experimental/anthropic/usage.js +175 -0
- package/dist/experimental/anthropic/utils.cjs +24 -0
- package/dist/experimental/anthropic/utils.d.ts +1 -0
- package/dist/experimental/anthropic/utils.js +20 -0
- package/dist/index.cjs +1 -1
- package/dist/index.d.ts +1 -1
- package/dist/index.js +1 -1
- package/dist/traceable.cjs +38 -4
- package/dist/traceable.d.ts +4 -0
- package/dist/traceable.js +38 -4
- package/dist/utils/usage.cjs +6 -7
- package/dist/utils/usage.js +6 -7
- package/dist/wrappers/gemini.cjs +434 -0
- package/dist/wrappers/gemini.d.ts +46 -0
- package/dist/wrappers/gemini.js +431 -0
- package/experimental/anthropic.cjs +1 -0
- package/experimental/anthropic.d.cts +1 -0
- package/experimental/anthropic.d.ts +1 -0
- package/experimental/anthropic.js +1 -0
- package/package.json +16 -1
|
@@ -0,0 +1,175 @@
|
|
|
1
|
+
import { convertAnthropicUsageToInputTokenDetails } from "../../utils/usage.js";
|
|
2
|
+
import { getNumberProperty } from "./utils.js";
|
|
3
|
+
/**
|
|
4
|
+
* Aggregates usage from modelUsage breakdown (includes all models, including hidden ones).
|
|
5
|
+
* This provides accurate totals when multiple models are used.
|
|
6
|
+
* @internal
|
|
7
|
+
*/
|
|
8
|
+
export function aggregateUsageFromModelUsage(modelUsage) {
|
|
9
|
+
const metrics = {};
|
|
10
|
+
let totalInputTokens = 0;
|
|
11
|
+
let totalOutputTokens = 0;
|
|
12
|
+
let totalCacheReadTokens = 0;
|
|
13
|
+
let totalCacheCreationTokens = 0;
|
|
14
|
+
// Aggregate across all models
|
|
15
|
+
for (const modelStats of Object.values(modelUsage)) {
|
|
16
|
+
totalInputTokens += modelStats.inputTokens || 0;
|
|
17
|
+
totalOutputTokens += modelStats.outputTokens || 0;
|
|
18
|
+
totalCacheReadTokens += modelStats.cacheReadInputTokens || 0;
|
|
19
|
+
totalCacheCreationTokens += modelStats.cacheCreationInputTokens || 0;
|
|
20
|
+
}
|
|
21
|
+
// Build input_token_details if we have cache tokens
|
|
22
|
+
if (totalCacheReadTokens > 0 || totalCacheCreationTokens > 0) {
|
|
23
|
+
metrics.input_token_details = {
|
|
24
|
+
cache_read: totalCacheReadTokens,
|
|
25
|
+
cache_creation: totalCacheCreationTokens,
|
|
26
|
+
};
|
|
27
|
+
}
|
|
28
|
+
// Sum all input tokens (new + cache read + cache creation)
|
|
29
|
+
const totalPromptTokens = totalInputTokens + totalCacheReadTokens + totalCacheCreationTokens;
|
|
30
|
+
metrics.input_tokens = totalPromptTokens;
|
|
31
|
+
metrics.output_tokens = totalOutputTokens;
|
|
32
|
+
metrics.total_tokens = totalPromptTokens + totalOutputTokens;
|
|
33
|
+
return metrics;
|
|
34
|
+
}
|
|
35
|
+
/**
|
|
36
|
+
* Extracts and normalizes usage metrics from a Claude Agent SDK message.
|
|
37
|
+
* @internal
|
|
38
|
+
*/
|
|
39
|
+
export function extractUsageFromMessage(message) {
|
|
40
|
+
const metrics = {};
|
|
41
|
+
// Assistant messages contain usage in message.message.usage
|
|
42
|
+
// Result messages contain usage in message.usage
|
|
43
|
+
let usage;
|
|
44
|
+
if (message.type === "assistant") {
|
|
45
|
+
usage = message.message?.usage;
|
|
46
|
+
}
|
|
47
|
+
else if (message.type === "result") {
|
|
48
|
+
usage = message.usage;
|
|
49
|
+
}
|
|
50
|
+
if (!usage || typeof usage !== "object") {
|
|
51
|
+
return metrics;
|
|
52
|
+
}
|
|
53
|
+
// Standard token counts - use LangSmith's expected field names
|
|
54
|
+
const inputTokens = getNumberProperty(usage, "input_tokens") || 0;
|
|
55
|
+
const outputTokens = getNumberProperty(usage, "output_tokens") || 0;
|
|
56
|
+
// Get cache tokens
|
|
57
|
+
const cacheRead = getNumberProperty(usage, "cache_read_input_tokens") || 0;
|
|
58
|
+
const cacheCreation = getNumberProperty(usage, "cache_creation_input_tokens") || 0;
|
|
59
|
+
// Build input_token_details if we have cache tokens
|
|
60
|
+
if (cacheRead > 0 || cacheCreation > 0) {
|
|
61
|
+
const inputTokenDetails = convertAnthropicUsageToInputTokenDetails(usage);
|
|
62
|
+
if (Object.keys(inputTokenDetails).length > 0) {
|
|
63
|
+
metrics.input_token_details = inputTokenDetails;
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
// Sum cache tokens into input_tokens total (matching Python's sum_anthropic_tokens)
|
|
67
|
+
const totalInputTokens = inputTokens + cacheRead + cacheCreation;
|
|
68
|
+
metrics.input_tokens = totalInputTokens;
|
|
69
|
+
metrics.output_tokens = outputTokens;
|
|
70
|
+
metrics.total_tokens = totalInputTokens + outputTokens;
|
|
71
|
+
return metrics;
|
|
72
|
+
}
|
|
73
|
+
/**
|
|
74
|
+
* Corrects usage metrics for assistant runs based on the results of the runs.
|
|
75
|
+
* @internal
|
|
76
|
+
*/
|
|
77
|
+
export function correctUsageFromResults(resultUsages, assistantRuns) {
|
|
78
|
+
const runByModel = assistantRuns.reduce((acc, run) => {
|
|
79
|
+
const modelId = run.extra?.metadata?.ls_model_name;
|
|
80
|
+
if (!modelId)
|
|
81
|
+
return acc;
|
|
82
|
+
acc[modelId] ??= [];
|
|
83
|
+
acc[modelId].push(run);
|
|
84
|
+
return acc;
|
|
85
|
+
}, {});
|
|
86
|
+
const runUsageByModel = assistantRuns.reduce((acc, run) => {
|
|
87
|
+
const modelId = run.extra?.metadata?.ls_model_name;
|
|
88
|
+
if (!modelId)
|
|
89
|
+
return acc;
|
|
90
|
+
const usageMetadata = { ...run.extra?.metadata?.usage_metadata };
|
|
91
|
+
usageMetadata.input_tokens ??= 0;
|
|
92
|
+
usageMetadata.output_tokens ??= 0;
|
|
93
|
+
usageMetadata.total_tokens ??= 0;
|
|
94
|
+
usageMetadata.input_token_details = {
|
|
95
|
+
...usageMetadata.input_token_details,
|
|
96
|
+
};
|
|
97
|
+
usageMetadata.input_token_details.cache_read ??= 0;
|
|
98
|
+
usageMetadata.input_token_details.ephemeral_5m_input_tokens ??= 0;
|
|
99
|
+
usageMetadata.input_token_details.ephemeral_1h_input_tokens ??= 0;
|
|
100
|
+
acc[modelId] ??= {
|
|
101
|
+
input_tokens: 0,
|
|
102
|
+
output_tokens: 0,
|
|
103
|
+
total_tokens: 0,
|
|
104
|
+
input_token_details: {
|
|
105
|
+
cache_read: 0,
|
|
106
|
+
cache_creation: 0,
|
|
107
|
+
},
|
|
108
|
+
};
|
|
109
|
+
acc[modelId].input_tokens += usageMetadata.input_tokens;
|
|
110
|
+
acc[modelId].output_tokens += usageMetadata.output_tokens;
|
|
111
|
+
acc[modelId].total_tokens += usageMetadata.total_tokens;
|
|
112
|
+
acc[modelId].input_token_details.cache_read +=
|
|
113
|
+
usageMetadata.input_token_details.cache_read;
|
|
114
|
+
acc[modelId].input_token_details.cache_creation +=
|
|
115
|
+
usageMetadata.input_token_details.ephemeral_5m_input_tokens;
|
|
116
|
+
acc[modelId].input_token_details.cache_creation +=
|
|
117
|
+
usageMetadata.input_token_details.ephemeral_1h_input_tokens;
|
|
118
|
+
return acc;
|
|
119
|
+
}, {});
|
|
120
|
+
const resultUsageMap = Object.fromEntries(Object.entries(resultUsages).map(([modelId, usage]) => [
|
|
121
|
+
modelId,
|
|
122
|
+
{
|
|
123
|
+
input_tokens: usage.inputTokens +
|
|
124
|
+
usage.cacheReadInputTokens +
|
|
125
|
+
usage.cacheCreationInputTokens,
|
|
126
|
+
output_tokens: usage.outputTokens,
|
|
127
|
+
total_tokens: usage.inputTokens +
|
|
128
|
+
usage.cacheReadInputTokens +
|
|
129
|
+
usage.cacheCreationInputTokens +
|
|
130
|
+
usage.outputTokens,
|
|
131
|
+
input_token_details: {
|
|
132
|
+
cache_read: usage.cacheReadInputTokens,
|
|
133
|
+
cache_creation: usage.cacheCreationInputTokens,
|
|
134
|
+
},
|
|
135
|
+
},
|
|
136
|
+
]));
|
|
137
|
+
for (const modelId in resultUsageMap) {
|
|
138
|
+
const lastRun = runByModel[modelId]?.at(-1);
|
|
139
|
+
const runsUsage = runUsageByModel[modelId];
|
|
140
|
+
const resultUsage = resultUsageMap[modelId];
|
|
141
|
+
if (!runsUsage || !lastRun)
|
|
142
|
+
continue;
|
|
143
|
+
const difference = {
|
|
144
|
+
input_tokens: Math.max(0, resultUsage.input_tokens - runsUsage.input_tokens),
|
|
145
|
+
output_tokens: Math.max(0, resultUsage.output_tokens - runsUsage.output_tokens),
|
|
146
|
+
total_tokens: Math.max(0, resultUsage.total_tokens - runsUsage.total_tokens),
|
|
147
|
+
cache_read: Math.max(0, resultUsage.input_token_details.cache_read -
|
|
148
|
+
runsUsage.input_token_details.cache_read),
|
|
149
|
+
cache_creation: Math.max(0, resultUsage.input_token_details.cache_creation -
|
|
150
|
+
runsUsage.input_token_details.cache_creation),
|
|
151
|
+
};
|
|
152
|
+
if (Object.values(difference).some((value) => value > 0)) {
|
|
153
|
+
// apply difference to the last run
|
|
154
|
+
lastRun.extra ??= {};
|
|
155
|
+
lastRun.extra.metadata ??= {};
|
|
156
|
+
lastRun.extra.metadata.usage_metadata ??= {};
|
|
157
|
+
lastRun.extra.metadata.usage_metadata.input_tokens ??= 0;
|
|
158
|
+
lastRun.extra.metadata.usage_metadata.input_tokens +=
|
|
159
|
+
difference.input_tokens;
|
|
160
|
+
lastRun.extra.metadata.usage_metadata.output_tokens ??= 0;
|
|
161
|
+
lastRun.extra.metadata.usage_metadata.output_tokens +=
|
|
162
|
+
difference.output_tokens;
|
|
163
|
+
lastRun.extra.metadata.usage_metadata.total_tokens ??= 0;
|
|
164
|
+
lastRun.extra.metadata.usage_metadata.total_tokens +=
|
|
165
|
+
difference.total_tokens;
|
|
166
|
+
lastRun.extra.metadata.usage_metadata.input_token_details ??= {};
|
|
167
|
+
lastRun.extra.metadata.usage_metadata.input_token_details.cache_read ??= 0;
|
|
168
|
+
lastRun.extra.metadata.usage_metadata.input_token_details.cache_read +=
|
|
169
|
+
difference.cache_read;
|
|
170
|
+
lastRun.extra.metadata.usage_metadata.input_token_details.ephemeral_5m_input_tokens ??= 0;
|
|
171
|
+
lastRun.extra.metadata.usage_metadata.input_token_details.ephemeral_5m_input_tokens +=
|
|
172
|
+
difference.cache_creation;
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
}
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.getNumberProperty = getNumberProperty;
|
|
4
|
+
exports.isIterable = isIterable;
|
|
5
|
+
/**
|
|
6
|
+
* Gets a number property from an object.
|
|
7
|
+
* @internal
|
|
8
|
+
*/
|
|
9
|
+
function getNumberProperty(obj, key) {
|
|
10
|
+
if (!obj || typeof obj !== "object" || !(key in obj)) {
|
|
11
|
+
return undefined;
|
|
12
|
+
}
|
|
13
|
+
const value = Reflect.get(obj, key);
|
|
14
|
+
return typeof value === "number" ? value : undefined;
|
|
15
|
+
}
|
|
16
|
+
/**
|
|
17
|
+
* Checks if a value is iterable.
|
|
18
|
+
* @internal
|
|
19
|
+
*/
|
|
20
|
+
function isIterable(value) {
|
|
21
|
+
return (typeof value === "object" &&
|
|
22
|
+
value !== null &&
|
|
23
|
+
typeof value[Symbol.iterator] === "function");
|
|
24
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Gets a number property from an object.
|
|
3
|
+
* @internal
|
|
4
|
+
*/
|
|
5
|
+
export function getNumberProperty(obj, key) {
|
|
6
|
+
if (!obj || typeof obj !== "object" || !(key in obj)) {
|
|
7
|
+
return undefined;
|
|
8
|
+
}
|
|
9
|
+
const value = Reflect.get(obj, key);
|
|
10
|
+
return typeof value === "number" ? value : undefined;
|
|
11
|
+
}
|
|
12
|
+
/**
|
|
13
|
+
* Checks if a value is iterable.
|
|
14
|
+
* @internal
|
|
15
|
+
*/
|
|
16
|
+
export function isIterable(value) {
|
|
17
|
+
return (typeof value === "object" &&
|
|
18
|
+
value !== null &&
|
|
19
|
+
typeof value[Symbol.iterator] === "function");
|
|
20
|
+
}
|
package/dist/index.cjs
CHANGED
|
@@ -15,4 +15,4 @@ Object.defineProperty(exports, "uuid7FromTime", { enumerable: true, get: functio
|
|
|
15
15
|
var prompts_cache_js_1 = require("./utils/prompts_cache.cjs");
|
|
16
16
|
Object.defineProperty(exports, "Cache", { enumerable: true, get: function () { return prompts_cache_js_1.Cache; } });
|
|
17
17
|
// Update using yarn bump-version
|
|
18
|
-
exports.__version__ = "0.4.
|
|
18
|
+
exports.__version__ = "0.4.9";
|
package/dist/index.d.ts
CHANGED
|
@@ -5,4 +5,4 @@ export { overrideFetchImplementation } from "./singletons/fetch.js";
|
|
|
5
5
|
export { getDefaultProjectName } from "./utils/project.js";
|
|
6
6
|
export { uuid7, uuid7FromTime } from "./uuid.js";
|
|
7
7
|
export { Cache, type CacheConfig, type CacheMetrics, } from "./utils/prompts_cache.js";
|
|
8
|
-
export declare const __version__ = "0.4.
|
|
8
|
+
export declare const __version__ = "0.4.9";
|
package/dist/index.js
CHANGED
|
@@ -5,4 +5,4 @@ export { getDefaultProjectName } from "./utils/project.js";
|
|
|
5
5
|
export { uuid7, uuid7FromTime } from "./uuid.js";
|
|
6
6
|
export { Cache, } from "./utils/prompts_cache.js";
|
|
7
7
|
// Update using yarn bump-version
|
|
8
|
-
export const __version__ = "0.4.
|
|
8
|
+
export const __version__ = "0.4.9";
|
package/dist/traceable.cjs
CHANGED
|
@@ -297,7 +297,7 @@ const getSerializablePromise = (arg) => {
|
|
|
297
297
|
});
|
|
298
298
|
return promiseProxy;
|
|
299
299
|
};
|
|
300
|
-
const convertSerializableArg = (arg) => {
|
|
300
|
+
const convertSerializableArg = (arg, options) => {
|
|
301
301
|
if ((0, asserts_js_1.isReadableStream)(arg)) {
|
|
302
302
|
const proxyState = [];
|
|
303
303
|
const transform = new TransformStream({
|
|
@@ -386,7 +386,42 @@ const convertSerializableArg = (arg) => {
|
|
|
386
386
|
return { converted, deferredInputs: true };
|
|
387
387
|
}
|
|
388
388
|
if ((0, asserts_js_1.isThenable)(arg)) {
|
|
389
|
-
return {
|
|
389
|
+
return {
|
|
390
|
+
converted: getSerializablePromise(arg),
|
|
391
|
+
deferredInputs: true,
|
|
392
|
+
};
|
|
393
|
+
}
|
|
394
|
+
const maxDepth = options?.maxDepth ?? 0;
|
|
395
|
+
const currentDepth = options?.depth ?? 0;
|
|
396
|
+
if (currentDepth < maxDepth) {
|
|
397
|
+
if (Array.isArray(arg)) {
|
|
398
|
+
const converted = [];
|
|
399
|
+
let deferredInputs = false;
|
|
400
|
+
for (let i = 0; i < arg.length; i++) {
|
|
401
|
+
const res = convertSerializableArg(arg[i], {
|
|
402
|
+
depth: currentDepth + 1,
|
|
403
|
+
maxDepth,
|
|
404
|
+
});
|
|
405
|
+
converted.push(res.converted);
|
|
406
|
+
deferredInputs = deferredInputs || res.deferredInputs;
|
|
407
|
+
}
|
|
408
|
+
return { converted, deferredInputs };
|
|
409
|
+
}
|
|
410
|
+
if (typeof arg === "object" && arg != null) {
|
|
411
|
+
const converted = {};
|
|
412
|
+
let deferredInputs = false;
|
|
413
|
+
for (const key in arg) {
|
|
414
|
+
if (Object.prototype.hasOwnProperty.call(arg, key)) {
|
|
415
|
+
const res = convertSerializableArg(arg[key], {
|
|
416
|
+
...options,
|
|
417
|
+
depth: currentDepth + 1,
|
|
418
|
+
});
|
|
419
|
+
converted[key] = res.converted;
|
|
420
|
+
deferredInputs = deferredInputs || res.deferredInputs;
|
|
421
|
+
}
|
|
422
|
+
}
|
|
423
|
+
return { converted, deferredInputs };
|
|
424
|
+
}
|
|
390
425
|
}
|
|
391
426
|
return { converted: arg, deferredInputs: false };
|
|
392
427
|
};
|
|
@@ -472,11 +507,10 @@ function traceable(wrappedFunc, config) {
|
|
|
472
507
|
runEndedPromiseResolver();
|
|
473
508
|
};
|
|
474
509
|
const asyncLocalStorage = traceable_js_1.AsyncLocalStorageProviderSingleton.getInstance();
|
|
475
|
-
// TODO: deal with possible nested promises and async iterables
|
|
476
510
|
const processedArgs = args;
|
|
477
511
|
let deferredInputs = false;
|
|
478
512
|
for (let i = 0; i < processedArgs.length; i++) {
|
|
479
|
-
const { converted, deferredInputs: argDefersInput } = convertSerializableArg(processedArgs[i]);
|
|
513
|
+
const { converted, deferredInputs: argDefersInput } = convertSerializableArg(processedArgs[i], config?.__deferredSerializableArgOptions);
|
|
480
514
|
processedArgs[i] = converted;
|
|
481
515
|
deferredInputs = deferredInputs || argDefersInput;
|
|
482
516
|
}
|
package/dist/traceable.d.ts
CHANGED
|
@@ -19,6 +19,10 @@ export type TraceableConfig<Func extends (...args: any[]) => any> = Partial<Omit
|
|
|
19
19
|
argsConfigPath?: [number] | [number, string];
|
|
20
20
|
tracer?: OTELTracer;
|
|
21
21
|
__finalTracedIteratorKey?: string;
|
|
22
|
+
__deferredSerializableArgOptions?: {
|
|
23
|
+
depth?: number;
|
|
24
|
+
maxDepth?: number;
|
|
25
|
+
};
|
|
22
26
|
/**
|
|
23
27
|
* Extract attachments from args and return remaining args.
|
|
24
28
|
* @param args Arguments of the traced function
|
package/dist/traceable.js
CHANGED
|
@@ -293,7 +293,7 @@ const getSerializablePromise = (arg) => {
|
|
|
293
293
|
});
|
|
294
294
|
return promiseProxy;
|
|
295
295
|
};
|
|
296
|
-
const convertSerializableArg = (arg) => {
|
|
296
|
+
const convertSerializableArg = (arg, options) => {
|
|
297
297
|
if (isReadableStream(arg)) {
|
|
298
298
|
const proxyState = [];
|
|
299
299
|
const transform = new TransformStream({
|
|
@@ -382,7 +382,42 @@ const convertSerializableArg = (arg) => {
|
|
|
382
382
|
return { converted, deferredInputs: true };
|
|
383
383
|
}
|
|
384
384
|
if (isThenable(arg)) {
|
|
385
|
-
return {
|
|
385
|
+
return {
|
|
386
|
+
converted: getSerializablePromise(arg),
|
|
387
|
+
deferredInputs: true,
|
|
388
|
+
};
|
|
389
|
+
}
|
|
390
|
+
const maxDepth = options?.maxDepth ?? 0;
|
|
391
|
+
const currentDepth = options?.depth ?? 0;
|
|
392
|
+
if (currentDepth < maxDepth) {
|
|
393
|
+
if (Array.isArray(arg)) {
|
|
394
|
+
const converted = [];
|
|
395
|
+
let deferredInputs = false;
|
|
396
|
+
for (let i = 0; i < arg.length; i++) {
|
|
397
|
+
const res = convertSerializableArg(arg[i], {
|
|
398
|
+
depth: currentDepth + 1,
|
|
399
|
+
maxDepth,
|
|
400
|
+
});
|
|
401
|
+
converted.push(res.converted);
|
|
402
|
+
deferredInputs = deferredInputs || res.deferredInputs;
|
|
403
|
+
}
|
|
404
|
+
return { converted, deferredInputs };
|
|
405
|
+
}
|
|
406
|
+
if (typeof arg === "object" && arg != null) {
|
|
407
|
+
const converted = {};
|
|
408
|
+
let deferredInputs = false;
|
|
409
|
+
for (const key in arg) {
|
|
410
|
+
if (Object.prototype.hasOwnProperty.call(arg, key)) {
|
|
411
|
+
const res = convertSerializableArg(arg[key], {
|
|
412
|
+
...options,
|
|
413
|
+
depth: currentDepth + 1,
|
|
414
|
+
});
|
|
415
|
+
converted[key] = res.converted;
|
|
416
|
+
deferredInputs = deferredInputs || res.deferredInputs;
|
|
417
|
+
}
|
|
418
|
+
}
|
|
419
|
+
return { converted, deferredInputs };
|
|
420
|
+
}
|
|
386
421
|
}
|
|
387
422
|
return { converted: arg, deferredInputs: false };
|
|
388
423
|
};
|
|
@@ -468,11 +503,10 @@ export function traceable(wrappedFunc, config) {
|
|
|
468
503
|
runEndedPromiseResolver();
|
|
469
504
|
};
|
|
470
505
|
const asyncLocalStorage = AsyncLocalStorageProviderSingleton.getInstance();
|
|
471
|
-
// TODO: deal with possible nested promises and async iterables
|
|
472
506
|
const processedArgs = args;
|
|
473
507
|
let deferredInputs = false;
|
|
474
508
|
for (let i = 0; i < processedArgs.length; i++) {
|
|
475
|
-
const { converted, deferredInputs: argDefersInput } = convertSerializableArg(processedArgs[i]);
|
|
509
|
+
const { converted, deferredInputs: argDefersInput } = convertSerializableArg(processedArgs[i], config?.__deferredSerializableArgOptions);
|
|
476
510
|
processedArgs[i] = converted;
|
|
477
511
|
deferredInputs = deferredInputs || argDefersInput;
|
|
478
512
|
}
|
package/dist/utils/usage.cjs
CHANGED
|
@@ -2,28 +2,27 @@
|
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.convertAnthropicUsageToInputTokenDetails = void 0;
|
|
4
4
|
const convertAnthropicUsageToInputTokenDetails = (usage) => {
|
|
5
|
-
const
|
|
5
|
+
const result = {};
|
|
6
6
|
if (usage.cache_creation != null &&
|
|
7
7
|
typeof usage.cache_creation === "object") {
|
|
8
8
|
const cacheCreation = usage.cache_creation;
|
|
9
9
|
if (typeof cacheCreation.ephemeral_5m_input_tokens === "number") {
|
|
10
|
-
|
|
10
|
+
result.ephemeral_5m_input_tokens =
|
|
11
11
|
cacheCreation.ephemeral_5m_input_tokens;
|
|
12
12
|
}
|
|
13
13
|
if (typeof cacheCreation.ephemeral_1h_input_tokens === "number") {
|
|
14
|
-
|
|
14
|
+
result.ephemeral_1hr_input_tokens =
|
|
15
15
|
cacheCreation.ephemeral_1h_input_tokens;
|
|
16
16
|
}
|
|
17
17
|
// If cache_creation not returned (no beta header passed),
|
|
18
18
|
// fallback to assuming 5m cache tokens
|
|
19
19
|
}
|
|
20
20
|
else if (typeof usage.cache_creation_input_tokens === "number") {
|
|
21
|
-
|
|
22
|
-
usage.cache_creation_input_tokens;
|
|
21
|
+
result.ephemeral_5m_input_tokens = usage.cache_creation_input_tokens;
|
|
23
22
|
}
|
|
24
23
|
if (typeof usage.cache_read_input_tokens === "number") {
|
|
25
|
-
|
|
24
|
+
result.cache_read = usage.cache_read_input_tokens;
|
|
26
25
|
}
|
|
27
|
-
return
|
|
26
|
+
return result;
|
|
28
27
|
};
|
|
29
28
|
exports.convertAnthropicUsageToInputTokenDetails = convertAnthropicUsageToInputTokenDetails;
|
package/dist/utils/usage.js
CHANGED
|
@@ -1,25 +1,24 @@
|
|
|
1
1
|
export const convertAnthropicUsageToInputTokenDetails = (usage) => {
|
|
2
|
-
const
|
|
2
|
+
const result = {};
|
|
3
3
|
if (usage.cache_creation != null &&
|
|
4
4
|
typeof usage.cache_creation === "object") {
|
|
5
5
|
const cacheCreation = usage.cache_creation;
|
|
6
6
|
if (typeof cacheCreation.ephemeral_5m_input_tokens === "number") {
|
|
7
|
-
|
|
7
|
+
result.ephemeral_5m_input_tokens =
|
|
8
8
|
cacheCreation.ephemeral_5m_input_tokens;
|
|
9
9
|
}
|
|
10
10
|
if (typeof cacheCreation.ephemeral_1h_input_tokens === "number") {
|
|
11
|
-
|
|
11
|
+
result.ephemeral_1hr_input_tokens =
|
|
12
12
|
cacheCreation.ephemeral_1h_input_tokens;
|
|
13
13
|
}
|
|
14
14
|
// If cache_creation not returned (no beta header passed),
|
|
15
15
|
// fallback to assuming 5m cache tokens
|
|
16
16
|
}
|
|
17
17
|
else if (typeof usage.cache_creation_input_tokens === "number") {
|
|
18
|
-
|
|
19
|
-
usage.cache_creation_input_tokens;
|
|
18
|
+
result.ephemeral_5m_input_tokens = usage.cache_creation_input_tokens;
|
|
20
19
|
}
|
|
21
20
|
if (typeof usage.cache_read_input_tokens === "number") {
|
|
22
|
-
|
|
21
|
+
result.cache_read = usage.cache_read_input_tokens;
|
|
23
22
|
}
|
|
24
|
-
return
|
|
23
|
+
return result;
|
|
25
24
|
};
|