langsmith 0.4.4 → 0.4.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/client.cjs +62 -1
- package/dist/client.d.ts +45 -0
- package/dist/client.js +62 -1
- package/dist/index.cjs +4 -2
- package/dist/index.d.ts +2 -1
- package/dist/index.js +2 -1
- package/dist/run_trees.cjs +21 -1
- package/dist/run_trees.js +21 -1
- package/dist/utils/prompts_cache.cjs +292 -0
- package/dist/utils/prompts_cache.d.ts +142 -0
- package/dist/utils/prompts_cache.js +288 -0
- package/dist/utils/prompts_cache_fs.browser.cjs +24 -0
- package/dist/utils/prompts_cache_fs.browser.d.ts +16 -0
- package/dist/utils/prompts_cache_fs.browser.js +20 -0
- package/dist/utils/prompts_cache_fs.cjs +86 -0
- package/dist/utils/prompts_cache_fs.d.ts +16 -0
- package/dist/utils/prompts_cache_fs.js +49 -0
- package/package.json +4 -1
package/dist/client.cjs
CHANGED
|
@@ -46,6 +46,7 @@ const _uuid_js_1 = require("./utils/_uuid.cjs");
|
|
|
46
46
|
const warn_js_1 = require("./utils/warn.cjs");
|
|
47
47
|
const prompts_js_1 = require("./utils/prompts.cjs");
|
|
48
48
|
const error_js_1 = require("./utils/error.cjs");
|
|
49
|
+
const prompts_cache_js_1 = require("./utils/prompts_cache.cjs");
|
|
49
50
|
const fetch_js_1 = require("./singletons/fetch.cjs");
|
|
50
51
|
const index_js_2 = require("./utils/fast-safe-stringify/index.cjs");
|
|
51
52
|
function mergeRuntimeEnvIntoRun(run, cachedEnvVars, omitTracedRuntimeInfo) {
|
|
@@ -410,6 +411,12 @@ class Client {
|
|
|
410
411
|
writable: true,
|
|
411
412
|
value: void 0
|
|
412
413
|
});
|
|
414
|
+
Object.defineProperty(this, "_cache", {
|
|
415
|
+
enumerable: true,
|
|
416
|
+
configurable: true,
|
|
417
|
+
writable: true,
|
|
418
|
+
value: void 0
|
|
419
|
+
});
|
|
413
420
|
Object.defineProperty(this, "multipartStreamingDisabled", {
|
|
414
421
|
enumerable: true,
|
|
415
422
|
configurable: true,
|
|
@@ -481,6 +488,16 @@ class Client {
|
|
|
481
488
|
}
|
|
482
489
|
// Cache metadata env vars once during construction to avoid repeatedly scanning process.env
|
|
483
490
|
this.cachedLSEnvVarsForMetadata = (0, env_js_1.getLangSmithEnvVarsMetadata)();
|
|
491
|
+
// Initialize cache
|
|
492
|
+
if (config.cache === true) {
|
|
493
|
+
this._cache = new prompts_cache_js_1.Cache();
|
|
494
|
+
}
|
|
495
|
+
else if (config.cache && typeof config.cache === "object") {
|
|
496
|
+
this._cache = config.cache;
|
|
497
|
+
}
|
|
498
|
+
else {
|
|
499
|
+
this._cache = undefined;
|
|
500
|
+
}
|
|
484
501
|
}
|
|
485
502
|
static getDefaultClientConfig() {
|
|
486
503
|
const apiKey = (0, env_js_1.getLangSmithEnvironmentVariable)("API_KEY");
|
|
@@ -3784,7 +3801,18 @@ class Client {
|
|
|
3784
3801
|
});
|
|
3785
3802
|
return response.json();
|
|
3786
3803
|
}
|
|
3787
|
-
|
|
3804
|
+
/**
|
|
3805
|
+
* Generate a cache key for a prompt.
|
|
3806
|
+
* Format: "{identifier}" or "{identifier}:with_model"
|
|
3807
|
+
*/
|
|
3808
|
+
_getPromptCacheKey(promptIdentifier, includeModel) {
|
|
3809
|
+
const suffix = includeModel ? ":with_model" : "";
|
|
3810
|
+
return `${promptIdentifier}${suffix}`;
|
|
3811
|
+
}
|
|
3812
|
+
/**
|
|
3813
|
+
* Fetch a prompt commit directly from the API (bypassing cache).
|
|
3814
|
+
*/
|
|
3815
|
+
async _fetchPromptFromApi(promptIdentifier, options) {
|
|
3788
3816
|
const [owner, promptName, commitHash] = (0, prompts_js_1.parsePromptIdentifier)(promptIdentifier);
|
|
3789
3817
|
const response = await this.caller.call(async () => {
|
|
3790
3818
|
const res = await this._fetch(`${this.apiUrl}/commits/${owner}/${promptName}/${commitHash}${options?.includeModel ? "?include_model=true" : ""}`, {
|
|
@@ -3805,6 +3833,22 @@ class Client {
|
|
|
3805
3833
|
examples: result.examples,
|
|
3806
3834
|
};
|
|
3807
3835
|
}
|
|
3836
|
+
async pullPromptCommit(promptIdentifier, options) {
|
|
3837
|
+
// Check cache first if not skipped
|
|
3838
|
+
if (!options?.skipCache && this._cache) {
|
|
3839
|
+
const cacheKey = this._getPromptCacheKey(promptIdentifier, options?.includeModel);
|
|
3840
|
+
const cached = this._cache.get(cacheKey);
|
|
3841
|
+
if (cached) {
|
|
3842
|
+
return cached;
|
|
3843
|
+
}
|
|
3844
|
+
// Cache miss - fetch from API and cache it
|
|
3845
|
+
const result = await this._fetchPromptFromApi(promptIdentifier, options);
|
|
3846
|
+
this._cache.set(cacheKey, result);
|
|
3847
|
+
return result;
|
|
3848
|
+
}
|
|
3849
|
+
// No cache or skip cache - fetch directly
|
|
3850
|
+
return this._fetchPromptFromApi(promptIdentifier, options);
|
|
3851
|
+
}
|
|
3808
3852
|
/**
|
|
3809
3853
|
* This method should not be used directly, use `import { pull } from "langchain/hub"` instead.
|
|
3810
3854
|
* Using this method directly returns the JSON string of the prompt rather than a LangChain object.
|
|
@@ -3813,6 +3857,7 @@ class Client {
|
|
|
3813
3857
|
async _pullPrompt(promptIdentifier, options) {
|
|
3814
3858
|
const promptObject = await this.pullPromptCommit(promptIdentifier, {
|
|
3815
3859
|
includeModel: options?.includeModel,
|
|
3860
|
+
skipCache: options?.skipCache,
|
|
3816
3861
|
});
|
|
3817
3862
|
const prompt = JSON.stringify(promptObject.manifest);
|
|
3818
3863
|
return prompt;
|
|
@@ -3927,6 +3972,22 @@ class Client {
|
|
|
3927
3972
|
throw new Error(`Invalid public ${kind} URL or token: ${urlOrToken}`);
|
|
3928
3973
|
}
|
|
3929
3974
|
}
|
|
3975
|
+
/**
|
|
3976
|
+
* Get the cache instance, if caching is enabled.
|
|
3977
|
+
* Useful for accessing cache metrics or manually managing the cache.
|
|
3978
|
+
*/
|
|
3979
|
+
get cache() {
|
|
3980
|
+
return this._cache;
|
|
3981
|
+
}
|
|
3982
|
+
/**
|
|
3983
|
+
* Cleanup resources held by the client.
|
|
3984
|
+
* Stops the cache's background refresh timer.
|
|
3985
|
+
*/
|
|
3986
|
+
cleanup() {
|
|
3987
|
+
if (this._cache) {
|
|
3988
|
+
this._cache.stop();
|
|
3989
|
+
}
|
|
3990
|
+
}
|
|
3930
3991
|
/**
|
|
3931
3992
|
* Awaits all pending trace batches. Useful for environments where
|
|
3932
3993
|
* you need to be sure that all tracing requests finish before execution ends,
|
package/dist/client.d.ts
CHANGED
|
@@ -2,6 +2,7 @@ import type { OTELContext } from "./experimental/otel/types.js";
|
|
|
2
2
|
import { AsyncCallerParams } from "./utils/async_caller.js";
|
|
3
3
|
import { ComparativeExperiment, DataType, Dataset, DatasetDiffInfo, DatasetShareSchema, Example, ExampleCreate, ExampleUpdate, ExampleUpdateWithoutId, Feedback, FeedbackConfig, FeedbackIngestToken, KVMap, LangChainBaseMessage, LangSmithSettings, LikePromptResponse, Prompt, PromptCommit, PromptSortField, Run, RunCreate, RunUpdate, ScoreType, ExampleSearch, TimeDelta, TracerSession, TracerSessionResult, ValueType, AnnotationQueue, RunWithAnnotationQueueInfo, Attachments, UploadExamplesResponse, UpdateExamplesResponse, DatasetVersion, AnnotationQueueWithDetails } from "./schemas.js";
|
|
4
4
|
import { EvaluationResult, EvaluationResults } from "./evaluation/evaluator.js";
|
|
5
|
+
import { Cache } from "./utils/prompts_cache.js";
|
|
5
6
|
export interface ClientConfig {
|
|
6
7
|
apiUrl?: string;
|
|
7
8
|
apiKey?: string;
|
|
@@ -49,6 +50,28 @@ export interface ClientConfig {
|
|
|
49
50
|
* Custom fetch implementation. Useful for testing.
|
|
50
51
|
*/
|
|
51
52
|
fetchImplementation?: typeof fetch;
|
|
53
|
+
/**
|
|
54
|
+
* Configuration for caching. Can be:
|
|
55
|
+
* - `true`: Enable caching with default settings
|
|
56
|
+
* - `Cache` instance: Use custom cache configuration
|
|
57
|
+
* - `undefined` or `false`: Disable caching (default)
|
|
58
|
+
*
|
|
59
|
+
* @example
|
|
60
|
+
* ```typescript
|
|
61
|
+
* import { Client, Cache } from "langsmith";
|
|
62
|
+
*
|
|
63
|
+
* // Enable with defaults
|
|
64
|
+
* const client1 = new Client({ cache: true });
|
|
65
|
+
*
|
|
66
|
+
* // Or use custom configuration
|
|
67
|
+
* const myCache = new Cache({
|
|
68
|
+
* maxSize: 100,
|
|
69
|
+
* ttlSeconds: 3600, // 1 hour, or null for infinite TTL
|
|
70
|
+
* });
|
|
71
|
+
* const client2 = new Client({ cache: myCache });
|
|
72
|
+
* ```
|
|
73
|
+
*/
|
|
74
|
+
cache?: Cache | boolean;
|
|
52
75
|
}
|
|
53
76
|
/**
|
|
54
77
|
* Represents the parameters for listing runs (spans) from the Langsmith server.
|
|
@@ -351,6 +374,7 @@ export declare class Client implements LangSmithTracingClientInterface {
|
|
|
351
374
|
private langSmithToOTELTranslator?;
|
|
352
375
|
private fetchImplementation?;
|
|
353
376
|
private cachedLSEnvVarsForMetadata?;
|
|
377
|
+
private _cache?;
|
|
354
378
|
private get _fetch();
|
|
355
379
|
private multipartStreamingDisabled;
|
|
356
380
|
private _multipartDisabled;
|
|
@@ -995,8 +1019,18 @@ export declare class Client implements LangSmithTracingClientInterface {
|
|
|
995
1019
|
isArchived?: boolean;
|
|
996
1020
|
}): Promise<Record<string, any>>;
|
|
997
1021
|
deletePrompt(promptIdentifier: string): Promise<void>;
|
|
1022
|
+
/**
|
|
1023
|
+
* Generate a cache key for a prompt.
|
|
1024
|
+
* Format: "{identifier}" or "{identifier}:with_model"
|
|
1025
|
+
*/
|
|
1026
|
+
private _getPromptCacheKey;
|
|
1027
|
+
/**
|
|
1028
|
+
* Fetch a prompt commit directly from the API (bypassing cache).
|
|
1029
|
+
*/
|
|
1030
|
+
private _fetchPromptFromApi;
|
|
998
1031
|
pullPromptCommit(promptIdentifier: string, options?: {
|
|
999
1032
|
includeModel?: boolean;
|
|
1033
|
+
skipCache?: boolean;
|
|
1000
1034
|
}): Promise<PromptCommit>;
|
|
1001
1035
|
/**
|
|
1002
1036
|
* This method should not be used directly, use `import { pull } from "langchain/hub"` instead.
|
|
@@ -1005,6 +1039,7 @@ export declare class Client implements LangSmithTracingClientInterface {
|
|
|
1005
1039
|
*/
|
|
1006
1040
|
_pullPrompt(promptIdentifier: string, options?: {
|
|
1007
1041
|
includeModel?: boolean;
|
|
1042
|
+
skipCache?: boolean;
|
|
1008
1043
|
}): Promise<any>;
|
|
1009
1044
|
pushPrompt(promptIdentifier: string, options?: {
|
|
1010
1045
|
object?: any;
|
|
@@ -1030,6 +1065,16 @@ export declare class Client implements LangSmithTracingClientInterface {
|
|
|
1030
1065
|
datasetName?: string;
|
|
1031
1066
|
}): Promise<void>;
|
|
1032
1067
|
private parseTokenOrUrl;
|
|
1068
|
+
/**
|
|
1069
|
+
* Get the cache instance, if caching is enabled.
|
|
1070
|
+
* Useful for accessing cache metrics or manually managing the cache.
|
|
1071
|
+
*/
|
|
1072
|
+
get cache(): Cache | undefined;
|
|
1073
|
+
/**
|
|
1074
|
+
* Cleanup resources held by the client.
|
|
1075
|
+
* Stops the cache's background refresh timer.
|
|
1076
|
+
*/
|
|
1077
|
+
cleanup(): void;
|
|
1033
1078
|
/**
|
|
1034
1079
|
* Awaits all pending trace batches. Useful for environments where
|
|
1035
1080
|
* you need to be sure that all tracing requests finish before execution ends,
|
package/dist/client.js
CHANGED
|
@@ -9,6 +9,7 @@ import { assertUuid } from "./utils/_uuid.js";
|
|
|
9
9
|
import { warnOnce } from "./utils/warn.js";
|
|
10
10
|
import { parsePromptIdentifier } from "./utils/prompts.js";
|
|
11
11
|
import { raiseForStatus, isLangSmithNotFoundError } from "./utils/error.js";
|
|
12
|
+
import { Cache } from "./utils/prompts_cache.js";
|
|
12
13
|
import { _globalFetchImplementationIsNodeFetch, _getFetchImplementation, } from "./singletons/fetch.js";
|
|
13
14
|
import { serialize as serializePayloadForTracing } from "./utils/fast-safe-stringify/index.js";
|
|
14
15
|
export function mergeRuntimeEnvIntoRun(run, cachedEnvVars, omitTracedRuntimeInfo) {
|
|
@@ -372,6 +373,12 @@ export class Client {
|
|
|
372
373
|
writable: true,
|
|
373
374
|
value: void 0
|
|
374
375
|
});
|
|
376
|
+
Object.defineProperty(this, "_cache", {
|
|
377
|
+
enumerable: true,
|
|
378
|
+
configurable: true,
|
|
379
|
+
writable: true,
|
|
380
|
+
value: void 0
|
|
381
|
+
});
|
|
375
382
|
Object.defineProperty(this, "multipartStreamingDisabled", {
|
|
376
383
|
enumerable: true,
|
|
377
384
|
configurable: true,
|
|
@@ -443,6 +450,16 @@ export class Client {
|
|
|
443
450
|
}
|
|
444
451
|
// Cache metadata env vars once during construction to avoid repeatedly scanning process.env
|
|
445
452
|
this.cachedLSEnvVarsForMetadata = getLangSmithEnvVarsMetadata();
|
|
453
|
+
// Initialize cache
|
|
454
|
+
if (config.cache === true) {
|
|
455
|
+
this._cache = new Cache();
|
|
456
|
+
}
|
|
457
|
+
else if (config.cache && typeof config.cache === "object") {
|
|
458
|
+
this._cache = config.cache;
|
|
459
|
+
}
|
|
460
|
+
else {
|
|
461
|
+
this._cache = undefined;
|
|
462
|
+
}
|
|
446
463
|
}
|
|
447
464
|
static getDefaultClientConfig() {
|
|
448
465
|
const apiKey = getLangSmithEnvironmentVariable("API_KEY");
|
|
@@ -3746,7 +3763,18 @@ export class Client {
|
|
|
3746
3763
|
});
|
|
3747
3764
|
return response.json();
|
|
3748
3765
|
}
|
|
3749
|
-
|
|
3766
|
+
/**
|
|
3767
|
+
* Generate a cache key for a prompt.
|
|
3768
|
+
* Format: "{identifier}" or "{identifier}:with_model"
|
|
3769
|
+
*/
|
|
3770
|
+
_getPromptCacheKey(promptIdentifier, includeModel) {
|
|
3771
|
+
const suffix = includeModel ? ":with_model" : "";
|
|
3772
|
+
return `${promptIdentifier}${suffix}`;
|
|
3773
|
+
}
|
|
3774
|
+
/**
|
|
3775
|
+
* Fetch a prompt commit directly from the API (bypassing cache).
|
|
3776
|
+
*/
|
|
3777
|
+
async _fetchPromptFromApi(promptIdentifier, options) {
|
|
3750
3778
|
const [owner, promptName, commitHash] = parsePromptIdentifier(promptIdentifier);
|
|
3751
3779
|
const response = await this.caller.call(async () => {
|
|
3752
3780
|
const res = await this._fetch(`${this.apiUrl}/commits/${owner}/${promptName}/${commitHash}${options?.includeModel ? "?include_model=true" : ""}`, {
|
|
@@ -3767,6 +3795,22 @@ export class Client {
|
|
|
3767
3795
|
examples: result.examples,
|
|
3768
3796
|
};
|
|
3769
3797
|
}
|
|
3798
|
+
async pullPromptCommit(promptIdentifier, options) {
|
|
3799
|
+
// Check cache first if not skipped
|
|
3800
|
+
if (!options?.skipCache && this._cache) {
|
|
3801
|
+
const cacheKey = this._getPromptCacheKey(promptIdentifier, options?.includeModel);
|
|
3802
|
+
const cached = this._cache.get(cacheKey);
|
|
3803
|
+
if (cached) {
|
|
3804
|
+
return cached;
|
|
3805
|
+
}
|
|
3806
|
+
// Cache miss - fetch from API and cache it
|
|
3807
|
+
const result = await this._fetchPromptFromApi(promptIdentifier, options);
|
|
3808
|
+
this._cache.set(cacheKey, result);
|
|
3809
|
+
return result;
|
|
3810
|
+
}
|
|
3811
|
+
// No cache or skip cache - fetch directly
|
|
3812
|
+
return this._fetchPromptFromApi(promptIdentifier, options);
|
|
3813
|
+
}
|
|
3770
3814
|
/**
|
|
3771
3815
|
* This method should not be used directly, use `import { pull } from "langchain/hub"` instead.
|
|
3772
3816
|
* Using this method directly returns the JSON string of the prompt rather than a LangChain object.
|
|
@@ -3775,6 +3819,7 @@ export class Client {
|
|
|
3775
3819
|
async _pullPrompt(promptIdentifier, options) {
|
|
3776
3820
|
const promptObject = await this.pullPromptCommit(promptIdentifier, {
|
|
3777
3821
|
includeModel: options?.includeModel,
|
|
3822
|
+
skipCache: options?.skipCache,
|
|
3778
3823
|
});
|
|
3779
3824
|
const prompt = JSON.stringify(promptObject.manifest);
|
|
3780
3825
|
return prompt;
|
|
@@ -3889,6 +3934,22 @@ export class Client {
|
|
|
3889
3934
|
throw new Error(`Invalid public ${kind} URL or token: ${urlOrToken}`);
|
|
3890
3935
|
}
|
|
3891
3936
|
}
|
|
3937
|
+
/**
|
|
3938
|
+
* Get the cache instance, if caching is enabled.
|
|
3939
|
+
* Useful for accessing cache metrics or manually managing the cache.
|
|
3940
|
+
*/
|
|
3941
|
+
get cache() {
|
|
3942
|
+
return this._cache;
|
|
3943
|
+
}
|
|
3944
|
+
/**
|
|
3945
|
+
* Cleanup resources held by the client.
|
|
3946
|
+
* Stops the cache's background refresh timer.
|
|
3947
|
+
*/
|
|
3948
|
+
cleanup() {
|
|
3949
|
+
if (this._cache) {
|
|
3950
|
+
this._cache.stop();
|
|
3951
|
+
}
|
|
3952
|
+
}
|
|
3892
3953
|
/**
|
|
3893
3954
|
* Awaits all pending trace batches. Useful for environments where
|
|
3894
3955
|
* you need to be sure that all tracing requests finish before execution ends,
|
package/dist/index.cjs
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.__version__ = exports.uuid7FromTime = exports.uuid7 = exports.getDefaultProjectName = exports.overrideFetchImplementation = exports.RunTree = exports.Client = void 0;
|
|
3
|
+
exports.__version__ = exports.Cache = exports.uuid7FromTime = exports.uuid7 = exports.getDefaultProjectName = exports.overrideFetchImplementation = exports.RunTree = exports.Client = void 0;
|
|
4
4
|
var client_js_1 = require("./client.cjs");
|
|
5
5
|
Object.defineProperty(exports, "Client", { enumerable: true, get: function () { return client_js_1.Client; } });
|
|
6
6
|
var run_trees_js_1 = require("./run_trees.cjs");
|
|
@@ -12,5 +12,7 @@ Object.defineProperty(exports, "getDefaultProjectName", { enumerable: true, get:
|
|
|
12
12
|
var uuid_js_1 = require("./uuid.cjs");
|
|
13
13
|
Object.defineProperty(exports, "uuid7", { enumerable: true, get: function () { return uuid_js_1.uuid7; } });
|
|
14
14
|
Object.defineProperty(exports, "uuid7FromTime", { enumerable: true, get: function () { return uuid_js_1.uuid7FromTime; } });
|
|
15
|
+
var prompts_cache_js_1 = require("./utils/prompts_cache.cjs");
|
|
16
|
+
Object.defineProperty(exports, "Cache", { enumerable: true, get: function () { return prompts_cache_js_1.Cache; } });
|
|
15
17
|
// Update using yarn bump-version
|
|
16
|
-
exports.__version__ = "0.4.
|
|
18
|
+
exports.__version__ = "0.4.6";
|
package/dist/index.d.ts
CHANGED
|
@@ -4,4 +4,5 @@ export { RunTree, type RunTreeConfig } from "./run_trees.js";
|
|
|
4
4
|
export { overrideFetchImplementation } from "./singletons/fetch.js";
|
|
5
5
|
export { getDefaultProjectName } from "./utils/project.js";
|
|
6
6
|
export { uuid7, uuid7FromTime } from "./uuid.js";
|
|
7
|
-
export
|
|
7
|
+
export { Cache, type CacheConfig, type CacheMetrics, } from "./utils/prompts_cache.js";
|
|
8
|
+
export declare const __version__ = "0.4.6";
|
package/dist/index.js
CHANGED
|
@@ -3,5 +3,6 @@ export { RunTree } from "./run_trees.js";
|
|
|
3
3
|
export { overrideFetchImplementation } from "./singletons/fetch.js";
|
|
4
4
|
export { getDefaultProjectName } from "./utils/project.js";
|
|
5
5
|
export { uuid7, uuid7FromTime } from "./uuid.js";
|
|
6
|
+
export { Cache, } from "./utils/prompts_cache.js";
|
|
6
7
|
// Update using yarn bump-version
|
|
7
|
-
export const __version__ = "0.4.
|
|
8
|
+
export const __version__ = "0.4.6";
|
package/dist/run_trees.cjs
CHANGED
|
@@ -44,6 +44,20 @@ function convertToDottedOrderFormat(epoch, runId, executionOrder = 1) {
|
|
|
44
44
|
microsecondPrecisionDatestring,
|
|
45
45
|
};
|
|
46
46
|
}
|
|
47
|
+
const HEADER_SAFE_REPLICA_FIELDS = new Set([
|
|
48
|
+
"projectName",
|
|
49
|
+
"updates",
|
|
50
|
+
"reroot",
|
|
51
|
+
]);
|
|
52
|
+
function filterReplicaForHeaders(replica) {
|
|
53
|
+
const filtered = {};
|
|
54
|
+
for (const key of Object.keys(replica)) {
|
|
55
|
+
if (HEADER_SAFE_REPLICA_FIELDS.has(key)) {
|
|
56
|
+
filtered[key] = replica[key];
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
return filtered;
|
|
60
|
+
}
|
|
47
61
|
/**
|
|
48
62
|
* Baggage header information
|
|
49
63
|
*/
|
|
@@ -97,7 +111,13 @@ class Baggage {
|
|
|
97
111
|
project_name = value;
|
|
98
112
|
}
|
|
99
113
|
else if (key === "langsmith-replicas") {
|
|
100
|
-
|
|
114
|
+
const parsed = JSON.parse(value);
|
|
115
|
+
replicas = parsed.map((replica) => {
|
|
116
|
+
if (Array.isArray(replica)) {
|
|
117
|
+
return replica;
|
|
118
|
+
}
|
|
119
|
+
return filterReplicaForHeaders(replica);
|
|
120
|
+
});
|
|
101
121
|
}
|
|
102
122
|
}
|
|
103
123
|
return new Baggage(metadata, tags, project_name, replicas);
|
package/dist/run_trees.js
CHANGED
|
@@ -38,6 +38,20 @@ export function convertToDottedOrderFormat(epoch, runId, executionOrder = 1) {
|
|
|
38
38
|
microsecondPrecisionDatestring,
|
|
39
39
|
};
|
|
40
40
|
}
|
|
41
|
+
const HEADER_SAFE_REPLICA_FIELDS = new Set([
|
|
42
|
+
"projectName",
|
|
43
|
+
"updates",
|
|
44
|
+
"reroot",
|
|
45
|
+
]);
|
|
46
|
+
function filterReplicaForHeaders(replica) {
|
|
47
|
+
const filtered = {};
|
|
48
|
+
for (const key of Object.keys(replica)) {
|
|
49
|
+
if (HEADER_SAFE_REPLICA_FIELDS.has(key)) {
|
|
50
|
+
filtered[key] = replica[key];
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
return filtered;
|
|
54
|
+
}
|
|
41
55
|
/**
|
|
42
56
|
* Baggage header information
|
|
43
57
|
*/
|
|
@@ -91,7 +105,13 @@ class Baggage {
|
|
|
91
105
|
project_name = value;
|
|
92
106
|
}
|
|
93
107
|
else if (key === "langsmith-replicas") {
|
|
94
|
-
|
|
108
|
+
const parsed = JSON.parse(value);
|
|
109
|
+
replicas = parsed.map((replica) => {
|
|
110
|
+
if (Array.isArray(replica)) {
|
|
111
|
+
return replica;
|
|
112
|
+
}
|
|
113
|
+
return filterReplicaForHeaders(replica);
|
|
114
|
+
});
|
|
95
115
|
}
|
|
96
116
|
}
|
|
97
117
|
return new Baggage(metadata, tags, project_name, replicas);
|
|
@@ -0,0 +1,292 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* Prompt caching module for LangSmith SDK.
|
|
4
|
+
*
|
|
5
|
+
* Provides an LRU cache with background refresh for prompt caching.
|
|
6
|
+
* Uses stale-while-revalidate pattern for optimal performance.
|
|
7
|
+
*
|
|
8
|
+
* Works in all environments. File operations (dump/load) use helpers
|
|
9
|
+
* that are swapped for browser builds via package.json browser field.
|
|
10
|
+
*/
|
|
11
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
12
|
+
exports.Cache = void 0;
|
|
13
|
+
const prompts_cache_fs_js_1 = require("./prompts_cache_fs.cjs");
|
|
14
|
+
/**
|
|
15
|
+
* Check if a cache entry is stale based on TTL.
|
|
16
|
+
*/
|
|
17
|
+
function isStale(entry, ttlSeconds) {
|
|
18
|
+
if (ttlSeconds === null) {
|
|
19
|
+
return false; // Infinite TTL, never stale
|
|
20
|
+
}
|
|
21
|
+
const ageMs = Date.now() - entry.createdAt;
|
|
22
|
+
return ageMs > ttlSeconds * 1000;
|
|
23
|
+
}
|
|
24
|
+
/**
|
|
25
|
+
* LRU cache with background refresh for prompts.
|
|
26
|
+
*
|
|
27
|
+
* Features:
|
|
28
|
+
* - In-memory LRU cache with configurable max size
|
|
29
|
+
* - Background refresh using setInterval
|
|
30
|
+
* - Stale-while-revalidate: returns stale data while refresh happens
|
|
31
|
+
* - JSON dump/load for offline use
|
|
32
|
+
*
|
|
33
|
+
* @example
|
|
34
|
+
* ```typescript
|
|
35
|
+
* const cache = new Cache({
|
|
36
|
+
* maxSize: 100,
|
|
37
|
+
* ttlSeconds: 3600,
|
|
38
|
+
* fetchFunc: async (key) => client.pullPromptCommit(key),
|
|
39
|
+
* });
|
|
40
|
+
*
|
|
41
|
+
* // Use the cache
|
|
42
|
+
* cache.set("my-prompt:latest", promptCommit);
|
|
43
|
+
* const cached = cache.get("my-prompt:latest");
|
|
44
|
+
*
|
|
45
|
+
* // Cleanup
|
|
46
|
+
* cache.stop();
|
|
47
|
+
* ```
|
|
48
|
+
*/
|
|
49
|
+
class Cache {
|
|
50
|
+
constructor(config = {}) {
|
|
51
|
+
Object.defineProperty(this, "cache", {
|
|
52
|
+
enumerable: true,
|
|
53
|
+
configurable: true,
|
|
54
|
+
writable: true,
|
|
55
|
+
value: new Map()
|
|
56
|
+
});
|
|
57
|
+
Object.defineProperty(this, "maxSize", {
|
|
58
|
+
enumerable: true,
|
|
59
|
+
configurable: true,
|
|
60
|
+
writable: true,
|
|
61
|
+
value: void 0
|
|
62
|
+
});
|
|
63
|
+
Object.defineProperty(this, "ttlSeconds", {
|
|
64
|
+
enumerable: true,
|
|
65
|
+
configurable: true,
|
|
66
|
+
writable: true,
|
|
67
|
+
value: void 0
|
|
68
|
+
});
|
|
69
|
+
Object.defineProperty(this, "refreshIntervalSeconds", {
|
|
70
|
+
enumerable: true,
|
|
71
|
+
configurable: true,
|
|
72
|
+
writable: true,
|
|
73
|
+
value: void 0
|
|
74
|
+
});
|
|
75
|
+
Object.defineProperty(this, "fetchFunc", {
|
|
76
|
+
enumerable: true,
|
|
77
|
+
configurable: true,
|
|
78
|
+
writable: true,
|
|
79
|
+
value: void 0
|
|
80
|
+
});
|
|
81
|
+
Object.defineProperty(this, "refreshTimer", {
|
|
82
|
+
enumerable: true,
|
|
83
|
+
configurable: true,
|
|
84
|
+
writable: true,
|
|
85
|
+
value: void 0
|
|
86
|
+
});
|
|
87
|
+
Object.defineProperty(this, "_metrics", {
|
|
88
|
+
enumerable: true,
|
|
89
|
+
configurable: true,
|
|
90
|
+
writable: true,
|
|
91
|
+
value: {
|
|
92
|
+
hits: 0,
|
|
93
|
+
misses: 0,
|
|
94
|
+
refreshes: 0,
|
|
95
|
+
refreshErrors: 0,
|
|
96
|
+
}
|
|
97
|
+
});
|
|
98
|
+
this.maxSize = config.maxSize ?? 100;
|
|
99
|
+
this.ttlSeconds = config.ttlSeconds ?? 3600;
|
|
100
|
+
this.refreshIntervalSeconds = config.refreshIntervalSeconds ?? 60;
|
|
101
|
+
this.fetchFunc = config.fetchFunc;
|
|
102
|
+
// Start background refresh if fetch function provided and TTL is set
|
|
103
|
+
if (this.fetchFunc && this.ttlSeconds !== null) {
|
|
104
|
+
this.startRefreshLoop();
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
/**
|
|
108
|
+
* Get cache performance metrics.
|
|
109
|
+
*/
|
|
110
|
+
get metrics() {
|
|
111
|
+
return { ...this._metrics };
|
|
112
|
+
}
|
|
113
|
+
/**
|
|
114
|
+
* Get total cache requests (hits + misses).
|
|
115
|
+
*/
|
|
116
|
+
get totalRequests() {
|
|
117
|
+
return this._metrics.hits + this._metrics.misses;
|
|
118
|
+
}
|
|
119
|
+
/**
|
|
120
|
+
* Get cache hit rate (0.0 to 1.0).
|
|
121
|
+
*/
|
|
122
|
+
get hitRate() {
|
|
123
|
+
const total = this.totalRequests;
|
|
124
|
+
return total > 0 ? this._metrics.hits / total : 0;
|
|
125
|
+
}
|
|
126
|
+
/**
|
|
127
|
+
* Reset all metrics to zero.
|
|
128
|
+
*/
|
|
129
|
+
resetMetrics() {
|
|
130
|
+
this._metrics = {
|
|
131
|
+
hits: 0,
|
|
132
|
+
misses: 0,
|
|
133
|
+
refreshes: 0,
|
|
134
|
+
refreshErrors: 0,
|
|
135
|
+
};
|
|
136
|
+
}
|
|
137
|
+
/**
|
|
138
|
+
* Get a value from cache.
|
|
139
|
+
*
|
|
140
|
+
* Returns the cached value or undefined if not found.
|
|
141
|
+
* Stale entries are still returned (background refresh handles updates).
|
|
142
|
+
*/
|
|
143
|
+
get(key) {
|
|
144
|
+
const entry = this.cache.get(key);
|
|
145
|
+
if (!entry) {
|
|
146
|
+
this._metrics.misses += 1;
|
|
147
|
+
return undefined;
|
|
148
|
+
}
|
|
149
|
+
// Move to end for LRU (delete and re-add)
|
|
150
|
+
this.cache.delete(key);
|
|
151
|
+
this.cache.set(key, entry);
|
|
152
|
+
this._metrics.hits += 1;
|
|
153
|
+
return entry.value;
|
|
154
|
+
}
|
|
155
|
+
/**
|
|
156
|
+
* Set a value in the cache.
|
|
157
|
+
*/
|
|
158
|
+
set(key, value) {
|
|
159
|
+
// Check if we need to evict (and key is new)
|
|
160
|
+
if (!this.cache.has(key) && this.cache.size >= this.maxSize) {
|
|
161
|
+
// Evict oldest (first item in Map)
|
|
162
|
+
const oldestKey = this.cache.keys().next().value;
|
|
163
|
+
if (oldestKey !== undefined) {
|
|
164
|
+
this.cache.delete(oldestKey);
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
const entry = {
|
|
168
|
+
value,
|
|
169
|
+
createdAt: Date.now(),
|
|
170
|
+
};
|
|
171
|
+
// Delete first to ensure it's at the end
|
|
172
|
+
this.cache.delete(key);
|
|
173
|
+
this.cache.set(key, entry);
|
|
174
|
+
}
|
|
175
|
+
/**
|
|
176
|
+
* Remove a specific entry from cache.
|
|
177
|
+
*/
|
|
178
|
+
invalidate(key) {
|
|
179
|
+
this.cache.delete(key);
|
|
180
|
+
}
|
|
181
|
+
/**
|
|
182
|
+
* Clear all cache entries.
|
|
183
|
+
*/
|
|
184
|
+
clear() {
|
|
185
|
+
this.cache.clear();
|
|
186
|
+
}
|
|
187
|
+
/**
|
|
188
|
+
* Get the number of entries in the cache.
|
|
189
|
+
*/
|
|
190
|
+
get size() {
|
|
191
|
+
return this.cache.size;
|
|
192
|
+
}
|
|
193
|
+
/**
|
|
194
|
+
* Stop background refresh.
|
|
195
|
+
* Should be called when the client is being cleaned up.
|
|
196
|
+
*/
|
|
197
|
+
stop() {
|
|
198
|
+
if (this.refreshTimer) {
|
|
199
|
+
clearInterval(this.refreshTimer);
|
|
200
|
+
this.refreshTimer = undefined;
|
|
201
|
+
}
|
|
202
|
+
}
|
|
203
|
+
/**
|
|
204
|
+
* Dump cache contents to a JSON file for offline use.
|
|
205
|
+
*/
|
|
206
|
+
dump(filePath) {
|
|
207
|
+
const entries = {};
|
|
208
|
+
for (const [key, entry] of this.cache.entries()) {
|
|
209
|
+
entries[key] = entry.value;
|
|
210
|
+
}
|
|
211
|
+
(0, prompts_cache_fs_js_1.dumpCache)(filePath, entries);
|
|
212
|
+
}
|
|
213
|
+
/**
|
|
214
|
+
* Load cache contents from a JSON file.
|
|
215
|
+
*
|
|
216
|
+
* Loaded entries get a fresh TTL starting from load time.
|
|
217
|
+
*
|
|
218
|
+
* @returns Number of entries loaded.
|
|
219
|
+
*/
|
|
220
|
+
load(filePath) {
|
|
221
|
+
const entries = (0, prompts_cache_fs_js_1.loadCache)(filePath);
|
|
222
|
+
if (!entries) {
|
|
223
|
+
return 0;
|
|
224
|
+
}
|
|
225
|
+
let loaded = 0;
|
|
226
|
+
const now = Date.now();
|
|
227
|
+
for (const [key, value] of Object.entries(entries)) {
|
|
228
|
+
if (this.cache.size >= this.maxSize) {
|
|
229
|
+
break;
|
|
230
|
+
}
|
|
231
|
+
const entry = {
|
|
232
|
+
value: value,
|
|
233
|
+
createdAt: now, // Fresh TTL from load time
|
|
234
|
+
};
|
|
235
|
+
this.cache.set(key, entry);
|
|
236
|
+
loaded += 1;
|
|
237
|
+
}
|
|
238
|
+
return loaded;
|
|
239
|
+
}
|
|
240
|
+
/**
|
|
241
|
+
* Start the background refresh loop.
|
|
242
|
+
*/
|
|
243
|
+
startRefreshLoop() {
|
|
244
|
+
this.refreshTimer = setInterval(() => {
|
|
245
|
+
this.refreshStaleEntries().catch((e) => {
|
|
246
|
+
// Log but don't die - keep the refresh loop running
|
|
247
|
+
console.warn("Unexpected error in cache refresh loop:", e);
|
|
248
|
+
});
|
|
249
|
+
}, this.refreshIntervalSeconds * 1000);
|
|
250
|
+
// Don't block Node.js from exiting
|
|
251
|
+
if (this.refreshTimer.unref) {
|
|
252
|
+
this.refreshTimer.unref();
|
|
253
|
+
}
|
|
254
|
+
}
|
|
255
|
+
/**
|
|
256
|
+
* Get list of stale cache keys.
|
|
257
|
+
*/
|
|
258
|
+
getStaleKeys() {
|
|
259
|
+
const staleKeys = [];
|
|
260
|
+
for (const [key, entry] of this.cache.entries()) {
|
|
261
|
+
if (isStale(entry, this.ttlSeconds)) {
|
|
262
|
+
staleKeys.push(key);
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
return staleKeys;
|
|
266
|
+
}
|
|
267
|
+
/**
|
|
268
|
+
* Check for stale entries and refresh them.
|
|
269
|
+
*/
|
|
270
|
+
async refreshStaleEntries() {
|
|
271
|
+
if (!this.fetchFunc) {
|
|
272
|
+
return;
|
|
273
|
+
}
|
|
274
|
+
const staleKeys = this.getStaleKeys();
|
|
275
|
+
if (staleKeys.length === 0) {
|
|
276
|
+
return;
|
|
277
|
+
}
|
|
278
|
+
for (const key of staleKeys) {
|
|
279
|
+
try {
|
|
280
|
+
const newValue = await this.fetchFunc(key);
|
|
281
|
+
this.set(key, newValue);
|
|
282
|
+
this._metrics.refreshes += 1;
|
|
283
|
+
}
|
|
284
|
+
catch (e) {
|
|
285
|
+
// Keep stale data on refresh failure
|
|
286
|
+
this._metrics.refreshErrors += 1;
|
|
287
|
+
console.warn(`Failed to refresh cache entry ${key}:`, e);
|
|
288
|
+
}
|
|
289
|
+
}
|
|
290
|
+
}
|
|
291
|
+
}
|
|
292
|
+
exports.Cache = Cache;
|
|
@@ -0,0 +1,142 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Prompt caching module for LangSmith SDK.
|
|
3
|
+
*
|
|
4
|
+
* Provides an LRU cache with background refresh for prompt caching.
|
|
5
|
+
* Uses stale-while-revalidate pattern for optimal performance.
|
|
6
|
+
*
|
|
7
|
+
* Works in all environments. File operations (dump/load) use helpers
|
|
8
|
+
* that are swapped for browser builds via package.json browser field.
|
|
9
|
+
*/
|
|
10
|
+
import type { PromptCommit } from "../schemas.js";
|
|
11
|
+
/**
|
|
12
|
+
* A single cache entry with metadata for TTL tracking.
|
|
13
|
+
*/
|
|
14
|
+
export interface CacheEntry<T = unknown> {
|
|
15
|
+
value: T;
|
|
16
|
+
createdAt: number;
|
|
17
|
+
}
|
|
18
|
+
/**
|
|
19
|
+
* Cache performance metrics.
|
|
20
|
+
*/
|
|
21
|
+
export interface CacheMetrics {
|
|
22
|
+
hits: number;
|
|
23
|
+
misses: number;
|
|
24
|
+
refreshes: number;
|
|
25
|
+
refreshErrors: number;
|
|
26
|
+
}
|
|
27
|
+
/**
|
|
28
|
+
* Configuration options for Cache.
|
|
29
|
+
*/
|
|
30
|
+
export interface CacheConfig {
|
|
31
|
+
/** Maximum entries in cache (LRU eviction when exceeded). Default: 100 */
|
|
32
|
+
maxSize?: number;
|
|
33
|
+
/** Time in seconds before entry is stale. null = infinite TTL. Default: 3600 */
|
|
34
|
+
ttlSeconds?: number | null;
|
|
35
|
+
/** How often to check for stale entries in seconds. Default: 60 */
|
|
36
|
+
refreshIntervalSeconds?: number;
|
|
37
|
+
/** Callback to fetch fresh data for a cache key */
|
|
38
|
+
fetchFunc?: (key: string) => Promise<PromptCommit>;
|
|
39
|
+
}
|
|
40
|
+
/**
|
|
41
|
+
* LRU cache with background refresh for prompts.
|
|
42
|
+
*
|
|
43
|
+
* Features:
|
|
44
|
+
* - In-memory LRU cache with configurable max size
|
|
45
|
+
* - Background refresh using setInterval
|
|
46
|
+
* - Stale-while-revalidate: returns stale data while refresh happens
|
|
47
|
+
* - JSON dump/load for offline use
|
|
48
|
+
*
|
|
49
|
+
* @example
|
|
50
|
+
* ```typescript
|
|
51
|
+
* const cache = new Cache({
|
|
52
|
+
* maxSize: 100,
|
|
53
|
+
* ttlSeconds: 3600,
|
|
54
|
+
* fetchFunc: async (key) => client.pullPromptCommit(key),
|
|
55
|
+
* });
|
|
56
|
+
*
|
|
57
|
+
* // Use the cache
|
|
58
|
+
* cache.set("my-prompt:latest", promptCommit);
|
|
59
|
+
* const cached = cache.get("my-prompt:latest");
|
|
60
|
+
*
|
|
61
|
+
* // Cleanup
|
|
62
|
+
* cache.stop();
|
|
63
|
+
* ```
|
|
64
|
+
*/
|
|
65
|
+
export declare class Cache {
|
|
66
|
+
private cache;
|
|
67
|
+
private maxSize;
|
|
68
|
+
private ttlSeconds;
|
|
69
|
+
private refreshIntervalSeconds;
|
|
70
|
+
private fetchFunc?;
|
|
71
|
+
private refreshTimer?;
|
|
72
|
+
private _metrics;
|
|
73
|
+
constructor(config?: CacheConfig);
|
|
74
|
+
/**
|
|
75
|
+
* Get cache performance metrics.
|
|
76
|
+
*/
|
|
77
|
+
get metrics(): Readonly<CacheMetrics>;
|
|
78
|
+
/**
|
|
79
|
+
* Get total cache requests (hits + misses).
|
|
80
|
+
*/
|
|
81
|
+
get totalRequests(): number;
|
|
82
|
+
/**
|
|
83
|
+
* Get cache hit rate (0.0 to 1.0).
|
|
84
|
+
*/
|
|
85
|
+
get hitRate(): number;
|
|
86
|
+
/**
|
|
87
|
+
* Reset all metrics to zero.
|
|
88
|
+
*/
|
|
89
|
+
resetMetrics(): void;
|
|
90
|
+
/**
|
|
91
|
+
* Get a value from cache.
|
|
92
|
+
*
|
|
93
|
+
* Returns the cached value or undefined if not found.
|
|
94
|
+
* Stale entries are still returned (background refresh handles updates).
|
|
95
|
+
*/
|
|
96
|
+
get(key: string): PromptCommit | undefined;
|
|
97
|
+
/**
|
|
98
|
+
* Set a value in the cache.
|
|
99
|
+
*/
|
|
100
|
+
set(key: string, value: PromptCommit): void;
|
|
101
|
+
/**
|
|
102
|
+
* Remove a specific entry from cache.
|
|
103
|
+
*/
|
|
104
|
+
invalidate(key: string): void;
|
|
105
|
+
/**
|
|
106
|
+
* Clear all cache entries.
|
|
107
|
+
*/
|
|
108
|
+
clear(): void;
|
|
109
|
+
/**
|
|
110
|
+
* Get the number of entries in the cache.
|
|
111
|
+
*/
|
|
112
|
+
get size(): number;
|
|
113
|
+
/**
|
|
114
|
+
* Stop background refresh.
|
|
115
|
+
* Should be called when the client is being cleaned up.
|
|
116
|
+
*/
|
|
117
|
+
stop(): void;
|
|
118
|
+
/**
|
|
119
|
+
* Dump cache contents to a JSON file for offline use.
|
|
120
|
+
*/
|
|
121
|
+
dump(filePath: string): void;
|
|
122
|
+
/**
|
|
123
|
+
* Load cache contents from a JSON file.
|
|
124
|
+
*
|
|
125
|
+
* Loaded entries get a fresh TTL starting from load time.
|
|
126
|
+
*
|
|
127
|
+
* @returns Number of entries loaded.
|
|
128
|
+
*/
|
|
129
|
+
load(filePath: string): number;
|
|
130
|
+
/**
|
|
131
|
+
* Start the background refresh loop.
|
|
132
|
+
*/
|
|
133
|
+
private startRefreshLoop;
|
|
134
|
+
/**
|
|
135
|
+
* Get list of stale cache keys.
|
|
136
|
+
*/
|
|
137
|
+
private getStaleKeys;
|
|
138
|
+
/**
|
|
139
|
+
* Check for stale entries and refresh them.
|
|
140
|
+
*/
|
|
141
|
+
private refreshStaleEntries;
|
|
142
|
+
}
|
|
@@ -0,0 +1,288 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Prompt caching module for LangSmith SDK.
|
|
3
|
+
*
|
|
4
|
+
* Provides an LRU cache with background refresh for prompt caching.
|
|
5
|
+
* Uses stale-while-revalidate pattern for optimal performance.
|
|
6
|
+
*
|
|
7
|
+
* Works in all environments. File operations (dump/load) use helpers
|
|
8
|
+
* that are swapped for browser builds via package.json browser field.
|
|
9
|
+
*/
|
|
10
|
+
import { dumpCache, loadCache } from "./prompts_cache_fs.js";
|
|
11
|
+
/**
|
|
12
|
+
* Check if a cache entry is stale based on TTL.
|
|
13
|
+
*/
|
|
14
|
+
function isStale(entry, ttlSeconds) {
|
|
15
|
+
if (ttlSeconds === null) {
|
|
16
|
+
return false; // Infinite TTL, never stale
|
|
17
|
+
}
|
|
18
|
+
const ageMs = Date.now() - entry.createdAt;
|
|
19
|
+
return ageMs > ttlSeconds * 1000;
|
|
20
|
+
}
|
|
21
|
+
/**
|
|
22
|
+
* LRU cache with background refresh for prompts.
|
|
23
|
+
*
|
|
24
|
+
* Features:
|
|
25
|
+
* - In-memory LRU cache with configurable max size
|
|
26
|
+
* - Background refresh using setInterval
|
|
27
|
+
* - Stale-while-revalidate: returns stale data while refresh happens
|
|
28
|
+
* - JSON dump/load for offline use
|
|
29
|
+
*
|
|
30
|
+
* @example
|
|
31
|
+
* ```typescript
|
|
32
|
+
* const cache = new Cache({
|
|
33
|
+
* maxSize: 100,
|
|
34
|
+
* ttlSeconds: 3600,
|
|
35
|
+
* fetchFunc: async (key) => client.pullPromptCommit(key),
|
|
36
|
+
* });
|
|
37
|
+
*
|
|
38
|
+
* // Use the cache
|
|
39
|
+
* cache.set("my-prompt:latest", promptCommit);
|
|
40
|
+
* const cached = cache.get("my-prompt:latest");
|
|
41
|
+
*
|
|
42
|
+
* // Cleanup
|
|
43
|
+
* cache.stop();
|
|
44
|
+
* ```
|
|
45
|
+
*/
|
|
46
|
+
export class Cache {
|
|
47
|
+
constructor(config = {}) {
|
|
48
|
+
Object.defineProperty(this, "cache", {
|
|
49
|
+
enumerable: true,
|
|
50
|
+
configurable: true,
|
|
51
|
+
writable: true,
|
|
52
|
+
value: new Map()
|
|
53
|
+
});
|
|
54
|
+
Object.defineProperty(this, "maxSize", {
|
|
55
|
+
enumerable: true,
|
|
56
|
+
configurable: true,
|
|
57
|
+
writable: true,
|
|
58
|
+
value: void 0
|
|
59
|
+
});
|
|
60
|
+
Object.defineProperty(this, "ttlSeconds", {
|
|
61
|
+
enumerable: true,
|
|
62
|
+
configurable: true,
|
|
63
|
+
writable: true,
|
|
64
|
+
value: void 0
|
|
65
|
+
});
|
|
66
|
+
Object.defineProperty(this, "refreshIntervalSeconds", {
|
|
67
|
+
enumerable: true,
|
|
68
|
+
configurable: true,
|
|
69
|
+
writable: true,
|
|
70
|
+
value: void 0
|
|
71
|
+
});
|
|
72
|
+
Object.defineProperty(this, "fetchFunc", {
|
|
73
|
+
enumerable: true,
|
|
74
|
+
configurable: true,
|
|
75
|
+
writable: true,
|
|
76
|
+
value: void 0
|
|
77
|
+
});
|
|
78
|
+
Object.defineProperty(this, "refreshTimer", {
|
|
79
|
+
enumerable: true,
|
|
80
|
+
configurable: true,
|
|
81
|
+
writable: true,
|
|
82
|
+
value: void 0
|
|
83
|
+
});
|
|
84
|
+
Object.defineProperty(this, "_metrics", {
|
|
85
|
+
enumerable: true,
|
|
86
|
+
configurable: true,
|
|
87
|
+
writable: true,
|
|
88
|
+
value: {
|
|
89
|
+
hits: 0,
|
|
90
|
+
misses: 0,
|
|
91
|
+
refreshes: 0,
|
|
92
|
+
refreshErrors: 0,
|
|
93
|
+
}
|
|
94
|
+
});
|
|
95
|
+
this.maxSize = config.maxSize ?? 100;
|
|
96
|
+
this.ttlSeconds = config.ttlSeconds ?? 3600;
|
|
97
|
+
this.refreshIntervalSeconds = config.refreshIntervalSeconds ?? 60;
|
|
98
|
+
this.fetchFunc = config.fetchFunc;
|
|
99
|
+
// Start background refresh if fetch function provided and TTL is set
|
|
100
|
+
if (this.fetchFunc && this.ttlSeconds !== null) {
|
|
101
|
+
this.startRefreshLoop();
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
/**
|
|
105
|
+
* Get cache performance metrics.
|
|
106
|
+
*/
|
|
107
|
+
get metrics() {
|
|
108
|
+
return { ...this._metrics };
|
|
109
|
+
}
|
|
110
|
+
/**
|
|
111
|
+
* Get total cache requests (hits + misses).
|
|
112
|
+
*/
|
|
113
|
+
get totalRequests() {
|
|
114
|
+
return this._metrics.hits + this._metrics.misses;
|
|
115
|
+
}
|
|
116
|
+
/**
|
|
117
|
+
* Get cache hit rate (0.0 to 1.0).
|
|
118
|
+
*/
|
|
119
|
+
get hitRate() {
|
|
120
|
+
const total = this.totalRequests;
|
|
121
|
+
return total > 0 ? this._metrics.hits / total : 0;
|
|
122
|
+
}
|
|
123
|
+
/**
|
|
124
|
+
* Reset all metrics to zero.
|
|
125
|
+
*/
|
|
126
|
+
resetMetrics() {
|
|
127
|
+
this._metrics = {
|
|
128
|
+
hits: 0,
|
|
129
|
+
misses: 0,
|
|
130
|
+
refreshes: 0,
|
|
131
|
+
refreshErrors: 0,
|
|
132
|
+
};
|
|
133
|
+
}
|
|
134
|
+
/**
|
|
135
|
+
* Get a value from cache.
|
|
136
|
+
*
|
|
137
|
+
* Returns the cached value or undefined if not found.
|
|
138
|
+
* Stale entries are still returned (background refresh handles updates).
|
|
139
|
+
*/
|
|
140
|
+
get(key) {
|
|
141
|
+
const entry = this.cache.get(key);
|
|
142
|
+
if (!entry) {
|
|
143
|
+
this._metrics.misses += 1;
|
|
144
|
+
return undefined;
|
|
145
|
+
}
|
|
146
|
+
// Move to end for LRU (delete and re-add)
|
|
147
|
+
this.cache.delete(key);
|
|
148
|
+
this.cache.set(key, entry);
|
|
149
|
+
this._metrics.hits += 1;
|
|
150
|
+
return entry.value;
|
|
151
|
+
}
|
|
152
|
+
/**
|
|
153
|
+
* Set a value in the cache.
|
|
154
|
+
*/
|
|
155
|
+
set(key, value) {
|
|
156
|
+
// Check if we need to evict (and key is new)
|
|
157
|
+
if (!this.cache.has(key) && this.cache.size >= this.maxSize) {
|
|
158
|
+
// Evict oldest (first item in Map)
|
|
159
|
+
const oldestKey = this.cache.keys().next().value;
|
|
160
|
+
if (oldestKey !== undefined) {
|
|
161
|
+
this.cache.delete(oldestKey);
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
const entry = {
|
|
165
|
+
value,
|
|
166
|
+
createdAt: Date.now(),
|
|
167
|
+
};
|
|
168
|
+
// Delete first to ensure it's at the end
|
|
169
|
+
this.cache.delete(key);
|
|
170
|
+
this.cache.set(key, entry);
|
|
171
|
+
}
|
|
172
|
+
/**
|
|
173
|
+
* Remove a specific entry from cache.
|
|
174
|
+
*/
|
|
175
|
+
invalidate(key) {
|
|
176
|
+
this.cache.delete(key);
|
|
177
|
+
}
|
|
178
|
+
/**
|
|
179
|
+
* Clear all cache entries.
|
|
180
|
+
*/
|
|
181
|
+
clear() {
|
|
182
|
+
this.cache.clear();
|
|
183
|
+
}
|
|
184
|
+
/**
|
|
185
|
+
* Get the number of entries in the cache.
|
|
186
|
+
*/
|
|
187
|
+
get size() {
|
|
188
|
+
return this.cache.size;
|
|
189
|
+
}
|
|
190
|
+
/**
|
|
191
|
+
* Stop background refresh.
|
|
192
|
+
* Should be called when the client is being cleaned up.
|
|
193
|
+
*/
|
|
194
|
+
stop() {
|
|
195
|
+
if (this.refreshTimer) {
|
|
196
|
+
clearInterval(this.refreshTimer);
|
|
197
|
+
this.refreshTimer = undefined;
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
/**
|
|
201
|
+
* Dump cache contents to a JSON file for offline use.
|
|
202
|
+
*/
|
|
203
|
+
dump(filePath) {
|
|
204
|
+
const entries = {};
|
|
205
|
+
for (const [key, entry] of this.cache.entries()) {
|
|
206
|
+
entries[key] = entry.value;
|
|
207
|
+
}
|
|
208
|
+
dumpCache(filePath, entries);
|
|
209
|
+
}
|
|
210
|
+
/**
|
|
211
|
+
* Load cache contents from a JSON file.
|
|
212
|
+
*
|
|
213
|
+
* Loaded entries get a fresh TTL starting from load time.
|
|
214
|
+
*
|
|
215
|
+
* @returns Number of entries loaded.
|
|
216
|
+
*/
|
|
217
|
+
load(filePath) {
|
|
218
|
+
const entries = loadCache(filePath);
|
|
219
|
+
if (!entries) {
|
|
220
|
+
return 0;
|
|
221
|
+
}
|
|
222
|
+
let loaded = 0;
|
|
223
|
+
const now = Date.now();
|
|
224
|
+
for (const [key, value] of Object.entries(entries)) {
|
|
225
|
+
if (this.cache.size >= this.maxSize) {
|
|
226
|
+
break;
|
|
227
|
+
}
|
|
228
|
+
const entry = {
|
|
229
|
+
value: value,
|
|
230
|
+
createdAt: now, // Fresh TTL from load time
|
|
231
|
+
};
|
|
232
|
+
this.cache.set(key, entry);
|
|
233
|
+
loaded += 1;
|
|
234
|
+
}
|
|
235
|
+
return loaded;
|
|
236
|
+
}
|
|
237
|
+
/**
|
|
238
|
+
* Start the background refresh loop.
|
|
239
|
+
*/
|
|
240
|
+
startRefreshLoop() {
|
|
241
|
+
this.refreshTimer = setInterval(() => {
|
|
242
|
+
this.refreshStaleEntries().catch((e) => {
|
|
243
|
+
// Log but don't die - keep the refresh loop running
|
|
244
|
+
console.warn("Unexpected error in cache refresh loop:", e);
|
|
245
|
+
});
|
|
246
|
+
}, this.refreshIntervalSeconds * 1000);
|
|
247
|
+
// Don't block Node.js from exiting
|
|
248
|
+
if (this.refreshTimer.unref) {
|
|
249
|
+
this.refreshTimer.unref();
|
|
250
|
+
}
|
|
251
|
+
}
|
|
252
|
+
/**
|
|
253
|
+
* Get list of stale cache keys.
|
|
254
|
+
*/
|
|
255
|
+
getStaleKeys() {
|
|
256
|
+
const staleKeys = [];
|
|
257
|
+
for (const [key, entry] of this.cache.entries()) {
|
|
258
|
+
if (isStale(entry, this.ttlSeconds)) {
|
|
259
|
+
staleKeys.push(key);
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
return staleKeys;
|
|
263
|
+
}
|
|
264
|
+
/**
|
|
265
|
+
* Check for stale entries and refresh them.
|
|
266
|
+
*/
|
|
267
|
+
async refreshStaleEntries() {
|
|
268
|
+
if (!this.fetchFunc) {
|
|
269
|
+
return;
|
|
270
|
+
}
|
|
271
|
+
const staleKeys = this.getStaleKeys();
|
|
272
|
+
if (staleKeys.length === 0) {
|
|
273
|
+
return;
|
|
274
|
+
}
|
|
275
|
+
for (const key of staleKeys) {
|
|
276
|
+
try {
|
|
277
|
+
const newValue = await this.fetchFunc(key);
|
|
278
|
+
this.set(key, newValue);
|
|
279
|
+
this._metrics.refreshes += 1;
|
|
280
|
+
}
|
|
281
|
+
catch (e) {
|
|
282
|
+
// Keep stale data on refresh failure
|
|
283
|
+
this._metrics.refreshErrors += 1;
|
|
284
|
+
console.warn(`Failed to refresh cache entry ${key}:`, e);
|
|
285
|
+
}
|
|
286
|
+
}
|
|
287
|
+
}
|
|
288
|
+
}
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* File system operations for prompt cache (Browser version).
|
|
4
|
+
*
|
|
5
|
+
* This stub is used in browser builds via the package.json browser field.
|
|
6
|
+
* File operations are not supported in browser environments.
|
|
7
|
+
*/
|
|
8
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
9
|
+
exports.dumpCache = dumpCache;
|
|
10
|
+
exports.loadCache = loadCache;
|
|
11
|
+
/**
|
|
12
|
+
* Dump cache entries to a JSON file.
|
|
13
|
+
* @throws Error - Always throws in browser environments.
|
|
14
|
+
*/
|
|
15
|
+
function dumpCache(_filePath, _entries) {
|
|
16
|
+
throw new Error("dump() is not supported in browser environments.");
|
|
17
|
+
}
|
|
18
|
+
/**
|
|
19
|
+
* Load cache entries from a JSON file.
|
|
20
|
+
* @throws Error - Always throws in browser environments.
|
|
21
|
+
*/
|
|
22
|
+
function loadCache(_filePath) {
|
|
23
|
+
throw new Error("load() is not supported in browser environments.");
|
|
24
|
+
}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* File system operations for prompt cache (Browser version).
|
|
3
|
+
*
|
|
4
|
+
* This stub is used in browser builds via the package.json browser field.
|
|
5
|
+
* File operations are not supported in browser environments.
|
|
6
|
+
*/
|
|
7
|
+
/**
|
|
8
|
+
* Dump cache entries to a JSON file.
|
|
9
|
+
* @throws Error - Always throws in browser environments.
|
|
10
|
+
*/
|
|
11
|
+
export declare function dumpCache(_filePath: string, _entries: Record<string, unknown>): void;
|
|
12
|
+
/**
|
|
13
|
+
* Load cache entries from a JSON file.
|
|
14
|
+
* @throws Error - Always throws in browser environments.
|
|
15
|
+
*/
|
|
16
|
+
export declare function loadCache(_filePath: string): Record<string, unknown> | null;
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* File system operations for prompt cache (Browser version).
|
|
3
|
+
*
|
|
4
|
+
* This stub is used in browser builds via the package.json browser field.
|
|
5
|
+
* File operations are not supported in browser environments.
|
|
6
|
+
*/
|
|
7
|
+
/**
|
|
8
|
+
* Dump cache entries to a JSON file.
|
|
9
|
+
* @throws Error - Always throws in browser environments.
|
|
10
|
+
*/
|
|
11
|
+
export function dumpCache(_filePath, _entries) {
|
|
12
|
+
throw new Error("dump() is not supported in browser environments.");
|
|
13
|
+
}
|
|
14
|
+
/**
|
|
15
|
+
* Load cache entries from a JSON file.
|
|
16
|
+
* @throws Error - Always throws in browser environments.
|
|
17
|
+
*/
|
|
18
|
+
export function loadCache(_filePath) {
|
|
19
|
+
throw new Error("load() is not supported in browser environments.");
|
|
20
|
+
}
|
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* File system operations for prompt cache (Node.js version).
|
|
4
|
+
*
|
|
5
|
+
* This file is swapped with prompts_cache_fs.browser.ts for browser builds
|
|
6
|
+
* via the package.json browser field.
|
|
7
|
+
*/
|
|
8
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
9
|
+
if (k2 === undefined) k2 = k;
|
|
10
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
11
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
12
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
13
|
+
}
|
|
14
|
+
Object.defineProperty(o, k2, desc);
|
|
15
|
+
}) : (function(o, m, k, k2) {
|
|
16
|
+
if (k2 === undefined) k2 = k;
|
|
17
|
+
o[k2] = m[k];
|
|
18
|
+
}));
|
|
19
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
20
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
21
|
+
}) : function(o, v) {
|
|
22
|
+
o["default"] = v;
|
|
23
|
+
});
|
|
24
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
25
|
+
var ownKeys = function(o) {
|
|
26
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
27
|
+
var ar = [];
|
|
28
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
29
|
+
return ar;
|
|
30
|
+
};
|
|
31
|
+
return ownKeys(o);
|
|
32
|
+
};
|
|
33
|
+
return function (mod) {
|
|
34
|
+
if (mod && mod.__esModule) return mod;
|
|
35
|
+
var result = {};
|
|
36
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
37
|
+
__setModuleDefault(result, mod);
|
|
38
|
+
return result;
|
|
39
|
+
};
|
|
40
|
+
})();
|
|
41
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
42
|
+
exports.dumpCache = dumpCache;
|
|
43
|
+
exports.loadCache = loadCache;
|
|
44
|
+
const fs = __importStar(require("node:fs"));
|
|
45
|
+
const path = __importStar(require("node:path"));
|
|
46
|
+
/**
|
|
47
|
+
* Dump cache entries to a JSON file.
|
|
48
|
+
*/
|
|
49
|
+
function dumpCache(filePath, entries) {
|
|
50
|
+
const dir = path.dirname(filePath);
|
|
51
|
+
if (!fs.existsSync(dir)) {
|
|
52
|
+
fs.mkdirSync(dir, { recursive: true });
|
|
53
|
+
}
|
|
54
|
+
const data = { entries };
|
|
55
|
+
// Atomic write: write to temp file then rename
|
|
56
|
+
const tempPath = `${filePath}.tmp`;
|
|
57
|
+
try {
|
|
58
|
+
fs.writeFileSync(tempPath, JSON.stringify(data, null, 2));
|
|
59
|
+
fs.renameSync(tempPath, filePath);
|
|
60
|
+
}
|
|
61
|
+
catch (e) {
|
|
62
|
+
// Clean up temp file on failure
|
|
63
|
+
if (fs.existsSync(tempPath)) {
|
|
64
|
+
fs.unlinkSync(tempPath);
|
|
65
|
+
}
|
|
66
|
+
throw e;
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
/**
|
|
70
|
+
* Load cache entries from a JSON file.
|
|
71
|
+
*
|
|
72
|
+
* @returns The entries object, or null if file doesn't exist or is invalid.
|
|
73
|
+
*/
|
|
74
|
+
function loadCache(filePath) {
|
|
75
|
+
if (!fs.existsSync(filePath)) {
|
|
76
|
+
return null;
|
|
77
|
+
}
|
|
78
|
+
try {
|
|
79
|
+
const content = fs.readFileSync(filePath, "utf-8");
|
|
80
|
+
const data = JSON.parse(content);
|
|
81
|
+
return data.entries ?? null;
|
|
82
|
+
}
|
|
83
|
+
catch {
|
|
84
|
+
return null;
|
|
85
|
+
}
|
|
86
|
+
}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* File system operations for prompt cache (Node.js version).
|
|
3
|
+
*
|
|
4
|
+
* This file is swapped with prompts_cache_fs.browser.ts for browser builds
|
|
5
|
+
* via the package.json browser field.
|
|
6
|
+
*/
|
|
7
|
+
/**
|
|
8
|
+
* Dump cache entries to a JSON file.
|
|
9
|
+
*/
|
|
10
|
+
export declare function dumpCache(filePath: string, entries: Record<string, unknown>): void;
|
|
11
|
+
/**
|
|
12
|
+
* Load cache entries from a JSON file.
|
|
13
|
+
*
|
|
14
|
+
* @returns The entries object, or null if file doesn't exist or is invalid.
|
|
15
|
+
*/
|
|
16
|
+
export declare function loadCache(filePath: string): Record<string, unknown> | null;
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* File system operations for prompt cache (Node.js version).
|
|
3
|
+
*
|
|
4
|
+
* This file is swapped with prompts_cache_fs.browser.ts for browser builds
|
|
5
|
+
* via the package.json browser field.
|
|
6
|
+
*/
|
|
7
|
+
import * as fs from "node:fs";
|
|
8
|
+
import * as path from "node:path";
|
|
9
|
+
/**
|
|
10
|
+
* Dump cache entries to a JSON file.
|
|
11
|
+
*/
|
|
12
|
+
export function dumpCache(filePath, entries) {
|
|
13
|
+
const dir = path.dirname(filePath);
|
|
14
|
+
if (!fs.existsSync(dir)) {
|
|
15
|
+
fs.mkdirSync(dir, { recursive: true });
|
|
16
|
+
}
|
|
17
|
+
const data = { entries };
|
|
18
|
+
// Atomic write: write to temp file then rename
|
|
19
|
+
const tempPath = `${filePath}.tmp`;
|
|
20
|
+
try {
|
|
21
|
+
fs.writeFileSync(tempPath, JSON.stringify(data, null, 2));
|
|
22
|
+
fs.renameSync(tempPath, filePath);
|
|
23
|
+
}
|
|
24
|
+
catch (e) {
|
|
25
|
+
// Clean up temp file on failure
|
|
26
|
+
if (fs.existsSync(tempPath)) {
|
|
27
|
+
fs.unlinkSync(tempPath);
|
|
28
|
+
}
|
|
29
|
+
throw e;
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
/**
|
|
33
|
+
* Load cache entries from a JSON file.
|
|
34
|
+
*
|
|
35
|
+
* @returns The entries object, or null if file doesn't exist or is invalid.
|
|
36
|
+
*/
|
|
37
|
+
export function loadCache(filePath) {
|
|
38
|
+
if (!fs.existsSync(filePath)) {
|
|
39
|
+
return null;
|
|
40
|
+
}
|
|
41
|
+
try {
|
|
42
|
+
const content = fs.readFileSync(filePath, "utf-8");
|
|
43
|
+
const data = JSON.parse(content);
|
|
44
|
+
return data.entries ?? null;
|
|
45
|
+
}
|
|
46
|
+
catch {
|
|
47
|
+
return null;
|
|
48
|
+
}
|
|
49
|
+
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "langsmith",
|
|
3
|
-
"version": "0.4.
|
|
3
|
+
"version": "0.4.6",
|
|
4
4
|
"description": "Client library to connect to the LangSmith Observability and Evaluation Platform.",
|
|
5
5
|
"packageManager": "yarn@1.22.19",
|
|
6
6
|
"files": [
|
|
@@ -401,5 +401,8 @@
|
|
|
401
401
|
"require": "./experimental/vercel.cjs"
|
|
402
402
|
},
|
|
403
403
|
"./package.json": "./package.json"
|
|
404
|
+
},
|
|
405
|
+
"browser": {
|
|
406
|
+
"./dist/utils/prompts_cache_fs.js": "./dist/utils/prompts_cache_fs.browser.js"
|
|
404
407
|
}
|
|
405
408
|
}
|