@pluslabs/utils 0.3.1 → 0.4.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/module/cache/index.d.ts +4 -4
- package/dist/module/cache/index.d.ts.map +1 -1
- package/dist/module/cache/index.js.map +1 -1
- package/dist/module/index.d.ts +4 -4
- package/dist/module/index.d.ts.map +1 -1
- package/dist/module/index.js +4 -4
- package/dist/module/index.js.map +1 -1
- package/dist/module/langfuse-prompt-service/index.d.ts +103 -0
- package/dist/module/langfuse-prompt-service/index.d.ts.map +1 -0
- package/dist/module/langfuse-prompt-service/index.js +297 -0
- package/dist/module/langfuse-prompt-service/index.js.map +1 -0
- package/dist/module/logger/index.d.ts +12 -0
- package/dist/module/logger/index.d.ts.map +1 -0
- package/dist/module/logger/index.js +29 -0
- package/dist/module/logger/index.js.map +1 -0
- package/dist/module/stamp/index.d.ts.map +1 -1
- package/dist/module/stamp/index.js.map +1 -1
- package/package.json +53 -25
- package/src/index.ts +4 -4
- package/src/langfuse-prompt-service/index.ts +469 -0
- package/src/logger/index.ts +29 -0
- package/dist/main/cache/index.d.ts +0 -42
- package/dist/main/cache/index.d.ts.map +0 -1
- package/dist/main/cache/index.js +0 -118
- package/dist/main/cache/index.js.map +0 -1
- package/dist/main/gpt/index.d.ts +0 -18
- package/dist/main/gpt/index.d.ts.map +0 -1
- package/dist/main/gpt/index.js +0 -27
- package/dist/main/gpt/index.js.map +0 -1
- package/dist/main/index.d.ts +0 -6
- package/dist/main/index.d.ts.map +0 -1
- package/dist/main/index.js +0 -39
- package/dist/main/index.js.map +0 -1
- package/dist/main/stamp/index.d.ts +0 -23
- package/dist/main/stamp/index.d.ts.map +0 -1
- package/dist/main/stamp/index.js +0 -47
- package/dist/main/stamp/index.js.map +0 -1
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@pluslabs/utils",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.4.1",
|
|
4
4
|
"description": "A set of utilities used across projects",
|
|
5
5
|
"keywords": [
|
|
6
6
|
"javascript",
|
|
@@ -12,46 +12,74 @@
|
|
|
12
12
|
"bugs": "https://github.com/pluslabs/utils/issues",
|
|
13
13
|
"license": "MIT",
|
|
14
14
|
"author": "Magic, Inc",
|
|
15
|
+
"type": "module",
|
|
15
16
|
"files": [
|
|
16
17
|
"dist",
|
|
17
18
|
"src"
|
|
18
19
|
],
|
|
19
|
-
"main": "dist/main/index.js",
|
|
20
20
|
"module": "dist/module/index.js",
|
|
21
21
|
"types": "dist/module/index.d.ts",
|
|
22
|
+
"exports": {
|
|
23
|
+
".": {
|
|
24
|
+
"types": "./dist/module/index.d.ts",
|
|
25
|
+
"import": "./dist/module/index.js",
|
|
26
|
+
"default": "./dist/module/index.js"
|
|
27
|
+
},
|
|
28
|
+
"./stamp": {
|
|
29
|
+
"types": "./dist/module/stamp/index.d.ts",
|
|
30
|
+
"import": "./dist/module/stamp/index.js",
|
|
31
|
+
"default": "./dist/module/stamp/index.js"
|
|
32
|
+
},
|
|
33
|
+
"./logger": {
|
|
34
|
+
"types": "./dist/module/logger/index.d.ts",
|
|
35
|
+
"import": "./dist/module/logger/index.js",
|
|
36
|
+
"default": "./dist/module/logger/index.js"
|
|
37
|
+
},
|
|
38
|
+
"./langfuse-prompt-service": {
|
|
39
|
+
"types": "./dist/module/langfuse-prompt-service/index.d.ts",
|
|
40
|
+
"import": "./dist/module/langfuse-prompt-service/index.js",
|
|
41
|
+
"default": "./dist/module/langfuse-prompt-service/index.js"
|
|
42
|
+
}
|
|
43
|
+
},
|
|
22
44
|
"sideEffects": false,
|
|
23
45
|
"repository": "pluslabs/utils",
|
|
24
46
|
"scripts": {
|
|
25
47
|
"clean": "rimraf dist",
|
|
26
48
|
"format": "prettier --write \"{src,test}/**/*.ts\"",
|
|
27
|
-
"build": "run-s clean format
|
|
28
|
-
"build:main": "tsc -p tsconfig.json",
|
|
29
|
-
"build:module": "tsc -p tsconfig.module.json",
|
|
49
|
+
"build": "run-s clean format && tsc -p tsconfig.json",
|
|
30
50
|
"test": "run-s test:types test:run",
|
|
31
|
-
"test:run": "jest --runInBand",
|
|
51
|
+
"test:run": "jest --runInBand --passWithNoTests",
|
|
32
52
|
"test:watch": "jest --watch --verbose false --silent false",
|
|
33
|
-
"test:types": "
|
|
53
|
+
"test:types": "tsc -p tsconfig.json",
|
|
34
54
|
"docs": "typedoc --entryPoints src/index.ts --out docs --includes src/**/*.ts",
|
|
35
|
-
"docs:json": "typedoc --entryPoints src/index.ts --includes src/**/*.ts --json docs/spec.json --excludeExternals"
|
|
55
|
+
"docs:json": "typedoc --entryPoints src/index.ts --includes src/**/*.ts --json docs/spec.json --excludeExternals",
|
|
56
|
+
"prepare": "husky"
|
|
57
|
+
},
|
|
58
|
+
"peerDependencies": {
|
|
59
|
+
"@langfuse/client": "^4.5.1",
|
|
60
|
+
"@langfuse/core": "^4.5.1",
|
|
61
|
+
"@upstash/redis": "^1.36.1",
|
|
62
|
+
"@opentelemetry/api": "^1.9.0",
|
|
63
|
+
"@opentelemetry/core": "^2.3.0"
|
|
36
64
|
},
|
|
37
65
|
"devDependencies": {
|
|
38
|
-
"@
|
|
39
|
-
"
|
|
40
|
-
"jest": "^
|
|
66
|
+
"@langfuse/client": "^4.5.1",
|
|
67
|
+
"@langfuse/core": "^4.5.1",
|
|
68
|
+
"@types/jest": "^30.0.0",
|
|
69
|
+
"@upstash/redis": "^1.36.1",
|
|
70
|
+
"@opentelemetry/api": "^1.9.0",
|
|
71
|
+
"@opentelemetry/core": "^2.3.0",
|
|
72
|
+
"husky": "^9.1.7",
|
|
73
|
+
"jest": "^30.2.0",
|
|
41
74
|
"npm-run-all": "^4.1.5",
|
|
42
|
-
"prettier": "^
|
|
43
|
-
"pretty-quick": "^
|
|
44
|
-
"rimraf": "^
|
|
45
|
-
"ts-jest": "^29.
|
|
46
|
-
"ts-loader": "^
|
|
47
|
-
"ts-node": "^10.9.
|
|
48
|
-
"tsd": "^0.
|
|
49
|
-
"typedoc": "^0.
|
|
50
|
-
"typescript": "^
|
|
51
|
-
},
|
|
52
|
-
"husky": {
|
|
53
|
-
"hooks": {
|
|
54
|
-
"pre-commit": "pretty-quick --staged"
|
|
55
|
-
}
|
|
75
|
+
"prettier": "^3.7.4",
|
|
76
|
+
"pretty-quick": "^4.2.2",
|
|
77
|
+
"rimraf": "^6.1.2",
|
|
78
|
+
"ts-jest": "^29.4.6",
|
|
79
|
+
"ts-loader": "^9.5.4",
|
|
80
|
+
"ts-node": "^10.9.2",
|
|
81
|
+
"tsd": "^0.33.0",
|
|
82
|
+
"typedoc": "^0.28.15",
|
|
83
|
+
"typescript": "^5.9.3"
|
|
56
84
|
}
|
|
57
85
|
}
|
package/src/index.ts
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
|
-
import * as gpt from './gpt';
|
|
2
|
-
import createCacheKeysManager from './cache';
|
|
1
|
+
import * as gpt from './gpt/index.js';
|
|
2
|
+
import createCacheKeysManager from './cache/index.js';
|
|
3
3
|
|
|
4
|
-
export * from './stamp';
|
|
5
|
-
export * from './cache';
|
|
4
|
+
export * from './stamp/index.js';
|
|
5
|
+
export * from './cache/index.js';
|
|
6
6
|
|
|
7
7
|
export { gpt, createCacheKeysManager };
|
|
@@ -0,0 +1,469 @@
|
|
|
1
|
+
import { LangfuseClient, TextPromptClient, ChatPromptClient } from '@langfuse/client';
|
|
2
|
+
import type { PromptMeta } from '@langfuse/core';
|
|
3
|
+
import { Redis } from '@upstash/redis';
|
|
4
|
+
// ESM best-practice: keep relative imports but use `.js` specifiers.
|
|
5
|
+
// With `moduleResolution: "NodeNext"`, TS resolves this to the TS sources and
|
|
6
|
+
// preserves `.js` in emitted output (required by Node ESM / Turbopack).
|
|
7
|
+
import { createStamp } from '../stamp/index.js';
|
|
8
|
+
import { logger } from '../logger/index.js';
|
|
9
|
+
|
|
10
|
+
export type LangfusePromptClient = TextPromptClient | ChatPromptClient;
|
|
11
|
+
|
|
12
|
+
export type TPrompt = {
|
|
13
|
+
id: string;
|
|
14
|
+
prompt: string;
|
|
15
|
+
version: string;
|
|
16
|
+
version_label?: string;
|
|
17
|
+
};
|
|
18
|
+
|
|
19
|
+
export type ToolPromptWithClient = TPrompt & {
|
|
20
|
+
langfusePrompt: LangfusePromptClient;
|
|
21
|
+
};
|
|
22
|
+
|
|
23
|
+
export type PromptVersionInfo = {
|
|
24
|
+
id: string;
|
|
25
|
+
version: number;
|
|
26
|
+
version_label: string | null;
|
|
27
|
+
labels: string[];
|
|
28
|
+
config: LangfusePromptConfig | null;
|
|
29
|
+
};
|
|
30
|
+
|
|
31
|
+
export type PromptVersionsResult = {
|
|
32
|
+
name: string;
|
|
33
|
+
activePrompt: PromptVersionInfo | null;
|
|
34
|
+
versions: PromptVersionInfo[];
|
|
35
|
+
};
|
|
36
|
+
|
|
37
|
+
export type LangfusePromptConfig = {
|
|
38
|
+
model?: string;
|
|
39
|
+
model_parameters?: {
|
|
40
|
+
reasoning_effort?: string;
|
|
41
|
+
};
|
|
42
|
+
};
|
|
43
|
+
|
|
44
|
+
const DEFAULT_CACHE_TTL_SECONDS = 60 * 1; // 1 minute
|
|
45
|
+
|
|
46
|
+
const createRedisFromEnv = (): Redis =>
|
|
47
|
+
new Redis({
|
|
48
|
+
url: process.env.REDIS_UPSTASH_KV_REST_API_URL!,
|
|
49
|
+
token: process.env.REDIS_UPSTASH_KV_REST_API_TOKEN!
|
|
50
|
+
});
|
|
51
|
+
|
|
52
|
+
// ─────────────────────────────────────────────────────────────────────────────
|
|
53
|
+
// Generic Redis Cache Helper
|
|
54
|
+
// ─────────────────────────────────────────────────────────────────────────────
|
|
55
|
+
|
|
56
|
+
type CacheOptions<T, S> = {
|
|
57
|
+
cacheKey: string;
|
|
58
|
+
fetcher: () => Promise<T>;
|
|
59
|
+
ttlSeconds?: number;
|
|
60
|
+
serialize: (value: T) => S;
|
|
61
|
+
hydrate: (serialized: S) => T;
|
|
62
|
+
logContext: string;
|
|
63
|
+
};
|
|
64
|
+
|
|
65
|
+
/**
|
|
66
|
+
* Generic Redis cache wrapper with custom serialization/hydration.
|
|
67
|
+
* If Redis is not configured, this is a passthrough that simply calls the fetcher.
|
|
68
|
+
*/
|
|
69
|
+
async function withRedisCache<T, S>(
|
|
70
|
+
redis: Redis,
|
|
71
|
+
{
|
|
72
|
+
cacheKey,
|
|
73
|
+
fetcher,
|
|
74
|
+
ttlSeconds = DEFAULT_CACHE_TTL_SECONDS,
|
|
75
|
+
serialize,
|
|
76
|
+
hydrate,
|
|
77
|
+
logContext
|
|
78
|
+
}: CacheOptions<T, S>
|
|
79
|
+
): Promise<T> {
|
|
80
|
+
try {
|
|
81
|
+
const cached = await redis.get<S | string>(cacheKey);
|
|
82
|
+
if (cached) {
|
|
83
|
+
const parsed: S = typeof cached === 'string' ? JSON.parse(cached) : cached;
|
|
84
|
+
logger.log(`[langfuse-prompt-service] [${logContext}] [cache-hit]`, cacheKey);
|
|
85
|
+
return hydrate(parsed);
|
|
86
|
+
}
|
|
87
|
+
} catch (cacheError) {
|
|
88
|
+
logger.warn(`[langfuse-prompt-service] [${logContext}] [cache-get-error]`, cacheError);
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
const result = await fetcher();
|
|
92
|
+
logger.log(`[langfuse-prompt-service] [${logContext}] [cache-miss]`, cacheKey);
|
|
93
|
+
|
|
94
|
+
try {
|
|
95
|
+
await redis.setex(cacheKey, ttlSeconds, JSON.stringify(serialize(result)));
|
|
96
|
+
logger.log(`[langfuse-prompt-service] [${logContext}] [cache-set]`, cacheKey);
|
|
97
|
+
} catch (err) {
|
|
98
|
+
logger.warn(`[langfuse-prompt-service] [${logContext}] [cache-set-error]`, err);
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
return result;
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
// ─────────────────────────────────────────────────────────────────────────────
|
|
105
|
+
// Langfuse Prompt Serialization
|
|
106
|
+
// ─────────────────────────────────────────────────────────────────────────────
|
|
107
|
+
|
|
108
|
+
type SerializedPrompt = {
|
|
109
|
+
type: 'text' | 'chat';
|
|
110
|
+
promptResponse: Record<string, unknown>;
|
|
111
|
+
isFallback: boolean;
|
|
112
|
+
};
|
|
113
|
+
|
|
114
|
+
/**
|
|
115
|
+
* Build a stable Redis cache key for a Langfuse prompt.
|
|
116
|
+
*/
|
|
117
|
+
const buildPromptCacheKey = (promptName: string, version?: string | number): string => {
|
|
118
|
+
const versionPart = version !== undefined ? `v${version}` : 'production';
|
|
119
|
+
return `langfuse:prompt:${promptName}:${versionPart}`;
|
|
120
|
+
};
|
|
121
|
+
|
|
122
|
+
/**
|
|
123
|
+
* Serialize a Langfuse prompt client to a plain object for Redis storage.
|
|
124
|
+
*/
|
|
125
|
+
const serializePromptClient = (client: LangfusePromptClient): SerializedPrompt => {
|
|
126
|
+
const isText = client instanceof TextPromptClient;
|
|
127
|
+
return {
|
|
128
|
+
type: isText ? 'text' : 'chat',
|
|
129
|
+
promptResponse: (client as TextPromptClient | ChatPromptClient)
|
|
130
|
+
.promptResponse as unknown as Record<string, unknown>,
|
|
131
|
+
isFallback: client.isFallback
|
|
132
|
+
};
|
|
133
|
+
};
|
|
134
|
+
|
|
135
|
+
/**
|
|
136
|
+
* Hydrate a serialized prompt back into a Langfuse prompt client instance.
|
|
137
|
+
*/
|
|
138
|
+
const hydratePromptClient = (serialized: SerializedPrompt): LangfusePromptClient => {
|
|
139
|
+
if (serialized.type === 'text') {
|
|
140
|
+
return new TextPromptClient(
|
|
141
|
+
serialized.promptResponse as unknown as ConstructorParameters<
|
|
142
|
+
typeof TextPromptClient
|
|
143
|
+
>[0],
|
|
144
|
+
serialized.isFallback
|
|
145
|
+
);
|
|
146
|
+
}
|
|
147
|
+
return new ChatPromptClient(
|
|
148
|
+
serialized.promptResponse as unknown as ConstructorParameters<typeof ChatPromptClient>[0],
|
|
149
|
+
serialized.isFallback
|
|
150
|
+
);
|
|
151
|
+
};
|
|
152
|
+
|
|
153
|
+
export type LangfusePromptServiceOptions = {
|
|
154
|
+
/**
|
|
155
|
+
* Optional Langfuse client instance. If omitted, a new client is created.
|
|
156
|
+
*/
|
|
157
|
+
client?: LangfuseClient;
|
|
158
|
+
/**
|
|
159
|
+
* Optional Redis client for caching. If omitted, tries to auto-configure Upstash Redis from env.
|
|
160
|
+
* If env vars are missing, caching is disabled.
|
|
161
|
+
*/
|
|
162
|
+
redis?: Redis;
|
|
163
|
+
/**
|
|
164
|
+
* Default cache TTL (seconds) for prompt caching.
|
|
165
|
+
*/
|
|
166
|
+
cacheTtlSeconds?: number;
|
|
167
|
+
/**
|
|
168
|
+
* Optional tool prompt mapping (toolId -> promptName).
|
|
169
|
+
* If omitted, tool prompt names fall back to `tools/${toolId}`.
|
|
170
|
+
*/
|
|
171
|
+
toolPromptMap?: Record<string, string>;
|
|
172
|
+
};
|
|
173
|
+
|
|
174
|
+
export class LangfusePromptService {
|
|
175
|
+
private client: LangfuseClient;
|
|
176
|
+
private redis: Redis;
|
|
177
|
+
private cacheTtlSeconds: number;
|
|
178
|
+
private toolPromptMap?: Record<string, string>;
|
|
179
|
+
|
|
180
|
+
constructor(options: LangfusePromptServiceOptions = {}) {
|
|
181
|
+
this.client = options.client ?? new LangfuseClient();
|
|
182
|
+
this.redis = options.redis ?? createRedisFromEnv();
|
|
183
|
+
this.cacheTtlSeconds = options.cacheTtlSeconds ?? DEFAULT_CACHE_TTL_SECONDS;
|
|
184
|
+
this.toolPromptMap = options.toolPromptMap;
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
/**
|
|
188
|
+
* Get tool prompt name for a given tool ID.
|
|
189
|
+
*/
|
|
190
|
+
private getToolPromptName(toolId: string): string {
|
|
191
|
+
if (this.toolPromptMap && toolId in this.toolPromptMap) {
|
|
192
|
+
return this.toolPromptMap[toolId]!;
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
// Default: tool prompts are stored under `tools/<toolId>`
|
|
196
|
+
return `tools/${toolId}`;
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
/**
|
|
200
|
+
* Fetch a prompt by Langfuse prompt name with optional version.
|
|
201
|
+
* Uses Redis cache (when configured) to avoid repeated Langfuse API calls.
|
|
202
|
+
*/
|
|
203
|
+
async getPrompt({
|
|
204
|
+
promptName,
|
|
205
|
+
version
|
|
206
|
+
}: {
|
|
207
|
+
promptName: string;
|
|
208
|
+
version?: string;
|
|
209
|
+
}): Promise<TextPromptClient> {
|
|
210
|
+
const { stamp } = createStamp(`langfuse.getPrompt[${promptName}]`);
|
|
211
|
+
|
|
212
|
+
try {
|
|
213
|
+
const result = await withRedisCache<LangfusePromptClient, SerializedPrompt>(
|
|
214
|
+
this.redis,
|
|
215
|
+
{
|
|
216
|
+
cacheKey: buildPromptCacheKey(promptName, version),
|
|
217
|
+
fetcher: async () => {
|
|
218
|
+
// When version is specified, fetch by version number
|
|
219
|
+
// Otherwise, fetch by 'production' label to ensure we get the correct promoted version
|
|
220
|
+
const prompt = await this.client.prompt.get(promptName, {
|
|
221
|
+
...(version
|
|
222
|
+
? { version: parseInt(version, 10) }
|
|
223
|
+
: { label: 'production' })
|
|
224
|
+
});
|
|
225
|
+
stamp('fetched');
|
|
226
|
+
return prompt;
|
|
227
|
+
},
|
|
228
|
+
ttlSeconds: this.cacheTtlSeconds,
|
|
229
|
+
serialize: serializePromptClient,
|
|
230
|
+
hydrate: hydratePromptClient,
|
|
231
|
+
logContext: 'getPrompt'
|
|
232
|
+
}
|
|
233
|
+
);
|
|
234
|
+
|
|
235
|
+
return result as TextPromptClient;
|
|
236
|
+
} catch (error) {
|
|
237
|
+
logger.error(
|
|
238
|
+
'[langfuse-prompt-service] [getPrompt]',
|
|
239
|
+
`[promptName=${promptName}]`,
|
|
240
|
+
`[error=${JSON.stringify(error)}]`
|
|
241
|
+
);
|
|
242
|
+
throw error;
|
|
243
|
+
} finally {
|
|
244
|
+
stamp('end').end();
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
/**
|
|
249
|
+
* Fetch a tool prompt by tool ID with optional version override.
|
|
250
|
+
*/
|
|
251
|
+
async getToolPrompt({ toolId, version }: { toolId: string; version?: number }) {
|
|
252
|
+
try {
|
|
253
|
+
const promptName = this.getToolPromptName(toolId);
|
|
254
|
+
const langfusePrompt = await this.getPrompt({
|
|
255
|
+
promptName,
|
|
256
|
+
version: version?.toString()
|
|
257
|
+
});
|
|
258
|
+
return langfusePrompt;
|
|
259
|
+
} catch (error) {
|
|
260
|
+
logger.warn(
|
|
261
|
+
'[langfuse-prompt-service] [getToolPrompt]',
|
|
262
|
+
`[toolId=${toolId}]`,
|
|
263
|
+
`[error=${JSON.stringify(error)}]`
|
|
264
|
+
);
|
|
265
|
+
return null;
|
|
266
|
+
}
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
/**
|
|
270
|
+
* Fetch multiple tool prompts with optional overrides.
|
|
271
|
+
* Uses a single composite Redis cache key for the entire tool set (when Redis configured).
|
|
272
|
+
*/
|
|
273
|
+
async getToolPrompts({
|
|
274
|
+
toolIds,
|
|
275
|
+
toolOverrides
|
|
276
|
+
}: {
|
|
277
|
+
toolIds: string[];
|
|
278
|
+
toolOverrides?: Record<string, number>;
|
|
279
|
+
}): Promise<LangfusePromptClient[]> {
|
|
280
|
+
const { stamp } = createStamp(`langfuse.getToolPrompts[${toolIds.join(',')}]`);
|
|
281
|
+
|
|
282
|
+
// Build composite cache key from sorted tool IDs + versions
|
|
283
|
+
const keyParts = toolIds
|
|
284
|
+
.slice()
|
|
285
|
+
.sort()
|
|
286
|
+
.map((id) => {
|
|
287
|
+
const v = toolOverrides?.[id];
|
|
288
|
+
return v !== undefined ? `${id}:v${v}` : id;
|
|
289
|
+
});
|
|
290
|
+
|
|
291
|
+
try {
|
|
292
|
+
return await withRedisCache<LangfusePromptClient[], SerializedPrompt[]>(this.redis, {
|
|
293
|
+
cacheKey: `langfuse:toolprompts:${keyParts.join('|')}`,
|
|
294
|
+
fetcher: async () => {
|
|
295
|
+
// `getToolPrompt` is already fail-safe (returns null on errors), so `Promise.all` is fine here.
|
|
296
|
+
const results = await Promise.all(
|
|
297
|
+
toolIds.map((toolId) => {
|
|
298
|
+
const version = toolOverrides?.[toolId];
|
|
299
|
+
return this.getToolPrompt({ toolId, version });
|
|
300
|
+
})
|
|
301
|
+
);
|
|
302
|
+
|
|
303
|
+
const toolPrompts = results.filter(
|
|
304
|
+
(p): p is TextPromptClient => p !== null
|
|
305
|
+
) as LangfusePromptClient[];
|
|
306
|
+
stamp('fetched');
|
|
307
|
+
return toolPrompts;
|
|
308
|
+
},
|
|
309
|
+
ttlSeconds: this.cacheTtlSeconds,
|
|
310
|
+
serialize: (prompts) => prompts.map(serializePromptClient),
|
|
311
|
+
hydrate: (serialized) => serialized.map(hydratePromptClient),
|
|
312
|
+
logContext: 'getToolPrompts'
|
|
313
|
+
});
|
|
314
|
+
} finally {
|
|
315
|
+
stamp('end').end();
|
|
316
|
+
}
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
/**
|
|
320
|
+
* Fetch tool prompts and convert to ToolPromptWithClient format.
|
|
321
|
+
*/
|
|
322
|
+
async getToolPromptsWithClients({
|
|
323
|
+
toolIds,
|
|
324
|
+
toolOverrides
|
|
325
|
+
}: {
|
|
326
|
+
toolIds: string[];
|
|
327
|
+
toolOverrides?: Record<string, number>;
|
|
328
|
+
}): Promise<ToolPromptWithClient[]> {
|
|
329
|
+
const langfusePrompts = await this.getToolPrompts({ toolIds, toolOverrides });
|
|
330
|
+
|
|
331
|
+
return langfusePrompts.map((lp) => ({
|
|
332
|
+
id: `langfuse-${lp.version}`,
|
|
333
|
+
prompt: lp.prompt as string,
|
|
334
|
+
active: true,
|
|
335
|
+
version: lp.version.toString(),
|
|
336
|
+
langfusePrompt: lp
|
|
337
|
+
}));
|
|
338
|
+
}
|
|
339
|
+
|
|
340
|
+
/**
|
|
341
|
+
* Return every version for each prompt, including per-version labels and configs.
|
|
342
|
+
* Missing prompt names are simply omitted.
|
|
343
|
+
*/
|
|
344
|
+
async listPromptVersionsWithConfigs(
|
|
345
|
+
promptNames: string[],
|
|
346
|
+
concurrency = 8
|
|
347
|
+
): Promise<Record<string, PromptVersionsResult>> {
|
|
348
|
+
const targetNames = new Set(promptNames);
|
|
349
|
+
const results: Record<string, PromptVersionsResult> = {};
|
|
350
|
+
|
|
351
|
+
// Single page fetch with generous limit; adjust if prompt library grows beyond 200.
|
|
352
|
+
const response = await this.client.api.prompts.list({
|
|
353
|
+
page: 1,
|
|
354
|
+
limit: 100
|
|
355
|
+
});
|
|
356
|
+
|
|
357
|
+
const promptMetas: PromptMeta[] = (response.data || []).filter((p: PromptMeta) =>
|
|
358
|
+
targetNames.has(p.name)
|
|
359
|
+
);
|
|
360
|
+
|
|
361
|
+
// Pre-allocate container for version details per prompt
|
|
362
|
+
const versionBuckets: Record<string, PromptVersionInfo[]> = {};
|
|
363
|
+
for (const p of promptMetas) {
|
|
364
|
+
versionBuckets[p.name] = [];
|
|
365
|
+
}
|
|
366
|
+
|
|
367
|
+
// Build work items (promptName + version) and process with bounded concurrency
|
|
368
|
+
type WorkItem = { promptName: string; version: number };
|
|
369
|
+
const work: WorkItem[] = [];
|
|
370
|
+
|
|
371
|
+
for (const p of promptMetas) {
|
|
372
|
+
for (const version of p.versions) {
|
|
373
|
+
work.push({
|
|
374
|
+
promptName: p.name,
|
|
375
|
+
version
|
|
376
|
+
});
|
|
377
|
+
}
|
|
378
|
+
}
|
|
379
|
+
|
|
380
|
+
let cursor = 0;
|
|
381
|
+
const runWorker = async () => {
|
|
382
|
+
while (cursor < work.length) {
|
|
383
|
+
const idx = cursor++;
|
|
384
|
+
const item = work[idx];
|
|
385
|
+
const prompt = await this.client.prompt.get(item.promptName, {
|
|
386
|
+
version: item.version
|
|
387
|
+
});
|
|
388
|
+
|
|
389
|
+
versionBuckets[item.promptName].push({
|
|
390
|
+
id: `${item.promptName}-v${item.version}`,
|
|
391
|
+
version: item.version,
|
|
392
|
+
version_label:
|
|
393
|
+
prompt.labels.find((l: string) => l !== 'production' && l !== 'latest') ||
|
|
394
|
+
null,
|
|
395
|
+
labels: prompt.labels,
|
|
396
|
+
config: prompt.config as LangfusePromptConfig
|
|
397
|
+
});
|
|
398
|
+
}
|
|
399
|
+
};
|
|
400
|
+
|
|
401
|
+
const workers = Array.from({ length: Math.min(concurrency, work.length) }, () =>
|
|
402
|
+
runWorker()
|
|
403
|
+
);
|
|
404
|
+
|
|
405
|
+
await Promise.all(workers);
|
|
406
|
+
|
|
407
|
+
// Build final result per prompt
|
|
408
|
+
for (const p of promptMetas) {
|
|
409
|
+
const versionDetails = versionBuckets[p.name].sort((a, b) => a.version - b.version);
|
|
410
|
+
|
|
411
|
+
const activePrompt =
|
|
412
|
+
versionDetails.find((v) => v.labels.indexOf('production') !== -1) ??
|
|
413
|
+
versionDetails[versionDetails.length - 1] ??
|
|
414
|
+
null;
|
|
415
|
+
|
|
416
|
+
results[p.name] = {
|
|
417
|
+
name: p.name,
|
|
418
|
+
activePrompt,
|
|
419
|
+
versions: versionDetails
|
|
420
|
+
};
|
|
421
|
+
}
|
|
422
|
+
|
|
423
|
+
return results;
|
|
424
|
+
}
|
|
425
|
+
|
|
426
|
+
/**
|
|
427
|
+
* Cached wrapper around listPromptVersionsWithConfigs.
|
|
428
|
+
* Uses Redis with a short TTL to avoid repeated Langfuse list/get bursts.
|
|
429
|
+
*/
|
|
430
|
+
async listPromptVersionsWithConfigsCached(
|
|
431
|
+
promptNames: string[],
|
|
432
|
+
concurrency = 8,
|
|
433
|
+
ttlSeconds = 60,
|
|
434
|
+
forceRefresh = false
|
|
435
|
+
): Promise<Record<string, PromptVersionsResult>> {
|
|
436
|
+
const cacheKey = `langfuse:prompt_versions:${promptNames
|
|
437
|
+
.slice()
|
|
438
|
+
.sort()
|
|
439
|
+
.join('|')}:c${concurrency}`;
|
|
440
|
+
|
|
441
|
+
// Force refresh bypasses cache read
|
|
442
|
+
if (forceRefresh) {
|
|
443
|
+
const fresh = await this.listPromptVersionsWithConfigs(promptNames, concurrency);
|
|
444
|
+
this.redis
|
|
445
|
+
.setex(cacheKey, ttlSeconds, JSON.stringify(fresh))
|
|
446
|
+
.catch((error: unknown) =>
|
|
447
|
+
logger.warn(
|
|
448
|
+
'[langfuse-prompt-service] [listPromptVersionsWithConfigsCached] [cache-set-error]',
|
|
449
|
+
error
|
|
450
|
+
)
|
|
451
|
+
);
|
|
452
|
+
return fresh;
|
|
453
|
+
}
|
|
454
|
+
|
|
455
|
+
return withRedisCache<
|
|
456
|
+
Record<string, PromptVersionsResult>,
|
|
457
|
+
Record<string, PromptVersionsResult>
|
|
458
|
+
>(this.redis, {
|
|
459
|
+
cacheKey,
|
|
460
|
+
fetcher: () => this.listPromptVersionsWithConfigs(promptNames, concurrency),
|
|
461
|
+
ttlSeconds,
|
|
462
|
+
serialize: (v) => v,
|
|
463
|
+
hydrate: (v) => v,
|
|
464
|
+
logContext: 'listPromptVersionsWithConfigsCached'
|
|
465
|
+
});
|
|
466
|
+
}
|
|
467
|
+
}
|
|
468
|
+
|
|
469
|
+
export const langfusePromptService = new LangfusePromptService();
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
const levels = ['all', 'info', 'warn', 'error'];
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Utility for logging messages at different levels in different environments.
|
|
5
|
+
* Logging a lot of data in production adds costs to log ingestion.
|
|
6
|
+
*/
|
|
7
|
+
export const logger = {
|
|
8
|
+
level: levels.indexOf(process.env.NODE_ENV !== 'development' ? 'info' : 'all'),
|
|
9
|
+
log: (...messages: unknown[]) => {
|
|
10
|
+
if (levels.indexOf('all') >= logger.level) {
|
|
11
|
+
console.log(...messages);
|
|
12
|
+
}
|
|
13
|
+
},
|
|
14
|
+
info: (...messages: unknown[]) => {
|
|
15
|
+
if (levels.indexOf('info') >= logger.level) {
|
|
16
|
+
console.log(...messages);
|
|
17
|
+
}
|
|
18
|
+
},
|
|
19
|
+
warn: (...messages: unknown[]) => {
|
|
20
|
+
if (levels.indexOf('warn') >= logger.level) {
|
|
21
|
+
console.warn(...messages);
|
|
22
|
+
}
|
|
23
|
+
},
|
|
24
|
+
error: (...messages: unknown[]) => {
|
|
25
|
+
if (levels.indexOf('error') >= logger.level) {
|
|
26
|
+
console.error('\x1b[31m', ...messages, '\x1b[0m');
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
};
|
|
@@ -1,42 +0,0 @@
|
|
|
1
|
-
declare class CacheKey {
|
|
2
|
-
private key;
|
|
3
|
-
/**
|
|
4
|
-
* The time to live for the cache key in seconds
|
|
5
|
-
*/
|
|
6
|
-
ttl: number;
|
|
7
|
-
constructor(prefix: string, ...args: string[]);
|
|
8
|
-
setTtl(ttl: number): this;
|
|
9
|
-
add(...values: (string | number | boolean)[]): this;
|
|
10
|
-
get(): string;
|
|
11
|
-
}
|
|
12
|
-
type CacheKeysManagerOptions<R extends Record<string, readonly string[]>> = {
|
|
13
|
-
registry: R;
|
|
14
|
-
namespace: string;
|
|
15
|
-
redisClient?: {
|
|
16
|
-
get<T = string>(key: string): Promise<T | null>;
|
|
17
|
-
set<T = string>(key: string, value: T, options: {
|
|
18
|
-
ex: number;
|
|
19
|
-
}): Promise<void>;
|
|
20
|
-
del(key: string): Promise<void>;
|
|
21
|
-
[key: string]: any;
|
|
22
|
-
};
|
|
23
|
-
};
|
|
24
|
-
/**
|
|
25
|
-
* Creates a cache keys manager
|
|
26
|
-
* @param options - The options for the cache keys manager
|
|
27
|
-
* @param options.registry - The registry of cache keys
|
|
28
|
-
* @param options.namespace - The namespace for the cache keys
|
|
29
|
-
* @param options.redisClient - The redis client to use for the cache keys manager
|
|
30
|
-
* @returns The cache keys manager
|
|
31
|
-
*/
|
|
32
|
-
declare function createCacheKeysManager<R extends Record<string, readonly string[]>>(options: CacheKeysManagerOptions<R>): {
|
|
33
|
-
getCacheKey: <K extends keyof R, V extends R[K][number], Args extends (string | number | boolean)[]>(root: K, node: V, ...identifiers: Args) => CacheKey;
|
|
34
|
-
invalidateCacheKey: <K_1 extends keyof R, V_1 extends R[K_1][number], Args_1 extends (string | number | boolean)[]>(root: K_1, key: V_1, ...identifiers: Args_1) => Promise<void>;
|
|
35
|
-
getCachedData: <T, K_2 extends keyof R, V_2 extends R[K_2][number]>(options: {
|
|
36
|
-
cacheKey: string | (() => [K_2, V_2, ...(string | number | boolean)[]]);
|
|
37
|
-
queryFn: () => Promise<T | null>;
|
|
38
|
-
ttl?: number | undefined;
|
|
39
|
-
}) => Promise<T | null>;
|
|
40
|
-
};
|
|
41
|
-
export default createCacheKeysManager;
|
|
42
|
-
//# sourceMappingURL=index.d.ts.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/cache/index.ts"],"names":[],"mappings":"AAEA,cAAM,QAAQ;IACV,OAAO,CAAC,GAAG,CAAW;IAEtB;;OAEG;IACI,GAAG,EAAE,MAAM,CAAe;gBAErB,MAAM,EAAE,MAAM,EAAE,GAAG,IAAI,EAAE,MAAM,EAAE;IAI7C,MAAM,CAAC,GAAG,EAAE,MAAM;IAKlB,GAAG,CAAC,GAAG,MAAM,EAAE,CAAC,MAAM,GAAG,MAAM,GAAG,OAAO,CAAC,EAAE;IAK5C,GAAG;CAGN;AAED,KAAK,uBAAuB,CAAC,CAAC,SAAS,MAAM,CAAC,MAAM,EAAE,SAAS,MAAM,EAAE,CAAC,IAAI;IACxE,QAAQ,EAAE,CAAC,CAAC;IACZ,SAAS,EAAE,MAAM,CAAC;IAClB,WAAW,CAAC,EAAE;QACV,GAAG,CAAC,CAAC,GAAG,MAAM,EAAE,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC;QAChD,GAAG,CAAC,CAAC,GAAG,MAAM,EAAE,GAAG,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,EAAE,OAAO,EAAE;YAAE,EAAE,EAAE,MAAM,CAAA;SAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;QAC/E,GAAG,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;QAChC,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAC;KACtB,CAAC;CACL,CAAC;AAEF;;;;;;;GAOG;AACH,iBAAS,sBAAsB,CAAC,CAAC,SAAS,MAAM,CAAC,MAAM,EAAE,SAAS,MAAM,EAAE,CAAC,EACvE,OAAO,EAAE,uBAAuB,CAAC,CAAC,CAAC;;wKAqBO,QAAQ,IAAI,CAAC;;;;;;EA0F1D;AAED,eAAe,sBAAsB,CAAC"}
|