@langfuse/client 4.0.0-alpha.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,279 @@
1
+ import mustache from "mustache";
2
+ import { ChatMessageType, } from "./types.js";
3
+ mustache.escape = function (text) {
4
+ return text;
5
+ };
6
+ class BasePromptClient {
7
+ constructor(prompt, isFallback = false, type) {
8
+ this.name = prompt.name;
9
+ this.version = prompt.version;
10
+ this.config = prompt.config;
11
+ this.labels = prompt.labels;
12
+ this.tags = prompt.tags;
13
+ this.isFallback = isFallback;
14
+ this.type = type;
15
+ this.commitMessage = prompt.commitMessage;
16
+ }
17
+ _transformToLangchainVariables(content) {
18
+ const jsonEscapedContent = this.escapeJsonForLangchain(content);
19
+ return jsonEscapedContent.replace(/\{\{(\w+)\}\}/g, "{$1}");
20
+ }
21
+ /**
22
+ * Escapes every curly brace that is part of a JSON object by doubling it.
23
+ *
24
+ * A curly brace is considered “JSON-related” when, after skipping any immediate
25
+ * whitespace, the next non-whitespace character is a single (') or double (") quote.
26
+ *
27
+ * Braces that are already doubled (e.g. `{{variable}}` placeholders) are left untouched.
28
+ *
29
+ * @param text - Input string that may contain JSON snippets.
30
+ * @returns The string with JSON-related braces doubled.
31
+ */
32
+ escapeJsonForLangchain(text) {
33
+ var _a;
34
+ const out = []; // collected characters
35
+ const stack = []; // true = “this { belongs to JSON”, false = normal “{”
36
+ let i = 0;
37
+ const n = text.length;
38
+ while (i < n) {
39
+ const ch = text[i];
40
+ // ---------- opening brace ----------
41
+ if (ch === "{") {
42
+ // leave existing “{{ …” untouched
43
+ if (i + 1 < n && text[i + 1] === "{") {
44
+ out.push("{{");
45
+ i += 2;
46
+ continue;
47
+ }
48
+ // look ahead to find the next non-space character
49
+ let j = i + 1;
50
+ while (j < n && /\s/.test(text[j])) {
51
+ j++;
52
+ }
53
+ const isJson = j < n && (text[j] === "'" || text[j] === '"');
54
+ out.push(isJson ? "{{" : "{");
55
+ stack.push(isJson); // remember how this “{” was treated
56
+ i += 1;
57
+ continue;
58
+ }
59
+ // ---------- closing brace ----------
60
+ if (ch === "}") {
61
+ // leave existing “… }}” untouched
62
+ if (i + 1 < n && text[i + 1] === "}") {
63
+ out.push("}}");
64
+ i += 2;
65
+ continue;
66
+ }
67
+ const isJson = (_a = stack.pop()) !== null && _a !== void 0 ? _a : false;
68
+ out.push(isJson ? "}}" : "}");
69
+ i += 1;
70
+ continue;
71
+ }
72
+ // ---------- any other character ----------
73
+ out.push(ch);
74
+ i += 1;
75
+ }
76
+ return out.join("");
77
+ }
78
+ }
79
+ export class TextPromptClient extends BasePromptClient {
80
+ constructor(prompt, isFallback = false) {
81
+ super(prompt, isFallback, "text");
82
+ this.promptResponse = prompt;
83
+ this.prompt = prompt.prompt;
84
+ }
85
+ compile(variables, _placeholders) {
86
+ return mustache.render(this.promptResponse.prompt, variables !== null && variables !== void 0 ? variables : {});
87
+ }
88
+ getLangchainPrompt(_options) {
89
+ /**
90
+ * Converts Langfuse prompt into string compatible with Langchain PromptTemplate.
91
+ *
92
+ * It specifically adapts the mustache-style double curly braces {{variable}} used in Langfuse
93
+ * to the single curly brace {variable} format expected by Langchain.
94
+ *
95
+ * @returns {string} The string that can be plugged into Langchain's PromptTemplate.
96
+ */
97
+ return this._transformToLangchainVariables(this.prompt);
98
+ }
99
+ toJSON() {
100
+ return JSON.stringify({
101
+ name: this.name,
102
+ prompt: this.prompt,
103
+ version: this.version,
104
+ isFallback: this.isFallback,
105
+ tags: this.tags,
106
+ labels: this.labels,
107
+ type: this.type,
108
+ config: this.config,
109
+ });
110
+ }
111
+ }
112
+ export class ChatPromptClient extends BasePromptClient {
113
+ constructor(prompt, isFallback = false) {
114
+ const normalizedPrompt = ChatPromptClient.normalizePrompt(prompt.prompt);
115
+ const typedPrompt = {
116
+ ...prompt,
117
+ prompt: normalizedPrompt,
118
+ };
119
+ super(typedPrompt, isFallback, "chat");
120
+ this.promptResponse = typedPrompt;
121
+ this.prompt = normalizedPrompt;
122
+ }
123
+ static normalizePrompt(prompt) {
124
+ // Convert ChatMessages to ChatMessageWithPlaceholders for backward compatibility
125
+ return prompt.map((item) => {
126
+ if ("type" in item) {
127
+ // Already has type field (new format)
128
+ return item;
129
+ }
130
+ else {
131
+ // Plain ChatMessage (legacy format) - add type field
132
+ return {
133
+ type: ChatMessageType.ChatMessage,
134
+ ...item,
135
+ };
136
+ }
137
+ });
138
+ }
139
+ compile(variables, placeholders) {
140
+ /**
141
+ * Compiles the chat prompt by replacing placeholders and variables with provided values.
142
+ *
143
+ * First fills-in placeholders by from the provided placeholder parameter.
144
+ * Then compiles variables into the message content.
145
+ * Unresolved placeholders are included in the output as placeholder objects.
146
+ * If you only want to fill-in placeholders, pass an empty object for variables.
147
+ *
148
+ * @param variables - Key-value pairs for Mustache variable substitution in message content
149
+ * @param placeholders - Key-value pairs where keys are placeholder names and values can be ChatMessage arrays
150
+ * @returns Array of ChatMessage objects and placeholder objects with placeholders replaced and variables rendered
151
+ */
152
+ const messagesWithPlaceholdersReplaced = [];
153
+ const placeholderValues = placeholders !== null && placeholders !== void 0 ? placeholders : {};
154
+ for (const item of this.prompt) {
155
+ if ("type" in item && item.type === ChatMessageType.Placeholder) {
156
+ const placeholderValue = placeholderValues[item.name];
157
+ if (Array.isArray(placeholderValue) &&
158
+ placeholderValue.length > 0 &&
159
+ placeholderValue.every((msg) => typeof msg === "object" && "role" in msg && "content" in msg)) {
160
+ messagesWithPlaceholdersReplaced.push(...placeholderValue);
161
+ }
162
+ else if (Array.isArray(placeholderValue) &&
163
+ placeholderValue.length === 0) {
164
+ // Empty array provided - skip placeholder (don't include it)
165
+ }
166
+ else if (placeholderValue !== undefined) {
167
+ // Non-standard placeholder value format, just stringfiy
168
+ messagesWithPlaceholdersReplaced.push(JSON.stringify(placeholderValue));
169
+ }
170
+ else {
171
+ // Keep unresolved placeholder in the output
172
+ messagesWithPlaceholdersReplaced.push(item);
173
+ }
174
+ }
175
+ else if ("role" in item &&
176
+ "content" in item &&
177
+ item.type === ChatMessageType.ChatMessage) {
178
+ messagesWithPlaceholdersReplaced.push({
179
+ role: item.role,
180
+ content: item.content,
181
+ });
182
+ }
183
+ }
184
+ return messagesWithPlaceholdersReplaced.map((item) => {
185
+ if (typeof item === "object" &&
186
+ item !== null &&
187
+ "role" in item &&
188
+ "content" in item) {
189
+ return {
190
+ ...item,
191
+ content: mustache.render(item.content, variables !== null && variables !== void 0 ? variables : {}),
192
+ };
193
+ }
194
+ else {
195
+ // Return placeholder or stringified value as-is
196
+ return item;
197
+ }
198
+ });
199
+ }
200
+ getLangchainPrompt(options) {
201
+ var _a;
202
+ /*
203
+ * Converts Langfuse prompt into format compatible with Langchain PromptTemplate.
204
+ *
205
+ * Fills-in placeholders from provided values and converts unresolved ones to Langchain MessagesPlaceholder objects.
206
+ * Transforms variables from {{var}} to {var} format for Langchain without rendering them.
207
+ *
208
+ * @param options - Configuration object
209
+ * @param options.placeholders - Key-value pairs where keys are placeholder names and values can be ChatMessage arrays
210
+ * @returns Array of ChatMessage objects and Langchain MessagesPlaceholder objects with variables transformed for Langchain compatibility.
211
+ *
212
+ * @example
213
+ * ```typescript
214
+ * const client = new ChatPromptClient(prompt);
215
+ * client.getLangchainPrompt({ placeholders: { examples: [{ role: "user", content: "Hello" }] } });
216
+ * ```
217
+ */
218
+ const messagesWithPlaceholdersReplaced = [];
219
+ const placeholderValues = (_a = options === null || options === void 0 ? void 0 : options.placeholders) !== null && _a !== void 0 ? _a : {};
220
+ for (const item of this.prompt) {
221
+ if ("type" in item && item.type === ChatMessageType.Placeholder) {
222
+ const placeholderValue = placeholderValues[item.name];
223
+ if (Array.isArray(placeholderValue) &&
224
+ placeholderValue.length > 0 &&
225
+ placeholderValue.every((msg) => typeof msg === "object" && "role" in msg && "content" in msg)) {
226
+ // Complete placeholder fill-in, replace with it
227
+ messagesWithPlaceholdersReplaced.push(...placeholderValue.map((msg) => {
228
+ return {
229
+ role: msg.role,
230
+ content: this._transformToLangchainVariables(msg.content),
231
+ };
232
+ }));
233
+ }
234
+ else if (Array.isArray(placeholderValue) &&
235
+ placeholderValue.length === 0) {
236
+ // Skip empty array placeholder
237
+ }
238
+ else if (placeholderValue !== undefined) {
239
+ // Non-standard placeholder value, just stringify and add directly
240
+ messagesWithPlaceholdersReplaced.push(JSON.stringify(placeholderValue));
241
+ }
242
+ else {
243
+ // Convert unresolved placeholder to Langchain MessagesPlaceholder
244
+ messagesWithPlaceholdersReplaced.push({
245
+ variableName: item.name,
246
+ optional: false,
247
+ });
248
+ }
249
+ }
250
+ else if ("role" in item &&
251
+ "content" in item &&
252
+ item.type === ChatMessageType.ChatMessage) {
253
+ messagesWithPlaceholdersReplaced.push({
254
+ role: item.role,
255
+ content: this._transformToLangchainVariables(item.content),
256
+ });
257
+ }
258
+ }
259
+ return messagesWithPlaceholdersReplaced;
260
+ }
261
+ toJSON() {
262
+ return JSON.stringify({
263
+ name: this.name,
264
+ prompt: this.promptResponse.prompt.map((item) => {
265
+ if ("type" in item && item.type === ChatMessageType.ChatMessage) {
266
+ const { type: _, ...messageWithoutType } = item;
267
+ return messageWithoutType;
268
+ }
269
+ return item;
270
+ }),
271
+ version: this.version,
272
+ isFallback: this.isFallback,
273
+ tags: this.tags,
274
+ labels: this.labels,
275
+ type: this.type,
276
+ config: this.config,
277
+ });
278
+ }
279
+ }
@@ -0,0 +1,41 @@
1
+ import { CreatePromptRequest, LangfuseAPIClient, Prompt, ChatMessage } from "@langfuse/core";
2
+ import { ChatPromptClient, TextPromptClient } from "./promptClients.js";
3
+ import { CreateChatPromptBodyWithPlaceholders } from "./types.js";
4
+ export declare class PromptManager {
5
+ private cache;
6
+ private apiClient;
7
+ constructor(params: {
8
+ apiClient: LangfuseAPIClient;
9
+ });
10
+ get logger(): import("@langfuse/core").Logger;
11
+ create(body: CreateChatPromptBodyWithPlaceholders): Promise<ChatPromptClient>;
12
+ create(body: Omit<CreatePromptRequest.Text, "type"> & {
13
+ type?: "text";
14
+ }): Promise<TextPromptClient>;
15
+ create(body: CreatePromptRequest.Chat): Promise<ChatPromptClient>;
16
+ update(params: {
17
+ name: string;
18
+ version: number;
19
+ newLabels: string[];
20
+ }): Promise<Prompt>;
21
+ get(name: string, options?: {
22
+ version?: number;
23
+ label?: string;
24
+ cacheTtlSeconds?: number;
25
+ fallback?: string;
26
+ maxRetries?: number;
27
+ type?: "text";
28
+ fetchTimeoutMs?: number;
29
+ }): Promise<TextPromptClient>;
30
+ get(name: string, options?: {
31
+ version?: number;
32
+ label?: string;
33
+ cacheTtlSeconds?: number;
34
+ fallback?: ChatMessage[];
35
+ maxRetries?: number;
36
+ type: "chat";
37
+ fetchTimeoutMs?: number;
38
+ }): Promise<ChatPromptClient>;
39
+ private fetchPromptAndUpdateCache;
40
+ }
41
+ //# sourceMappingURL=promptManager.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"promptManager.d.ts","sourceRoot":"","sources":["../../src/prompt/promptManager.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,mBAAmB,EAEnB,iBAAiB,EAEjB,MAAM,EACN,WAAW,EACZ,MAAM,gBAAgB,CAAC;AAGxB,OAAO,EACL,gBAAgB,EAChB,gBAAgB,EAEjB,MAAM,oBAAoB,CAAC;AAC5B,OAAO,EAEL,oCAAoC,EACrC,MAAM,YAAY,CAAC;AAEpB,qBAAa,aAAa;IACxB,OAAO,CAAC,KAAK,CAAsB;IACnC,OAAO,CAAC,SAAS,CAAoB;gBAEzB,MAAM,EAAE;QAAE,SAAS,EAAE,iBAAiB,CAAA;KAAE;IAOpD,IAAI,MAAM,oCAET;IAEK,MAAM,CACV,IAAI,EAAE,oCAAoC,GACzC,OAAO,CAAC,gBAAgB,CAAC;IACtB,MAAM,CACV,IAAI,EAAE,IAAI,CAAC,mBAAmB,CAAC,IAAI,EAAE,MAAM,CAAC,GAAG;QAAE,IAAI,CAAC,EAAE,MAAM,CAAA;KAAE,GAC/D,OAAO,CAAC,gBAAgB,CAAC;IACtB,MAAM,CAAC,IAAI,EAAE,mBAAmB,CAAC,IAAI,GAAG,OAAO,CAAC,gBAAgB,CAAC;IAqCjE,MAAM,CAAC,MAAM,EAAE;QACnB,IAAI,EAAE,MAAM,CAAC;QACb,OAAO,EAAE,MAAM,CAAC;QAChB,SAAS,EAAE,MAAM,EAAE,CAAC;KACrB,GAAG,OAAO,CAAC,MAAM,CAAC;IAYb,GAAG,CACP,IAAI,EAAE,MAAM,EACZ,OAAO,CAAC,EAAE;QACR,OAAO,CAAC,EAAE,MAAM,CAAC;QACjB,KAAK,CAAC,EAAE,MAAM,CAAC;QACf,eAAe,CAAC,EAAE,MAAM,CAAC;QACzB,QAAQ,CAAC,EAAE,MAAM,CAAC;QAClB,UAAU,CAAC,EAAE,MAAM,CAAC;QACpB,IAAI,CAAC,EAAE,MAAM,CAAC;QACd,cAAc,CAAC,EAAE,MAAM,CAAC;KACzB,GACA,OAAO,CAAC,gBAAgB,CAAC;IACtB,GAAG,CACP,IAAI,EAAE,MAAM,EACZ,OAAO,CAAC,EAAE;QACR,OAAO,CAAC,EAAE,MAAM,CAAC;QACjB,KAAK,CAAC,EAAE,MAAM,CAAC;QACf,eAAe,CAAC,EAAE,MAAM,CAAC;QACzB,QAAQ,CAAC,EAAE,WAAW,EAAE,CAAC;QACzB,UAAU,CAAC,EAAE,MAAM,CAAC;QACpB,IAAI,EAAE,MAAM,CAAC;QACb,cAAc,CAAC,EAAE,MAAM,CAAC;KACzB,GACA,OAAO,CAAC,gBAAgB,CAAC;YA2Fd,yBAAyB;CAgDxC"}
@@ -0,0 +1,144 @@
1
+ import { getGlobalLogger, } from "@langfuse/core";
2
+ import { LangfusePromptCache } from "./promptCache.js";
3
+ import { ChatPromptClient, TextPromptClient, } from "./promptClients.js";
4
+ import { ChatMessageType, } from "./types.js";
5
+ export class PromptManager {
6
+ constructor(params) {
7
+ const { apiClient } = params;
8
+ this.apiClient = apiClient;
9
+ this.cache = new LangfusePromptCache();
10
+ }
11
+ get logger() {
12
+ return getGlobalLogger();
13
+ }
14
+ async create(body) {
15
+ var _a;
16
+ const requestBody = body.type === "chat"
17
+ ? {
18
+ ...body,
19
+ prompt: body.prompt.map((item) => {
20
+ if ("type" in item && item.type === ChatMessageType.Placeholder) {
21
+ return {
22
+ type: ChatMessageType.Placeholder,
23
+ name: item.name,
24
+ };
25
+ }
26
+ else {
27
+ // Handle regular ChatMessage (without type field) from API
28
+ return { type: ChatMessageType.ChatMessage, ...item };
29
+ }
30
+ }),
31
+ }
32
+ : {
33
+ ...body,
34
+ type: (_a = body.type) !== null && _a !== void 0 ? _a : "text",
35
+ };
36
+ const promptResponse = await this.apiClient.prompts.create(requestBody);
37
+ if (promptResponse.type === "chat") {
38
+ return new ChatPromptClient(promptResponse);
39
+ }
40
+ return new TextPromptClient(promptResponse);
41
+ }
42
+ async update(params) {
43
+ const { name, version, newLabels } = params;
44
+ const newPrompt = await this.apiClient.promptVersion.update(name, version, {
45
+ newLabels,
46
+ });
47
+ this.cache.invalidate(name);
48
+ return newPrompt;
49
+ }
50
+ async get(name, options) {
51
+ var _a;
52
+ const cacheKey = this.cache.createKey({
53
+ name,
54
+ label: options === null || options === void 0 ? void 0 : options.label,
55
+ });
56
+ const cachedPrompt = this.cache.getIncludingExpired(cacheKey);
57
+ if (!cachedPrompt || (options === null || options === void 0 ? void 0 : options.cacheTtlSeconds) === 0) {
58
+ try {
59
+ return await this.fetchPromptAndUpdateCache({
60
+ name,
61
+ version: options === null || options === void 0 ? void 0 : options.version,
62
+ label: options === null || options === void 0 ? void 0 : options.label,
63
+ cacheTtlSeconds: options === null || options === void 0 ? void 0 : options.cacheTtlSeconds,
64
+ maxRetries: options === null || options === void 0 ? void 0 : options.maxRetries,
65
+ fetchTimeoutMs: options === null || options === void 0 ? void 0 : options.fetchTimeoutMs,
66
+ });
67
+ }
68
+ catch (err) {
69
+ if (options === null || options === void 0 ? void 0 : options.fallback) {
70
+ const sharedFallbackParams = {
71
+ name,
72
+ version: (_a = options === null || options === void 0 ? void 0 : options.version) !== null && _a !== void 0 ? _a : 0,
73
+ labels: options.label ? [options.label] : [],
74
+ cacheTtlSeconds: options === null || options === void 0 ? void 0 : options.cacheTtlSeconds,
75
+ config: {},
76
+ tags: [],
77
+ };
78
+ if (options.type === "chat") {
79
+ return new ChatPromptClient({
80
+ ...sharedFallbackParams,
81
+ type: "chat",
82
+ prompt: options.fallback.map((msg) => ({
83
+ type: ChatMessageType.ChatMessage,
84
+ ...msg,
85
+ })),
86
+ }, true);
87
+ }
88
+ else {
89
+ return new TextPromptClient({
90
+ ...sharedFallbackParams,
91
+ type: "text",
92
+ prompt: options.fallback,
93
+ }, true);
94
+ }
95
+ }
96
+ throw err;
97
+ }
98
+ }
99
+ if (cachedPrompt.isExpired) {
100
+ // If the cache is not currently being refreshed, start refreshing it and register the promise in the cache
101
+ if (!this.cache.isRefreshing(cacheKey)) {
102
+ const refreshPromptPromise = this.fetchPromptAndUpdateCache({
103
+ name,
104
+ version: options === null || options === void 0 ? void 0 : options.version,
105
+ label: options === null || options === void 0 ? void 0 : options.label,
106
+ cacheTtlSeconds: options === null || options === void 0 ? void 0 : options.cacheTtlSeconds,
107
+ maxRetries: options === null || options === void 0 ? void 0 : options.maxRetries,
108
+ fetchTimeoutMs: options === null || options === void 0 ? void 0 : options.fetchTimeoutMs,
109
+ }).catch(() => {
110
+ this.logger.warn(`Failed to refresh prompt cache '${cacheKey}', stale cache will be used until next refresh succeeds.`);
111
+ });
112
+ this.cache.addRefreshingPromise(cacheKey, refreshPromptPromise);
113
+ }
114
+ return cachedPrompt.value;
115
+ }
116
+ return cachedPrompt.value;
117
+ }
118
+ async fetchPromptAndUpdateCache(params) {
119
+ const cacheKey = this.cache.createKey(params);
120
+ try {
121
+ const { name, version, cacheTtlSeconds, label, maxRetries, fetchTimeoutMs, } = params;
122
+ const data = await this.apiClient.prompts.get(name, {
123
+ version,
124
+ label,
125
+ }, {
126
+ maxRetries,
127
+ timeoutInSeconds: fetchTimeoutMs ? fetchTimeoutMs / 1000 : undefined,
128
+ });
129
+ let prompt;
130
+ if (data.type === "chat") {
131
+ prompt = new ChatPromptClient(data);
132
+ }
133
+ else {
134
+ prompt = new TextPromptClient(data);
135
+ }
136
+ this.cache.set(cacheKey, prompt, cacheTtlSeconds);
137
+ return prompt;
138
+ }
139
+ catch (error) {
140
+ this.logger.error(`Error fetching prompt '${cacheKey}':`, error);
141
+ throw error;
142
+ }
143
+ }
144
+ }
@@ -0,0 +1,18 @@
1
+ import { ChatMessage, PlaceholderMessage, ChatMessageWithPlaceholders, CreatePromptRequest } from "@langfuse/core";
2
+ export declare enum ChatMessageType {
3
+ ChatMessage = "chatmessage",
4
+ Placeholder = "placeholder"
5
+ }
6
+ export type ChatMessageOrPlaceholder = ChatMessage | ({
7
+ type: ChatMessageType.Placeholder;
8
+ } & PlaceholderMessage);
9
+ export type LangchainMessagesPlaceholder = {
10
+ variableName: string;
11
+ optional?: boolean;
12
+ };
13
+ export type CreateChatPromptBodyWithPlaceholders = {
14
+ type: "chat";
15
+ } & Omit<CreatePromptRequest.Chat, "type" | "prompt"> & {
16
+ prompt: (ChatMessage | ChatMessageWithPlaceholders)[];
17
+ };
18
+ //# sourceMappingURL=types.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../src/prompt/types.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,WAAW,EACX,kBAAkB,EAClB,2BAA2B,EAC3B,mBAAmB,EACpB,MAAM,gBAAgB,CAAC;AAExB,oBAAY,eAAe;IACzB,WAAW,gBAAgB;IAC3B,WAAW,gBAAgB;CAC5B;AAED,MAAM,MAAM,wBAAwB,GAChC,WAAW,GACX,CAAC;IAAE,IAAI,EAAE,eAAe,CAAC,WAAW,CAAA;CAAE,GAAG,kBAAkB,CAAC,CAAC;AAEjE,MAAM,MAAM,4BAA4B,GAAG;IACzC,YAAY,EAAE,MAAM,CAAC;IACrB,QAAQ,CAAC,EAAE,OAAO,CAAC;CACpB,CAAC;AAEF,MAAM,MAAM,oCAAoC,GAAG;IACjD,IAAI,EAAE,MAAM,CAAC;CACd,GAAG,IAAI,CAAC,mBAAmB,CAAC,IAAI,EAAE,MAAM,GAAG,QAAQ,CAAC,GAAG;IACpD,MAAM,EAAE,CAAC,WAAW,GAAG,2BAA2B,CAAC,EAAE,CAAC;CACvD,CAAC"}
@@ -0,0 +1,5 @@
1
+ export var ChatMessageType;
2
+ (function (ChatMessageType) {
3
+ ChatMessageType["ChatMessage"] = "chatmessage";
4
+ ChatMessageType["Placeholder"] = "placeholder";
5
+ })(ChatMessageType || (ChatMessageType = {}));
@@ -0,0 +1,27 @@
1
+ import { LangfuseAPIClient, ScoreBody } from "@langfuse/core";
2
+ import { Span } from "@opentelemetry/api";
3
+ export declare class ScoreManager {
4
+ private apiClient;
5
+ private eventQueue;
6
+ private flushPromise;
7
+ private flushTimer;
8
+ private flushAtCount;
9
+ private flushIntervalSeconds;
10
+ constructor(params: {
11
+ apiClient: LangfuseAPIClient;
12
+ });
13
+ get logger(): import("@langfuse/core").Logger;
14
+ create(data: ScoreBody): void;
15
+ observation(observation: {
16
+ otelSpan: Span;
17
+ }, data: Omit<ScoreBody, "traceId" | "sessionId" | "observationId" | "datasetRunId">): void;
18
+ trace(observation: {
19
+ otelSpan: Span;
20
+ }, data: Omit<ScoreBody, "traceId" | "sessionId" | "observationId" | "datasetRunId">): void;
21
+ activeObservation(data: Omit<ScoreBody, "traceId" | "sessionId" | "observationId" | "datasetRunId">): void;
22
+ activeTrace(data: Omit<ScoreBody, "traceId" | "sessionId" | "observationId" | "datasetRunId">): void;
23
+ private handleFlush;
24
+ flush(): Promise<void>;
25
+ shutdown(): Promise<void>;
26
+ }
27
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/score/index.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,iBAAiB,EAIjB,SAAS,EAIV,MAAM,gBAAgB,CAAC;AACxB,OAAO,EAAE,IAAI,EAAS,MAAM,oBAAoB,CAAC;AAKjD,qBAAa,YAAY;IACvB,OAAO,CAAC,SAAS,CAAoB;IACrC,OAAO,CAAC,UAAU,CAAwB;IAC1C,OAAO,CAAC,YAAY,CAA8B;IAClD,OAAO,CAAC,UAAU,CAAa;IAC/B,OAAO,CAAC,YAAY,CAAS;IAC7B,OAAO,CAAC,oBAAoB,CAAS;gBAEzB,MAAM,EAAE;QAAE,SAAS,EAAE,iBAAiB,CAAA;KAAE;IAYpD,IAAI,MAAM,oCAET;IAEM,MAAM,CAAC,IAAI,EAAE,SAAS,GAAG,IAAI;IAgC7B,WAAW,CAChB,WAAW,EAAE;QAAE,QAAQ,EAAE,IAAI,CAAA;KAAE,EAC/B,IAAI,EAAE,IAAI,CACR,SAAS,EACT,SAAS,GAAG,WAAW,GAAG,eAAe,GAAG,cAAc,CAC3D;IAWI,KAAK,CACV,WAAW,EAAE;QAAE,QAAQ,EAAE,IAAI,CAAA;KAAE,EAC/B,IAAI,EAAE,IAAI,CACR,SAAS,EACT,SAAS,GAAG,WAAW,GAAG,eAAe,GAAG,cAAc,CAC3D;IAUI,iBAAiB,CACtB,IAAI,EAAE,IAAI,CACR,SAAS,EACT,SAAS,GAAG,WAAW,GAAG,eAAe,GAAG,cAAc,CAC3D;IAkBI,WAAW,CAChB,IAAI,EAAE,IAAI,CACR,SAAS,EACT,SAAS,GAAG,WAAW,GAAG,eAAe,GAAG,cAAc,CAC3D;YAiBW,WAAW;IAkCZ,KAAK;IAIL,QAAQ;CAGtB"}
@@ -0,0 +1,125 @@
1
+ import { getEnv, generateUUID, getGlobalLogger, safeSetTimeout, } from "@langfuse/core";
2
+ import { trace } from "@opentelemetry/api";
3
+ const MAX_QUEUE_SIZE = 100000; // prevent memory leaks
4
+ const MAX_BATCH_SIZE = 100;
5
+ export class ScoreManager {
6
+ constructor(params) {
7
+ this.eventQueue = [];
8
+ this.flushPromise = null;
9
+ this.flushTimer = null;
10
+ this.apiClient = params.apiClient;
11
+ const envFlushAtCount = getEnv("LANGFUSE_FLUSH_AT");
12
+ const envFlushIntervalSeconds = getEnv("LANGFUSE_FLUSH_INTERVAL");
13
+ this.flushAtCount = envFlushAtCount ? Number(envFlushAtCount) : 10;
14
+ this.flushIntervalSeconds = envFlushIntervalSeconds
15
+ ? Number(envFlushIntervalSeconds)
16
+ : 1;
17
+ }
18
+ get logger() {
19
+ return getGlobalLogger();
20
+ }
21
+ create(data) {
22
+ var _a, _b;
23
+ const scoreData = {
24
+ ...data,
25
+ id: (_a = data.id) !== null && _a !== void 0 ? _a : generateUUID(),
26
+ environment: (_b = data.environment) !== null && _b !== void 0 ? _b : getEnv("LANGFUSE_TRACING_ENVIRONMENT"),
27
+ };
28
+ const scoreIngestionEvent = {
29
+ id: generateUUID(),
30
+ type: "score-create",
31
+ timestamp: new Date().toISOString(),
32
+ body: scoreData,
33
+ };
34
+ if (this.eventQueue.length >= MAX_QUEUE_SIZE) {
35
+ this.logger.error(`Score queue is at max size ${MAX_QUEUE_SIZE}. Dropping score.`);
36
+ return;
37
+ }
38
+ this.eventQueue.push(scoreIngestionEvent);
39
+ if (this.eventQueue.length >= this.flushAtCount) {
40
+ this.flushPromise = this.flush();
41
+ }
42
+ else if (!this.flushTimer) {
43
+ this.flushTimer = safeSetTimeout(() => {
44
+ this.flushPromise = this.flush();
45
+ }, this.flushIntervalSeconds * 1000);
46
+ }
47
+ }
48
+ observation(observation, data) {
49
+ const { spanId, traceId } = observation.otelSpan.spanContext();
50
+ this.create({
51
+ ...data,
52
+ traceId,
53
+ observationId: spanId,
54
+ });
55
+ }
56
+ trace(observation, data) {
57
+ const { traceId } = observation.otelSpan.spanContext();
58
+ this.create({
59
+ ...data,
60
+ traceId,
61
+ });
62
+ }
63
+ activeObservation(data) {
64
+ const currentOtelSpan = trace.getActiveSpan();
65
+ if (!currentOtelSpan) {
66
+ this.logger.warn("No active span in context to score.");
67
+ return;
68
+ }
69
+ const { spanId, traceId } = currentOtelSpan.spanContext();
70
+ this.create({
71
+ ...data,
72
+ traceId,
73
+ observationId: spanId,
74
+ });
75
+ }
76
+ activeTrace(data) {
77
+ const currentOtelSpan = trace.getActiveSpan();
78
+ if (!currentOtelSpan) {
79
+ this.logger.warn("No active span in context to score trace.");
80
+ return;
81
+ }
82
+ const { traceId } = currentOtelSpan.spanContext();
83
+ this.create({
84
+ ...data,
85
+ traceId,
86
+ });
87
+ }
88
+ async handleFlush() {
89
+ try {
90
+ if (this.flushTimer) {
91
+ clearTimeout(this.flushTimer);
92
+ this.flushTimer = null;
93
+ }
94
+ const promises = [];
95
+ while (this.eventQueue.length > 0) {
96
+ const batch = this.eventQueue.splice(0, MAX_BATCH_SIZE);
97
+ promises.push(this.apiClient.ingestion
98
+ .batch({ batch })
99
+ .then((res) => {
100
+ var _a;
101
+ if (((_a = res.errors) === null || _a === void 0 ? void 0 : _a.length) > 0) {
102
+ this.logger.error("Error ingesting scores:", res.errors);
103
+ }
104
+ })
105
+ .catch((err) => {
106
+ this.logger.error("Failed to export score batch:", err);
107
+ }));
108
+ }
109
+ await Promise.all(promises);
110
+ }
111
+ catch (err) {
112
+ this.logger.error("Error flushing Score Manager: ", err);
113
+ }
114
+ finally {
115
+ this.flushPromise = null;
116
+ }
117
+ }
118
+ async flush() {
119
+ var _a;
120
+ return (_a = this.flushPromise) !== null && _a !== void 0 ? _a : this.handleFlush();
121
+ }
122
+ async shutdown() {
123
+ await this.flush();
124
+ }
125
+ }