@librechat/agents 2.4.87 → 2.4.89
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/instrumentation.cjs +7 -1
- package/dist/cjs/instrumentation.cjs.map +1 -1
- package/dist/cjs/run.cjs +44 -14
- package/dist/cjs/run.cjs.map +1 -1
- package/dist/cjs/utils/title.cjs +54 -25
- package/dist/cjs/utils/title.cjs.map +1 -1
- package/dist/esm/instrumentation.mjs +7 -1
- package/dist/esm/instrumentation.mjs.map +1 -1
- package/dist/esm/run.mjs +44 -14
- package/dist/esm/run.mjs.map +1 -1
- package/dist/esm/utils/title.mjs +54 -25
- package/dist/esm/utils/title.mjs.map +1 -1
- package/dist/types/run.d.ts +2 -2
- package/package.json +2 -2
- package/src/instrumentation.ts +8 -1
- package/src/llm/anthropic/llm.spec.ts +1 -1
- package/src/run.ts +63 -18
- package/src/scripts/simple.ts +9 -1
- package/src/utils/title.ts +80 -40
|
@@ -7,8 +7,14 @@ var misc = require('./utils/misc.cjs');
|
|
|
7
7
|
if (misc.isPresent(process.env.LANGFUSE_SECRET_KEY) &&
|
|
8
8
|
misc.isPresent(process.env.LANGFUSE_PUBLIC_KEY) &&
|
|
9
9
|
misc.isPresent(process.env.LANGFUSE_BASE_URL)) {
|
|
10
|
+
const langfuseSpanProcessor = new otel.LangfuseSpanProcessor({
|
|
11
|
+
publicKey: process.env.LANGFUSE_PUBLIC_KEY,
|
|
12
|
+
secretKey: process.env.LANGFUSE_SECRET_KEY,
|
|
13
|
+
baseUrl: process.env.LANGFUSE_BASE_URL,
|
|
14
|
+
environment: process.env.NODE_ENV ?? 'development',
|
|
15
|
+
});
|
|
10
16
|
const sdk = new sdkNode.NodeSDK({
|
|
11
|
-
spanProcessors: [
|
|
17
|
+
spanProcessors: [langfuseSpanProcessor],
|
|
12
18
|
});
|
|
13
19
|
sdk.start();
|
|
14
20
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"instrumentation.cjs","sources":["../../src/instrumentation.ts"],"sourcesContent":["import { NodeSDK } from '@opentelemetry/sdk-node';\nimport { LangfuseSpanProcessor } from '@langfuse/otel';\nimport { isPresent } from '@/utils/misc';\n\nif (\n isPresent(process.env.LANGFUSE_SECRET_KEY) &&\n isPresent(process.env.LANGFUSE_PUBLIC_KEY) &&\n isPresent(process.env.LANGFUSE_BASE_URL)\n) {\n const sdk = new NodeSDK({\n spanProcessors: [
|
|
1
|
+
{"version":3,"file":"instrumentation.cjs","sources":["../../src/instrumentation.ts"],"sourcesContent":["import { NodeSDK } from '@opentelemetry/sdk-node';\nimport { LangfuseSpanProcessor } from '@langfuse/otel';\nimport { isPresent } from '@/utils/misc';\n\nif (\n isPresent(process.env.LANGFUSE_SECRET_KEY) &&\n isPresent(process.env.LANGFUSE_PUBLIC_KEY) &&\n isPresent(process.env.LANGFUSE_BASE_URL)\n) {\n const langfuseSpanProcessor = new LangfuseSpanProcessor({\n publicKey: process.env.LANGFUSE_PUBLIC_KEY,\n secretKey: process.env.LANGFUSE_SECRET_KEY,\n baseUrl: process.env.LANGFUSE_BASE_URL,\n environment: process.env.NODE_ENV ?? 'development',\n });\n\n const sdk = new NodeSDK({\n spanProcessors: [langfuseSpanProcessor],\n });\n\n sdk.start();\n}\n"],"names":["isPresent","LangfuseSpanProcessor","NodeSDK"],"mappings":";;;;;;AAIA,IACEA,cAAS,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC;AAC1C,IAAAA,cAAS,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC;IAC1CA,cAAS,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,EACxC;AACA,IAAA,MAAM,qBAAqB,GAAG,IAAIC,0BAAqB,CAAC;AACtD,QAAA,SAAS,EAAE,OAAO,CAAC,GAAG,CAAC,mBAAmB;AAC1C,QAAA,SAAS,EAAE,OAAO,CAAC,GAAG,CAAC,mBAAmB;AAC1C,QAAA,OAAO,EAAE,OAAO,CAAC,GAAG,CAAC,iBAAiB;AACtC,QAAA,WAAW,EAAE,OAAO,CAAC,GAAG,CAAC,QAAQ,IAAI,aAAa;AACnD,KAAA,CAAC;AAEF,IAAA,MAAM,GAAG,GAAG,IAAIC,eAAO,CAAC;QACtB,cAAc,EAAE,CAAC,qBAAqB,CAAC;AACxC,KAAA,CAAC;IAEF,GAAG,CAAC,KAAK,EAAE;AACb;;"}
|
package/dist/cjs/run.cjs
CHANGED
|
@@ -5,6 +5,7 @@ var zodToJsonSchema = require('zod-to-json-schema');
|
|
|
5
5
|
var langchain = require('@langfuse/langchain');
|
|
6
6
|
var prompts = require('@langchain/core/prompts');
|
|
7
7
|
var messages = require('@langchain/core/messages');
|
|
8
|
+
var runnables = require('@langchain/core/runnables');
|
|
8
9
|
var openai = require('@langchain/openai');
|
|
9
10
|
var _enum = require('./common/enum.cjs');
|
|
10
11
|
var providers = require('./llm/providers.cjs');
|
|
@@ -103,17 +104,18 @@ class Run {
|
|
|
103
104
|
if (misc.isPresent(process.env.LANGFUSE_SECRET_KEY) &&
|
|
104
105
|
misc.isPresent(process.env.LANGFUSE_PUBLIC_KEY) &&
|
|
105
106
|
misc.isPresent(process.env.LANGFUSE_BASE_URL)) {
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
107
|
+
const userId = config.configurable?.user_id;
|
|
108
|
+
const sessionId = config.configurable?.thread_id;
|
|
109
|
+
const traceMetadata = {
|
|
110
|
+
messageId: this.id,
|
|
111
|
+
parentMessageId: config.configurable?.requestBody?.parentMessageId,
|
|
112
|
+
};
|
|
113
|
+
const handler = new langchain.CallbackHandler({
|
|
114
|
+
userId,
|
|
115
|
+
sessionId,
|
|
116
|
+
traceMetadata,
|
|
117
|
+
});
|
|
118
|
+
config.callbacks = (config.callbacks ?? []).concat([handler]);
|
|
117
119
|
}
|
|
118
120
|
if (!this.id) {
|
|
119
121
|
throw new Error('Run ID not provided');
|
|
@@ -194,6 +196,22 @@ class Run {
|
|
|
194
196
|
};
|
|
195
197
|
}
|
|
196
198
|
async generateTitle({ provider, inputText, contentParts, titlePrompt, clientOptions, chainOptions, skipLanguage, omitOptions = defaultOmitOptions, titleMethod = _enum.TitleMethod.COMPLETION, titlePromptTemplate, }) {
|
|
199
|
+
if (chainOptions != null &&
|
|
200
|
+
misc.isPresent(process.env.LANGFUSE_SECRET_KEY) &&
|
|
201
|
+
misc.isPresent(process.env.LANGFUSE_PUBLIC_KEY) &&
|
|
202
|
+
misc.isPresent(process.env.LANGFUSE_BASE_URL)) {
|
|
203
|
+
const userId = chainOptions.configurable?.user_id;
|
|
204
|
+
const sessionId = chainOptions.configurable?.thread_id;
|
|
205
|
+
const traceMetadata = {
|
|
206
|
+
messageId: 'title-' + this.id,
|
|
207
|
+
};
|
|
208
|
+
const handler = new langchain.CallbackHandler({
|
|
209
|
+
userId,
|
|
210
|
+
sessionId,
|
|
211
|
+
traceMetadata,
|
|
212
|
+
});
|
|
213
|
+
chainOptions.callbacks = (chainOptions.callbacks ?? []).concat([handler]);
|
|
214
|
+
}
|
|
197
215
|
const convoTemplate = prompts.PromptTemplate.fromTemplate(titlePromptTemplate ?? 'User: {input}\nAI: {output}');
|
|
198
216
|
const response = contentParts
|
|
199
217
|
.map((part) => {
|
|
@@ -202,7 +220,6 @@ class Run {
|
|
|
202
220
|
return '';
|
|
203
221
|
})
|
|
204
222
|
.join('\n');
|
|
205
|
-
const convo = (await convoTemplate.invoke({ input: inputText, output: response })).value;
|
|
206
223
|
const model = this.Graph?.getNewModel({
|
|
207
224
|
provider,
|
|
208
225
|
omitOptions,
|
|
@@ -222,10 +239,23 @@ class Run {
|
|
|
222
239
|
model.n = clientOptions
|
|
223
240
|
?.n;
|
|
224
241
|
}
|
|
225
|
-
const
|
|
242
|
+
const convoToTitleInput = new runnables.RunnableLambda({
|
|
243
|
+
func: (promptValue) => ({
|
|
244
|
+
convo: promptValue.value,
|
|
245
|
+
inputText,
|
|
246
|
+
skipLanguage,
|
|
247
|
+
}),
|
|
248
|
+
}).withConfig({ runName: 'ConvoTransform' });
|
|
249
|
+
const titleChain = titleMethod === _enum.TitleMethod.COMPLETION
|
|
226
250
|
? await title.createCompletionTitleRunnable(model, titlePrompt)
|
|
227
251
|
: await title.createTitleRunnable(model, titlePrompt);
|
|
228
|
-
|
|
252
|
+
/** Pipes `convoTemplate` -> `transformer` -> `titleChain` */
|
|
253
|
+
const fullChain = convoTemplate
|
|
254
|
+
.withConfig({ runName: 'ConvoTemplate' })
|
|
255
|
+
.pipe(convoToTitleInput)
|
|
256
|
+
.pipe(titleChain)
|
|
257
|
+
.withConfig({ runName: 'TitleChain' });
|
|
258
|
+
return await fullChain.invoke({ input: inputText, output: response }, chainOptions);
|
|
229
259
|
}
|
|
230
260
|
}
|
|
231
261
|
|
package/dist/cjs/run.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"run.cjs","sources":["../../src/run.ts"],"sourcesContent":["// src/run.ts\nimport './instrumentation';\nimport { zodToJsonSchema } from 'zod-to-json-schema';\nimport { CallbackHandler } from '@langfuse/langchain';\nimport { PromptTemplate } from '@langchain/core/prompts';\nimport { SystemMessage } from '@langchain/core/messages';\nimport { AzureChatOpenAI, ChatOpenAI } from '@langchain/openai';\nimport type {\n BaseMessage,\n MessageContentComplex,\n} from '@langchain/core/messages';\nimport type { ClientCallbacks, SystemCallbacks } from '@/graphs/Graph';\nimport type { RunnableConfig } from '@langchain/core/runnables';\nimport type * as t from '@/types';\nimport { GraphEvents, Providers, Callback, TitleMethod } from '@/common';\nimport { manualToolStreamProviders } from '@/llm/providers';\nimport { shiftIndexTokenCountMap } from '@/messages/format';\nimport {\n createTitleRunnable,\n createCompletionTitleRunnable,\n} from '@/utils/title';\nimport { createTokenCounter } from '@/utils/tokens';\nimport { StandardGraph } from '@/graphs/Graph';\nimport { HandlerRegistry } from '@/events';\nimport { isOpenAILike } from '@/utils/llm';\nimport { isPresent } from '@/utils/misc';\n\nexport const defaultOmitOptions = new Set([\n 'stream',\n 'thinking',\n 'streaming',\n 'maxTokens',\n 'clientOptions',\n 'thinkingConfig',\n 'thinkingBudget',\n 'includeThoughts',\n 'maxOutputTokens',\n 'additionalModelRequestFields',\n]);\n\nexport class Run<T extends t.BaseGraphState> {\n graphRunnable?: t.CompiledWorkflow<T, Partial<T>, string>;\n // private collab!: CollabGraph;\n // private taskManager!: TaskManager;\n private handlerRegistry: HandlerRegistry;\n id: string;\n Graph: StandardGraph | undefined;\n provider: Providers | undefined;\n returnContent: boolean = false;\n\n private constructor(config: Partial<t.RunConfig>) {\n const runId = config.runId ?? '';\n if (!runId) {\n throw new Error('Run ID not provided');\n }\n\n this.id = runId;\n\n const handlerRegistry = new HandlerRegistry();\n\n if (config.customHandlers) {\n for (const [eventType, handler] of Object.entries(\n config.customHandlers\n )) {\n handlerRegistry.register(eventType, handler);\n }\n }\n\n this.handlerRegistry = handlerRegistry;\n\n if (!config.graphConfig) {\n throw new Error('Graph config not provided');\n }\n\n if (config.graphConfig.type === 'standard' || !config.graphConfig.type) {\n this.provider = config.graphConfig.llmConfig.provider;\n this.graphRunnable = this.createStandardGraph(\n config.graphConfig\n ) as unknown as t.CompiledWorkflow<T, Partial<T>, string>;\n if (this.Graph) {\n this.Graph.handlerRegistry = handlerRegistry;\n }\n }\n\n this.returnContent = config.returnContent ?? false;\n }\n\n private createStandardGraph(\n config: t.StandardGraphConfig\n ): t.CompiledWorkflow<t.IState, Partial<t.IState>, string> {\n const { llmConfig, tools = [], ...graphInput } = config;\n const { provider, ...clientOptions } = llmConfig;\n\n const standardGraph = new StandardGraph({\n tools,\n provider,\n clientOptions,\n ...graphInput,\n runId: this.id,\n });\n this.Graph = standardGraph;\n return standardGraph.createWorkflow();\n }\n\n static async create<T extends t.BaseGraphState>(\n config: t.RunConfig\n ): Promise<Run<T>> {\n return new Run<T>(config);\n }\n\n getRunMessages(): BaseMessage[] | undefined {\n if (!this.Graph) {\n throw new Error(\n 'Graph not initialized. Make sure to use Run.create() to instantiate the Run.'\n );\n }\n return this.Graph.getRunMessages();\n }\n\n async processStream(\n inputs: t.IState,\n config: Partial<RunnableConfig> & { version: 'v1' | 'v2'; run_id?: string },\n streamOptions?: t.EventStreamOptions\n ): Promise<MessageContentComplex[] | undefined> {\n if (!this.graphRunnable) {\n throw new Error(\n 'Run not initialized. Make sure to use Run.create() to instantiate the Run.'\n );\n }\n if (!this.Graph) {\n throw new Error(\n 'Graph not initialized. Make sure to use Run.create() to instantiate the Run.'\n );\n }\n\n this.Graph.resetValues(streamOptions?.keepContent);\n const provider = this.Graph.provider;\n const hasTools = this.Graph.tools ? this.Graph.tools.length > 0 : false;\n if (streamOptions?.callbacks) {\n /* TODO: conflicts with callback manager */\n const callbacks = (config.callbacks as t.ProvidedCallbacks) ?? [];\n config.callbacks = callbacks.concat(\n this.getCallbacks(streamOptions.callbacks)\n );\n }\n\n if (\n isPresent(process.env.LANGFUSE_SECRET_KEY) &&\n isPresent(process.env.LANGFUSE_PUBLIC_KEY) &&\n isPresent(process.env.LANGFUSE_BASE_URL)\n ) {\n config.callbacks = (\n (config.callbacks as t.ProvidedCallbacks) ?? []\n ).concat([\n new CallbackHandler({\n userId: config.configurable?.user_id,\n sessionId: this.id,\n traceMetadata: {\n messageId: config.configurable?.requestBody?.messageId,\n conversationId: config.configurable?.requestBody?.conversationId,\n parentMessageId: config.configurable?.requestBody?.parentMessageId,\n },\n }),\n ]);\n }\n\n if (!this.id) {\n throw new Error('Run ID not provided');\n }\n\n const tokenCounter =\n streamOptions?.tokenCounter ??\n (streamOptions?.indexTokenCountMap\n ? await createTokenCounter()\n : undefined);\n const tools = this.Graph.tools as\n | Array<t.GenericTool | undefined>\n | undefined;\n const toolTokens = tokenCounter\n ? (tools?.reduce((acc, tool) => {\n if (!(tool as Partial<t.GenericTool>).schema) {\n return acc;\n }\n\n const jsonSchema = zodToJsonSchema(\n (tool?.schema as t.ZodObjectAny).describe(tool?.description ?? ''),\n tool?.name ?? ''\n );\n return (\n acc + tokenCounter(new SystemMessage(JSON.stringify(jsonSchema)))\n );\n }, 0) ?? 0)\n : 0;\n let instructionTokens = toolTokens;\n if (this.Graph.systemMessage && tokenCounter) {\n instructionTokens += tokenCounter(this.Graph.systemMessage);\n }\n const tokenMap = streamOptions?.indexTokenCountMap ?? {};\n if (this.Graph.systemMessage && instructionTokens > 0) {\n this.Graph.indexTokenCountMap = shiftIndexTokenCountMap(\n tokenMap,\n instructionTokens\n );\n } else if (instructionTokens > 0) {\n tokenMap[0] = tokenMap[0] + instructionTokens;\n this.Graph.indexTokenCountMap = tokenMap;\n } else {\n this.Graph.indexTokenCountMap = tokenMap;\n }\n\n this.Graph.maxContextTokens = streamOptions?.maxContextTokens;\n this.Graph.tokenCounter = tokenCounter;\n\n config.run_id = this.id;\n config.configurable = Object.assign(config.configurable ?? {}, {\n run_id: this.id,\n provider: this.provider,\n });\n\n const stream = this.graphRunnable.streamEvents(inputs, config, {\n raiseError: true,\n });\n\n for await (const event of stream) {\n const { data, name, metadata, ...info } = event;\n\n let eventName: t.EventName = info.event;\n if (\n hasTools &&\n manualToolStreamProviders.has(provider) &&\n eventName === GraphEvents.CHAT_MODEL_STREAM\n ) {\n /* Skipping CHAT_MODEL_STREAM event due to double-call edge case */\n continue;\n }\n\n if (eventName && eventName === GraphEvents.ON_CUSTOM_EVENT) {\n eventName = name;\n }\n\n const handler = this.handlerRegistry.getHandler(eventName);\n if (handler) {\n handler.handle(eventName, data, metadata, this.Graph);\n }\n }\n\n if (this.returnContent) {\n return this.Graph.getContentParts();\n }\n }\n\n private createSystemCallback<K extends keyof ClientCallbacks>(\n clientCallbacks: ClientCallbacks,\n key: K\n ): SystemCallbacks[K] {\n return ((...args: unknown[]) => {\n const clientCallback = clientCallbacks[key];\n if (clientCallback && this.Graph) {\n (clientCallback as (...args: unknown[]) => void)(this.Graph, ...args);\n }\n }) as SystemCallbacks[K];\n }\n\n getCallbacks(clientCallbacks: ClientCallbacks): SystemCallbacks {\n return {\n [Callback.TOOL_ERROR]: this.createSystemCallback(\n clientCallbacks,\n Callback.TOOL_ERROR\n ),\n [Callback.TOOL_START]: this.createSystemCallback(\n clientCallbacks,\n Callback.TOOL_START\n ),\n [Callback.TOOL_END]: this.createSystemCallback(\n clientCallbacks,\n Callback.TOOL_END\n ),\n };\n }\n\n async generateTitle({\n provider,\n inputText,\n contentParts,\n titlePrompt,\n clientOptions,\n chainOptions,\n skipLanguage,\n omitOptions = defaultOmitOptions,\n titleMethod = TitleMethod.COMPLETION,\n titlePromptTemplate,\n }: t.RunTitleOptions): Promise<{ language?: string; title?: string }> {\n const convoTemplate = PromptTemplate.fromTemplate(\n titlePromptTemplate ?? 'User: {input}\\nAI: {output}'\n );\n const response = contentParts\n .map((part) => {\n if (part?.type === 'text') return part.text;\n return '';\n })\n .join('\\n');\n const convo = (\n await convoTemplate.invoke({ input: inputText, output: response })\n ).value;\n const model = this.Graph?.getNewModel({\n provider,\n omitOptions,\n clientOptions,\n });\n if (!model) {\n return { language: '', title: '' };\n }\n if (\n isOpenAILike(provider) &&\n (model instanceof ChatOpenAI || model instanceof AzureChatOpenAI)\n ) {\n model.temperature = (clientOptions as t.OpenAIClientOptions | undefined)\n ?.temperature as number;\n model.topP = (clientOptions as t.OpenAIClientOptions | undefined)\n ?.topP as number;\n model.frequencyPenalty = (\n clientOptions as t.OpenAIClientOptions | undefined\n )?.frequencyPenalty as number;\n model.presencePenalty = (\n clientOptions as t.OpenAIClientOptions | undefined\n )?.presencePenalty as number;\n model.n = (clientOptions as t.OpenAIClientOptions | undefined)\n ?.n as number;\n }\n const chain =\n titleMethod === TitleMethod.COMPLETION\n ? await createCompletionTitleRunnable(model, titlePrompt)\n : await createTitleRunnable(model, titlePrompt);\n return await chain.invoke({ convo, inputText, skipLanguage }, chainOptions);\n }\n}\n"],"names":["HandlerRegistry","StandardGraph","isPresent","CallbackHandler","createTokenCounter","zodToJsonSchema","SystemMessage","shiftIndexTokenCountMap","manualToolStreamProviders","GraphEvents","Callback","TitleMethod","PromptTemplate","isOpenAILike","ChatOpenAI","AzureChatOpenAI","createCompletionTitleRunnable","createTitleRunnable"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AA2Ba,MAAA,kBAAkB,GAAG,IAAI,GAAG,CAAC;IACxC,QAAQ;IACR,UAAU;IACV,WAAW;IACX,WAAW;IACX,eAAe;IACf,gBAAgB;IAChB,gBAAgB;IAChB,iBAAiB;IACjB,iBAAiB;IACjB,8BAA8B;AAC/B,CAAA;MAEY,GAAG,CAAA;AACd,IAAA,aAAa;;;AAGL,IAAA,eAAe;AACvB,IAAA,EAAE;AACF,IAAA,KAAK;AACL,IAAA,QAAQ;IACR,aAAa,GAAY,KAAK;AAE9B,IAAA,WAAA,CAAoB,MAA4B,EAAA;AAC9C,QAAA,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,IAAI,EAAE;QAChC,IAAI,CAAC,KAAK,EAAE;AACV,YAAA,MAAM,IAAI,KAAK,CAAC,qBAAqB,CAAC;;AAGxC,QAAA,IAAI,CAAC,EAAE,GAAG,KAAK;AAEf,QAAA,MAAM,eAAe,GAAG,IAAIA,sBAAe,EAAE;AAE7C,QAAA,IAAI,MAAM,CAAC,cAAc,EAAE;AACzB,YAAA,KAAK,MAAM,CAAC,SAAS,EAAE,OAAO,CAAC,IAAI,MAAM,CAAC,OAAO,CAC/C,MAAM,CAAC,cAAc,CACtB,EAAE;AACD,gBAAA,eAAe,CAAC,QAAQ,CAAC,SAAS,EAAE,OAAO,CAAC;;;AAIhD,QAAA,IAAI,CAAC,eAAe,GAAG,eAAe;AAEtC,QAAA,IAAI,CAAC,MAAM,CAAC,WAAW,EAAE;AACvB,YAAA,MAAM,IAAI,KAAK,CAAC,2BAA2B,CAAC;;AAG9C,QAAA,IAAI,MAAM,CAAC,WAAW,CAAC,IAAI,KAAK,UAAU,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,EAAE;YACtE,IAAI,CAAC,QAAQ,GAAG,MAAM,CAAC,WAAW,CAAC,SAAS,CAAC,QAAQ;YACrD,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,mBAAmB,CAC3C,MAAM,CAAC,WAAW,CACqC;AACzD,YAAA,IAAI,IAAI,CAAC,KAAK,EAAE;AACd,gBAAA,IAAI,CAAC,KAAK,CAAC,eAAe,GAAG,eAAe;;;QAIhD,IAAI,CAAC,aAAa,GAAG,MAAM,CAAC,aAAa,IAAI,KAAK;;AAG5C,IAAA,mBAAmB,CACzB,MAA6B,EAAA;AAE7B,QAAA,MAAM,EAAE,SAAS,EAAE,KAAK,GAAG,EAAE,EAAE,GAAG,UAAU,EAAE,GAAG,MAAM;QACvD,MAAM,EAAE,QAAQ,EAAE,GAAG,aAAa,EAAE,GAAG,SAAS;AAEhD,QAAA,MAAM,aAAa,GAAG,IAAIC,mBAAa,CAAC;YACtC,KAAK;YACL,QAAQ;YACR,aAAa;AACb,YAAA,GAAG,UAAU;YACb,KAAK,EAAE,IAAI,CAAC,EAAE;AACf,SAAA,CAAC;AACF,QAAA,IAAI,CAAC,KAAK,GAAG,aAAa;AAC1B,QAAA,OAAO,aAAa,CAAC,cAAc,EAAE;;AAGvC,IAAA,aAAa,MAAM,CACjB,MAAmB,EAAA;AAEnB,QAAA,OAAO,IAAI,GAAG,CAAI,MAAM,CAAC;;IAG3B,cAAc,GAAA;AACZ,QAAA,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;AACf,YAAA,MAAM,IAAI,KAAK,CACb,8EAA8E,CAC/E;;AAEH,QAAA,OAAO,IAAI,CAAC,KAAK,CAAC,cAAc,EAAE;;AAGpC,IAAA,MAAM,aAAa,CACjB,MAAgB,EAChB,MAA2E,EAC3E,aAAoC,EAAA;AAEpC,QAAA,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE;AACvB,YAAA,MAAM,IAAI,KAAK,CACb,4EAA4E,CAC7E;;AAEH,QAAA,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;AACf,YAAA,MAAM,IAAI,KAAK,CACb,8EAA8E,CAC/E;;QAGH,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,aAAa,EAAE,WAAW,CAAC;AAClD,QAAA,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,QAAQ;QACpC,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,GAAG,KAAK;AACvE,QAAA,IAAI,aAAa,EAAE,SAAS,EAAE;;AAE5B,YAAA,MAAM,SAAS,GAAI,MAAM,CAAC,SAAiC,IAAI,EAAE;AACjE,YAAA,MAAM,CAAC,SAAS,GAAG,SAAS,CAAC,MAAM,CACjC,IAAI,CAAC,YAAY,CAAC,aAAa,CAAC,SAAS,CAAC,CAC3C;;AAGH,QAAA,IACEC,cAAS,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC;AAC1C,YAAAA,cAAS,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC;YAC1CA,cAAS,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,EACxC;AACA,YAAA,MAAM,CAAC,SAAS,GAAG,CAChB,MAAM,CAAC,SAAiC,IAAI,EAAE,EAC/C,MAAM,CAAC;AACP,gBAAA,IAAIC,yBAAe,CAAC;AAClB,oBAAA,MAAM,EAAE,MAAM,CAAC,YAAY,EAAE,OAAO;oBACpC,SAAS,EAAE,IAAI,CAAC,EAAE;AAClB,oBAAA,aAAa,EAAE;AACb,wBAAA,SAAS,EAAE,MAAM,CAAC,YAAY,EAAE,WAAW,EAAE,SAAS;AACtD,wBAAA,cAAc,EAAE,MAAM,CAAC,YAAY,EAAE,WAAW,EAAE,cAAc;AAChE,wBAAA,eAAe,EAAE,MAAM,CAAC,YAAY,EAAE,WAAW,EAAE,eAAe;AACnE,qBAAA;iBACF,CAAC;AACH,aAAA,CAAC;;AAGJ,QAAA,IAAI,CAAC,IAAI,CAAC,EAAE,EAAE;AACZ,YAAA,MAAM,IAAI,KAAK,CAAC,qBAAqB,CAAC;;AAGxC,QAAA,MAAM,YAAY,GAChB,aAAa,EAAE,YAAY;aAC1B,aAAa,EAAE;kBACZ,MAAMC,yBAAkB;kBACxB,SAAS,CAAC;AAChB,QAAA,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,KAEZ;QACb,MAAM,UAAU,GAAG;eACd,KAAK,EAAE,MAAM,CAAC,CAAC,GAAG,EAAE,IAAI,KAAI;AAC7B,gBAAA,IAAI,CAAE,IAA+B,CAAC,MAAM,EAAE;AAC5C,oBAAA,OAAO,GAAG;;gBAGZ,MAAM,UAAU,GAAGC,+BAAe,CAChC,CAAC,IAAI,EAAE,MAAyB,EAAC,QAAQ,CAAC,IAAI,EAAE,WAAW,IAAI,EAAE,CAAC,EAClE,IAAI,EAAE,IAAI,IAAI,EAAE,CACjB;AACD,gBAAA,QACE,GAAG,GAAG,YAAY,CAAC,IAAIC,sBAAa,CAAC,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC;AAErE,aAAC,EAAE,CAAC,CAAC,IAAI,CAAC;cACR,CAAC;QACL,IAAI,iBAAiB,GAAG,UAAU;QAClC,IAAI,IAAI,CAAC,KAAK,CAAC,aAAa,IAAI,YAAY,EAAE;YAC5C,iBAAiB,IAAI,YAAY,CAAC,IAAI,CAAC,KAAK,CAAC,aAAa,CAAC;;AAE7D,QAAA,MAAM,QAAQ,GAAG,aAAa,EAAE,kBAAkB,IAAI,EAAE;QACxD,IAAI,IAAI,CAAC,KAAK,CAAC,aAAa,IAAI,iBAAiB,GAAG,CAAC,EAAE;YACrD,IAAI,CAAC,KAAK,CAAC,kBAAkB,GAAGC,8BAAuB,CACrD,QAAQ,EACR,iBAAiB,CAClB;;AACI,aAAA,IAAI,iBAAiB,GAAG,CAAC,EAAE;YAChC,QAAQ,CAAC,CAAC,CAAC,GAAG,QAAQ,CAAC,CAAC,CAAC,GAAG,iBAAiB;AAC7C,YAAA,IAAI,CAAC,KAAK,CAAC,kBAAkB,GAAG,QAAQ;;aACnC;AACL,YAAA,IAAI,CAAC,KAAK,CAAC,kBAAkB,GAAG,QAAQ;;QAG1C,IAAI,CAAC,KAAK,CAAC,gBAAgB,GAAG,aAAa,EAAE,gBAAgB;AAC7D,QAAA,IAAI,CAAC,KAAK,CAAC,YAAY,GAAG,YAAY;AAEtC,QAAA,MAAM,CAAC,MAAM,GAAG,IAAI,CAAC,EAAE;AACvB,QAAA,MAAM,CAAC,YAAY,GAAG,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,YAAY,IAAI,EAAE,EAAE;YAC7D,MAAM,EAAE,IAAI,CAAC,EAAE;YACf,QAAQ,EAAE,IAAI,CAAC,QAAQ;AACxB,SAAA,CAAC;QAEF,MAAM,MAAM,GAAG,IAAI,CAAC,aAAa,CAAC,YAAY,CAAC,MAAM,EAAE,MAAM,EAAE;AAC7D,YAAA,UAAU,EAAE,IAAI;AACjB,SAAA,CAAC;AAEF,QAAA,WAAW,MAAM,KAAK,IAAI,MAAM,EAAE;AAChC,YAAA,MAAM,EAAE,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,GAAG,IAAI,EAAE,GAAG,KAAK;AAE/C,YAAA,IAAI,SAAS,GAAgB,IAAI,CAAC,KAAK;AACvC,YAAA,IACE,QAAQ;AACR,gBAAAC,mCAAyB,CAAC,GAAG,CAAC,QAAQ,CAAC;AACvC,gBAAA,SAAS,KAAKC,iBAAW,CAAC,iBAAiB,EAC3C;;gBAEA;;YAGF,IAAI,SAAS,IAAI,SAAS,KAAKA,iBAAW,CAAC,eAAe,EAAE;gBAC1D,SAAS,GAAG,IAAI;;YAGlB,MAAM,OAAO,GAAG,IAAI,CAAC,eAAe,CAAC,UAAU,CAAC,SAAS,CAAC;YAC1D,IAAI,OAAO,EAAE;AACX,gBAAA,OAAO,CAAC,MAAM,CAAC,SAAS,EAAE,IAAI,EAAE,QAAQ,EAAE,IAAI,CAAC,KAAK,CAAC;;;AAIzD,QAAA,IAAI,IAAI,CAAC,aAAa,EAAE;AACtB,YAAA,OAAO,IAAI,CAAC,KAAK,CAAC,eAAe,EAAE;;;IAI/B,oBAAoB,CAC1B,eAAgC,EAChC,GAAM,EAAA;AAEN,QAAA,QAAQ,CAAC,GAAG,IAAe,KAAI;AAC7B,YAAA,MAAM,cAAc,GAAG,eAAe,CAAC,GAAG,CAAC;AAC3C,YAAA,IAAI,cAAc,IAAI,IAAI,CAAC,KAAK,EAAE;gBAC/B,cAA+C,CAAC,IAAI,CAAC,KAAK,EAAE,GAAG,IAAI,CAAC;;AAEzE,SAAC;;AAGH,IAAA,YAAY,CAAC,eAAgC,EAAA;QAC3C,OAAO;AACL,YAAA,CAACC,cAAQ,CAAC,UAAU,GAAG,IAAI,CAAC,oBAAoB,CAC9C,eAAe,EACfA,cAAQ,CAAC,UAAU,CACpB;AACD,YAAA,CAACA,cAAQ,CAAC,UAAU,GAAG,IAAI,CAAC,oBAAoB,CAC9C,eAAe,EACfA,cAAQ,CAAC,UAAU,CACpB;AACD,YAAA,CAACA,cAAQ,CAAC,QAAQ,GAAG,IAAI,CAAC,oBAAoB,CAC5C,eAAe,EACfA,cAAQ,CAAC,QAAQ,CAClB;SACF;;AAGH,IAAA,MAAM,aAAa,CAAC,EAClB,QAAQ,EACR,SAAS,EACT,YAAY,EACZ,WAAW,EACX,aAAa,EACb,YAAY,EACZ,YAAY,EACZ,WAAW,GAAG,kBAAkB,EAChC,WAAW,GAAGC,iBAAW,CAAC,UAAU,EACpC,mBAAmB,GACD,EAAA;QAClB,MAAM,aAAa,GAAGC,sBAAc,CAAC,YAAY,CAC/C,mBAAmB,IAAI,6BAA6B,CACrD;QACD,MAAM,QAAQ,GAAG;AACd,aAAA,GAAG,CAAC,CAAC,IAAI,KAAI;AACZ,YAAA,IAAI,IAAI,EAAE,IAAI,KAAK,MAAM;gBAAE,OAAO,IAAI,CAAC,IAAI;AAC3C,YAAA,OAAO,EAAE;AACX,SAAC;aACA,IAAI,CAAC,IAAI,CAAC;QACb,MAAM,KAAK,GAAG,CACZ,MAAM,aAAa,CAAC,MAAM,CAAC,EAAE,KAAK,EAAE,SAAS,EAAE,MAAM,EAAE,QAAQ,EAAE,CAAC,EAClE,KAAK;AACP,QAAA,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,EAAE,WAAW,CAAC;YACpC,QAAQ;YACR,WAAW;YACX,aAAa;AACd,SAAA,CAAC;QACF,IAAI,CAAC,KAAK,EAAE;YACV,OAAO,EAAE,QAAQ,EAAE,EAAE,EAAE,KAAK,EAAE,EAAE,EAAE;;QAEpC,IACEC,gBAAY,CAAC,QAAQ,CAAC;aACrB,KAAK,YAAYC,iBAAU,IAAI,KAAK,YAAYC,sBAAe,CAAC,EACjE;YACA,KAAK,CAAC,WAAW,GAAI;AACnB,kBAAE,WAAqB;YACzB,KAAK,CAAC,IAAI,GAAI;AACZ,kBAAE,IAAc;AAClB,YAAA,KAAK,CAAC,gBAAgB,GACpB,aACD,EAAE,gBAA0B;AAC7B,YAAA,KAAK,CAAC,eAAe,GACnB,aACD,EAAE,eAAyB;YAC5B,KAAK,CAAC,CAAC,GAAI;AACT,kBAAE,CAAW;;AAEjB,QAAA,MAAM,KAAK,GACT,WAAW,KAAKJ,iBAAW,CAAC;AAC1B,cAAE,MAAMK,mCAA6B,CAAC,KAAK,EAAE,WAAW;cACtD,MAAMC,yBAAmB,CAAC,KAAK,EAAE,WAAW,CAAC;AACnD,QAAA,OAAO,MAAM,KAAK,CAAC,MAAM,CAAC,EAAE,KAAK,EAAE,SAAS,EAAE,YAAY,EAAE,EAAE,YAAY,CAAC;;AAE9E;;;;;"}
|
|
1
|
+
{"version":3,"file":"run.cjs","sources":["../../src/run.ts"],"sourcesContent":["// src/run.ts\nimport './instrumentation';\nimport { zodToJsonSchema } from 'zod-to-json-schema';\nimport { CallbackHandler } from '@langfuse/langchain';\nimport { PromptTemplate } from '@langchain/core/prompts';\nimport { SystemMessage } from '@langchain/core/messages';\nimport { RunnableLambda } from '@langchain/core/runnables';\nimport { AzureChatOpenAI, ChatOpenAI } from '@langchain/openai';\nimport type {\n MessageContentComplex,\n BaseMessage,\n} from '@langchain/core/messages';\nimport type { StringPromptValue } from '@langchain/core/prompt_values';\nimport type { RunnableConfig } from '@langchain/core/runnables';\nimport type { ClientCallbacks, SystemCallbacks } from '@/graphs/Graph';\nimport type * as t from '@/types';\nimport { GraphEvents, Providers, Callback, TitleMethod } from '@/common';\nimport { manualToolStreamProviders } from '@/llm/providers';\nimport { shiftIndexTokenCountMap } from '@/messages/format';\nimport {\n createTitleRunnable,\n createCompletionTitleRunnable,\n} from '@/utils/title';\nimport { createTokenCounter } from '@/utils/tokens';\nimport { StandardGraph } from '@/graphs/Graph';\nimport { HandlerRegistry } from '@/events';\nimport { isOpenAILike } from '@/utils/llm';\nimport { isPresent } from '@/utils/misc';\n\nexport const defaultOmitOptions = new Set([\n 'stream',\n 'thinking',\n 'streaming',\n 'maxTokens',\n 'clientOptions',\n 'thinkingConfig',\n 'thinkingBudget',\n 'includeThoughts',\n 'maxOutputTokens',\n 'additionalModelRequestFields',\n]);\n\nexport class Run<T extends t.BaseGraphState> {\n graphRunnable?: t.CompiledWorkflow<T, Partial<T>, string>;\n // private collab!: CollabGraph;\n // private taskManager!: TaskManager;\n private handlerRegistry: HandlerRegistry;\n id: string;\n Graph: StandardGraph | undefined;\n provider: Providers | undefined;\n returnContent: boolean = false;\n\n private constructor(config: Partial<t.RunConfig>) {\n const runId = config.runId ?? '';\n if (!runId) {\n throw new Error('Run ID not provided');\n }\n\n this.id = runId;\n\n const handlerRegistry = new HandlerRegistry();\n\n if (config.customHandlers) {\n for (const [eventType, handler] of Object.entries(\n config.customHandlers\n )) {\n handlerRegistry.register(eventType, handler);\n }\n }\n\n this.handlerRegistry = handlerRegistry;\n\n if (!config.graphConfig) {\n throw new Error('Graph config not provided');\n }\n\n if (config.graphConfig.type === 'standard' || !config.graphConfig.type) {\n this.provider = config.graphConfig.llmConfig.provider;\n this.graphRunnable = this.createStandardGraph(\n config.graphConfig\n ) as unknown as t.CompiledWorkflow<T, Partial<T>, string>;\n if (this.Graph) {\n this.Graph.handlerRegistry = handlerRegistry;\n }\n }\n\n this.returnContent = config.returnContent ?? false;\n }\n\n private createStandardGraph(\n config: t.StandardGraphConfig\n ): t.CompiledWorkflow<t.IState, Partial<t.IState>, string> {\n const { llmConfig, tools = [], ...graphInput } = config;\n const { provider, ...clientOptions } = llmConfig;\n\n const standardGraph = new StandardGraph({\n tools,\n provider,\n clientOptions,\n ...graphInput,\n runId: this.id,\n });\n this.Graph = standardGraph;\n return standardGraph.createWorkflow();\n }\n\n static async create<T extends t.BaseGraphState>(\n config: t.RunConfig\n ): Promise<Run<T>> {\n return new Run<T>(config);\n }\n\n getRunMessages(): BaseMessage[] | undefined {\n if (!this.Graph) {\n throw new Error(\n 'Graph not initialized. Make sure to use Run.create() to instantiate the Run.'\n );\n }\n return this.Graph.getRunMessages();\n }\n\n async processStream(\n inputs: t.IState,\n config: Partial<RunnableConfig> & { version: 'v1' | 'v2'; run_id?: string },\n streamOptions?: t.EventStreamOptions\n ): Promise<MessageContentComplex[] | undefined> {\n if (!this.graphRunnable) {\n throw new Error(\n 'Run not initialized. Make sure to use Run.create() to instantiate the Run.'\n );\n }\n if (!this.Graph) {\n throw new Error(\n 'Graph not initialized. Make sure to use Run.create() to instantiate the Run.'\n );\n }\n\n this.Graph.resetValues(streamOptions?.keepContent);\n const provider = this.Graph.provider;\n const hasTools = this.Graph.tools ? this.Graph.tools.length > 0 : false;\n if (streamOptions?.callbacks) {\n /* TODO: conflicts with callback manager */\n const callbacks = (config.callbacks as t.ProvidedCallbacks) ?? [];\n config.callbacks = callbacks.concat(\n this.getCallbacks(streamOptions.callbacks)\n );\n }\n\n if (\n isPresent(process.env.LANGFUSE_SECRET_KEY) &&\n isPresent(process.env.LANGFUSE_PUBLIC_KEY) &&\n isPresent(process.env.LANGFUSE_BASE_URL)\n ) {\n const userId = config.configurable?.user_id;\n const sessionId = config.configurable?.thread_id;\n const traceMetadata = {\n messageId: this.id,\n parentMessageId: config.configurable?.requestBody?.parentMessageId,\n };\n const handler = new CallbackHandler({\n userId,\n sessionId,\n traceMetadata,\n });\n config.callbacks = (\n (config.callbacks as t.ProvidedCallbacks) ?? []\n ).concat([handler]);\n }\n\n if (!this.id) {\n throw new Error('Run ID not provided');\n }\n\n const tokenCounter =\n streamOptions?.tokenCounter ??\n (streamOptions?.indexTokenCountMap\n ? await createTokenCounter()\n : undefined);\n const tools = this.Graph.tools as\n | Array<t.GenericTool | undefined>\n | undefined;\n const toolTokens = tokenCounter\n ? (tools?.reduce((acc, tool) => {\n if (!(tool as Partial<t.GenericTool>).schema) {\n return acc;\n }\n\n const jsonSchema = zodToJsonSchema(\n (tool?.schema as t.ZodObjectAny).describe(tool?.description ?? ''),\n tool?.name ?? ''\n );\n return (\n acc + tokenCounter(new SystemMessage(JSON.stringify(jsonSchema)))\n );\n }, 0) ?? 0)\n : 0;\n let instructionTokens = toolTokens;\n if (this.Graph.systemMessage && tokenCounter) {\n instructionTokens += tokenCounter(this.Graph.systemMessage);\n }\n const tokenMap = streamOptions?.indexTokenCountMap ?? {};\n if (this.Graph.systemMessage && instructionTokens > 0) {\n this.Graph.indexTokenCountMap = shiftIndexTokenCountMap(\n tokenMap,\n instructionTokens\n );\n } else if (instructionTokens > 0) {\n tokenMap[0] = tokenMap[0] + instructionTokens;\n this.Graph.indexTokenCountMap = tokenMap;\n } else {\n this.Graph.indexTokenCountMap = tokenMap;\n }\n\n this.Graph.maxContextTokens = streamOptions?.maxContextTokens;\n this.Graph.tokenCounter = tokenCounter;\n\n config.run_id = this.id;\n config.configurable = Object.assign(config.configurable ?? {}, {\n run_id: this.id,\n provider: this.provider,\n });\n\n const stream = this.graphRunnable.streamEvents(inputs, config, {\n raiseError: true,\n });\n\n for await (const event of stream) {\n const { data, name, metadata, ...info } = event;\n\n let eventName: t.EventName = info.event;\n if (\n hasTools &&\n manualToolStreamProviders.has(provider) &&\n eventName === GraphEvents.CHAT_MODEL_STREAM\n ) {\n /* Skipping CHAT_MODEL_STREAM event due to double-call edge case */\n continue;\n }\n\n if (eventName && eventName === GraphEvents.ON_CUSTOM_EVENT) {\n eventName = name;\n }\n\n const handler = this.handlerRegistry.getHandler(eventName);\n if (handler) {\n handler.handle(eventName, data, metadata, this.Graph);\n }\n }\n\n if (this.returnContent) {\n return this.Graph.getContentParts();\n }\n }\n\n private createSystemCallback<K extends keyof ClientCallbacks>(\n clientCallbacks: ClientCallbacks,\n key: K\n ): SystemCallbacks[K] {\n return ((...args: unknown[]) => {\n const clientCallback = clientCallbacks[key];\n if (clientCallback && this.Graph) {\n (clientCallback as (...args: unknown[]) => void)(this.Graph, ...args);\n }\n }) as SystemCallbacks[K];\n }\n\n getCallbacks(clientCallbacks: ClientCallbacks): SystemCallbacks {\n return {\n [Callback.TOOL_ERROR]: this.createSystemCallback(\n clientCallbacks,\n Callback.TOOL_ERROR\n ),\n [Callback.TOOL_START]: this.createSystemCallback(\n clientCallbacks,\n Callback.TOOL_START\n ),\n [Callback.TOOL_END]: this.createSystemCallback(\n clientCallbacks,\n Callback.TOOL_END\n ),\n };\n }\n\n async generateTitle({\n provider,\n inputText,\n contentParts,\n titlePrompt,\n clientOptions,\n chainOptions,\n skipLanguage,\n omitOptions = defaultOmitOptions,\n titleMethod = TitleMethod.COMPLETION,\n titlePromptTemplate,\n }: t.RunTitleOptions): Promise<{ language?: string; title?: string }> {\n if (\n chainOptions != null &&\n isPresent(process.env.LANGFUSE_SECRET_KEY) &&\n isPresent(process.env.LANGFUSE_PUBLIC_KEY) &&\n isPresent(process.env.LANGFUSE_BASE_URL)\n ) {\n const userId = chainOptions.configurable?.user_id;\n const sessionId = chainOptions.configurable?.thread_id;\n const traceMetadata = {\n messageId: 'title-' + this.id,\n };\n const handler = new CallbackHandler({\n userId,\n sessionId,\n traceMetadata,\n });\n chainOptions.callbacks = (\n (chainOptions.callbacks as t.ProvidedCallbacks) ?? []\n ).concat([handler]);\n }\n\n const convoTemplate = PromptTemplate.fromTemplate(\n titlePromptTemplate ?? 'User: {input}\\nAI: {output}'\n );\n\n const response = contentParts\n .map((part) => {\n if (part?.type === 'text') return part.text;\n return '';\n })\n .join('\\n');\n\n const model = this.Graph?.getNewModel({\n provider,\n omitOptions,\n clientOptions,\n });\n if (!model) {\n return { language: '', title: '' };\n }\n if (\n isOpenAILike(provider) &&\n (model instanceof ChatOpenAI || model instanceof AzureChatOpenAI)\n ) {\n model.temperature = (clientOptions as t.OpenAIClientOptions | undefined)\n ?.temperature as number;\n model.topP = (clientOptions as t.OpenAIClientOptions | undefined)\n ?.topP as number;\n model.frequencyPenalty = (\n clientOptions as t.OpenAIClientOptions | undefined\n )?.frequencyPenalty as number;\n model.presencePenalty = (\n clientOptions as t.OpenAIClientOptions | undefined\n )?.presencePenalty as number;\n model.n = (clientOptions as t.OpenAIClientOptions | undefined)\n ?.n as number;\n }\n\n const convoToTitleInput = new RunnableLambda({\n func: (\n promptValue: StringPromptValue\n ): { convo: string; inputText: string; skipLanguage?: boolean } => ({\n convo: promptValue.value,\n inputText,\n skipLanguage,\n }),\n }).withConfig({ runName: 'ConvoTransform' });\n\n const titleChain =\n titleMethod === TitleMethod.COMPLETION\n ? await createCompletionTitleRunnable(model, titlePrompt)\n : await createTitleRunnable(model, titlePrompt);\n\n /** Pipes `convoTemplate` -> `transformer` -> `titleChain` */\n const fullChain = convoTemplate\n .withConfig({ runName: 'ConvoTemplate' })\n .pipe(convoToTitleInput)\n .pipe(titleChain)\n .withConfig({ runName: 'TitleChain' });\n\n return await fullChain.invoke(\n { input: inputText, output: response },\n chainOptions\n );\n }\n}\n"],"names":["HandlerRegistry","StandardGraph","isPresent","CallbackHandler","createTokenCounter","zodToJsonSchema","SystemMessage","shiftIndexTokenCountMap","manualToolStreamProviders","GraphEvents","Callback","TitleMethod","PromptTemplate","isOpenAILike","ChatOpenAI","AzureChatOpenAI","RunnableLambda","createCompletionTitleRunnable","createTitleRunnable"],"mappings":";;;;;;;;;;;;;;;;;;;AAAA;AA6Ba,MAAA,kBAAkB,GAAG,IAAI,GAAG,CAAC;IACxC,QAAQ;IACR,UAAU;IACV,WAAW;IACX,WAAW;IACX,eAAe;IACf,gBAAgB;IAChB,gBAAgB;IAChB,iBAAiB;IACjB,iBAAiB;IACjB,8BAA8B;AAC/B,CAAA;MAEY,GAAG,CAAA;AACd,IAAA,aAAa;;;AAGL,IAAA,eAAe;AACvB,IAAA,EAAE;AACF,IAAA,KAAK;AACL,IAAA,QAAQ;IACR,aAAa,GAAY,KAAK;AAE9B,IAAA,WAAA,CAAoB,MAA4B,EAAA;AAC9C,QAAA,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,IAAI,EAAE;QAChC,IAAI,CAAC,KAAK,EAAE;AACV,YAAA,MAAM,IAAI,KAAK,CAAC,qBAAqB,CAAC;;AAGxC,QAAA,IAAI,CAAC,EAAE,GAAG,KAAK;AAEf,QAAA,MAAM,eAAe,GAAG,IAAIA,sBAAe,EAAE;AAE7C,QAAA,IAAI,MAAM,CAAC,cAAc,EAAE;AACzB,YAAA,KAAK,MAAM,CAAC,SAAS,EAAE,OAAO,CAAC,IAAI,MAAM,CAAC,OAAO,CAC/C,MAAM,CAAC,cAAc,CACtB,EAAE;AACD,gBAAA,eAAe,CAAC,QAAQ,CAAC,SAAS,EAAE,OAAO,CAAC;;;AAIhD,QAAA,IAAI,CAAC,eAAe,GAAG,eAAe;AAEtC,QAAA,IAAI,CAAC,MAAM,CAAC,WAAW,EAAE;AACvB,YAAA,MAAM,IAAI,KAAK,CAAC,2BAA2B,CAAC;;AAG9C,QAAA,IAAI,MAAM,CAAC,WAAW,CAAC,IAAI,KAAK,UAAU,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,EAAE;YACtE,IAAI,CAAC,QAAQ,GAAG,MAAM,CAAC,WAAW,CAAC,SAAS,CAAC,QAAQ;YACrD,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,mBAAmB,CAC3C,MAAM,CAAC,WAAW,CACqC;AACzD,YAAA,IAAI,IAAI,CAAC,KAAK,EAAE;AACd,gBAAA,IAAI,CAAC,KAAK,CAAC,eAAe,GAAG,eAAe;;;QAIhD,IAAI,CAAC,aAAa,GAAG,MAAM,CAAC,aAAa,IAAI,KAAK;;AAG5C,IAAA,mBAAmB,CACzB,MAA6B,EAAA;AAE7B,QAAA,MAAM,EAAE,SAAS,EAAE,KAAK,GAAG,EAAE,EAAE,GAAG,UAAU,EAAE,GAAG,MAAM;QACvD,MAAM,EAAE,QAAQ,EAAE,GAAG,aAAa,EAAE,GAAG,SAAS;AAEhD,QAAA,MAAM,aAAa,GAAG,IAAIC,mBAAa,CAAC;YACtC,KAAK;YACL,QAAQ;YACR,aAAa;AACb,YAAA,GAAG,UAAU;YACb,KAAK,EAAE,IAAI,CAAC,EAAE;AACf,SAAA,CAAC;AACF,QAAA,IAAI,CAAC,KAAK,GAAG,aAAa;AAC1B,QAAA,OAAO,aAAa,CAAC,cAAc,EAAE;;AAGvC,IAAA,aAAa,MAAM,CACjB,MAAmB,EAAA;AAEnB,QAAA,OAAO,IAAI,GAAG,CAAI,MAAM,CAAC;;IAG3B,cAAc,GAAA;AACZ,QAAA,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;AACf,YAAA,MAAM,IAAI,KAAK,CACb,8EAA8E,CAC/E;;AAEH,QAAA,OAAO,IAAI,CAAC,KAAK,CAAC,cAAc,EAAE;;AAGpC,IAAA,MAAM,aAAa,CACjB,MAAgB,EAChB,MAA2E,EAC3E,aAAoC,EAAA;AAEpC,QAAA,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE;AACvB,YAAA,MAAM,IAAI,KAAK,CACb,4EAA4E,CAC7E;;AAEH,QAAA,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;AACf,YAAA,MAAM,IAAI,KAAK,CACb,8EAA8E,CAC/E;;QAGH,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,aAAa,EAAE,WAAW,CAAC;AAClD,QAAA,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,QAAQ;QACpC,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,GAAG,KAAK;AACvE,QAAA,IAAI,aAAa,EAAE,SAAS,EAAE;;AAE5B,YAAA,MAAM,SAAS,GAAI,MAAM,CAAC,SAAiC,IAAI,EAAE;AACjE,YAAA,MAAM,CAAC,SAAS,GAAG,SAAS,CAAC,MAAM,CACjC,IAAI,CAAC,YAAY,CAAC,aAAa,CAAC,SAAS,CAAC,CAC3C;;AAGH,QAAA,IACEC,cAAS,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC;AAC1C,YAAAA,cAAS,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC;YAC1CA,cAAS,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,EACxC;AACA,YAAA,MAAM,MAAM,GAAG,MAAM,CAAC,YAAY,EAAE,OAAO;AAC3C,YAAA,MAAM,SAAS,GAAG,MAAM,CAAC,YAAY,EAAE,SAAS;AAChD,YAAA,MAAM,aAAa,GAAG;gBACpB,SAAS,EAAE,IAAI,CAAC,EAAE;AAClB,gBAAA,eAAe,EAAE,MAAM,CAAC,YAAY,EAAE,WAAW,EAAE,eAAe;aACnE;AACD,YAAA,MAAM,OAAO,GAAG,IAAIC,yBAAe,CAAC;gBAClC,MAAM;gBACN,SAAS;gBACT,aAAa;AACd,aAAA,CAAC;AACF,YAAA,MAAM,CAAC,SAAS,GAAG,CAChB,MAAM,CAAC,SAAiC,IAAI,EAAE,EAC/C,MAAM,CAAC,CAAC,OAAO,CAAC,CAAC;;AAGrB,QAAA,IAAI,CAAC,IAAI,CAAC,EAAE,EAAE;AACZ,YAAA,MAAM,IAAI,KAAK,CAAC,qBAAqB,CAAC;;AAGxC,QAAA,MAAM,YAAY,GAChB,aAAa,EAAE,YAAY;aAC1B,aAAa,EAAE;kBACZ,MAAMC,yBAAkB;kBACxB,SAAS,CAAC;AAChB,QAAA,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,KAEZ;QACb,MAAM,UAAU,GAAG;eACd,KAAK,EAAE,MAAM,CAAC,CAAC,GAAG,EAAE,IAAI,KAAI;AAC7B,gBAAA,IAAI,CAAE,IAA+B,CAAC,MAAM,EAAE;AAC5C,oBAAA,OAAO,GAAG;;gBAGZ,MAAM,UAAU,GAAGC,+BAAe,CAChC,CAAC,IAAI,EAAE,MAAyB,EAAC,QAAQ,CAAC,IAAI,EAAE,WAAW,IAAI,EAAE,CAAC,EAClE,IAAI,EAAE,IAAI,IAAI,EAAE,CACjB;AACD,gBAAA,QACE,GAAG,GAAG,YAAY,CAAC,IAAIC,sBAAa,CAAC,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC;AAErE,aAAC,EAAE,CAAC,CAAC,IAAI,CAAC;cACR,CAAC;QACL,IAAI,iBAAiB,GAAG,UAAU;QAClC,IAAI,IAAI,CAAC,KAAK,CAAC,aAAa,IAAI,YAAY,EAAE;YAC5C,iBAAiB,IAAI,YAAY,CAAC,IAAI,CAAC,KAAK,CAAC,aAAa,CAAC;;AAE7D,QAAA,MAAM,QAAQ,GAAG,aAAa,EAAE,kBAAkB,IAAI,EAAE;QACxD,IAAI,IAAI,CAAC,KAAK,CAAC,aAAa,IAAI,iBAAiB,GAAG,CAAC,EAAE;YACrD,IAAI,CAAC,KAAK,CAAC,kBAAkB,GAAGC,8BAAuB,CACrD,QAAQ,EACR,iBAAiB,CAClB;;AACI,aAAA,IAAI,iBAAiB,GAAG,CAAC,EAAE;YAChC,QAAQ,CAAC,CAAC,CAAC,GAAG,QAAQ,CAAC,CAAC,CAAC,GAAG,iBAAiB;AAC7C,YAAA,IAAI,CAAC,KAAK,CAAC,kBAAkB,GAAG,QAAQ;;aACnC;AACL,YAAA,IAAI,CAAC,KAAK,CAAC,kBAAkB,GAAG,QAAQ;;QAG1C,IAAI,CAAC,KAAK,CAAC,gBAAgB,GAAG,aAAa,EAAE,gBAAgB;AAC7D,QAAA,IAAI,CAAC,KAAK,CAAC,YAAY,GAAG,YAAY;AAEtC,QAAA,MAAM,CAAC,MAAM,GAAG,IAAI,CAAC,EAAE;AACvB,QAAA,MAAM,CAAC,YAAY,GAAG,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,YAAY,IAAI,EAAE,EAAE;YAC7D,MAAM,EAAE,IAAI,CAAC,EAAE;YACf,QAAQ,EAAE,IAAI,CAAC,QAAQ;AACxB,SAAA,CAAC;QAEF,MAAM,MAAM,GAAG,IAAI,CAAC,aAAa,CAAC,YAAY,CAAC,MAAM,EAAE,MAAM,EAAE;AAC7D,YAAA,UAAU,EAAE,IAAI;AACjB,SAAA,CAAC;AAEF,QAAA,WAAW,MAAM,KAAK,IAAI,MAAM,EAAE;AAChC,YAAA,MAAM,EAAE,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,GAAG,IAAI,EAAE,GAAG,KAAK;AAE/C,YAAA,IAAI,SAAS,GAAgB,IAAI,CAAC,KAAK;AACvC,YAAA,IACE,QAAQ;AACR,gBAAAC,mCAAyB,CAAC,GAAG,CAAC,QAAQ,CAAC;AACvC,gBAAA,SAAS,KAAKC,iBAAW,CAAC,iBAAiB,EAC3C;;gBAEA;;YAGF,IAAI,SAAS,IAAI,SAAS,KAAKA,iBAAW,CAAC,eAAe,EAAE;gBAC1D,SAAS,GAAG,IAAI;;YAGlB,MAAM,OAAO,GAAG,IAAI,CAAC,eAAe,CAAC,UAAU,CAAC,SAAS,CAAC;YAC1D,IAAI,OAAO,EAAE;AACX,gBAAA,OAAO,CAAC,MAAM,CAAC,SAAS,EAAE,IAAI,EAAE,QAAQ,EAAE,IAAI,CAAC,KAAK,CAAC;;;AAIzD,QAAA,IAAI,IAAI,CAAC,aAAa,EAAE;AACtB,YAAA,OAAO,IAAI,CAAC,KAAK,CAAC,eAAe,EAAE;;;IAI/B,oBAAoB,CAC1B,eAAgC,EAChC,GAAM,EAAA;AAEN,QAAA,QAAQ,CAAC,GAAG,IAAe,KAAI;AAC7B,YAAA,MAAM,cAAc,GAAG,eAAe,CAAC,GAAG,CAAC;AAC3C,YAAA,IAAI,cAAc,IAAI,IAAI,CAAC,KAAK,EAAE;gBAC/B,cAA+C,CAAC,IAAI,CAAC,KAAK,EAAE,GAAG,IAAI,CAAC;;AAEzE,SAAC;;AAGH,IAAA,YAAY,CAAC,eAAgC,EAAA;QAC3C,OAAO;AACL,YAAA,CAACC,cAAQ,CAAC,UAAU,GAAG,IAAI,CAAC,oBAAoB,CAC9C,eAAe,EACfA,cAAQ,CAAC,UAAU,CACpB;AACD,YAAA,CAACA,cAAQ,CAAC,UAAU,GAAG,IAAI,CAAC,oBAAoB,CAC9C,eAAe,EACfA,cAAQ,CAAC,UAAU,CACpB;AACD,YAAA,CAACA,cAAQ,CAAC,QAAQ,GAAG,IAAI,CAAC,oBAAoB,CAC5C,eAAe,EACfA,cAAQ,CAAC,QAAQ,CAClB;SACF;;AAGH,IAAA,MAAM,aAAa,CAAC,EAClB,QAAQ,EACR,SAAS,EACT,YAAY,EACZ,WAAW,EACX,aAAa,EACb,YAAY,EACZ,YAAY,EACZ,WAAW,GAAG,kBAAkB,EAChC,WAAW,GAAGC,iBAAW,CAAC,UAAU,EACpC,mBAAmB,GACD,EAAA;QAClB,IACE,YAAY,IAAI,IAAI;AACpB,YAAAT,cAAS,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC;AAC1C,YAAAA,cAAS,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC;YAC1CA,cAAS,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,EACxC;AACA,YAAA,MAAM,MAAM,GAAG,YAAY,CAAC,YAAY,EAAE,OAAO;AACjD,YAAA,MAAM,SAAS,GAAG,YAAY,CAAC,YAAY,EAAE,SAAS;AACtD,YAAA,MAAM,aAAa,GAAG;AACpB,gBAAA,SAAS,EAAE,QAAQ,GAAG,IAAI,CAAC,EAAE;aAC9B;AACD,YAAA,MAAM,OAAO,GAAG,IAAIC,yBAAe,CAAC;gBAClC,MAAM;gBACN,SAAS;gBACT,aAAa;AACd,aAAA,CAAC;AACF,YAAA,YAAY,CAAC,SAAS,GAAG,CACtB,YAAY,CAAC,SAAiC,IAAI,EAAE,EACrD,MAAM,CAAC,CAAC,OAAO,CAAC,CAAC;;QAGrB,MAAM,aAAa,GAAGS,sBAAc,CAAC,YAAY,CAC/C,mBAAmB,IAAI,6BAA6B,CACrD;QAED,MAAM,QAAQ,GAAG;AACd,aAAA,GAAG,CAAC,CAAC,IAAI,KAAI;AACZ,YAAA,IAAI,IAAI,EAAE,IAAI,KAAK,MAAM;gBAAE,OAAO,IAAI,CAAC,IAAI;AAC3C,YAAA,OAAO,EAAE;AACX,SAAC;aACA,IAAI,CAAC,IAAI,CAAC;AAEb,QAAA,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,EAAE,WAAW,CAAC;YACpC,QAAQ;YACR,WAAW;YACX,aAAa;AACd,SAAA,CAAC;QACF,IAAI,CAAC,KAAK,EAAE;YACV,OAAO,EAAE,QAAQ,EAAE,EAAE,EAAE,KAAK,EAAE,EAAE,EAAE;;QAEpC,IACEC,gBAAY,CAAC,QAAQ,CAAC;aACrB,KAAK,YAAYC,iBAAU,IAAI,KAAK,YAAYC,sBAAe,CAAC,EACjE;YACA,KAAK,CAAC,WAAW,GAAI;AACnB,kBAAE,WAAqB;YACzB,KAAK,CAAC,IAAI,GAAI;AACZ,kBAAE,IAAc;AAClB,YAAA,KAAK,CAAC,gBAAgB,GACpB,aACD,EAAE,gBAA0B;AAC7B,YAAA,KAAK,CAAC,eAAe,GACnB,aACD,EAAE,eAAyB;YAC5B,KAAK,CAAC,CAAC,GAAI;AACT,kBAAE,CAAW;;AAGjB,QAAA,MAAM,iBAAiB,GAAG,IAAIC,wBAAc,CAAC;AAC3C,YAAA,IAAI,EAAE,CACJ,WAA8B,MACoC;gBAClE,KAAK,EAAE,WAAW,CAAC,KAAK;gBACxB,SAAS;gBACT,YAAY;aACb,CAAC;SACH,CAAC,CAAC,UAAU,CAAC,EAAE,OAAO,EAAE,gBAAgB,EAAE,CAAC;AAE5C,QAAA,MAAM,UAAU,GACd,WAAW,KAAKL,iBAAW,CAAC;AAC1B,cAAE,MAAMM,mCAA6B,CAAC,KAAK,EAAE,WAAW;cACtD,MAAMC,yBAAmB,CAAC,KAAK,EAAE,WAAW,CAAC;;QAGnD,MAAM,SAAS,GAAG;AACf,aAAA,UAAU,CAAC,EAAE,OAAO,EAAE,eAAe,EAAE;aACvC,IAAI,CAAC,iBAAiB;aACtB,IAAI,CAAC,UAAU;AACf,aAAA,UAAU,CAAC,EAAE,OAAO,EAAE,YAAY,EAAE,CAAC;AAExC,QAAA,OAAO,MAAM,SAAS,CAAC,MAAM,CAC3B,EAAE,KAAK,EAAE,SAAS,EAAE,MAAM,EAAE,QAAQ,EAAE,EACtC,YAAY,CACb;;AAEJ;;;;;"}
|
package/dist/cjs/utils/title.cjs
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
3
|
var zod = require('zod');
|
|
4
|
-
var runnables = require('@langchain/core/runnables');
|
|
5
4
|
var prompts = require('@langchain/core/prompts');
|
|
5
|
+
var runnables = require('@langchain/core/runnables');
|
|
6
6
|
var _enum = require('../common/enum.cjs');
|
|
7
7
|
|
|
8
8
|
const defaultTitlePrompt = `Analyze this conversation and provide:
|
|
@@ -29,36 +29,57 @@ const createTitleRunnable = async (model, _titlePrompt) => {
|
|
|
29
29
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
30
30
|
/* @ts-ignore */
|
|
31
31
|
const combinedLLM = model.withStructuredOutput(combinedSchema);
|
|
32
|
-
const titlePrompt = prompts.ChatPromptTemplate.fromTemplate(_titlePrompt ?? defaultTitlePrompt);
|
|
32
|
+
const titlePrompt = prompts.ChatPromptTemplate.fromTemplate(_titlePrompt ?? defaultTitlePrompt).withConfig({ runName: 'TitlePrompt' });
|
|
33
|
+
const titleOnlyInnerChain = runnables.RunnableSequence.from([titlePrompt, titleLLM]);
|
|
34
|
+
const combinedInnerChain = runnables.RunnableSequence.from([titlePrompt, combinedLLM]);
|
|
35
|
+
/** Wrap titleOnlyChain in RunnableLambda to create parent span */
|
|
36
|
+
const titleOnlyChain = new runnables.RunnableLambda({
|
|
37
|
+
func: async (input, config) => {
|
|
38
|
+
return await titleOnlyInnerChain.invoke(input, config);
|
|
39
|
+
},
|
|
40
|
+
}).withConfig({ runName: 'TitleOnlyChain' });
|
|
41
|
+
/** Wrap combinedChain in RunnableLambda to create parent span */
|
|
42
|
+
const combinedChain = new runnables.RunnableLambda({
|
|
43
|
+
func: async (input, config) => {
|
|
44
|
+
return await combinedInnerChain.invoke(input, config);
|
|
45
|
+
},
|
|
46
|
+
}).withConfig({ runName: 'TitleLanguageChain' });
|
|
47
|
+
/** Runnable to add default values if needed */
|
|
48
|
+
const addDefaults = new runnables.RunnableLambda({
|
|
49
|
+
func: (result) => ({
|
|
50
|
+
language: result?.language ?? 'English',
|
|
51
|
+
title: result?.title ?? '',
|
|
52
|
+
}),
|
|
53
|
+
}).withConfig({ runName: 'AddDefaults' });
|
|
54
|
+
const combinedChainInner = runnables.RunnableSequence.from([
|
|
55
|
+
combinedChain,
|
|
56
|
+
addDefaults,
|
|
57
|
+
]);
|
|
58
|
+
/** Wrap combinedChainWithDefaults in RunnableLambda to create parent span */
|
|
59
|
+
const combinedChainWithDefaults = new runnables.RunnableLambda({
|
|
60
|
+
func: async (input, config) => {
|
|
61
|
+
return await combinedChainInner.invoke(input, config);
|
|
62
|
+
},
|
|
63
|
+
}).withConfig({ runName: 'CombinedChainWithDefaults' });
|
|
33
64
|
return new runnables.RunnableLambda({
|
|
34
65
|
func: async (input, config) => {
|
|
66
|
+
const invokeInput = { convo: input.convo };
|
|
35
67
|
if (input.skipLanguage) {
|
|
36
|
-
return (await
|
|
37
|
-
convo: input.convo,
|
|
38
|
-
}, config));
|
|
68
|
+
return (await titleOnlyChain.invoke(invokeInput, config));
|
|
39
69
|
}
|
|
40
|
-
|
|
41
|
-
convo: input.convo,
|
|
42
|
-
}, config));
|
|
43
|
-
return {
|
|
44
|
-
language: result?.language ?? 'English',
|
|
45
|
-
title: result?.title ?? '',
|
|
46
|
-
};
|
|
70
|
+
return await combinedChainWithDefaults.invoke(invokeInput, config);
|
|
47
71
|
},
|
|
48
|
-
});
|
|
72
|
+
}).withConfig({ runName: 'TitleGenerator' });
|
|
49
73
|
};
|
|
50
74
|
const defaultCompletionPrompt = `Provide a concise, 5-word-or-less title for the conversation, using title case conventions. Only return the title itself.
|
|
51
75
|
|
|
52
76
|
Conversation:
|
|
53
77
|
{convo}`;
|
|
54
78
|
const createCompletionTitleRunnable = async (model, titlePrompt) => {
|
|
55
|
-
const completionPrompt = prompts.ChatPromptTemplate.fromTemplate(titlePrompt ?? defaultCompletionPrompt);
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
convo: input.convo,
|
|
60
|
-
});
|
|
61
|
-
const response = await model.invoke(promptOutput, config);
|
|
79
|
+
const completionPrompt = prompts.ChatPromptTemplate.fromTemplate(titlePrompt ?? defaultCompletionPrompt).withConfig({ runName: 'CompletionTitlePrompt' });
|
|
80
|
+
/** Runnable to extract content from model response */
|
|
81
|
+
const extractContent = new runnables.RunnableLambda({
|
|
82
|
+
func: (response) => {
|
|
62
83
|
let content = '';
|
|
63
84
|
if (typeof response.content === 'string') {
|
|
64
85
|
content = response.content;
|
|
@@ -69,12 +90,20 @@ const createCompletionTitleRunnable = async (model, titlePrompt) => {
|
|
|
69
90
|
.map((part) => part.text)
|
|
70
91
|
.join('');
|
|
71
92
|
}
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
93
|
+
return { title: content.trim() };
|
|
94
|
+
},
|
|
95
|
+
}).withConfig({ runName: 'ExtractTitle' });
|
|
96
|
+
const innerChain = runnables.RunnableSequence.from([
|
|
97
|
+
completionPrompt,
|
|
98
|
+
model,
|
|
99
|
+
extractContent,
|
|
100
|
+
]);
|
|
101
|
+
/** Wrap in RunnableLambda to create a parent span for LangFuse */
|
|
102
|
+
return new runnables.RunnableLambda({
|
|
103
|
+
func: async (input, config) => {
|
|
104
|
+
return await innerChain.invoke(input, config);
|
|
76
105
|
},
|
|
77
|
-
});
|
|
106
|
+
}).withConfig({ runName: 'CompletionTitleChain' });
|
|
78
107
|
};
|
|
79
108
|
|
|
80
109
|
exports.createCompletionTitleRunnable = createCompletionTitleRunnable;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"title.cjs","sources":["../../../src/utils/title.ts"],"sourcesContent":["import { z } from 'zod';\nimport {
|
|
1
|
+
{"version":3,"file":"title.cjs","sources":["../../../src/utils/title.ts"],"sourcesContent":["import { z } from 'zod';\nimport { ChatPromptTemplate } from '@langchain/core/prompts';\nimport { RunnableLambda, RunnableSequence } from '@langchain/core/runnables';\nimport type { Runnable, RunnableConfig } from '@langchain/core/runnables';\nimport type { AIMessage } from '@langchain/core/messages';\nimport type * as t from '@/types';\nimport { ContentTypes } from '@/common';\n\nconst defaultTitlePrompt = `Analyze this conversation and provide:\n1. The detected language of the conversation\n2. A concise title in the detected language (5 words or less, no punctuation or quotation)\n\n{convo}`;\n\nconst titleSchema = z.object({\n title: z\n .string()\n .describe(\n 'A concise title for the conversation in 5 words or less, without punctuation or quotation'\n ),\n});\n\nconst combinedSchema = z.object({\n language: z.string().describe('The detected language of the conversation'),\n title: z\n .string()\n .describe(\n 'A concise title for the conversation in 5 words or less, without punctuation or quotation'\n ),\n});\n\nexport const createTitleRunnable = async (\n model: t.ChatModelInstance,\n _titlePrompt?: string\n): Promise<Runnable> => {\n // Disabled since this works fine\n // eslint-disable-next-line @typescript-eslint/ban-ts-comment\n /* @ts-ignore */\n const titleLLM = model.withStructuredOutput(titleSchema);\n // eslint-disable-next-line @typescript-eslint/ban-ts-comment\n /* @ts-ignore */\n const combinedLLM = model.withStructuredOutput(combinedSchema);\n\n const titlePrompt = ChatPromptTemplate.fromTemplate(\n _titlePrompt ?? defaultTitlePrompt\n ).withConfig({ runName: 'TitlePrompt' });\n\n const titleOnlyInnerChain = RunnableSequence.from([titlePrompt, titleLLM]);\n const combinedInnerChain = RunnableSequence.from([titlePrompt, combinedLLM]);\n\n /** Wrap titleOnlyChain in RunnableLambda to create parent span */\n const titleOnlyChain = new RunnableLambda({\n func: async (\n input: { convo: string },\n config?: Partial<RunnableConfig>\n ): Promise<{ title: string }> => {\n return await titleOnlyInnerChain.invoke(input, config);\n },\n }).withConfig({ runName: 'TitleOnlyChain' });\n\n /** Wrap combinedChain in RunnableLambda to create parent span */\n const combinedChain = new RunnableLambda({\n func: async (\n input: { convo: string },\n config?: Partial<RunnableConfig>\n ): Promise<{ language: string; title: string }> => {\n return await combinedInnerChain.invoke(input, config);\n },\n }).withConfig({ runName: 'TitleLanguageChain' });\n\n /** Runnable to add default values if needed */\n const addDefaults = new RunnableLambda({\n func: (\n result: { language: string; title: string } | undefined\n ): { language: string; title: string } => ({\n language: result?.language ?? 'English',\n title: result?.title ?? '',\n }),\n }).withConfig({ runName: 'AddDefaults' });\n\n const combinedChainInner = RunnableSequence.from([\n combinedChain,\n addDefaults,\n ]);\n\n /** Wrap combinedChainWithDefaults in RunnableLambda to create parent span */\n const combinedChainWithDefaults = new RunnableLambda({\n func: async (\n input: { convo: string },\n config?: Partial<RunnableConfig>\n ): Promise<{ language: string; title: string }> => {\n return await combinedChainInner.invoke(input, config);\n },\n }).withConfig({ runName: 'CombinedChainWithDefaults' });\n\n return new RunnableLambda({\n func: async (\n input: {\n convo: string;\n inputText: string;\n skipLanguage: boolean;\n },\n config?: Partial<RunnableConfig>\n ): Promise<{ language: string; title: string } | { title: string }> => {\n const invokeInput = { convo: input.convo };\n\n if (input.skipLanguage) {\n return (await titleOnlyChain.invoke(invokeInput, config)) as {\n title: string;\n };\n }\n\n return await combinedChainWithDefaults.invoke(invokeInput, config);\n },\n }).withConfig({ runName: 'TitleGenerator' });\n};\n\nconst defaultCompletionPrompt = `Provide a concise, 5-word-or-less title for the conversation, using title case conventions. Only return the title itself.\n\nConversation:\n{convo}`;\n\nexport const createCompletionTitleRunnable = async (\n model: t.ChatModelInstance,\n titlePrompt?: string\n): Promise<Runnable> => {\n const completionPrompt = ChatPromptTemplate.fromTemplate(\n titlePrompt ?? defaultCompletionPrompt\n ).withConfig({ runName: 'CompletionTitlePrompt' });\n\n /** Runnable to extract content from model response */\n const extractContent = new RunnableLambda({\n func: (response: AIMessage): { title: string } => {\n let content = '';\n if (typeof response.content === 'string') {\n content = response.content;\n } else if (Array.isArray(response.content)) {\n content = response.content\n .filter(\n (part): part is { type: ContentTypes.TEXT; text: string } =>\n part.type === ContentTypes.TEXT\n )\n .map((part) => part.text)\n .join('');\n }\n return { title: content.trim() };\n },\n }).withConfig({ runName: 'ExtractTitle' });\n\n const innerChain = RunnableSequence.from([\n completionPrompt,\n model,\n extractContent,\n ]);\n\n /** Wrap in RunnableLambda to create a parent span for LangFuse */\n return new RunnableLambda({\n func: async (\n input: { convo: string },\n config?: Partial<RunnableConfig>\n ): Promise<{ title: string }> => {\n return await innerChain.invoke(input, config);\n },\n }).withConfig({ runName: 'CompletionTitleChain' });\n};\n"],"names":["z","ChatPromptTemplate","RunnableSequence","RunnableLambda","ContentTypes"],"mappings":";;;;;;;AAQA,MAAM,kBAAkB,GAAG,CAAA;;;;QAInB;AAER,MAAM,WAAW,GAAGA,KAAC,CAAC,MAAM,CAAC;AAC3B,IAAA,KAAK,EAAEA;AACJ,SAAA,MAAM;SACN,QAAQ,CACP,2FAA2F,CAC5F;AACJ,CAAA,CAAC;AAEF,MAAM,cAAc,GAAGA,KAAC,CAAC,MAAM,CAAC;IAC9B,QAAQ,EAAEA,KAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,CAAC,2CAA2C,CAAC;AAC1E,IAAA,KAAK,EAAEA;AACJ,SAAA,MAAM;SACN,QAAQ,CACP,2FAA2F,CAC5F;AACJ,CAAA,CAAC;AAEW,MAAA,mBAAmB,GAAG,OACjC,KAA0B,EAC1B,YAAqB,KACA;;;;IAIrB,MAAM,QAAQ,GAAG,KAAK,CAAC,oBAAoB,CAAC,WAAW,CAAC;;;IAGxD,MAAM,WAAW,GAAG,KAAK,CAAC,oBAAoB,CAAC,cAAc,CAAC;AAE9D,IAAA,MAAM,WAAW,GAAGC,0BAAkB,CAAC,YAAY,CACjD,YAAY,IAAI,kBAAkB,CACnC,CAAC,UAAU,CAAC,EAAE,OAAO,EAAE,aAAa,EAAE,CAAC;AAExC,IAAA,MAAM,mBAAmB,GAAGC,0BAAgB,CAAC,IAAI,CAAC,CAAC,WAAW,EAAE,QAAQ,CAAC,CAAC;AAC1E,IAAA,MAAM,kBAAkB,GAAGA,0BAAgB,CAAC,IAAI,CAAC,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC;;AAG5E,IAAA,MAAM,cAAc,GAAG,IAAIC,wBAAc,CAAC;AACxC,QAAA,IAAI,EAAE,OACJ,KAAwB,EACxB,MAAgC,KACF;YAC9B,OAAO,MAAM,mBAAmB,CAAC,MAAM,CAAC,KAAK,EAAE,MAAM,CAAC;SACvD;KACF,CAAC,CAAC,UAAU,CAAC,EAAE,OAAO,EAAE,gBAAgB,EAAE,CAAC;;AAG5C,IAAA,MAAM,aAAa,GAAG,IAAIA,wBAAc,CAAC;AACvC,QAAA,IAAI,EAAE,OACJ,KAAwB,EACxB,MAAgC,KACgB;YAChD,OAAO,MAAM,kBAAkB,CAAC,MAAM,CAAC,KAAK,EAAE,MAAM,CAAC;SACtD;KACF,CAAC,CAAC,UAAU,CAAC,EAAE,OAAO,EAAE,oBAAoB,EAAE,CAAC;;AAGhD,IAAA,MAAM,WAAW,GAAG,IAAIA,wBAAc,CAAC;AACrC,QAAA,IAAI,EAAE,CACJ,MAAuD,MACd;AACzC,YAAA,QAAQ,EAAE,MAAM,EAAE,QAAQ,IAAI,SAAS;AACvC,YAAA,KAAK,EAAE,MAAM,EAAE,KAAK,IAAI,EAAE;SAC3B,CAAC;KACH,CAAC,CAAC,UAAU,CAAC,EAAE,OAAO,EAAE,aAAa,EAAE,CAAC;AAEzC,IAAA,MAAM,kBAAkB,GAAGD,0BAAgB,CAAC,IAAI,CAAC;QAC/C,aAAa;QACb,WAAW;AACZ,KAAA,CAAC;;AAGF,IAAA,MAAM,yBAAyB,GAAG,IAAIC,wBAAc,CAAC;AACnD,QAAA,IAAI,EAAE,OACJ,KAAwB,EACxB,MAAgC,KACgB;YAChD,OAAO,MAAM,kBAAkB,CAAC,MAAM,CAAC,KAAK,EAAE,MAAM,CAAC;SACtD;KACF,CAAC,CAAC,UAAU,CAAC,EAAE,OAAO,EAAE,2BAA2B,EAAE,CAAC;IAEvD,OAAO,IAAIA,wBAAc,CAAC;AACxB,QAAA,IAAI,EAAE,OACJ,KAIC,EACD,MAAgC,KACoC;YACpE,MAAM,WAAW,GAAG,EAAE,KAAK,EAAE,KAAK,CAAC,KAAK,EAAE;AAE1C,YAAA,IAAI,KAAK,CAAC,YAAY,EAAE;gBACtB,QAAQ,MAAM,cAAc,CAAC,MAAM,CAAC,WAAW,EAAE,MAAM,CAAC;;YAK1D,OAAO,MAAM,yBAAyB,CAAC,MAAM,CAAC,WAAW,EAAE,MAAM,CAAC;SACnE;KACF,CAAC,CAAC,UAAU,CAAC,EAAE,OAAO,EAAE,gBAAgB,EAAE,CAAC;AAC9C;AAEA,MAAM,uBAAuB,GAAG,CAAA;;;QAGxB;AAEK,MAAA,6BAA6B,GAAG,OAC3C,KAA0B,EAC1B,WAAoB,KACC;AACrB,IAAA,MAAM,gBAAgB,GAAGF,0BAAkB,CAAC,YAAY,CACtD,WAAW,IAAI,uBAAuB,CACvC,CAAC,UAAU,CAAC,EAAE,OAAO,EAAE,uBAAuB,EAAE,CAAC;;AAGlD,IAAA,MAAM,cAAc,GAAG,IAAIE,wBAAc,CAAC;AACxC,QAAA,IAAI,EAAE,CAAC,QAAmB,KAAuB;YAC/C,IAAI,OAAO,GAAG,EAAE;AAChB,YAAA,IAAI,OAAO,QAAQ,CAAC,OAAO,KAAK,QAAQ,EAAE;AACxC,gBAAA,OAAO,GAAG,QAAQ,CAAC,OAAO;;iBACrB,IAAI,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE;gBAC1C,OAAO,GAAG,QAAQ,CAAC;AAChB,qBAAA,MAAM,CACL,CAAC,IAAI,KACH,IAAI,CAAC,IAAI,KAAKC,kBAAY,CAAC,IAAI;qBAElC,GAAG,CAAC,CAAC,IAAI,KAAK,IAAI,CAAC,IAAI;qBACvB,IAAI,CAAC,EAAE,CAAC;;YAEb,OAAO,EAAE,KAAK,EAAE,OAAO,CAAC,IAAI,EAAE,EAAE;SACjC;KACF,CAAC,CAAC,UAAU,CAAC,EAAE,OAAO,EAAE,cAAc,EAAE,CAAC;AAE1C,IAAA,MAAM,UAAU,GAAGF,0BAAgB,CAAC,IAAI,CAAC;QACvC,gBAAgB;QAChB,KAAK;QACL,cAAc;AACf,KAAA,CAAC;;IAGF,OAAO,IAAIC,wBAAc,CAAC;AACxB,QAAA,IAAI,EAAE,OACJ,KAAwB,EACxB,MAAgC,KACF;YAC9B,OAAO,MAAM,UAAU,CAAC,MAAM,CAAC,KAAK,EAAE,MAAM,CAAC;SAC9C;KACF,CAAC,CAAC,UAAU,CAAC,EAAE,OAAO,EAAE,sBAAsB,EAAE,CAAC;AACpD;;;;;"}
|
|
@@ -5,8 +5,14 @@ import { isPresent } from './utils/misc.mjs';
|
|
|
5
5
|
if (isPresent(process.env.LANGFUSE_SECRET_KEY) &&
|
|
6
6
|
isPresent(process.env.LANGFUSE_PUBLIC_KEY) &&
|
|
7
7
|
isPresent(process.env.LANGFUSE_BASE_URL)) {
|
|
8
|
+
const langfuseSpanProcessor = new LangfuseSpanProcessor({
|
|
9
|
+
publicKey: process.env.LANGFUSE_PUBLIC_KEY,
|
|
10
|
+
secretKey: process.env.LANGFUSE_SECRET_KEY,
|
|
11
|
+
baseUrl: process.env.LANGFUSE_BASE_URL,
|
|
12
|
+
environment: process.env.NODE_ENV ?? 'development',
|
|
13
|
+
});
|
|
8
14
|
const sdk = new NodeSDK({
|
|
9
|
-
spanProcessors: [
|
|
15
|
+
spanProcessors: [langfuseSpanProcessor],
|
|
10
16
|
});
|
|
11
17
|
sdk.start();
|
|
12
18
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"instrumentation.mjs","sources":["../../src/instrumentation.ts"],"sourcesContent":["import { NodeSDK } from '@opentelemetry/sdk-node';\nimport { LangfuseSpanProcessor } from '@langfuse/otel';\nimport { isPresent } from '@/utils/misc';\n\nif (\n isPresent(process.env.LANGFUSE_SECRET_KEY) &&\n isPresent(process.env.LANGFUSE_PUBLIC_KEY) &&\n isPresent(process.env.LANGFUSE_BASE_URL)\n) {\n const sdk = new NodeSDK({\n spanProcessors: [
|
|
1
|
+
{"version":3,"file":"instrumentation.mjs","sources":["../../src/instrumentation.ts"],"sourcesContent":["import { NodeSDK } from '@opentelemetry/sdk-node';\nimport { LangfuseSpanProcessor } from '@langfuse/otel';\nimport { isPresent } from '@/utils/misc';\n\nif (\n isPresent(process.env.LANGFUSE_SECRET_KEY) &&\n isPresent(process.env.LANGFUSE_PUBLIC_KEY) &&\n isPresent(process.env.LANGFUSE_BASE_URL)\n) {\n const langfuseSpanProcessor = new LangfuseSpanProcessor({\n publicKey: process.env.LANGFUSE_PUBLIC_KEY,\n secretKey: process.env.LANGFUSE_SECRET_KEY,\n baseUrl: process.env.LANGFUSE_BASE_URL,\n environment: process.env.NODE_ENV ?? 'development',\n });\n\n const sdk = new NodeSDK({\n spanProcessors: [langfuseSpanProcessor],\n });\n\n sdk.start();\n}\n"],"names":[],"mappings":";;;;AAIA,IACE,SAAS,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC;AAC1C,IAAA,SAAS,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC;IAC1C,SAAS,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,EACxC;AACA,IAAA,MAAM,qBAAqB,GAAG,IAAI,qBAAqB,CAAC;AACtD,QAAA,SAAS,EAAE,OAAO,CAAC,GAAG,CAAC,mBAAmB;AAC1C,QAAA,SAAS,EAAE,OAAO,CAAC,GAAG,CAAC,mBAAmB;AAC1C,QAAA,OAAO,EAAE,OAAO,CAAC,GAAG,CAAC,iBAAiB;AACtC,QAAA,WAAW,EAAE,OAAO,CAAC,GAAG,CAAC,QAAQ,IAAI,aAAa;AACnD,KAAA,CAAC;AAEF,IAAA,MAAM,GAAG,GAAG,IAAI,OAAO,CAAC;QACtB,cAAc,EAAE,CAAC,qBAAqB,CAAC;AACxC,KAAA,CAAC;IAEF,GAAG,CAAC,KAAK,EAAE;AACb"}
|
package/dist/esm/run.mjs
CHANGED
|
@@ -3,6 +3,7 @@ import { zodToJsonSchema } from 'zod-to-json-schema';
|
|
|
3
3
|
import { CallbackHandler } from '@langfuse/langchain';
|
|
4
4
|
import { PromptTemplate } from '@langchain/core/prompts';
|
|
5
5
|
import { SystemMessage } from '@langchain/core/messages';
|
|
6
|
+
import { RunnableLambda } from '@langchain/core/runnables';
|
|
6
7
|
import { ChatOpenAI as ChatOpenAI$1, AzureChatOpenAI as AzureChatOpenAI$1 } from '@langchain/openai';
|
|
7
8
|
import { GraphEvents, Callback, TitleMethod } from './common/enum.mjs';
|
|
8
9
|
import { manualToolStreamProviders } from './llm/providers.mjs';
|
|
@@ -101,17 +102,18 @@ class Run {
|
|
|
101
102
|
if (isPresent(process.env.LANGFUSE_SECRET_KEY) &&
|
|
102
103
|
isPresent(process.env.LANGFUSE_PUBLIC_KEY) &&
|
|
103
104
|
isPresent(process.env.LANGFUSE_BASE_URL)) {
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
105
|
+
const userId = config.configurable?.user_id;
|
|
106
|
+
const sessionId = config.configurable?.thread_id;
|
|
107
|
+
const traceMetadata = {
|
|
108
|
+
messageId: this.id,
|
|
109
|
+
parentMessageId: config.configurable?.requestBody?.parentMessageId,
|
|
110
|
+
};
|
|
111
|
+
const handler = new CallbackHandler({
|
|
112
|
+
userId,
|
|
113
|
+
sessionId,
|
|
114
|
+
traceMetadata,
|
|
115
|
+
});
|
|
116
|
+
config.callbacks = (config.callbacks ?? []).concat([handler]);
|
|
115
117
|
}
|
|
116
118
|
if (!this.id) {
|
|
117
119
|
throw new Error('Run ID not provided');
|
|
@@ -192,6 +194,22 @@ class Run {
|
|
|
192
194
|
};
|
|
193
195
|
}
|
|
194
196
|
async generateTitle({ provider, inputText, contentParts, titlePrompt, clientOptions, chainOptions, skipLanguage, omitOptions = defaultOmitOptions, titleMethod = TitleMethod.COMPLETION, titlePromptTemplate, }) {
|
|
197
|
+
if (chainOptions != null &&
|
|
198
|
+
isPresent(process.env.LANGFUSE_SECRET_KEY) &&
|
|
199
|
+
isPresent(process.env.LANGFUSE_PUBLIC_KEY) &&
|
|
200
|
+
isPresent(process.env.LANGFUSE_BASE_URL)) {
|
|
201
|
+
const userId = chainOptions.configurable?.user_id;
|
|
202
|
+
const sessionId = chainOptions.configurable?.thread_id;
|
|
203
|
+
const traceMetadata = {
|
|
204
|
+
messageId: 'title-' + this.id,
|
|
205
|
+
};
|
|
206
|
+
const handler = new CallbackHandler({
|
|
207
|
+
userId,
|
|
208
|
+
sessionId,
|
|
209
|
+
traceMetadata,
|
|
210
|
+
});
|
|
211
|
+
chainOptions.callbacks = (chainOptions.callbacks ?? []).concat([handler]);
|
|
212
|
+
}
|
|
195
213
|
const convoTemplate = PromptTemplate.fromTemplate(titlePromptTemplate ?? 'User: {input}\nAI: {output}');
|
|
196
214
|
const response = contentParts
|
|
197
215
|
.map((part) => {
|
|
@@ -200,7 +218,6 @@ class Run {
|
|
|
200
218
|
return '';
|
|
201
219
|
})
|
|
202
220
|
.join('\n');
|
|
203
|
-
const convo = (await convoTemplate.invoke({ input: inputText, output: response })).value;
|
|
204
221
|
const model = this.Graph?.getNewModel({
|
|
205
222
|
provider,
|
|
206
223
|
omitOptions,
|
|
@@ -220,10 +237,23 @@ class Run {
|
|
|
220
237
|
model.n = clientOptions
|
|
221
238
|
?.n;
|
|
222
239
|
}
|
|
223
|
-
const
|
|
240
|
+
const convoToTitleInput = new RunnableLambda({
|
|
241
|
+
func: (promptValue) => ({
|
|
242
|
+
convo: promptValue.value,
|
|
243
|
+
inputText,
|
|
244
|
+
skipLanguage,
|
|
245
|
+
}),
|
|
246
|
+
}).withConfig({ runName: 'ConvoTransform' });
|
|
247
|
+
const titleChain = titleMethod === TitleMethod.COMPLETION
|
|
224
248
|
? await createCompletionTitleRunnable(model, titlePrompt)
|
|
225
249
|
: await createTitleRunnable(model, titlePrompt);
|
|
226
|
-
|
|
250
|
+
/** Pipes `convoTemplate` -> `transformer` -> `titleChain` */
|
|
251
|
+
const fullChain = convoTemplate
|
|
252
|
+
.withConfig({ runName: 'ConvoTemplate' })
|
|
253
|
+
.pipe(convoToTitleInput)
|
|
254
|
+
.pipe(titleChain)
|
|
255
|
+
.withConfig({ runName: 'TitleChain' });
|
|
256
|
+
return await fullChain.invoke({ input: inputText, output: response }, chainOptions);
|
|
227
257
|
}
|
|
228
258
|
}
|
|
229
259
|
|
package/dist/esm/run.mjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"run.mjs","sources":["../../src/run.ts"],"sourcesContent":["// src/run.ts\nimport './instrumentation';\nimport { zodToJsonSchema } from 'zod-to-json-schema';\nimport { CallbackHandler } from '@langfuse/langchain';\nimport { PromptTemplate } from '@langchain/core/prompts';\nimport { SystemMessage } from '@langchain/core/messages';\nimport { AzureChatOpenAI, ChatOpenAI } from '@langchain/openai';\nimport type {\n BaseMessage,\n MessageContentComplex,\n} from '@langchain/core/messages';\nimport type { ClientCallbacks, SystemCallbacks } from '@/graphs/Graph';\nimport type { RunnableConfig } from '@langchain/core/runnables';\nimport type * as t from '@/types';\nimport { GraphEvents, Providers, Callback, TitleMethod } from '@/common';\nimport { manualToolStreamProviders } from '@/llm/providers';\nimport { shiftIndexTokenCountMap } from '@/messages/format';\nimport {\n createTitleRunnable,\n createCompletionTitleRunnable,\n} from '@/utils/title';\nimport { createTokenCounter } from '@/utils/tokens';\nimport { StandardGraph } from '@/graphs/Graph';\nimport { HandlerRegistry } from '@/events';\nimport { isOpenAILike } from '@/utils/llm';\nimport { isPresent } from '@/utils/misc';\n\nexport const defaultOmitOptions = new Set([\n 'stream',\n 'thinking',\n 'streaming',\n 'maxTokens',\n 'clientOptions',\n 'thinkingConfig',\n 'thinkingBudget',\n 'includeThoughts',\n 'maxOutputTokens',\n 'additionalModelRequestFields',\n]);\n\nexport class Run<T extends t.BaseGraphState> {\n graphRunnable?: t.CompiledWorkflow<T, Partial<T>, string>;\n // private collab!: CollabGraph;\n // private taskManager!: TaskManager;\n private handlerRegistry: HandlerRegistry;\n id: string;\n Graph: StandardGraph | undefined;\n provider: Providers | undefined;\n returnContent: boolean = false;\n\n private constructor(config: Partial<t.RunConfig>) {\n const runId = config.runId ?? '';\n if (!runId) {\n throw new Error('Run ID not provided');\n }\n\n this.id = runId;\n\n const handlerRegistry = new HandlerRegistry();\n\n if (config.customHandlers) {\n for (const [eventType, handler] of Object.entries(\n config.customHandlers\n )) {\n handlerRegistry.register(eventType, handler);\n }\n }\n\n this.handlerRegistry = handlerRegistry;\n\n if (!config.graphConfig) {\n throw new Error('Graph config not provided');\n }\n\n if (config.graphConfig.type === 'standard' || !config.graphConfig.type) {\n this.provider = config.graphConfig.llmConfig.provider;\n this.graphRunnable = this.createStandardGraph(\n config.graphConfig\n ) as unknown as t.CompiledWorkflow<T, Partial<T>, string>;\n if (this.Graph) {\n this.Graph.handlerRegistry = handlerRegistry;\n }\n }\n\n this.returnContent = config.returnContent ?? false;\n }\n\n private createStandardGraph(\n config: t.StandardGraphConfig\n ): t.CompiledWorkflow<t.IState, Partial<t.IState>, string> {\n const { llmConfig, tools = [], ...graphInput } = config;\n const { provider, ...clientOptions } = llmConfig;\n\n const standardGraph = new StandardGraph({\n tools,\n provider,\n clientOptions,\n ...graphInput,\n runId: this.id,\n });\n this.Graph = standardGraph;\n return standardGraph.createWorkflow();\n }\n\n static async create<T extends t.BaseGraphState>(\n config: t.RunConfig\n ): Promise<Run<T>> {\n return new Run<T>(config);\n }\n\n getRunMessages(): BaseMessage[] | undefined {\n if (!this.Graph) {\n throw new Error(\n 'Graph not initialized. Make sure to use Run.create() to instantiate the Run.'\n );\n }\n return this.Graph.getRunMessages();\n }\n\n async processStream(\n inputs: t.IState,\n config: Partial<RunnableConfig> & { version: 'v1' | 'v2'; run_id?: string },\n streamOptions?: t.EventStreamOptions\n ): Promise<MessageContentComplex[] | undefined> {\n if (!this.graphRunnable) {\n throw new Error(\n 'Run not initialized. Make sure to use Run.create() to instantiate the Run.'\n );\n }\n if (!this.Graph) {\n throw new Error(\n 'Graph not initialized. Make sure to use Run.create() to instantiate the Run.'\n );\n }\n\n this.Graph.resetValues(streamOptions?.keepContent);\n const provider = this.Graph.provider;\n const hasTools = this.Graph.tools ? this.Graph.tools.length > 0 : false;\n if (streamOptions?.callbacks) {\n /* TODO: conflicts with callback manager */\n const callbacks = (config.callbacks as t.ProvidedCallbacks) ?? [];\n config.callbacks = callbacks.concat(\n this.getCallbacks(streamOptions.callbacks)\n );\n }\n\n if (\n isPresent(process.env.LANGFUSE_SECRET_KEY) &&\n isPresent(process.env.LANGFUSE_PUBLIC_KEY) &&\n isPresent(process.env.LANGFUSE_BASE_URL)\n ) {\n config.callbacks = (\n (config.callbacks as t.ProvidedCallbacks) ?? []\n ).concat([\n new CallbackHandler({\n userId: config.configurable?.user_id,\n sessionId: this.id,\n traceMetadata: {\n messageId: config.configurable?.requestBody?.messageId,\n conversationId: config.configurable?.requestBody?.conversationId,\n parentMessageId: config.configurable?.requestBody?.parentMessageId,\n },\n }),\n ]);\n }\n\n if (!this.id) {\n throw new Error('Run ID not provided');\n }\n\n const tokenCounter =\n streamOptions?.tokenCounter ??\n (streamOptions?.indexTokenCountMap\n ? await createTokenCounter()\n : undefined);\n const tools = this.Graph.tools as\n | Array<t.GenericTool | undefined>\n | undefined;\n const toolTokens = tokenCounter\n ? (tools?.reduce((acc, tool) => {\n if (!(tool as Partial<t.GenericTool>).schema) {\n return acc;\n }\n\n const jsonSchema = zodToJsonSchema(\n (tool?.schema as t.ZodObjectAny).describe(tool?.description ?? ''),\n tool?.name ?? ''\n );\n return (\n acc + tokenCounter(new SystemMessage(JSON.stringify(jsonSchema)))\n );\n }, 0) ?? 0)\n : 0;\n let instructionTokens = toolTokens;\n if (this.Graph.systemMessage && tokenCounter) {\n instructionTokens += tokenCounter(this.Graph.systemMessage);\n }\n const tokenMap = streamOptions?.indexTokenCountMap ?? {};\n if (this.Graph.systemMessage && instructionTokens > 0) {\n this.Graph.indexTokenCountMap = shiftIndexTokenCountMap(\n tokenMap,\n instructionTokens\n );\n } else if (instructionTokens > 0) {\n tokenMap[0] = tokenMap[0] + instructionTokens;\n this.Graph.indexTokenCountMap = tokenMap;\n } else {\n this.Graph.indexTokenCountMap = tokenMap;\n }\n\n this.Graph.maxContextTokens = streamOptions?.maxContextTokens;\n this.Graph.tokenCounter = tokenCounter;\n\n config.run_id = this.id;\n config.configurable = Object.assign(config.configurable ?? {}, {\n run_id: this.id,\n provider: this.provider,\n });\n\n const stream = this.graphRunnable.streamEvents(inputs, config, {\n raiseError: true,\n });\n\n for await (const event of stream) {\n const { data, name, metadata, ...info } = event;\n\n let eventName: t.EventName = info.event;\n if (\n hasTools &&\n manualToolStreamProviders.has(provider) &&\n eventName === GraphEvents.CHAT_MODEL_STREAM\n ) {\n /* Skipping CHAT_MODEL_STREAM event due to double-call edge case */\n continue;\n }\n\n if (eventName && eventName === GraphEvents.ON_CUSTOM_EVENT) {\n eventName = name;\n }\n\n const handler = this.handlerRegistry.getHandler(eventName);\n if (handler) {\n handler.handle(eventName, data, metadata, this.Graph);\n }\n }\n\n if (this.returnContent) {\n return this.Graph.getContentParts();\n }\n }\n\n private createSystemCallback<K extends keyof ClientCallbacks>(\n clientCallbacks: ClientCallbacks,\n key: K\n ): SystemCallbacks[K] {\n return ((...args: unknown[]) => {\n const clientCallback = clientCallbacks[key];\n if (clientCallback && this.Graph) {\n (clientCallback as (...args: unknown[]) => void)(this.Graph, ...args);\n }\n }) as SystemCallbacks[K];\n }\n\n getCallbacks(clientCallbacks: ClientCallbacks): SystemCallbacks {\n return {\n [Callback.TOOL_ERROR]: this.createSystemCallback(\n clientCallbacks,\n Callback.TOOL_ERROR\n ),\n [Callback.TOOL_START]: this.createSystemCallback(\n clientCallbacks,\n Callback.TOOL_START\n ),\n [Callback.TOOL_END]: this.createSystemCallback(\n clientCallbacks,\n Callback.TOOL_END\n ),\n };\n }\n\n async generateTitle({\n provider,\n inputText,\n contentParts,\n titlePrompt,\n clientOptions,\n chainOptions,\n skipLanguage,\n omitOptions = defaultOmitOptions,\n titleMethod = TitleMethod.COMPLETION,\n titlePromptTemplate,\n }: t.RunTitleOptions): Promise<{ language?: string; title?: string }> {\n const convoTemplate = PromptTemplate.fromTemplate(\n titlePromptTemplate ?? 'User: {input}\\nAI: {output}'\n );\n const response = contentParts\n .map((part) => {\n if (part?.type === 'text') return part.text;\n return '';\n })\n .join('\\n');\n const convo = (\n await convoTemplate.invoke({ input: inputText, output: response })\n ).value;\n const model = this.Graph?.getNewModel({\n provider,\n omitOptions,\n clientOptions,\n });\n if (!model) {\n return { language: '', title: '' };\n }\n if (\n isOpenAILike(provider) &&\n (model instanceof ChatOpenAI || model instanceof AzureChatOpenAI)\n ) {\n model.temperature = (clientOptions as t.OpenAIClientOptions | undefined)\n ?.temperature as number;\n model.topP = (clientOptions as t.OpenAIClientOptions | undefined)\n ?.topP as number;\n model.frequencyPenalty = (\n clientOptions as t.OpenAIClientOptions | undefined\n )?.frequencyPenalty as number;\n model.presencePenalty = (\n clientOptions as t.OpenAIClientOptions | undefined\n )?.presencePenalty as number;\n model.n = (clientOptions as t.OpenAIClientOptions | undefined)\n ?.n as number;\n }\n const chain =\n titleMethod === TitleMethod.COMPLETION\n ? await createCompletionTitleRunnable(model, titlePrompt)\n : await createTitleRunnable(model, titlePrompt);\n return await chain.invoke({ convo, inputText, skipLanguage }, chainOptions);\n }\n}\n"],"names":["ChatOpenAI","AzureChatOpenAI"],"mappings":";;;;;;;;;;;;;;;;AAAA;AA2Ba,MAAA,kBAAkB,GAAG,IAAI,GAAG,CAAC;IACxC,QAAQ;IACR,UAAU;IACV,WAAW;IACX,WAAW;IACX,eAAe;IACf,gBAAgB;IAChB,gBAAgB;IAChB,iBAAiB;IACjB,iBAAiB;IACjB,8BAA8B;AAC/B,CAAA;MAEY,GAAG,CAAA;AACd,IAAA,aAAa;;;AAGL,IAAA,eAAe;AACvB,IAAA,EAAE;AACF,IAAA,KAAK;AACL,IAAA,QAAQ;IACR,aAAa,GAAY,KAAK;AAE9B,IAAA,WAAA,CAAoB,MAA4B,EAAA;AAC9C,QAAA,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,IAAI,EAAE;QAChC,IAAI,CAAC,KAAK,EAAE;AACV,YAAA,MAAM,IAAI,KAAK,CAAC,qBAAqB,CAAC;;AAGxC,QAAA,IAAI,CAAC,EAAE,GAAG,KAAK;AAEf,QAAA,MAAM,eAAe,GAAG,IAAI,eAAe,EAAE;AAE7C,QAAA,IAAI,MAAM,CAAC,cAAc,EAAE;AACzB,YAAA,KAAK,MAAM,CAAC,SAAS,EAAE,OAAO,CAAC,IAAI,MAAM,CAAC,OAAO,CAC/C,MAAM,CAAC,cAAc,CACtB,EAAE;AACD,gBAAA,eAAe,CAAC,QAAQ,CAAC,SAAS,EAAE,OAAO,CAAC;;;AAIhD,QAAA,IAAI,CAAC,eAAe,GAAG,eAAe;AAEtC,QAAA,IAAI,CAAC,MAAM,CAAC,WAAW,EAAE;AACvB,YAAA,MAAM,IAAI,KAAK,CAAC,2BAA2B,CAAC;;AAG9C,QAAA,IAAI,MAAM,CAAC,WAAW,CAAC,IAAI,KAAK,UAAU,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,EAAE;YACtE,IAAI,CAAC,QAAQ,GAAG,MAAM,CAAC,WAAW,CAAC,SAAS,CAAC,QAAQ;YACrD,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,mBAAmB,CAC3C,MAAM,CAAC,WAAW,CACqC;AACzD,YAAA,IAAI,IAAI,CAAC,KAAK,EAAE;AACd,gBAAA,IAAI,CAAC,KAAK,CAAC,eAAe,GAAG,eAAe;;;QAIhD,IAAI,CAAC,aAAa,GAAG,MAAM,CAAC,aAAa,IAAI,KAAK;;AAG5C,IAAA,mBAAmB,CACzB,MAA6B,EAAA;AAE7B,QAAA,MAAM,EAAE,SAAS,EAAE,KAAK,GAAG,EAAE,EAAE,GAAG,UAAU,EAAE,GAAG,MAAM;QACvD,MAAM,EAAE,QAAQ,EAAE,GAAG,aAAa,EAAE,GAAG,SAAS;AAEhD,QAAA,MAAM,aAAa,GAAG,IAAI,aAAa,CAAC;YACtC,KAAK;YACL,QAAQ;YACR,aAAa;AACb,YAAA,GAAG,UAAU;YACb,KAAK,EAAE,IAAI,CAAC,EAAE;AACf,SAAA,CAAC;AACF,QAAA,IAAI,CAAC,KAAK,GAAG,aAAa;AAC1B,QAAA,OAAO,aAAa,CAAC,cAAc,EAAE;;AAGvC,IAAA,aAAa,MAAM,CACjB,MAAmB,EAAA;AAEnB,QAAA,OAAO,IAAI,GAAG,CAAI,MAAM,CAAC;;IAG3B,cAAc,GAAA;AACZ,QAAA,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;AACf,YAAA,MAAM,IAAI,KAAK,CACb,8EAA8E,CAC/E;;AAEH,QAAA,OAAO,IAAI,CAAC,KAAK,CAAC,cAAc,EAAE;;AAGpC,IAAA,MAAM,aAAa,CACjB,MAAgB,EAChB,MAA2E,EAC3E,aAAoC,EAAA;AAEpC,QAAA,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE;AACvB,YAAA,MAAM,IAAI,KAAK,CACb,4EAA4E,CAC7E;;AAEH,QAAA,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;AACf,YAAA,MAAM,IAAI,KAAK,CACb,8EAA8E,CAC/E;;QAGH,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,aAAa,EAAE,WAAW,CAAC;AAClD,QAAA,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,QAAQ;QACpC,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,GAAG,KAAK;AACvE,QAAA,IAAI,aAAa,EAAE,SAAS,EAAE;;AAE5B,YAAA,MAAM,SAAS,GAAI,MAAM,CAAC,SAAiC,IAAI,EAAE;AACjE,YAAA,MAAM,CAAC,SAAS,GAAG,SAAS,CAAC,MAAM,CACjC,IAAI,CAAC,YAAY,CAAC,aAAa,CAAC,SAAS,CAAC,CAC3C;;AAGH,QAAA,IACE,SAAS,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC;AAC1C,YAAA,SAAS,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC;YAC1C,SAAS,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,EACxC;AACA,YAAA,MAAM,CAAC,SAAS,GAAG,CAChB,MAAM,CAAC,SAAiC,IAAI,EAAE,EAC/C,MAAM,CAAC;AACP,gBAAA,IAAI,eAAe,CAAC;AAClB,oBAAA,MAAM,EAAE,MAAM,CAAC,YAAY,EAAE,OAAO;oBACpC,SAAS,EAAE,IAAI,CAAC,EAAE;AAClB,oBAAA,aAAa,EAAE;AACb,wBAAA,SAAS,EAAE,MAAM,CAAC,YAAY,EAAE,WAAW,EAAE,SAAS;AACtD,wBAAA,cAAc,EAAE,MAAM,CAAC,YAAY,EAAE,WAAW,EAAE,cAAc;AAChE,wBAAA,eAAe,EAAE,MAAM,CAAC,YAAY,EAAE,WAAW,EAAE,eAAe;AACnE,qBAAA;iBACF,CAAC;AACH,aAAA,CAAC;;AAGJ,QAAA,IAAI,CAAC,IAAI,CAAC,EAAE,EAAE;AACZ,YAAA,MAAM,IAAI,KAAK,CAAC,qBAAqB,CAAC;;AAGxC,QAAA,MAAM,YAAY,GAChB,aAAa,EAAE,YAAY;aAC1B,aAAa,EAAE;kBACZ,MAAM,kBAAkB;kBACxB,SAAS,CAAC;AAChB,QAAA,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,KAEZ;QACb,MAAM,UAAU,GAAG;eACd,KAAK,EAAE,MAAM,CAAC,CAAC,GAAG,EAAE,IAAI,KAAI;AAC7B,gBAAA,IAAI,CAAE,IAA+B,CAAC,MAAM,EAAE;AAC5C,oBAAA,OAAO,GAAG;;gBAGZ,MAAM,UAAU,GAAG,eAAe,CAChC,CAAC,IAAI,EAAE,MAAyB,EAAC,QAAQ,CAAC,IAAI,EAAE,WAAW,IAAI,EAAE,CAAC,EAClE,IAAI,EAAE,IAAI,IAAI,EAAE,CACjB;AACD,gBAAA,QACE,GAAG,GAAG,YAAY,CAAC,IAAI,aAAa,CAAC,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC;AAErE,aAAC,EAAE,CAAC,CAAC,IAAI,CAAC;cACR,CAAC;QACL,IAAI,iBAAiB,GAAG,UAAU;QAClC,IAAI,IAAI,CAAC,KAAK,CAAC,aAAa,IAAI,YAAY,EAAE;YAC5C,iBAAiB,IAAI,YAAY,CAAC,IAAI,CAAC,KAAK,CAAC,aAAa,CAAC;;AAE7D,QAAA,MAAM,QAAQ,GAAG,aAAa,EAAE,kBAAkB,IAAI,EAAE;QACxD,IAAI,IAAI,CAAC,KAAK,CAAC,aAAa,IAAI,iBAAiB,GAAG,CAAC,EAAE;YACrD,IAAI,CAAC,KAAK,CAAC,kBAAkB,GAAG,uBAAuB,CACrD,QAAQ,EACR,iBAAiB,CAClB;;AACI,aAAA,IAAI,iBAAiB,GAAG,CAAC,EAAE;YAChC,QAAQ,CAAC,CAAC,CAAC,GAAG,QAAQ,CAAC,CAAC,CAAC,GAAG,iBAAiB;AAC7C,YAAA,IAAI,CAAC,KAAK,CAAC,kBAAkB,GAAG,QAAQ;;aACnC;AACL,YAAA,IAAI,CAAC,KAAK,CAAC,kBAAkB,GAAG,QAAQ;;QAG1C,IAAI,CAAC,KAAK,CAAC,gBAAgB,GAAG,aAAa,EAAE,gBAAgB;AAC7D,QAAA,IAAI,CAAC,KAAK,CAAC,YAAY,GAAG,YAAY;AAEtC,QAAA,MAAM,CAAC,MAAM,GAAG,IAAI,CAAC,EAAE;AACvB,QAAA,MAAM,CAAC,YAAY,GAAG,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,YAAY,IAAI,EAAE,EAAE;YAC7D,MAAM,EAAE,IAAI,CAAC,EAAE;YACf,QAAQ,EAAE,IAAI,CAAC,QAAQ;AACxB,SAAA,CAAC;QAEF,MAAM,MAAM,GAAG,IAAI,CAAC,aAAa,CAAC,YAAY,CAAC,MAAM,EAAE,MAAM,EAAE;AAC7D,YAAA,UAAU,EAAE,IAAI;AACjB,SAAA,CAAC;AAEF,QAAA,WAAW,MAAM,KAAK,IAAI,MAAM,EAAE;AAChC,YAAA,MAAM,EAAE,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,GAAG,IAAI,EAAE,GAAG,KAAK;AAE/C,YAAA,IAAI,SAAS,GAAgB,IAAI,CAAC,KAAK;AACvC,YAAA,IACE,QAAQ;AACR,gBAAA,yBAAyB,CAAC,GAAG,CAAC,QAAQ,CAAC;AACvC,gBAAA,SAAS,KAAK,WAAW,CAAC,iBAAiB,EAC3C;;gBAEA;;YAGF,IAAI,SAAS,IAAI,SAAS,KAAK,WAAW,CAAC,eAAe,EAAE;gBAC1D,SAAS,GAAG,IAAI;;YAGlB,MAAM,OAAO,GAAG,IAAI,CAAC,eAAe,CAAC,UAAU,CAAC,SAAS,CAAC;YAC1D,IAAI,OAAO,EAAE;AACX,gBAAA,OAAO,CAAC,MAAM,CAAC,SAAS,EAAE,IAAI,EAAE,QAAQ,EAAE,IAAI,CAAC,KAAK,CAAC;;;AAIzD,QAAA,IAAI,IAAI,CAAC,aAAa,EAAE;AACtB,YAAA,OAAO,IAAI,CAAC,KAAK,CAAC,eAAe,EAAE;;;IAI/B,oBAAoB,CAC1B,eAAgC,EAChC,GAAM,EAAA;AAEN,QAAA,QAAQ,CAAC,GAAG,IAAe,KAAI;AAC7B,YAAA,MAAM,cAAc,GAAG,eAAe,CAAC,GAAG,CAAC;AAC3C,YAAA,IAAI,cAAc,IAAI,IAAI,CAAC,KAAK,EAAE;gBAC/B,cAA+C,CAAC,IAAI,CAAC,KAAK,EAAE,GAAG,IAAI,CAAC;;AAEzE,SAAC;;AAGH,IAAA,YAAY,CAAC,eAAgC,EAAA;QAC3C,OAAO;AACL,YAAA,CAAC,QAAQ,CAAC,UAAU,GAAG,IAAI,CAAC,oBAAoB,CAC9C,eAAe,EACf,QAAQ,CAAC,UAAU,CACpB;AACD,YAAA,CAAC,QAAQ,CAAC,UAAU,GAAG,IAAI,CAAC,oBAAoB,CAC9C,eAAe,EACf,QAAQ,CAAC,UAAU,CACpB;AACD,YAAA,CAAC,QAAQ,CAAC,QAAQ,GAAG,IAAI,CAAC,oBAAoB,CAC5C,eAAe,EACf,QAAQ,CAAC,QAAQ,CAClB;SACF;;AAGH,IAAA,MAAM,aAAa,CAAC,EAClB,QAAQ,EACR,SAAS,EACT,YAAY,EACZ,WAAW,EACX,aAAa,EACb,YAAY,EACZ,YAAY,EACZ,WAAW,GAAG,kBAAkB,EAChC,WAAW,GAAG,WAAW,CAAC,UAAU,EACpC,mBAAmB,GACD,EAAA;QAClB,MAAM,aAAa,GAAG,cAAc,CAAC,YAAY,CAC/C,mBAAmB,IAAI,6BAA6B,CACrD;QACD,MAAM,QAAQ,GAAG;AACd,aAAA,GAAG,CAAC,CAAC,IAAI,KAAI;AACZ,YAAA,IAAI,IAAI,EAAE,IAAI,KAAK,MAAM;gBAAE,OAAO,IAAI,CAAC,IAAI;AAC3C,YAAA,OAAO,EAAE;AACX,SAAC;aACA,IAAI,CAAC,IAAI,CAAC;QACb,MAAM,KAAK,GAAG,CACZ,MAAM,aAAa,CAAC,MAAM,CAAC,EAAE,KAAK,EAAE,SAAS,EAAE,MAAM,EAAE,QAAQ,EAAE,CAAC,EAClE,KAAK;AACP,QAAA,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,EAAE,WAAW,CAAC;YACpC,QAAQ;YACR,WAAW;YACX,aAAa;AACd,SAAA,CAAC;QACF,IAAI,CAAC,KAAK,EAAE;YACV,OAAO,EAAE,QAAQ,EAAE,EAAE,EAAE,KAAK,EAAE,EAAE,EAAE;;QAEpC,IACE,YAAY,CAAC,QAAQ,CAAC;aACrB,KAAK,YAAYA,YAAU,IAAI,KAAK,YAAYC,iBAAe,CAAC,EACjE;YACA,KAAK,CAAC,WAAW,GAAI;AACnB,kBAAE,WAAqB;YACzB,KAAK,CAAC,IAAI,GAAI;AACZ,kBAAE,IAAc;AAClB,YAAA,KAAK,CAAC,gBAAgB,GACpB,aACD,EAAE,gBAA0B;AAC7B,YAAA,KAAK,CAAC,eAAe,GACnB,aACD,EAAE,eAAyB;YAC5B,KAAK,CAAC,CAAC,GAAI;AACT,kBAAE,CAAW;;AAEjB,QAAA,MAAM,KAAK,GACT,WAAW,KAAK,WAAW,CAAC;AAC1B,cAAE,MAAM,6BAA6B,CAAC,KAAK,EAAE,WAAW;cACtD,MAAM,mBAAmB,CAAC,KAAK,EAAE,WAAW,CAAC;AACnD,QAAA,OAAO,MAAM,KAAK,CAAC,MAAM,CAAC,EAAE,KAAK,EAAE,SAAS,EAAE,YAAY,EAAE,EAAE,YAAY,CAAC;;AAE9E;;;;"}
|
|
1
|
+
{"version":3,"file":"run.mjs","sources":["../../src/run.ts"],"sourcesContent":["// src/run.ts\nimport './instrumentation';\nimport { zodToJsonSchema } from 'zod-to-json-schema';\nimport { CallbackHandler } from '@langfuse/langchain';\nimport { PromptTemplate } from '@langchain/core/prompts';\nimport { SystemMessage } from '@langchain/core/messages';\nimport { RunnableLambda } from '@langchain/core/runnables';\nimport { AzureChatOpenAI, ChatOpenAI } from '@langchain/openai';\nimport type {\n MessageContentComplex,\n BaseMessage,\n} from '@langchain/core/messages';\nimport type { StringPromptValue } from '@langchain/core/prompt_values';\nimport type { RunnableConfig } from '@langchain/core/runnables';\nimport type { ClientCallbacks, SystemCallbacks } from '@/graphs/Graph';\nimport type * as t from '@/types';\nimport { GraphEvents, Providers, Callback, TitleMethod } from '@/common';\nimport { manualToolStreamProviders } from '@/llm/providers';\nimport { shiftIndexTokenCountMap } from '@/messages/format';\nimport {\n createTitleRunnable,\n createCompletionTitleRunnable,\n} from '@/utils/title';\nimport { createTokenCounter } from '@/utils/tokens';\nimport { StandardGraph } from '@/graphs/Graph';\nimport { HandlerRegistry } from '@/events';\nimport { isOpenAILike } from '@/utils/llm';\nimport { isPresent } from '@/utils/misc';\n\nexport const defaultOmitOptions = new Set([\n 'stream',\n 'thinking',\n 'streaming',\n 'maxTokens',\n 'clientOptions',\n 'thinkingConfig',\n 'thinkingBudget',\n 'includeThoughts',\n 'maxOutputTokens',\n 'additionalModelRequestFields',\n]);\n\nexport class Run<T extends t.BaseGraphState> {\n graphRunnable?: t.CompiledWorkflow<T, Partial<T>, string>;\n // private collab!: CollabGraph;\n // private taskManager!: TaskManager;\n private handlerRegistry: HandlerRegistry;\n id: string;\n Graph: StandardGraph | undefined;\n provider: Providers | undefined;\n returnContent: boolean = false;\n\n private constructor(config: Partial<t.RunConfig>) {\n const runId = config.runId ?? '';\n if (!runId) {\n throw new Error('Run ID not provided');\n }\n\n this.id = runId;\n\n const handlerRegistry = new HandlerRegistry();\n\n if (config.customHandlers) {\n for (const [eventType, handler] of Object.entries(\n config.customHandlers\n )) {\n handlerRegistry.register(eventType, handler);\n }\n }\n\n this.handlerRegistry = handlerRegistry;\n\n if (!config.graphConfig) {\n throw new Error('Graph config not provided');\n }\n\n if (config.graphConfig.type === 'standard' || !config.graphConfig.type) {\n this.provider = config.graphConfig.llmConfig.provider;\n this.graphRunnable = this.createStandardGraph(\n config.graphConfig\n ) as unknown as t.CompiledWorkflow<T, Partial<T>, string>;\n if (this.Graph) {\n this.Graph.handlerRegistry = handlerRegistry;\n }\n }\n\n this.returnContent = config.returnContent ?? false;\n }\n\n private createStandardGraph(\n config: t.StandardGraphConfig\n ): t.CompiledWorkflow<t.IState, Partial<t.IState>, string> {\n const { llmConfig, tools = [], ...graphInput } = config;\n const { provider, ...clientOptions } = llmConfig;\n\n const standardGraph = new StandardGraph({\n tools,\n provider,\n clientOptions,\n ...graphInput,\n runId: this.id,\n });\n this.Graph = standardGraph;\n return standardGraph.createWorkflow();\n }\n\n static async create<T extends t.BaseGraphState>(\n config: t.RunConfig\n ): Promise<Run<T>> {\n return new Run<T>(config);\n }\n\n getRunMessages(): BaseMessage[] | undefined {\n if (!this.Graph) {\n throw new Error(\n 'Graph not initialized. Make sure to use Run.create() to instantiate the Run.'\n );\n }\n return this.Graph.getRunMessages();\n }\n\n async processStream(\n inputs: t.IState,\n config: Partial<RunnableConfig> & { version: 'v1' | 'v2'; run_id?: string },\n streamOptions?: t.EventStreamOptions\n ): Promise<MessageContentComplex[] | undefined> {\n if (!this.graphRunnable) {\n throw new Error(\n 'Run not initialized. Make sure to use Run.create() to instantiate the Run.'\n );\n }\n if (!this.Graph) {\n throw new Error(\n 'Graph not initialized. Make sure to use Run.create() to instantiate the Run.'\n );\n }\n\n this.Graph.resetValues(streamOptions?.keepContent);\n const provider = this.Graph.provider;\n const hasTools = this.Graph.tools ? this.Graph.tools.length > 0 : false;\n if (streamOptions?.callbacks) {\n /* TODO: conflicts with callback manager */\n const callbacks = (config.callbacks as t.ProvidedCallbacks) ?? [];\n config.callbacks = callbacks.concat(\n this.getCallbacks(streamOptions.callbacks)\n );\n }\n\n if (\n isPresent(process.env.LANGFUSE_SECRET_KEY) &&\n isPresent(process.env.LANGFUSE_PUBLIC_KEY) &&\n isPresent(process.env.LANGFUSE_BASE_URL)\n ) {\n const userId = config.configurable?.user_id;\n const sessionId = config.configurable?.thread_id;\n const traceMetadata = {\n messageId: this.id,\n parentMessageId: config.configurable?.requestBody?.parentMessageId,\n };\n const handler = new CallbackHandler({\n userId,\n sessionId,\n traceMetadata,\n });\n config.callbacks = (\n (config.callbacks as t.ProvidedCallbacks) ?? []\n ).concat([handler]);\n }\n\n if (!this.id) {\n throw new Error('Run ID not provided');\n }\n\n const tokenCounter =\n streamOptions?.tokenCounter ??\n (streamOptions?.indexTokenCountMap\n ? await createTokenCounter()\n : undefined);\n const tools = this.Graph.tools as\n | Array<t.GenericTool | undefined>\n | undefined;\n const toolTokens = tokenCounter\n ? (tools?.reduce((acc, tool) => {\n if (!(tool as Partial<t.GenericTool>).schema) {\n return acc;\n }\n\n const jsonSchema = zodToJsonSchema(\n (tool?.schema as t.ZodObjectAny).describe(tool?.description ?? ''),\n tool?.name ?? ''\n );\n return (\n acc + tokenCounter(new SystemMessage(JSON.stringify(jsonSchema)))\n );\n }, 0) ?? 0)\n : 0;\n let instructionTokens = toolTokens;\n if (this.Graph.systemMessage && tokenCounter) {\n instructionTokens += tokenCounter(this.Graph.systemMessage);\n }\n const tokenMap = streamOptions?.indexTokenCountMap ?? {};\n if (this.Graph.systemMessage && instructionTokens > 0) {\n this.Graph.indexTokenCountMap = shiftIndexTokenCountMap(\n tokenMap,\n instructionTokens\n );\n } else if (instructionTokens > 0) {\n tokenMap[0] = tokenMap[0] + instructionTokens;\n this.Graph.indexTokenCountMap = tokenMap;\n } else {\n this.Graph.indexTokenCountMap = tokenMap;\n }\n\n this.Graph.maxContextTokens = streamOptions?.maxContextTokens;\n this.Graph.tokenCounter = tokenCounter;\n\n config.run_id = this.id;\n config.configurable = Object.assign(config.configurable ?? {}, {\n run_id: this.id,\n provider: this.provider,\n });\n\n const stream = this.graphRunnable.streamEvents(inputs, config, {\n raiseError: true,\n });\n\n for await (const event of stream) {\n const { data, name, metadata, ...info } = event;\n\n let eventName: t.EventName = info.event;\n if (\n hasTools &&\n manualToolStreamProviders.has(provider) &&\n eventName === GraphEvents.CHAT_MODEL_STREAM\n ) {\n /* Skipping CHAT_MODEL_STREAM event due to double-call edge case */\n continue;\n }\n\n if (eventName && eventName === GraphEvents.ON_CUSTOM_EVENT) {\n eventName = name;\n }\n\n const handler = this.handlerRegistry.getHandler(eventName);\n if (handler) {\n handler.handle(eventName, data, metadata, this.Graph);\n }\n }\n\n if (this.returnContent) {\n return this.Graph.getContentParts();\n }\n }\n\n private createSystemCallback<K extends keyof ClientCallbacks>(\n clientCallbacks: ClientCallbacks,\n key: K\n ): SystemCallbacks[K] {\n return ((...args: unknown[]) => {\n const clientCallback = clientCallbacks[key];\n if (clientCallback && this.Graph) {\n (clientCallback as (...args: unknown[]) => void)(this.Graph, ...args);\n }\n }) as SystemCallbacks[K];\n }\n\n getCallbacks(clientCallbacks: ClientCallbacks): SystemCallbacks {\n return {\n [Callback.TOOL_ERROR]: this.createSystemCallback(\n clientCallbacks,\n Callback.TOOL_ERROR\n ),\n [Callback.TOOL_START]: this.createSystemCallback(\n clientCallbacks,\n Callback.TOOL_START\n ),\n [Callback.TOOL_END]: this.createSystemCallback(\n clientCallbacks,\n Callback.TOOL_END\n ),\n };\n }\n\n async generateTitle({\n provider,\n inputText,\n contentParts,\n titlePrompt,\n clientOptions,\n chainOptions,\n skipLanguage,\n omitOptions = defaultOmitOptions,\n titleMethod = TitleMethod.COMPLETION,\n titlePromptTemplate,\n }: t.RunTitleOptions): Promise<{ language?: string; title?: string }> {\n if (\n chainOptions != null &&\n isPresent(process.env.LANGFUSE_SECRET_KEY) &&\n isPresent(process.env.LANGFUSE_PUBLIC_KEY) &&\n isPresent(process.env.LANGFUSE_BASE_URL)\n ) {\n const userId = chainOptions.configurable?.user_id;\n const sessionId = chainOptions.configurable?.thread_id;\n const traceMetadata = {\n messageId: 'title-' + this.id,\n };\n const handler = new CallbackHandler({\n userId,\n sessionId,\n traceMetadata,\n });\n chainOptions.callbacks = (\n (chainOptions.callbacks as t.ProvidedCallbacks) ?? []\n ).concat([handler]);\n }\n\n const convoTemplate = PromptTemplate.fromTemplate(\n titlePromptTemplate ?? 'User: {input}\\nAI: {output}'\n );\n\n const response = contentParts\n .map((part) => {\n if (part?.type === 'text') return part.text;\n return '';\n })\n .join('\\n');\n\n const model = this.Graph?.getNewModel({\n provider,\n omitOptions,\n clientOptions,\n });\n if (!model) {\n return { language: '', title: '' };\n }\n if (\n isOpenAILike(provider) &&\n (model instanceof ChatOpenAI || model instanceof AzureChatOpenAI)\n ) {\n model.temperature = (clientOptions as t.OpenAIClientOptions | undefined)\n ?.temperature as number;\n model.topP = (clientOptions as t.OpenAIClientOptions | undefined)\n ?.topP as number;\n model.frequencyPenalty = (\n clientOptions as t.OpenAIClientOptions | undefined\n )?.frequencyPenalty as number;\n model.presencePenalty = (\n clientOptions as t.OpenAIClientOptions | undefined\n )?.presencePenalty as number;\n model.n = (clientOptions as t.OpenAIClientOptions | undefined)\n ?.n as number;\n }\n\n const convoToTitleInput = new RunnableLambda({\n func: (\n promptValue: StringPromptValue\n ): { convo: string; inputText: string; skipLanguage?: boolean } => ({\n convo: promptValue.value,\n inputText,\n skipLanguage,\n }),\n }).withConfig({ runName: 'ConvoTransform' });\n\n const titleChain =\n titleMethod === TitleMethod.COMPLETION\n ? await createCompletionTitleRunnable(model, titlePrompt)\n : await createTitleRunnable(model, titlePrompt);\n\n /** Pipes `convoTemplate` -> `transformer` -> `titleChain` */\n const fullChain = convoTemplate\n .withConfig({ runName: 'ConvoTemplate' })\n .pipe(convoToTitleInput)\n .pipe(titleChain)\n .withConfig({ runName: 'TitleChain' });\n\n return await fullChain.invoke(\n { input: inputText, output: response },\n chainOptions\n );\n }\n}\n"],"names":["ChatOpenAI","AzureChatOpenAI"],"mappings":";;;;;;;;;;;;;;;;;AAAA;AA6Ba,MAAA,kBAAkB,GAAG,IAAI,GAAG,CAAC;IACxC,QAAQ;IACR,UAAU;IACV,WAAW;IACX,WAAW;IACX,eAAe;IACf,gBAAgB;IAChB,gBAAgB;IAChB,iBAAiB;IACjB,iBAAiB;IACjB,8BAA8B;AAC/B,CAAA;MAEY,GAAG,CAAA;AACd,IAAA,aAAa;;;AAGL,IAAA,eAAe;AACvB,IAAA,EAAE;AACF,IAAA,KAAK;AACL,IAAA,QAAQ;IACR,aAAa,GAAY,KAAK;AAE9B,IAAA,WAAA,CAAoB,MAA4B,EAAA;AAC9C,QAAA,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,IAAI,EAAE;QAChC,IAAI,CAAC,KAAK,EAAE;AACV,YAAA,MAAM,IAAI,KAAK,CAAC,qBAAqB,CAAC;;AAGxC,QAAA,IAAI,CAAC,EAAE,GAAG,KAAK;AAEf,QAAA,MAAM,eAAe,GAAG,IAAI,eAAe,EAAE;AAE7C,QAAA,IAAI,MAAM,CAAC,cAAc,EAAE;AACzB,YAAA,KAAK,MAAM,CAAC,SAAS,EAAE,OAAO,CAAC,IAAI,MAAM,CAAC,OAAO,CAC/C,MAAM,CAAC,cAAc,CACtB,EAAE;AACD,gBAAA,eAAe,CAAC,QAAQ,CAAC,SAAS,EAAE,OAAO,CAAC;;;AAIhD,QAAA,IAAI,CAAC,eAAe,GAAG,eAAe;AAEtC,QAAA,IAAI,CAAC,MAAM,CAAC,WAAW,EAAE;AACvB,YAAA,MAAM,IAAI,KAAK,CAAC,2BAA2B,CAAC;;AAG9C,QAAA,IAAI,MAAM,CAAC,WAAW,CAAC,IAAI,KAAK,UAAU,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,EAAE;YACtE,IAAI,CAAC,QAAQ,GAAG,MAAM,CAAC,WAAW,CAAC,SAAS,CAAC,QAAQ;YACrD,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,mBAAmB,CAC3C,MAAM,CAAC,WAAW,CACqC;AACzD,YAAA,IAAI,IAAI,CAAC,KAAK,EAAE;AACd,gBAAA,IAAI,CAAC,KAAK,CAAC,eAAe,GAAG,eAAe;;;QAIhD,IAAI,CAAC,aAAa,GAAG,MAAM,CAAC,aAAa,IAAI,KAAK;;AAG5C,IAAA,mBAAmB,CACzB,MAA6B,EAAA;AAE7B,QAAA,MAAM,EAAE,SAAS,EAAE,KAAK,GAAG,EAAE,EAAE,GAAG,UAAU,EAAE,GAAG,MAAM;QACvD,MAAM,EAAE,QAAQ,EAAE,GAAG,aAAa,EAAE,GAAG,SAAS;AAEhD,QAAA,MAAM,aAAa,GAAG,IAAI,aAAa,CAAC;YACtC,KAAK;YACL,QAAQ;YACR,aAAa;AACb,YAAA,GAAG,UAAU;YACb,KAAK,EAAE,IAAI,CAAC,EAAE;AACf,SAAA,CAAC;AACF,QAAA,IAAI,CAAC,KAAK,GAAG,aAAa;AAC1B,QAAA,OAAO,aAAa,CAAC,cAAc,EAAE;;AAGvC,IAAA,aAAa,MAAM,CACjB,MAAmB,EAAA;AAEnB,QAAA,OAAO,IAAI,GAAG,CAAI,MAAM,CAAC;;IAG3B,cAAc,GAAA;AACZ,QAAA,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;AACf,YAAA,MAAM,IAAI,KAAK,CACb,8EAA8E,CAC/E;;AAEH,QAAA,OAAO,IAAI,CAAC,KAAK,CAAC,cAAc,EAAE;;AAGpC,IAAA,MAAM,aAAa,CACjB,MAAgB,EAChB,MAA2E,EAC3E,aAAoC,EAAA;AAEpC,QAAA,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE;AACvB,YAAA,MAAM,IAAI,KAAK,CACb,4EAA4E,CAC7E;;AAEH,QAAA,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;AACf,YAAA,MAAM,IAAI,KAAK,CACb,8EAA8E,CAC/E;;QAGH,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,aAAa,EAAE,WAAW,CAAC;AAClD,QAAA,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,QAAQ;QACpC,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,GAAG,KAAK;AACvE,QAAA,IAAI,aAAa,EAAE,SAAS,EAAE;;AAE5B,YAAA,MAAM,SAAS,GAAI,MAAM,CAAC,SAAiC,IAAI,EAAE;AACjE,YAAA,MAAM,CAAC,SAAS,GAAG,SAAS,CAAC,MAAM,CACjC,IAAI,CAAC,YAAY,CAAC,aAAa,CAAC,SAAS,CAAC,CAC3C;;AAGH,QAAA,IACE,SAAS,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC;AAC1C,YAAA,SAAS,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC;YAC1C,SAAS,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,EACxC;AACA,YAAA,MAAM,MAAM,GAAG,MAAM,CAAC,YAAY,EAAE,OAAO;AAC3C,YAAA,MAAM,SAAS,GAAG,MAAM,CAAC,YAAY,EAAE,SAAS;AAChD,YAAA,MAAM,aAAa,GAAG;gBACpB,SAAS,EAAE,IAAI,CAAC,EAAE;AAClB,gBAAA,eAAe,EAAE,MAAM,CAAC,YAAY,EAAE,WAAW,EAAE,eAAe;aACnE;AACD,YAAA,MAAM,OAAO,GAAG,IAAI,eAAe,CAAC;gBAClC,MAAM;gBACN,SAAS;gBACT,aAAa;AACd,aAAA,CAAC;AACF,YAAA,MAAM,CAAC,SAAS,GAAG,CAChB,MAAM,CAAC,SAAiC,IAAI,EAAE,EAC/C,MAAM,CAAC,CAAC,OAAO,CAAC,CAAC;;AAGrB,QAAA,IAAI,CAAC,IAAI,CAAC,EAAE,EAAE;AACZ,YAAA,MAAM,IAAI,KAAK,CAAC,qBAAqB,CAAC;;AAGxC,QAAA,MAAM,YAAY,GAChB,aAAa,EAAE,YAAY;aAC1B,aAAa,EAAE;kBACZ,MAAM,kBAAkB;kBACxB,SAAS,CAAC;AAChB,QAAA,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,KAEZ;QACb,MAAM,UAAU,GAAG;eACd,KAAK,EAAE,MAAM,CAAC,CAAC,GAAG,EAAE,IAAI,KAAI;AAC7B,gBAAA,IAAI,CAAE,IAA+B,CAAC,MAAM,EAAE;AAC5C,oBAAA,OAAO,GAAG;;gBAGZ,MAAM,UAAU,GAAG,eAAe,CAChC,CAAC,IAAI,EAAE,MAAyB,EAAC,QAAQ,CAAC,IAAI,EAAE,WAAW,IAAI,EAAE,CAAC,EAClE,IAAI,EAAE,IAAI,IAAI,EAAE,CACjB;AACD,gBAAA,QACE,GAAG,GAAG,YAAY,CAAC,IAAI,aAAa,CAAC,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC;AAErE,aAAC,EAAE,CAAC,CAAC,IAAI,CAAC;cACR,CAAC;QACL,IAAI,iBAAiB,GAAG,UAAU;QAClC,IAAI,IAAI,CAAC,KAAK,CAAC,aAAa,IAAI,YAAY,EAAE;YAC5C,iBAAiB,IAAI,YAAY,CAAC,IAAI,CAAC,KAAK,CAAC,aAAa,CAAC;;AAE7D,QAAA,MAAM,QAAQ,GAAG,aAAa,EAAE,kBAAkB,IAAI,EAAE;QACxD,IAAI,IAAI,CAAC,KAAK,CAAC,aAAa,IAAI,iBAAiB,GAAG,CAAC,EAAE;YACrD,IAAI,CAAC,KAAK,CAAC,kBAAkB,GAAG,uBAAuB,CACrD,QAAQ,EACR,iBAAiB,CAClB;;AACI,aAAA,IAAI,iBAAiB,GAAG,CAAC,EAAE;YAChC,QAAQ,CAAC,CAAC,CAAC,GAAG,QAAQ,CAAC,CAAC,CAAC,GAAG,iBAAiB;AAC7C,YAAA,IAAI,CAAC,KAAK,CAAC,kBAAkB,GAAG,QAAQ;;aACnC;AACL,YAAA,IAAI,CAAC,KAAK,CAAC,kBAAkB,GAAG,QAAQ;;QAG1C,IAAI,CAAC,KAAK,CAAC,gBAAgB,GAAG,aAAa,EAAE,gBAAgB;AAC7D,QAAA,IAAI,CAAC,KAAK,CAAC,YAAY,GAAG,YAAY;AAEtC,QAAA,MAAM,CAAC,MAAM,GAAG,IAAI,CAAC,EAAE;AACvB,QAAA,MAAM,CAAC,YAAY,GAAG,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,YAAY,IAAI,EAAE,EAAE;YAC7D,MAAM,EAAE,IAAI,CAAC,EAAE;YACf,QAAQ,EAAE,IAAI,CAAC,QAAQ;AACxB,SAAA,CAAC;QAEF,MAAM,MAAM,GAAG,IAAI,CAAC,aAAa,CAAC,YAAY,CAAC,MAAM,EAAE,MAAM,EAAE;AAC7D,YAAA,UAAU,EAAE,IAAI;AACjB,SAAA,CAAC;AAEF,QAAA,WAAW,MAAM,KAAK,IAAI,MAAM,EAAE;AAChC,YAAA,MAAM,EAAE,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,GAAG,IAAI,EAAE,GAAG,KAAK;AAE/C,YAAA,IAAI,SAAS,GAAgB,IAAI,CAAC,KAAK;AACvC,YAAA,IACE,QAAQ;AACR,gBAAA,yBAAyB,CAAC,GAAG,CAAC,QAAQ,CAAC;AACvC,gBAAA,SAAS,KAAK,WAAW,CAAC,iBAAiB,EAC3C;;gBAEA;;YAGF,IAAI,SAAS,IAAI,SAAS,KAAK,WAAW,CAAC,eAAe,EAAE;gBAC1D,SAAS,GAAG,IAAI;;YAGlB,MAAM,OAAO,GAAG,IAAI,CAAC,eAAe,CAAC,UAAU,CAAC,SAAS,CAAC;YAC1D,IAAI,OAAO,EAAE;AACX,gBAAA,OAAO,CAAC,MAAM,CAAC,SAAS,EAAE,IAAI,EAAE,QAAQ,EAAE,IAAI,CAAC,KAAK,CAAC;;;AAIzD,QAAA,IAAI,IAAI,CAAC,aAAa,EAAE;AACtB,YAAA,OAAO,IAAI,CAAC,KAAK,CAAC,eAAe,EAAE;;;IAI/B,oBAAoB,CAC1B,eAAgC,EAChC,GAAM,EAAA;AAEN,QAAA,QAAQ,CAAC,GAAG,IAAe,KAAI;AAC7B,YAAA,MAAM,cAAc,GAAG,eAAe,CAAC,GAAG,CAAC;AAC3C,YAAA,IAAI,cAAc,IAAI,IAAI,CAAC,KAAK,EAAE;gBAC/B,cAA+C,CAAC,IAAI,CAAC,KAAK,EAAE,GAAG,IAAI,CAAC;;AAEzE,SAAC;;AAGH,IAAA,YAAY,CAAC,eAAgC,EAAA;QAC3C,OAAO;AACL,YAAA,CAAC,QAAQ,CAAC,UAAU,GAAG,IAAI,CAAC,oBAAoB,CAC9C,eAAe,EACf,QAAQ,CAAC,UAAU,CACpB;AACD,YAAA,CAAC,QAAQ,CAAC,UAAU,GAAG,IAAI,CAAC,oBAAoB,CAC9C,eAAe,EACf,QAAQ,CAAC,UAAU,CACpB;AACD,YAAA,CAAC,QAAQ,CAAC,QAAQ,GAAG,IAAI,CAAC,oBAAoB,CAC5C,eAAe,EACf,QAAQ,CAAC,QAAQ,CAClB;SACF;;AAGH,IAAA,MAAM,aAAa,CAAC,EAClB,QAAQ,EACR,SAAS,EACT,YAAY,EACZ,WAAW,EACX,aAAa,EACb,YAAY,EACZ,YAAY,EACZ,WAAW,GAAG,kBAAkB,EAChC,WAAW,GAAG,WAAW,CAAC,UAAU,EACpC,mBAAmB,GACD,EAAA;QAClB,IACE,YAAY,IAAI,IAAI;AACpB,YAAA,SAAS,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC;AAC1C,YAAA,SAAS,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC;YAC1C,SAAS,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,EACxC;AACA,YAAA,MAAM,MAAM,GAAG,YAAY,CAAC,YAAY,EAAE,OAAO;AACjD,YAAA,MAAM,SAAS,GAAG,YAAY,CAAC,YAAY,EAAE,SAAS;AACtD,YAAA,MAAM,aAAa,GAAG;AACpB,gBAAA,SAAS,EAAE,QAAQ,GAAG,IAAI,CAAC,EAAE;aAC9B;AACD,YAAA,MAAM,OAAO,GAAG,IAAI,eAAe,CAAC;gBAClC,MAAM;gBACN,SAAS;gBACT,aAAa;AACd,aAAA,CAAC;AACF,YAAA,YAAY,CAAC,SAAS,GAAG,CACtB,YAAY,CAAC,SAAiC,IAAI,EAAE,EACrD,MAAM,CAAC,CAAC,OAAO,CAAC,CAAC;;QAGrB,MAAM,aAAa,GAAG,cAAc,CAAC,YAAY,CAC/C,mBAAmB,IAAI,6BAA6B,CACrD;QAED,MAAM,QAAQ,GAAG;AACd,aAAA,GAAG,CAAC,CAAC,IAAI,KAAI;AACZ,YAAA,IAAI,IAAI,EAAE,IAAI,KAAK,MAAM;gBAAE,OAAO,IAAI,CAAC,IAAI;AAC3C,YAAA,OAAO,EAAE;AACX,SAAC;aACA,IAAI,CAAC,IAAI,CAAC;AAEb,QAAA,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,EAAE,WAAW,CAAC;YACpC,QAAQ;YACR,WAAW;YACX,aAAa;AACd,SAAA,CAAC;QACF,IAAI,CAAC,KAAK,EAAE;YACV,OAAO,EAAE,QAAQ,EAAE,EAAE,EAAE,KAAK,EAAE,EAAE,EAAE;;QAEpC,IACE,YAAY,CAAC,QAAQ,CAAC;aACrB,KAAK,YAAYA,YAAU,IAAI,KAAK,YAAYC,iBAAe,CAAC,EACjE;YACA,KAAK,CAAC,WAAW,GAAI;AACnB,kBAAE,WAAqB;YACzB,KAAK,CAAC,IAAI,GAAI;AACZ,kBAAE,IAAc;AAClB,YAAA,KAAK,CAAC,gBAAgB,GACpB,aACD,EAAE,gBAA0B;AAC7B,YAAA,KAAK,CAAC,eAAe,GACnB,aACD,EAAE,eAAyB;YAC5B,KAAK,CAAC,CAAC,GAAI;AACT,kBAAE,CAAW;;AAGjB,QAAA,MAAM,iBAAiB,GAAG,IAAI,cAAc,CAAC;AAC3C,YAAA,IAAI,EAAE,CACJ,WAA8B,MACoC;gBAClE,KAAK,EAAE,WAAW,CAAC,KAAK;gBACxB,SAAS;gBACT,YAAY;aACb,CAAC;SACH,CAAC,CAAC,UAAU,CAAC,EAAE,OAAO,EAAE,gBAAgB,EAAE,CAAC;AAE5C,QAAA,MAAM,UAAU,GACd,WAAW,KAAK,WAAW,CAAC;AAC1B,cAAE,MAAM,6BAA6B,CAAC,KAAK,EAAE,WAAW;cACtD,MAAM,mBAAmB,CAAC,KAAK,EAAE,WAAW,CAAC;;QAGnD,MAAM,SAAS,GAAG;AACf,aAAA,UAAU,CAAC,EAAE,OAAO,EAAE,eAAe,EAAE;aACvC,IAAI,CAAC,iBAAiB;aACtB,IAAI,CAAC,UAAU;AACf,aAAA,UAAU,CAAC,EAAE,OAAO,EAAE,YAAY,EAAE,CAAC;AAExC,QAAA,OAAO,MAAM,SAAS,CAAC,MAAM,CAC3B,EAAE,KAAK,EAAE,SAAS,EAAE,MAAM,EAAE,QAAQ,EAAE,EACtC,YAAY,CACb;;AAEJ;;;;"}
|
package/dist/esm/utils/title.mjs
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { z } from 'zod';
|
|
2
|
-
import { RunnableLambda } from '@langchain/core/runnables';
|
|
3
2
|
import { ChatPromptTemplate } from '@langchain/core/prompts';
|
|
3
|
+
import { RunnableLambda, RunnableSequence } from '@langchain/core/runnables';
|
|
4
4
|
import { ContentTypes } from '../common/enum.mjs';
|
|
5
5
|
|
|
6
6
|
const defaultTitlePrompt = `Analyze this conversation and provide:
|
|
@@ -27,36 +27,57 @@ const createTitleRunnable = async (model, _titlePrompt) => {
|
|
|
27
27
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
28
28
|
/* @ts-ignore */
|
|
29
29
|
const combinedLLM = model.withStructuredOutput(combinedSchema);
|
|
30
|
-
const titlePrompt = ChatPromptTemplate.fromTemplate(_titlePrompt ?? defaultTitlePrompt);
|
|
30
|
+
const titlePrompt = ChatPromptTemplate.fromTemplate(_titlePrompt ?? defaultTitlePrompt).withConfig({ runName: 'TitlePrompt' });
|
|
31
|
+
const titleOnlyInnerChain = RunnableSequence.from([titlePrompt, titleLLM]);
|
|
32
|
+
const combinedInnerChain = RunnableSequence.from([titlePrompt, combinedLLM]);
|
|
33
|
+
/** Wrap titleOnlyChain in RunnableLambda to create parent span */
|
|
34
|
+
const titleOnlyChain = new RunnableLambda({
|
|
35
|
+
func: async (input, config) => {
|
|
36
|
+
return await titleOnlyInnerChain.invoke(input, config);
|
|
37
|
+
},
|
|
38
|
+
}).withConfig({ runName: 'TitleOnlyChain' });
|
|
39
|
+
/** Wrap combinedChain in RunnableLambda to create parent span */
|
|
40
|
+
const combinedChain = new RunnableLambda({
|
|
41
|
+
func: async (input, config) => {
|
|
42
|
+
return await combinedInnerChain.invoke(input, config);
|
|
43
|
+
},
|
|
44
|
+
}).withConfig({ runName: 'TitleLanguageChain' });
|
|
45
|
+
/** Runnable to add default values if needed */
|
|
46
|
+
const addDefaults = new RunnableLambda({
|
|
47
|
+
func: (result) => ({
|
|
48
|
+
language: result?.language ?? 'English',
|
|
49
|
+
title: result?.title ?? '',
|
|
50
|
+
}),
|
|
51
|
+
}).withConfig({ runName: 'AddDefaults' });
|
|
52
|
+
const combinedChainInner = RunnableSequence.from([
|
|
53
|
+
combinedChain,
|
|
54
|
+
addDefaults,
|
|
55
|
+
]);
|
|
56
|
+
/** Wrap combinedChainWithDefaults in RunnableLambda to create parent span */
|
|
57
|
+
const combinedChainWithDefaults = new RunnableLambda({
|
|
58
|
+
func: async (input, config) => {
|
|
59
|
+
return await combinedChainInner.invoke(input, config);
|
|
60
|
+
},
|
|
61
|
+
}).withConfig({ runName: 'CombinedChainWithDefaults' });
|
|
31
62
|
return new RunnableLambda({
|
|
32
63
|
func: async (input, config) => {
|
|
64
|
+
const invokeInput = { convo: input.convo };
|
|
33
65
|
if (input.skipLanguage) {
|
|
34
|
-
return (await
|
|
35
|
-
convo: input.convo,
|
|
36
|
-
}, config));
|
|
66
|
+
return (await titleOnlyChain.invoke(invokeInput, config));
|
|
37
67
|
}
|
|
38
|
-
|
|
39
|
-
convo: input.convo,
|
|
40
|
-
}, config));
|
|
41
|
-
return {
|
|
42
|
-
language: result?.language ?? 'English',
|
|
43
|
-
title: result?.title ?? '',
|
|
44
|
-
};
|
|
68
|
+
return await combinedChainWithDefaults.invoke(invokeInput, config);
|
|
45
69
|
},
|
|
46
|
-
});
|
|
70
|
+
}).withConfig({ runName: 'TitleGenerator' });
|
|
47
71
|
};
|
|
48
72
|
const defaultCompletionPrompt = `Provide a concise, 5-word-or-less title for the conversation, using title case conventions. Only return the title itself.
|
|
49
73
|
|
|
50
74
|
Conversation:
|
|
51
75
|
{convo}`;
|
|
52
76
|
const createCompletionTitleRunnable = async (model, titlePrompt) => {
|
|
53
|
-
const completionPrompt = ChatPromptTemplate.fromTemplate(titlePrompt ?? defaultCompletionPrompt);
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
convo: input.convo,
|
|
58
|
-
});
|
|
59
|
-
const response = await model.invoke(promptOutput, config);
|
|
77
|
+
const completionPrompt = ChatPromptTemplate.fromTemplate(titlePrompt ?? defaultCompletionPrompt).withConfig({ runName: 'CompletionTitlePrompt' });
|
|
78
|
+
/** Runnable to extract content from model response */
|
|
79
|
+
const extractContent = new RunnableLambda({
|
|
80
|
+
func: (response) => {
|
|
60
81
|
let content = '';
|
|
61
82
|
if (typeof response.content === 'string') {
|
|
62
83
|
content = response.content;
|
|
@@ -67,12 +88,20 @@ const createCompletionTitleRunnable = async (model, titlePrompt) => {
|
|
|
67
88
|
.map((part) => part.text)
|
|
68
89
|
.join('');
|
|
69
90
|
}
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
91
|
+
return { title: content.trim() };
|
|
92
|
+
},
|
|
93
|
+
}).withConfig({ runName: 'ExtractTitle' });
|
|
94
|
+
const innerChain = RunnableSequence.from([
|
|
95
|
+
completionPrompt,
|
|
96
|
+
model,
|
|
97
|
+
extractContent,
|
|
98
|
+
]);
|
|
99
|
+
/** Wrap in RunnableLambda to create a parent span for LangFuse */
|
|
100
|
+
return new RunnableLambda({
|
|
101
|
+
func: async (input, config) => {
|
|
102
|
+
return await innerChain.invoke(input, config);
|
|
74
103
|
},
|
|
75
|
-
});
|
|
104
|
+
}).withConfig({ runName: 'CompletionTitleChain' });
|
|
76
105
|
};
|
|
77
106
|
|
|
78
107
|
export { createCompletionTitleRunnable, createTitleRunnable };
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"title.mjs","sources":["../../../src/utils/title.ts"],"sourcesContent":["import { z } from 'zod';\nimport {
|
|
1
|
+
{"version":3,"file":"title.mjs","sources":["../../../src/utils/title.ts"],"sourcesContent":["import { z } from 'zod';\nimport { ChatPromptTemplate } from '@langchain/core/prompts';\nimport { RunnableLambda, RunnableSequence } from '@langchain/core/runnables';\nimport type { Runnable, RunnableConfig } from '@langchain/core/runnables';\nimport type { AIMessage } from '@langchain/core/messages';\nimport type * as t from '@/types';\nimport { ContentTypes } from '@/common';\n\nconst defaultTitlePrompt = `Analyze this conversation and provide:\n1. The detected language of the conversation\n2. A concise title in the detected language (5 words or less, no punctuation or quotation)\n\n{convo}`;\n\nconst titleSchema = z.object({\n title: z\n .string()\n .describe(\n 'A concise title for the conversation in 5 words or less, without punctuation or quotation'\n ),\n});\n\nconst combinedSchema = z.object({\n language: z.string().describe('The detected language of the conversation'),\n title: z\n .string()\n .describe(\n 'A concise title for the conversation in 5 words or less, without punctuation or quotation'\n ),\n});\n\nexport const createTitleRunnable = async (\n model: t.ChatModelInstance,\n _titlePrompt?: string\n): Promise<Runnable> => {\n // Disabled since this works fine\n // eslint-disable-next-line @typescript-eslint/ban-ts-comment\n /* @ts-ignore */\n const titleLLM = model.withStructuredOutput(titleSchema);\n // eslint-disable-next-line @typescript-eslint/ban-ts-comment\n /* @ts-ignore */\n const combinedLLM = model.withStructuredOutput(combinedSchema);\n\n const titlePrompt = ChatPromptTemplate.fromTemplate(\n _titlePrompt ?? defaultTitlePrompt\n ).withConfig({ runName: 'TitlePrompt' });\n\n const titleOnlyInnerChain = RunnableSequence.from([titlePrompt, titleLLM]);\n const combinedInnerChain = RunnableSequence.from([titlePrompt, combinedLLM]);\n\n /** Wrap titleOnlyChain in RunnableLambda to create parent span */\n const titleOnlyChain = new RunnableLambda({\n func: async (\n input: { convo: string },\n config?: Partial<RunnableConfig>\n ): Promise<{ title: string }> => {\n return await titleOnlyInnerChain.invoke(input, config);\n },\n }).withConfig({ runName: 'TitleOnlyChain' });\n\n /** Wrap combinedChain in RunnableLambda to create parent span */\n const combinedChain = new RunnableLambda({\n func: async (\n input: { convo: string },\n config?: Partial<RunnableConfig>\n ): Promise<{ language: string; title: string }> => {\n return await combinedInnerChain.invoke(input, config);\n },\n }).withConfig({ runName: 'TitleLanguageChain' });\n\n /** Runnable to add default values if needed */\n const addDefaults = new RunnableLambda({\n func: (\n result: { language: string; title: string } | undefined\n ): { language: string; title: string } => ({\n language: result?.language ?? 'English',\n title: result?.title ?? '',\n }),\n }).withConfig({ runName: 'AddDefaults' });\n\n const combinedChainInner = RunnableSequence.from([\n combinedChain,\n addDefaults,\n ]);\n\n /** Wrap combinedChainWithDefaults in RunnableLambda to create parent span */\n const combinedChainWithDefaults = new RunnableLambda({\n func: async (\n input: { convo: string },\n config?: Partial<RunnableConfig>\n ): Promise<{ language: string; title: string }> => {\n return await combinedChainInner.invoke(input, config);\n },\n }).withConfig({ runName: 'CombinedChainWithDefaults' });\n\n return new RunnableLambda({\n func: async (\n input: {\n convo: string;\n inputText: string;\n skipLanguage: boolean;\n },\n config?: Partial<RunnableConfig>\n ): Promise<{ language: string; title: string } | { title: string }> => {\n const invokeInput = { convo: input.convo };\n\n if (input.skipLanguage) {\n return (await titleOnlyChain.invoke(invokeInput, config)) as {\n title: string;\n };\n }\n\n return await combinedChainWithDefaults.invoke(invokeInput, config);\n },\n }).withConfig({ runName: 'TitleGenerator' });\n};\n\nconst defaultCompletionPrompt = `Provide a concise, 5-word-or-less title for the conversation, using title case conventions. Only return the title itself.\n\nConversation:\n{convo}`;\n\nexport const createCompletionTitleRunnable = async (\n model: t.ChatModelInstance,\n titlePrompt?: string\n): Promise<Runnable> => {\n const completionPrompt = ChatPromptTemplate.fromTemplate(\n titlePrompt ?? defaultCompletionPrompt\n ).withConfig({ runName: 'CompletionTitlePrompt' });\n\n /** Runnable to extract content from model response */\n const extractContent = new RunnableLambda({\n func: (response: AIMessage): { title: string } => {\n let content = '';\n if (typeof response.content === 'string') {\n content = response.content;\n } else if (Array.isArray(response.content)) {\n content = response.content\n .filter(\n (part): part is { type: ContentTypes.TEXT; text: string } =>\n part.type === ContentTypes.TEXT\n )\n .map((part) => part.text)\n .join('');\n }\n return { title: content.trim() };\n },\n }).withConfig({ runName: 'ExtractTitle' });\n\n const innerChain = RunnableSequence.from([\n completionPrompt,\n model,\n extractContent,\n ]);\n\n /** Wrap in RunnableLambda to create a parent span for LangFuse */\n return new RunnableLambda({\n func: async (\n input: { convo: string },\n config?: Partial<RunnableConfig>\n ): Promise<{ title: string }> => {\n return await innerChain.invoke(input, config);\n },\n }).withConfig({ runName: 'CompletionTitleChain' });\n};\n"],"names":[],"mappings":";;;;;AAQA,MAAM,kBAAkB,GAAG,CAAA;;;;QAInB;AAER,MAAM,WAAW,GAAG,CAAC,CAAC,MAAM,CAAC;AAC3B,IAAA,KAAK,EAAE;AACJ,SAAA,MAAM;SACN,QAAQ,CACP,2FAA2F,CAC5F;AACJ,CAAA,CAAC;AAEF,MAAM,cAAc,GAAG,CAAC,CAAC,MAAM,CAAC;IAC9B,QAAQ,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,CAAC,2CAA2C,CAAC;AAC1E,IAAA,KAAK,EAAE;AACJ,SAAA,MAAM;SACN,QAAQ,CACP,2FAA2F,CAC5F;AACJ,CAAA,CAAC;AAEW,MAAA,mBAAmB,GAAG,OACjC,KAA0B,EAC1B,YAAqB,KACA;;;;IAIrB,MAAM,QAAQ,GAAG,KAAK,CAAC,oBAAoB,CAAC,WAAW,CAAC;;;IAGxD,MAAM,WAAW,GAAG,KAAK,CAAC,oBAAoB,CAAC,cAAc,CAAC;AAE9D,IAAA,MAAM,WAAW,GAAG,kBAAkB,CAAC,YAAY,CACjD,YAAY,IAAI,kBAAkB,CACnC,CAAC,UAAU,CAAC,EAAE,OAAO,EAAE,aAAa,EAAE,CAAC;AAExC,IAAA,MAAM,mBAAmB,GAAG,gBAAgB,CAAC,IAAI,CAAC,CAAC,WAAW,EAAE,QAAQ,CAAC,CAAC;AAC1E,IAAA,MAAM,kBAAkB,GAAG,gBAAgB,CAAC,IAAI,CAAC,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC;;AAG5E,IAAA,MAAM,cAAc,GAAG,IAAI,cAAc,CAAC;AACxC,QAAA,IAAI,EAAE,OACJ,KAAwB,EACxB,MAAgC,KACF;YAC9B,OAAO,MAAM,mBAAmB,CAAC,MAAM,CAAC,KAAK,EAAE,MAAM,CAAC;SACvD;KACF,CAAC,CAAC,UAAU,CAAC,EAAE,OAAO,EAAE,gBAAgB,EAAE,CAAC;;AAG5C,IAAA,MAAM,aAAa,GAAG,IAAI,cAAc,CAAC;AACvC,QAAA,IAAI,EAAE,OACJ,KAAwB,EACxB,MAAgC,KACgB;YAChD,OAAO,MAAM,kBAAkB,CAAC,MAAM,CAAC,KAAK,EAAE,MAAM,CAAC;SACtD;KACF,CAAC,CAAC,UAAU,CAAC,EAAE,OAAO,EAAE,oBAAoB,EAAE,CAAC;;AAGhD,IAAA,MAAM,WAAW,GAAG,IAAI,cAAc,CAAC;AACrC,QAAA,IAAI,EAAE,CACJ,MAAuD,MACd;AACzC,YAAA,QAAQ,EAAE,MAAM,EAAE,QAAQ,IAAI,SAAS;AACvC,YAAA,KAAK,EAAE,MAAM,EAAE,KAAK,IAAI,EAAE;SAC3B,CAAC;KACH,CAAC,CAAC,UAAU,CAAC,EAAE,OAAO,EAAE,aAAa,EAAE,CAAC;AAEzC,IAAA,MAAM,kBAAkB,GAAG,gBAAgB,CAAC,IAAI,CAAC;QAC/C,aAAa;QACb,WAAW;AACZ,KAAA,CAAC;;AAGF,IAAA,MAAM,yBAAyB,GAAG,IAAI,cAAc,CAAC;AACnD,QAAA,IAAI,EAAE,OACJ,KAAwB,EACxB,MAAgC,KACgB;YAChD,OAAO,MAAM,kBAAkB,CAAC,MAAM,CAAC,KAAK,EAAE,MAAM,CAAC;SACtD;KACF,CAAC,CAAC,UAAU,CAAC,EAAE,OAAO,EAAE,2BAA2B,EAAE,CAAC;IAEvD,OAAO,IAAI,cAAc,CAAC;AACxB,QAAA,IAAI,EAAE,OACJ,KAIC,EACD,MAAgC,KACoC;YACpE,MAAM,WAAW,GAAG,EAAE,KAAK,EAAE,KAAK,CAAC,KAAK,EAAE;AAE1C,YAAA,IAAI,KAAK,CAAC,YAAY,EAAE;gBACtB,QAAQ,MAAM,cAAc,CAAC,MAAM,CAAC,WAAW,EAAE,MAAM,CAAC;;YAK1D,OAAO,MAAM,yBAAyB,CAAC,MAAM,CAAC,WAAW,EAAE,MAAM,CAAC;SACnE;KACF,CAAC,CAAC,UAAU,CAAC,EAAE,OAAO,EAAE,gBAAgB,EAAE,CAAC;AAC9C;AAEA,MAAM,uBAAuB,GAAG,CAAA;;;QAGxB;AAEK,MAAA,6BAA6B,GAAG,OAC3C,KAA0B,EAC1B,WAAoB,KACC;AACrB,IAAA,MAAM,gBAAgB,GAAG,kBAAkB,CAAC,YAAY,CACtD,WAAW,IAAI,uBAAuB,CACvC,CAAC,UAAU,CAAC,EAAE,OAAO,EAAE,uBAAuB,EAAE,CAAC;;AAGlD,IAAA,MAAM,cAAc,GAAG,IAAI,cAAc,CAAC;AACxC,QAAA,IAAI,EAAE,CAAC,QAAmB,KAAuB;YAC/C,IAAI,OAAO,GAAG,EAAE;AAChB,YAAA,IAAI,OAAO,QAAQ,CAAC,OAAO,KAAK,QAAQ,EAAE;AACxC,gBAAA,OAAO,GAAG,QAAQ,CAAC,OAAO;;iBACrB,IAAI,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE;gBAC1C,OAAO,GAAG,QAAQ,CAAC;AAChB,qBAAA,MAAM,CACL,CAAC,IAAI,KACH,IAAI,CAAC,IAAI,KAAK,YAAY,CAAC,IAAI;qBAElC,GAAG,CAAC,CAAC,IAAI,KAAK,IAAI,CAAC,IAAI;qBACvB,IAAI,CAAC,EAAE,CAAC;;YAEb,OAAO,EAAE,KAAK,EAAE,OAAO,CAAC,IAAI,EAAE,EAAE;SACjC;KACF,CAAC,CAAC,UAAU,CAAC,EAAE,OAAO,EAAE,cAAc,EAAE,CAAC;AAE1C,IAAA,MAAM,UAAU,GAAG,gBAAgB,CAAC,IAAI,CAAC;QACvC,gBAAgB;QAChB,KAAK;QACL,cAAc;AACf,KAAA,CAAC;;IAGF,OAAO,IAAI,cAAc,CAAC;AACxB,QAAA,IAAI,EAAE,OACJ,KAAwB,EACxB,MAAgC,KACF;YAC9B,OAAO,MAAM,UAAU,CAAC,MAAM,CAAC,KAAK,EAAE,MAAM,CAAC;SAC9C;KACF,CAAC,CAAC,UAAU,CAAC,EAAE,OAAO,EAAE,sBAAsB,EAAE,CAAC;AACpD;;;;"}
|
package/dist/types/run.d.ts
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import './instrumentation';
|
|
2
|
-
import type {
|
|
3
|
-
import type { ClientCallbacks, SystemCallbacks } from '@/graphs/Graph';
|
|
2
|
+
import type { MessageContentComplex, BaseMessage } from '@langchain/core/messages';
|
|
4
3
|
import type { RunnableConfig } from '@langchain/core/runnables';
|
|
4
|
+
import type { ClientCallbacks, SystemCallbacks } from '@/graphs/Graph';
|
|
5
5
|
import type * as t from '@/types';
|
|
6
6
|
import { Providers } from '@/common';
|
|
7
7
|
import { StandardGraph } from '@/graphs/Graph';
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@librechat/agents",
|
|
3
|
-
"version": "2.4.
|
|
3
|
+
"version": "2.4.89",
|
|
4
4
|
"main": "./dist/cjs/main.cjs",
|
|
5
5
|
"module": "./dist/esm/main.mjs",
|
|
6
6
|
"types": "./dist/types/index.d.ts",
|
|
@@ -47,7 +47,7 @@
|
|
|
47
47
|
"image": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/image.ts --provider 'google' --name 'Jo' --location 'New York, NY'",
|
|
48
48
|
"code_exec_files": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/code_exec_files.ts --provider 'openAI' --name 'Jo' --location 'New York, NY'",
|
|
49
49
|
"code_exec_simple": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/code_exec_simple.ts --provider 'google' --name 'Jo' --location 'New York, NY'",
|
|
50
|
-
"simple": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/simple.ts --provider '
|
|
50
|
+
"simple": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/simple.ts --provider 'openAI' --name 'Jo' --location 'New York, NY'",
|
|
51
51
|
"caching": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/caching.ts --name 'Jo' --location 'New York, NY'",
|
|
52
52
|
"thinking": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/thinking.ts --name 'Jo' --location 'New York, NY'",
|
|
53
53
|
"memory": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/memory.ts --provider 'openAI' --name 'Jo' --location 'New York, NY'",
|
package/src/instrumentation.ts
CHANGED
|
@@ -7,8 +7,15 @@ if (
|
|
|
7
7
|
isPresent(process.env.LANGFUSE_PUBLIC_KEY) &&
|
|
8
8
|
isPresent(process.env.LANGFUSE_BASE_URL)
|
|
9
9
|
) {
|
|
10
|
+
const langfuseSpanProcessor = new LangfuseSpanProcessor({
|
|
11
|
+
publicKey: process.env.LANGFUSE_PUBLIC_KEY,
|
|
12
|
+
secretKey: process.env.LANGFUSE_SECRET_KEY,
|
|
13
|
+
baseUrl: process.env.LANGFUSE_BASE_URL,
|
|
14
|
+
environment: process.env.NODE_ENV ?? 'development',
|
|
15
|
+
});
|
|
16
|
+
|
|
10
17
|
const sdk = new NodeSDK({
|
|
11
|
-
spanProcessors: [
|
|
18
|
+
spanProcessors: [langfuseSpanProcessor],
|
|
12
19
|
});
|
|
13
20
|
|
|
14
21
|
sdk.start();
|
|
@@ -58,7 +58,7 @@ async function invoke(
|
|
|
58
58
|
const extendedThinkingModelName = 'claude-3-7-sonnet-20250219';
|
|
59
59
|
|
|
60
60
|
// use this for tests involving citations
|
|
61
|
-
const citationsModelName = 'claude-
|
|
61
|
+
const citationsModelName = 'claude-sonnet-4-5-20250929';
|
|
62
62
|
|
|
63
63
|
// use this for tests involving PDF documents
|
|
64
64
|
const pdfModelName = 'claude-3-5-haiku-20241022';
|
package/src/run.ts
CHANGED
|
@@ -4,13 +4,15 @@ import { zodToJsonSchema } from 'zod-to-json-schema';
|
|
|
4
4
|
import { CallbackHandler } from '@langfuse/langchain';
|
|
5
5
|
import { PromptTemplate } from '@langchain/core/prompts';
|
|
6
6
|
import { SystemMessage } from '@langchain/core/messages';
|
|
7
|
+
import { RunnableLambda } from '@langchain/core/runnables';
|
|
7
8
|
import { AzureChatOpenAI, ChatOpenAI } from '@langchain/openai';
|
|
8
9
|
import type {
|
|
9
|
-
BaseMessage,
|
|
10
10
|
MessageContentComplex,
|
|
11
|
+
BaseMessage,
|
|
11
12
|
} from '@langchain/core/messages';
|
|
12
|
-
import type {
|
|
13
|
+
import type { StringPromptValue } from '@langchain/core/prompt_values';
|
|
13
14
|
import type { RunnableConfig } from '@langchain/core/runnables';
|
|
15
|
+
import type { ClientCallbacks, SystemCallbacks } from '@/graphs/Graph';
|
|
14
16
|
import type * as t from '@/types';
|
|
15
17
|
import { GraphEvents, Providers, Callback, TitleMethod } from '@/common';
|
|
16
18
|
import { manualToolStreamProviders } from '@/llm/providers';
|
|
@@ -149,19 +151,20 @@ export class Run<T extends t.BaseGraphState> {
|
|
|
149
151
|
isPresent(process.env.LANGFUSE_PUBLIC_KEY) &&
|
|
150
152
|
isPresent(process.env.LANGFUSE_BASE_URL)
|
|
151
153
|
) {
|
|
154
|
+
const userId = config.configurable?.user_id;
|
|
155
|
+
const sessionId = config.configurable?.thread_id;
|
|
156
|
+
const traceMetadata = {
|
|
157
|
+
messageId: this.id,
|
|
158
|
+
parentMessageId: config.configurable?.requestBody?.parentMessageId,
|
|
159
|
+
};
|
|
160
|
+
const handler = new CallbackHandler({
|
|
161
|
+
userId,
|
|
162
|
+
sessionId,
|
|
163
|
+
traceMetadata,
|
|
164
|
+
});
|
|
152
165
|
config.callbacks = (
|
|
153
166
|
(config.callbacks as t.ProvidedCallbacks) ?? []
|
|
154
|
-
).concat([
|
|
155
|
-
new CallbackHandler({
|
|
156
|
-
userId: config.configurable?.user_id,
|
|
157
|
-
sessionId: this.id,
|
|
158
|
-
traceMetadata: {
|
|
159
|
-
messageId: config.configurable?.requestBody?.messageId,
|
|
160
|
-
conversationId: config.configurable?.requestBody?.conversationId,
|
|
161
|
-
parentMessageId: config.configurable?.requestBody?.parentMessageId,
|
|
162
|
-
},
|
|
163
|
-
}),
|
|
164
|
-
]);
|
|
167
|
+
).concat([handler]);
|
|
165
168
|
}
|
|
166
169
|
|
|
167
170
|
if (!this.id) {
|
|
@@ -290,18 +293,38 @@ export class Run<T extends t.BaseGraphState> {
|
|
|
290
293
|
titleMethod = TitleMethod.COMPLETION,
|
|
291
294
|
titlePromptTemplate,
|
|
292
295
|
}: t.RunTitleOptions): Promise<{ language?: string; title?: string }> {
|
|
296
|
+
if (
|
|
297
|
+
chainOptions != null &&
|
|
298
|
+
isPresent(process.env.LANGFUSE_SECRET_KEY) &&
|
|
299
|
+
isPresent(process.env.LANGFUSE_PUBLIC_KEY) &&
|
|
300
|
+
isPresent(process.env.LANGFUSE_BASE_URL)
|
|
301
|
+
) {
|
|
302
|
+
const userId = chainOptions.configurable?.user_id;
|
|
303
|
+
const sessionId = chainOptions.configurable?.thread_id;
|
|
304
|
+
const traceMetadata = {
|
|
305
|
+
messageId: 'title-' + this.id,
|
|
306
|
+
};
|
|
307
|
+
const handler = new CallbackHandler({
|
|
308
|
+
userId,
|
|
309
|
+
sessionId,
|
|
310
|
+
traceMetadata,
|
|
311
|
+
});
|
|
312
|
+
chainOptions.callbacks = (
|
|
313
|
+
(chainOptions.callbacks as t.ProvidedCallbacks) ?? []
|
|
314
|
+
).concat([handler]);
|
|
315
|
+
}
|
|
316
|
+
|
|
293
317
|
const convoTemplate = PromptTemplate.fromTemplate(
|
|
294
318
|
titlePromptTemplate ?? 'User: {input}\nAI: {output}'
|
|
295
319
|
);
|
|
320
|
+
|
|
296
321
|
const response = contentParts
|
|
297
322
|
.map((part) => {
|
|
298
323
|
if (part?.type === 'text') return part.text;
|
|
299
324
|
return '';
|
|
300
325
|
})
|
|
301
326
|
.join('\n');
|
|
302
|
-
|
|
303
|
-
await convoTemplate.invoke({ input: inputText, output: response })
|
|
304
|
-
).value;
|
|
327
|
+
|
|
305
328
|
const model = this.Graph?.getNewModel({
|
|
306
329
|
provider,
|
|
307
330
|
omitOptions,
|
|
@@ -327,10 +350,32 @@ export class Run<T extends t.BaseGraphState> {
|
|
|
327
350
|
model.n = (clientOptions as t.OpenAIClientOptions | undefined)
|
|
328
351
|
?.n as number;
|
|
329
352
|
}
|
|
330
|
-
|
|
353
|
+
|
|
354
|
+
const convoToTitleInput = new RunnableLambda({
|
|
355
|
+
func: (
|
|
356
|
+
promptValue: StringPromptValue
|
|
357
|
+
): { convo: string; inputText: string; skipLanguage?: boolean } => ({
|
|
358
|
+
convo: promptValue.value,
|
|
359
|
+
inputText,
|
|
360
|
+
skipLanguage,
|
|
361
|
+
}),
|
|
362
|
+
}).withConfig({ runName: 'ConvoTransform' });
|
|
363
|
+
|
|
364
|
+
const titleChain =
|
|
331
365
|
titleMethod === TitleMethod.COMPLETION
|
|
332
366
|
? await createCompletionTitleRunnable(model, titlePrompt)
|
|
333
367
|
: await createTitleRunnable(model, titlePrompt);
|
|
334
|
-
|
|
368
|
+
|
|
369
|
+
/** Pipes `convoTemplate` -> `transformer` -> `titleChain` */
|
|
370
|
+
const fullChain = convoTemplate
|
|
371
|
+
.withConfig({ runName: 'ConvoTemplate' })
|
|
372
|
+
.pipe(convoToTitleInput)
|
|
373
|
+
.pipe(titleChain)
|
|
374
|
+
.withConfig({ runName: 'TitleChain' });
|
|
375
|
+
|
|
376
|
+
return await fullChain.invoke(
|
|
377
|
+
{ input: inputText, output: response },
|
|
378
|
+
chainOptions
|
|
379
|
+
);
|
|
335
380
|
}
|
|
336
381
|
}
|
package/src/scripts/simple.ts
CHANGED
|
@@ -17,6 +17,7 @@ import {
|
|
|
17
17
|
import { GraphEvents, Providers, TitleMethod } from '@/common';
|
|
18
18
|
import { getLLMConfig } from '@/utils/llmConfig';
|
|
19
19
|
import { getArgs } from '@/scripts/args';
|
|
20
|
+
import { sleep } from '@/utils/run';
|
|
20
21
|
import { Run } from '@/run';
|
|
21
22
|
|
|
22
23
|
const conversationHistory: BaseMessage[] = [];
|
|
@@ -129,7 +130,7 @@ async function testStandardStreaming(): Promise<void> {
|
|
|
129
130
|
}
|
|
130
131
|
|
|
131
132
|
const run = await Run.create<t.IState>({
|
|
132
|
-
runId: 'test-
|
|
133
|
+
runId: 'test-simple-script',
|
|
133
134
|
graphConfig: {
|
|
134
135
|
type: 'standard',
|
|
135
136
|
llmConfig,
|
|
@@ -144,7 +145,9 @@ async function testStandardStreaming(): Promise<void> {
|
|
|
144
145
|
});
|
|
145
146
|
|
|
146
147
|
const config = {
|
|
148
|
+
runId: 'test-simple-script',
|
|
147
149
|
configurable: {
|
|
150
|
+
user_id: 'user-123',
|
|
148
151
|
thread_id: 'conversation-num-1',
|
|
149
152
|
},
|
|
150
153
|
streamMode: 'values',
|
|
@@ -176,6 +179,10 @@ async function testStandardStreaming(): Promise<void> {
|
|
|
176
179
|
contentParts,
|
|
177
180
|
// titleMethod: TitleMethod.STRUCTURED,
|
|
178
181
|
chainOptions: {
|
|
182
|
+
configurable: {
|
|
183
|
+
user_id: 'user-123',
|
|
184
|
+
thread_id: 'conversation-num-1',
|
|
185
|
+
},
|
|
179
186
|
callbacks: [
|
|
180
187
|
{
|
|
181
188
|
handleLLMEnd,
|
|
@@ -192,6 +199,7 @@ async function testStandardStreaming(): Promise<void> {
|
|
|
192
199
|
console.log('Collected usage metadata:', collectedUsage);
|
|
193
200
|
console.log('Generated Title:', titleResult);
|
|
194
201
|
console.log('Collected title usage metadata:', collected);
|
|
202
|
+
await sleep(5000);
|
|
195
203
|
}
|
|
196
204
|
|
|
197
205
|
process.on('unhandledRejection', (reason, promise) => {
|
package/src/utils/title.ts
CHANGED
|
@@ -1,7 +1,8 @@
|
|
|
1
1
|
import { z } from 'zod';
|
|
2
|
-
import { RunnableLambda } from '@langchain/core/runnables';
|
|
3
2
|
import { ChatPromptTemplate } from '@langchain/core/prompts';
|
|
3
|
+
import { RunnableLambda, RunnableSequence } from '@langchain/core/runnables';
|
|
4
4
|
import type { Runnable, RunnableConfig } from '@langchain/core/runnables';
|
|
5
|
+
import type { AIMessage } from '@langchain/core/messages';
|
|
5
6
|
import type * as t from '@/types';
|
|
6
7
|
import { ContentTypes } from '@/common';
|
|
7
8
|
|
|
@@ -42,7 +43,55 @@ export const createTitleRunnable = async (
|
|
|
42
43
|
|
|
43
44
|
const titlePrompt = ChatPromptTemplate.fromTemplate(
|
|
44
45
|
_titlePrompt ?? defaultTitlePrompt
|
|
45
|
-
);
|
|
46
|
+
).withConfig({ runName: 'TitlePrompt' });
|
|
47
|
+
|
|
48
|
+
const titleOnlyInnerChain = RunnableSequence.from([titlePrompt, titleLLM]);
|
|
49
|
+
const combinedInnerChain = RunnableSequence.from([titlePrompt, combinedLLM]);
|
|
50
|
+
|
|
51
|
+
/** Wrap titleOnlyChain in RunnableLambda to create parent span */
|
|
52
|
+
const titleOnlyChain = new RunnableLambda({
|
|
53
|
+
func: async (
|
|
54
|
+
input: { convo: string },
|
|
55
|
+
config?: Partial<RunnableConfig>
|
|
56
|
+
): Promise<{ title: string }> => {
|
|
57
|
+
return await titleOnlyInnerChain.invoke(input, config);
|
|
58
|
+
},
|
|
59
|
+
}).withConfig({ runName: 'TitleOnlyChain' });
|
|
60
|
+
|
|
61
|
+
/** Wrap combinedChain in RunnableLambda to create parent span */
|
|
62
|
+
const combinedChain = new RunnableLambda({
|
|
63
|
+
func: async (
|
|
64
|
+
input: { convo: string },
|
|
65
|
+
config?: Partial<RunnableConfig>
|
|
66
|
+
): Promise<{ language: string; title: string }> => {
|
|
67
|
+
return await combinedInnerChain.invoke(input, config);
|
|
68
|
+
},
|
|
69
|
+
}).withConfig({ runName: 'TitleLanguageChain' });
|
|
70
|
+
|
|
71
|
+
/** Runnable to add default values if needed */
|
|
72
|
+
const addDefaults = new RunnableLambda({
|
|
73
|
+
func: (
|
|
74
|
+
result: { language: string; title: string } | undefined
|
|
75
|
+
): { language: string; title: string } => ({
|
|
76
|
+
language: result?.language ?? 'English',
|
|
77
|
+
title: result?.title ?? '',
|
|
78
|
+
}),
|
|
79
|
+
}).withConfig({ runName: 'AddDefaults' });
|
|
80
|
+
|
|
81
|
+
const combinedChainInner = RunnableSequence.from([
|
|
82
|
+
combinedChain,
|
|
83
|
+
addDefaults,
|
|
84
|
+
]);
|
|
85
|
+
|
|
86
|
+
/** Wrap combinedChainWithDefaults in RunnableLambda to create parent span */
|
|
87
|
+
const combinedChainWithDefaults = new RunnableLambda({
|
|
88
|
+
func: async (
|
|
89
|
+
input: { convo: string },
|
|
90
|
+
config?: Partial<RunnableConfig>
|
|
91
|
+
): Promise<{ language: string; title: string }> => {
|
|
92
|
+
return await combinedChainInner.invoke(input, config);
|
|
93
|
+
},
|
|
94
|
+
}).withConfig({ runName: 'CombinedChainWithDefaults' });
|
|
46
95
|
|
|
47
96
|
return new RunnableLambda({
|
|
48
97
|
func: async (
|
|
@@ -53,28 +102,17 @@ export const createTitleRunnable = async (
|
|
|
53
102
|
},
|
|
54
103
|
config?: Partial<RunnableConfig>
|
|
55
104
|
): Promise<{ language: string; title: string } | { title: string }> => {
|
|
105
|
+
const invokeInput = { convo: input.convo };
|
|
106
|
+
|
|
56
107
|
if (input.skipLanguage) {
|
|
57
|
-
return (await
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
},
|
|
61
|
-
config
|
|
62
|
-
)) as { title: string };
|
|
108
|
+
return (await titleOnlyChain.invoke(invokeInput, config)) as {
|
|
109
|
+
title: string;
|
|
110
|
+
};
|
|
63
111
|
}
|
|
64
112
|
|
|
65
|
-
|
|
66
|
-
{
|
|
67
|
-
convo: input.convo,
|
|
68
|
-
},
|
|
69
|
-
config
|
|
70
|
-
)) as { language: string; title: string } | undefined;
|
|
71
|
-
|
|
72
|
-
return {
|
|
73
|
-
language: result?.language ?? 'English',
|
|
74
|
-
title: result?.title ?? '',
|
|
75
|
-
};
|
|
113
|
+
return await combinedChainWithDefaults.invoke(invokeInput, config);
|
|
76
114
|
},
|
|
77
|
-
});
|
|
115
|
+
}).withConfig({ runName: 'TitleGenerator' });
|
|
78
116
|
};
|
|
79
117
|
|
|
80
118
|
const defaultCompletionPrompt = `Provide a concise, 5-word-or-less title for the conversation, using title case conventions. Only return the title itself.
|
|
@@ -88,22 +126,11 @@ export const createCompletionTitleRunnable = async (
|
|
|
88
126
|
): Promise<Runnable> => {
|
|
89
127
|
const completionPrompt = ChatPromptTemplate.fromTemplate(
|
|
90
128
|
titlePrompt ?? defaultCompletionPrompt
|
|
91
|
-
);
|
|
129
|
+
).withConfig({ runName: 'CompletionTitlePrompt' });
|
|
92
130
|
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
convo: string;
|
|
97
|
-
inputText: string;
|
|
98
|
-
skipLanguage: boolean;
|
|
99
|
-
},
|
|
100
|
-
config?: Partial<RunnableConfig>
|
|
101
|
-
): Promise<{ title: string }> => {
|
|
102
|
-
const promptOutput = await completionPrompt.invoke({
|
|
103
|
-
convo: input.convo,
|
|
104
|
-
});
|
|
105
|
-
|
|
106
|
-
const response = await model.invoke(promptOutput, config);
|
|
131
|
+
/** Runnable to extract content from model response */
|
|
132
|
+
const extractContent = new RunnableLambda({
|
|
133
|
+
func: (response: AIMessage): { title: string } => {
|
|
107
134
|
let content = '';
|
|
108
135
|
if (typeof response.content === 'string') {
|
|
109
136
|
content = response.content;
|
|
@@ -116,10 +143,23 @@ export const createCompletionTitleRunnable = async (
|
|
|
116
143
|
.map((part) => part.text)
|
|
117
144
|
.join('');
|
|
118
145
|
}
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
146
|
+
return { title: content.trim() };
|
|
147
|
+
},
|
|
148
|
+
}).withConfig({ runName: 'ExtractTitle' });
|
|
149
|
+
|
|
150
|
+
const innerChain = RunnableSequence.from([
|
|
151
|
+
completionPrompt,
|
|
152
|
+
model,
|
|
153
|
+
extractContent,
|
|
154
|
+
]);
|
|
155
|
+
|
|
156
|
+
/** Wrap in RunnableLambda to create a parent span for LangFuse */
|
|
157
|
+
return new RunnableLambda({
|
|
158
|
+
func: async (
|
|
159
|
+
input: { convo: string },
|
|
160
|
+
config?: Partial<RunnableConfig>
|
|
161
|
+
): Promise<{ title: string }> => {
|
|
162
|
+
return await innerChain.invoke(input, config);
|
|
123
163
|
},
|
|
124
|
-
});
|
|
164
|
+
}).withConfig({ runName: 'CompletionTitleChain' });
|
|
125
165
|
};
|