langwatch 0.2.0 → 0.3.0-prerelease.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +7 -0
- package/README.md +268 -1
- package/dist/chunk-4BZATFKJ.mjs +181 -0
- package/dist/chunk-4BZATFKJ.mjs.map +1 -0
- package/dist/chunk-CSC3CMIT.mjs +118 -0
- package/dist/chunk-CSC3CMIT.mjs.map +1 -0
- package/dist/chunk-F63YKTXA.mjs +47 -0
- package/dist/chunk-F63YKTXA.mjs.map +1 -0
- package/dist/chunk-G3AUABT7.js +4 -0
- package/dist/chunk-G3AUABT7.js.map +1 -0
- package/dist/chunk-HPC6Z7J4.js +118 -0
- package/dist/chunk-HPC6Z7J4.js.map +1 -0
- package/dist/chunk-KGDAENGD.js +50 -0
- package/dist/chunk-KGDAENGD.js.map +1 -0
- package/dist/chunk-LD74LVRU.js +47 -0
- package/dist/chunk-LD74LVRU.js.map +1 -0
- package/dist/chunk-OM7VY3XT.mjs +4 -0
- package/dist/chunk-PCQVQ7SB.js +45 -0
- package/dist/chunk-PCQVQ7SB.js.map +1 -0
- package/dist/chunk-PMBEK6YE.mjs +424 -0
- package/dist/chunk-PMBEK6YE.mjs.map +1 -0
- package/dist/chunk-PR3JDWC3.mjs +50 -0
- package/dist/chunk-PR3JDWC3.mjs.map +1 -0
- package/dist/chunk-PTJ6AAI7.js +360 -0
- package/dist/chunk-PTJ6AAI7.js.map +1 -0
- package/dist/chunk-QEWDG5QE.mjs +45 -0
- package/dist/chunk-QEWDG5QE.mjs.map +1 -0
- package/dist/chunk-REUCVT7A.mjs +39 -0
- package/dist/chunk-REUCVT7A.mjs.map +1 -0
- package/dist/chunk-SVJ7SCGB.js +424 -0
- package/dist/chunk-SVJ7SCGB.js.map +1 -0
- package/dist/chunk-VJSOCNPA.js +181 -0
- package/dist/chunk-VJSOCNPA.js.map +1 -0
- package/dist/chunk-WM2GRSRW.js +39 -0
- package/dist/chunk-WM2GRSRW.js.map +1 -0
- package/dist/chunk-Z5J5UI5E.mjs +360 -0
- package/dist/chunk-Z5J5UI5E.mjs.map +1 -0
- package/dist/client-B2HqIKg6.d.ts +51 -0
- package/dist/client-XyCqclCi.d.mts +51 -0
- package/dist/client-browser.d.mts +8 -0
- package/dist/client-browser.d.ts +8 -0
- package/dist/client-browser.js +83 -0
- package/dist/client-browser.js.map +1 -0
- package/dist/client-browser.mjs +83 -0
- package/dist/client-browser.mjs.map +1 -0
- package/dist/client-node.d.mts +8 -0
- package/dist/client-node.d.ts +8 -0
- package/dist/client-node.js +90 -0
- package/dist/client-node.js.map +1 -0
- package/dist/client-node.mjs +90 -0
- package/dist/client-node.mjs.map +1 -0
- package/dist/evaluation/index.d.mts +897 -0
- package/dist/evaluation/index.d.ts +897 -0
- package/dist/evaluation/index.js +13 -0
- package/dist/evaluation/index.js.map +1 -0
- package/dist/evaluation/index.mjs +13 -0
- package/dist/evaluation/index.mjs.map +1 -0
- package/dist/filterable-batch-span-processor-zO5kcjBY.d.mts +64 -0
- package/dist/filterable-batch-span-processor-zO5kcjBY.d.ts +64 -0
- package/dist/index.d.mts +45 -1027
- package/dist/index.d.ts +45 -1027
- package/dist/index.js +11 -27291
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +23 -956
- package/dist/index.mjs.map +1 -1
- package/dist/observability/index.d.mts +260 -0
- package/dist/observability/index.d.ts +260 -0
- package/dist/observability/index.js +20 -0
- package/dist/observability/index.js.map +1 -0
- package/dist/observability/index.mjs +20 -0
- package/dist/observability/index.mjs.map +1 -0
- package/dist/observability/instrumentation/langchain/index.d.mts +40 -0
- package/dist/observability/instrumentation/langchain/index.d.ts +40 -0
- package/dist/observability/instrumentation/langchain/index.js +666 -0
- package/dist/observability/instrumentation/langchain/index.js.map +1 -0
- package/dist/observability/instrumentation/langchain/index.mjs +666 -0
- package/dist/observability/instrumentation/langchain/index.mjs.map +1 -0
- package/dist/prompt/index.d.mts +10 -0
- package/dist/prompt/index.d.ts +10 -0
- package/dist/prompt/index.js +18 -0
- package/dist/prompt/index.js.map +1 -0
- package/dist/prompt/index.mjs +18 -0
- package/dist/prompt/index.mjs.map +1 -0
- package/dist/prompt-BXJWdbQp.d.mts +1967 -0
- package/dist/prompt-BXJWdbQp.d.ts +1967 -0
- package/dist/record-evaluation-CmxMXa-3.d.mts +25 -0
- package/dist/record-evaluation-CmxMXa-3.d.ts +25 -0
- package/dist/trace-D-bZOuqb.d.mts +622 -0
- package/dist/trace-G2312klE.d.ts +622 -0
- package/package.json +86 -37
- package/.eslintrc.cjs +0 -37
- package/copy-types.sh +0 -17
- package/dist/chunk-LKD2K67J.mjs +0 -717
- package/dist/chunk-LKD2K67J.mjs.map +0 -1
- package/dist/utils-Cv-rUjJ1.d.mts +0 -313
- package/dist/utils-Cv-rUjJ1.d.ts +0 -313
- package/dist/utils.d.mts +0 -2
- package/dist/utils.d.ts +0 -2
- package/dist/utils.js +0 -709
- package/dist/utils.js.map +0 -1
- package/dist/utils.mjs +0 -11
- package/example/.env.example +0 -12
- package/example/.eslintrc.json +0 -26
- package/example/LICENSE +0 -13
- package/example/README.md +0 -12
- package/example/app/(chat)/chat/[id]/page.tsx +0 -60
- package/example/app/(chat)/layout.tsx +0 -14
- package/example/app/(chat)/page.tsx +0 -27
- package/example/app/actions.ts +0 -156
- package/example/app/globals.css +0 -76
- package/example/app/guardrails/page.tsx +0 -26
- package/example/app/langchain/page.tsx +0 -27
- package/example/app/langchain-rag/page.tsx +0 -28
- package/example/app/late-update/page.tsx +0 -27
- package/example/app/layout.tsx +0 -64
- package/example/app/login/actions.ts +0 -71
- package/example/app/login/page.tsx +0 -18
- package/example/app/manual/page.tsx +0 -27
- package/example/app/new/page.tsx +0 -5
- package/example/app/opengraph-image.png +0 -0
- package/example/app/share/[id]/page.tsx +0 -58
- package/example/app/signup/actions.ts +0 -111
- package/example/app/signup/page.tsx +0 -18
- package/example/app/twitter-image.png +0 -0
- package/example/auth.config.ts +0 -42
- package/example/auth.ts +0 -45
- package/example/components/button-scroll-to-bottom.tsx +0 -36
- package/example/components/chat-history.tsx +0 -49
- package/example/components/chat-list.tsx +0 -52
- package/example/components/chat-message-actions.tsx +0 -40
- package/example/components/chat-message.tsx +0 -80
- package/example/components/chat-panel.tsx +0 -139
- package/example/components/chat-share-dialog.tsx +0 -95
- package/example/components/chat.tsx +0 -84
- package/example/components/clear-history.tsx +0 -75
- package/example/components/empty-screen.tsx +0 -38
- package/example/components/external-link.tsx +0 -29
- package/example/components/footer.tsx +0 -19
- package/example/components/header.tsx +0 -114
- package/example/components/login-button.tsx +0 -42
- package/example/components/login-form.tsx +0 -97
- package/example/components/markdown.tsx +0 -9
- package/example/components/prompt-form.tsx +0 -115
- package/example/components/providers.tsx +0 -17
- package/example/components/sidebar-actions.tsx +0 -125
- package/example/components/sidebar-desktop.tsx +0 -19
- package/example/components/sidebar-footer.tsx +0 -16
- package/example/components/sidebar-item.tsx +0 -124
- package/example/components/sidebar-items.tsx +0 -42
- package/example/components/sidebar-list.tsx +0 -38
- package/example/components/sidebar-mobile.tsx +0 -31
- package/example/components/sidebar-toggle.tsx +0 -24
- package/example/components/sidebar.tsx +0 -21
- package/example/components/signup-form.tsx +0 -95
- package/example/components/stocks/events-skeleton.tsx +0 -31
- package/example/components/stocks/events.tsx +0 -30
- package/example/components/stocks/index.tsx +0 -36
- package/example/components/stocks/message.tsx +0 -134
- package/example/components/stocks/spinner.tsx +0 -16
- package/example/components/stocks/stock-purchase.tsx +0 -146
- package/example/components/stocks/stock-skeleton.tsx +0 -22
- package/example/components/stocks/stock.tsx +0 -210
- package/example/components/stocks/stocks-skeleton.tsx +0 -9
- package/example/components/stocks/stocks.tsx +0 -67
- package/example/components/tailwind-indicator.tsx +0 -14
- package/example/components/theme-toggle.tsx +0 -31
- package/example/components/ui/alert-dialog.tsx +0 -141
- package/example/components/ui/badge.tsx +0 -36
- package/example/components/ui/button.tsx +0 -57
- package/example/components/ui/codeblock.tsx +0 -148
- package/example/components/ui/dialog.tsx +0 -122
- package/example/components/ui/dropdown-menu.tsx +0 -205
- package/example/components/ui/icons.tsx +0 -507
- package/example/components/ui/input.tsx +0 -25
- package/example/components/ui/label.tsx +0 -26
- package/example/components/ui/select.tsx +0 -164
- package/example/components/ui/separator.tsx +0 -31
- package/example/components/ui/sheet.tsx +0 -140
- package/example/components/ui/sonner.tsx +0 -31
- package/example/components/ui/switch.tsx +0 -29
- package/example/components/ui/textarea.tsx +0 -24
- package/example/components/ui/tooltip.tsx +0 -30
- package/example/components/user-menu.tsx +0 -53
- package/example/components.json +0 -17
- package/example/instrumentation.ts +0 -11
- package/example/lib/chat/guardrails.tsx +0 -181
- package/example/lib/chat/langchain-rag.tsx +0 -191
- package/example/lib/chat/langchain.tsx +0 -112
- package/example/lib/chat/late-update.tsx +0 -208
- package/example/lib/chat/manual.tsx +0 -605
- package/example/lib/chat/vercel-ai.tsx +0 -576
- package/example/lib/hooks/use-copy-to-clipboard.tsx +0 -33
- package/example/lib/hooks/use-enter-submit.tsx +0 -23
- package/example/lib/hooks/use-local-storage.ts +0 -24
- package/example/lib/hooks/use-scroll-anchor.tsx +0 -86
- package/example/lib/hooks/use-sidebar.tsx +0 -60
- package/example/lib/hooks/use-streamable-text.ts +0 -25
- package/example/lib/types.ts +0 -41
- package/example/lib/utils.ts +0 -89
- package/example/middleware.ts +0 -8
- package/example/next-env.d.ts +0 -5
- package/example/next.config.js +0 -16
- package/example/package-lock.json +0 -10917
- package/example/package.json +0 -84
- package/example/pnpm-lock.yaml +0 -5712
- package/example/postcss.config.js +0 -6
- package/example/prettier.config.cjs +0 -34
- package/example/public/apple-touch-icon.png +0 -0
- package/example/public/favicon-16x16.png +0 -0
- package/example/public/favicon.ico +0 -0
- package/example/public/next.svg +0 -1
- package/example/public/thirteen.svg +0 -1
- package/example/public/vercel.svg +0 -1
- package/example/tailwind.config.ts +0 -81
- package/example/tsconfig.json +0 -35
- package/src/LangWatchExporter.ts +0 -96
- package/src/evaluations.ts +0 -219
- package/src/index.test.ts +0 -402
- package/src/index.ts +0 -596
- package/src/langchain.ts +0 -557
- package/src/server/types/.gitkeep +0 -0
- package/src/typeUtils.ts +0 -89
- package/src/types.ts +0 -82
- package/src/utils.ts +0 -205
- package/ts-to-zod.config.js +0 -24
- package/tsconfig.json +0 -32
- package/tsup.config.ts +0 -10
- package/vitest.config.ts +0 -8
- /package/dist/{utils.mjs.map → chunk-OM7VY3XT.mjs.map} +0 -0
|
@@ -1,181 +0,0 @@
|
|
|
1
|
-
import 'server-only'
|
|
2
|
-
|
|
3
|
-
import {
|
|
4
|
-
createAI,
|
|
5
|
-
createStreamableValue,
|
|
6
|
-
getMutableAIState,
|
|
7
|
-
type MutableAIState
|
|
8
|
-
} from 'ai/rsc'
|
|
9
|
-
|
|
10
|
-
import { BotMessage } from '@/components/stocks'
|
|
11
|
-
|
|
12
|
-
import { Message } from '@/lib/types'
|
|
13
|
-
import { nanoid } from '@/lib/utils'
|
|
14
|
-
import { StringOutputParser } from '@langchain/core/output_parsers'
|
|
15
|
-
import { ChatPromptTemplate } from '@langchain/core/prompts'
|
|
16
|
-
import { ChatOpenAI } from '@langchain/openai'
|
|
17
|
-
import { LangWatch, type LangWatchTrace } from 'langwatch'
|
|
18
|
-
|
|
19
|
-
async function submitUserMessage(message: string) {
|
|
20
|
-
'use server'
|
|
21
|
-
|
|
22
|
-
const langwatch = new LangWatch()
|
|
23
|
-
langwatch.on('error', e => {
|
|
24
|
-
console.log('Error from LangWatch:', e)
|
|
25
|
-
})
|
|
26
|
-
|
|
27
|
-
const trace = langwatch.getTrace()
|
|
28
|
-
|
|
29
|
-
const aiState = getMutableAIState<typeof Guardrails>()
|
|
30
|
-
const textStream = createStreamableValue('')
|
|
31
|
-
const textNode = <BotMessage content={textStream.value} />
|
|
32
|
-
|
|
33
|
-
void llmStep({ message, trace, aiState, textStream })
|
|
34
|
-
|
|
35
|
-
return {
|
|
36
|
-
id: nanoid(),
|
|
37
|
-
display: textNode
|
|
38
|
-
}
|
|
39
|
-
}
|
|
40
|
-
|
|
41
|
-
async function llmStep({
|
|
42
|
-
message,
|
|
43
|
-
trace,
|
|
44
|
-
aiState,
|
|
45
|
-
textStream
|
|
46
|
-
}: {
|
|
47
|
-
message: string
|
|
48
|
-
trace: LangWatchTrace
|
|
49
|
-
aiState: MutableAIState<AIState>
|
|
50
|
-
textStream: ReturnType<typeof createStreamableValue>
|
|
51
|
-
}) {
|
|
52
|
-
'use server'
|
|
53
|
-
|
|
54
|
-
textStream.update('Running Jailbreak guardrail...\n\n')
|
|
55
|
-
|
|
56
|
-
const jailbreakPromise = trace.evaluate({
|
|
57
|
-
evaluator: 'azure/jailbreak',
|
|
58
|
-
name: 'Jailbreak Detection',
|
|
59
|
-
input: message,
|
|
60
|
-
settings: {},
|
|
61
|
-
asGuardrail: true
|
|
62
|
-
})
|
|
63
|
-
|
|
64
|
-
const prompt = ChatPromptTemplate.fromMessages([
|
|
65
|
-
['system', 'Translate the following from English into Italian'],
|
|
66
|
-
['human', '{input}']
|
|
67
|
-
])
|
|
68
|
-
const model = new ChatOpenAI({ model: 'gpt-4o-mini' })
|
|
69
|
-
const outputParser = new StringOutputParser()
|
|
70
|
-
|
|
71
|
-
const chain = prompt.pipe(model).pipe(outputParser)
|
|
72
|
-
|
|
73
|
-
const chainPromise = chain.invoke(
|
|
74
|
-
{ input: message },
|
|
75
|
-
{ callbacks: [trace.getLangChainCallback()] }
|
|
76
|
-
)
|
|
77
|
-
|
|
78
|
-
const [jailbreakResult, result] = await Promise.all([
|
|
79
|
-
jailbreakPromise,
|
|
80
|
-
chainPromise
|
|
81
|
-
])
|
|
82
|
-
|
|
83
|
-
if (!jailbreakResult.passed) {
|
|
84
|
-
textStream.update('Jailbreak detected, stopping execution.')
|
|
85
|
-
textStream.done()
|
|
86
|
-
aiState.done()
|
|
87
|
-
return
|
|
88
|
-
}
|
|
89
|
-
|
|
90
|
-
aiState.update({
|
|
91
|
-
...aiState.get(),
|
|
92
|
-
messages: [
|
|
93
|
-
{
|
|
94
|
-
id: nanoid(),
|
|
95
|
-
role: 'system',
|
|
96
|
-
content: 'Translate the following from English into Italian'
|
|
97
|
-
},
|
|
98
|
-
{
|
|
99
|
-
id: nanoid(),
|
|
100
|
-
role: 'user',
|
|
101
|
-
content: message
|
|
102
|
-
}
|
|
103
|
-
]
|
|
104
|
-
})
|
|
105
|
-
|
|
106
|
-
textStream.update('Running Moderation guardrail...\n\n')
|
|
107
|
-
|
|
108
|
-
const moderationGuardrail = await trace.evaluate({
|
|
109
|
-
evaluator: 'openai/moderation',
|
|
110
|
-
asGuardrail: true,
|
|
111
|
-
name: 'Moderation',
|
|
112
|
-
input: message,
|
|
113
|
-
output: result, // optional
|
|
114
|
-
settings: {
|
|
115
|
-
model: 'text-moderation-stable',
|
|
116
|
-
categories: {
|
|
117
|
-
harassment: true,
|
|
118
|
-
harassment_threatening: true,
|
|
119
|
-
hate: true,
|
|
120
|
-
hate_threatening: true,
|
|
121
|
-
self_harm: true,
|
|
122
|
-
self_harm_instructions: true,
|
|
123
|
-
self_harm_intent: true,
|
|
124
|
-
sexual: true,
|
|
125
|
-
sexual_minors: true,
|
|
126
|
-
violence: true,
|
|
127
|
-
violence_graphic: true
|
|
128
|
-
}
|
|
129
|
-
}
|
|
130
|
-
})
|
|
131
|
-
|
|
132
|
-
if (!moderationGuardrail.passed) {
|
|
133
|
-
textStream.update('Moderation failed, stopping execution.')
|
|
134
|
-
textStream.done()
|
|
135
|
-
aiState.done()
|
|
136
|
-
return
|
|
137
|
-
}
|
|
138
|
-
|
|
139
|
-
textStream.update(result)
|
|
140
|
-
textStream.done()
|
|
141
|
-
|
|
142
|
-
aiState.done({
|
|
143
|
-
...aiState.get(),
|
|
144
|
-
messages: [
|
|
145
|
-
...aiState.get().messages,
|
|
146
|
-
{
|
|
147
|
-
id: nanoid(),
|
|
148
|
-
role: 'assistant',
|
|
149
|
-
content: result
|
|
150
|
-
}
|
|
151
|
-
]
|
|
152
|
-
})
|
|
153
|
-
}
|
|
154
|
-
|
|
155
|
-
export type AIState = {
|
|
156
|
-
chatId: string
|
|
157
|
-
messages: Message[]
|
|
158
|
-
}
|
|
159
|
-
|
|
160
|
-
export type UIState = {
|
|
161
|
-
id: string
|
|
162
|
-
display: React.ReactNode
|
|
163
|
-
}[]
|
|
164
|
-
|
|
165
|
-
export const Guardrails = createAI<AIState, UIState>({
|
|
166
|
-
actions: {
|
|
167
|
-
submitUserMessage
|
|
168
|
-
},
|
|
169
|
-
initialUIState: [],
|
|
170
|
-
initialAIState: { chatId: nanoid(), messages: [] },
|
|
171
|
-
onGetUIState: async () => {
|
|
172
|
-
'use server'
|
|
173
|
-
|
|
174
|
-
return undefined
|
|
175
|
-
},
|
|
176
|
-
onSetAIState: async ({ state }) => {
|
|
177
|
-
'use server'
|
|
178
|
-
|
|
179
|
-
return
|
|
180
|
-
}
|
|
181
|
-
})
|
|
@@ -1,191 +0,0 @@
|
|
|
1
|
-
import 'server-only'
|
|
2
|
-
|
|
3
|
-
import { openai } from '@ai-sdk/openai'
|
|
4
|
-
import {
|
|
5
|
-
createAI,
|
|
6
|
-
createStreamableUI,
|
|
7
|
-
createStreamableValue,
|
|
8
|
-
getMutableAIState,
|
|
9
|
-
streamUI
|
|
10
|
-
} from 'ai/rsc'
|
|
11
|
-
|
|
12
|
-
import { BotCard, BotMessage, Purchase, Stock } from '@/components/stocks'
|
|
13
|
-
|
|
14
|
-
import { Events } from '@/components/stocks/events'
|
|
15
|
-
import { SpinnerMessage, UserMessage } from '@/components/stocks/message'
|
|
16
|
-
import { Stocks } from '@/components/stocks/stocks'
|
|
17
|
-
import { Chat, Message } from '@/lib/types'
|
|
18
|
-
import { nanoid } from '@/lib/utils'
|
|
19
|
-
import { LangWatch, convertFromVercelAIMessages } from 'langwatch'
|
|
20
|
-
import { ChatOpenAI } from '@langchain/openai'
|
|
21
|
-
import {
|
|
22
|
-
ChatPromptTemplate,
|
|
23
|
-
PromptTemplateInput
|
|
24
|
-
} from '@langchain/core/prompts'
|
|
25
|
-
import {
|
|
26
|
-
HumanMessage,
|
|
27
|
-
SystemMessage,
|
|
28
|
-
AIMessage,
|
|
29
|
-
ToolMessage,
|
|
30
|
-
BaseMessageLike
|
|
31
|
-
} from '@langchain/core/messages'
|
|
32
|
-
import { StringOutputParser } from '@langchain/core/output_parsers'
|
|
33
|
-
import { CallbackManagerForRetrieverRun } from '@langchain/core/callbacks/manager'
|
|
34
|
-
import {
|
|
35
|
-
BaseRetriever,
|
|
36
|
-
type BaseRetrieverInput
|
|
37
|
-
} from '@langchain/core/retrievers'
|
|
38
|
-
import { Document } from '@langchain/core/documents'
|
|
39
|
-
import {
|
|
40
|
-
RunnableLambda,
|
|
41
|
-
RunnableMap,
|
|
42
|
-
RunnablePassthrough
|
|
43
|
-
} from '@langchain/core/runnables'
|
|
44
|
-
|
|
45
|
-
async function submitUserMessage(message: string) {
|
|
46
|
-
'use server'
|
|
47
|
-
|
|
48
|
-
const langwatch = new LangWatch()
|
|
49
|
-
langwatch.on('error', e => {
|
|
50
|
-
console.log('Error from LangWatch:', e)
|
|
51
|
-
})
|
|
52
|
-
|
|
53
|
-
const trace = langwatch.getTrace()
|
|
54
|
-
|
|
55
|
-
const aiState = getMutableAIState<typeof LangChainRAGAI>()
|
|
56
|
-
|
|
57
|
-
const messages: BaseMessageLike[] = [
|
|
58
|
-
['system', 'Answer based on the retrieved context'],
|
|
59
|
-
...(aiState.get().messages.map(message => {
|
|
60
|
-
if (message.role === 'system') {
|
|
61
|
-
return ['system', message.content.toString()]
|
|
62
|
-
}
|
|
63
|
-
if (message.role === 'user') {
|
|
64
|
-
return ['human', message.content.toString()]
|
|
65
|
-
}
|
|
66
|
-
if (message.role === 'tool') {
|
|
67
|
-
return ['tool', message.content.toString()]
|
|
68
|
-
}
|
|
69
|
-
return ['ai', message.content.toString()]
|
|
70
|
-
}) as BaseMessageLike[]),
|
|
71
|
-
['ai', 'Retrieved the following context: {context}'],
|
|
72
|
-
['human', '{question}']
|
|
73
|
-
]
|
|
74
|
-
|
|
75
|
-
aiState.update({
|
|
76
|
-
...aiState.get(),
|
|
77
|
-
messages: [
|
|
78
|
-
...aiState.get().messages,
|
|
79
|
-
{
|
|
80
|
-
id: nanoid(),
|
|
81
|
-
role: 'user',
|
|
82
|
-
content: message
|
|
83
|
-
}
|
|
84
|
-
]
|
|
85
|
-
})
|
|
86
|
-
|
|
87
|
-
const prompt = ChatPromptTemplate.fromMessages(messages)
|
|
88
|
-
const model = new ChatOpenAI({ model: 'gpt-4o-mini' })
|
|
89
|
-
const retriever = new CustomRetriever()
|
|
90
|
-
const outputParser = new StringOutputParser()
|
|
91
|
-
|
|
92
|
-
const setupAndRetrieval = RunnableMap.from({
|
|
93
|
-
context: new RunnableLambda({
|
|
94
|
-
func: (input: string) =>
|
|
95
|
-
retriever
|
|
96
|
-
.invoke(input, {
|
|
97
|
-
callbacks: [trace.getLangChainCallback()]
|
|
98
|
-
})
|
|
99
|
-
.then(response => response[0].pageContent)
|
|
100
|
-
}).withConfig({ runName: 'contextRetriever' }),
|
|
101
|
-
question: new RunnablePassthrough()
|
|
102
|
-
})
|
|
103
|
-
|
|
104
|
-
const chain = setupAndRetrieval.pipe(prompt).pipe(model).pipe(outputParser)
|
|
105
|
-
|
|
106
|
-
const stream = await chain.stream(message, {
|
|
107
|
-
callbacks: [trace.getLangChainCallback()]
|
|
108
|
-
})
|
|
109
|
-
|
|
110
|
-
let textStream = createStreamableValue('')
|
|
111
|
-
let textNode = <BotMessage content={textStream.value} />
|
|
112
|
-
let content = ''
|
|
113
|
-
|
|
114
|
-
setTimeout(async () => {
|
|
115
|
-
for await (const chunk of stream) {
|
|
116
|
-
textStream.update(chunk)
|
|
117
|
-
content += chunk
|
|
118
|
-
}
|
|
119
|
-
|
|
120
|
-
textStream?.done()
|
|
121
|
-
aiState.done({
|
|
122
|
-
...aiState.get(),
|
|
123
|
-
messages: [
|
|
124
|
-
...aiState.get().messages,
|
|
125
|
-
{
|
|
126
|
-
id: nanoid(),
|
|
127
|
-
role: 'assistant',
|
|
128
|
-
content
|
|
129
|
-
}
|
|
130
|
-
]
|
|
131
|
-
})
|
|
132
|
-
}, 0)
|
|
133
|
-
|
|
134
|
-
return {
|
|
135
|
-
id: nanoid(),
|
|
136
|
-
display: textNode
|
|
137
|
-
}
|
|
138
|
-
}
|
|
139
|
-
|
|
140
|
-
export type AIState = {
|
|
141
|
-
chatId: string
|
|
142
|
-
messages: Message[]
|
|
143
|
-
}
|
|
144
|
-
|
|
145
|
-
export type UIState = {
|
|
146
|
-
id: string
|
|
147
|
-
display: React.ReactNode
|
|
148
|
-
}[]
|
|
149
|
-
|
|
150
|
-
export const LangChainRAGAI = createAI<AIState, UIState>({
|
|
151
|
-
actions: {
|
|
152
|
-
submitUserMessage
|
|
153
|
-
},
|
|
154
|
-
initialUIState: [],
|
|
155
|
-
initialAIState: { chatId: nanoid(), messages: [] },
|
|
156
|
-
onGetUIState: async () => {
|
|
157
|
-
'use server'
|
|
158
|
-
|
|
159
|
-
return undefined
|
|
160
|
-
},
|
|
161
|
-
onSetAIState: async ({ state }) => {
|
|
162
|
-
'use server'
|
|
163
|
-
|
|
164
|
-
return
|
|
165
|
-
}
|
|
166
|
-
})
|
|
167
|
-
|
|
168
|
-
export class CustomRetriever extends BaseRetriever {
|
|
169
|
-
lc_namespace = ['langchain', 'retrievers']
|
|
170
|
-
|
|
171
|
-
constructor(fields?: BaseRetrieverInput) {
|
|
172
|
-
super(fields)
|
|
173
|
-
}
|
|
174
|
-
|
|
175
|
-
async _getRelevantDocuments(
|
|
176
|
-
query: string,
|
|
177
|
-
_runManager?: CallbackManagerForRetrieverRun
|
|
178
|
-
): Promise<Document[]> {
|
|
179
|
-
console.log('query', query)
|
|
180
|
-
return [
|
|
181
|
-
new Document({
|
|
182
|
-
pageContent: `Some document pertaining to ${query}`,
|
|
183
|
-
metadata: {}
|
|
184
|
-
}),
|
|
185
|
-
new Document({
|
|
186
|
-
pageContent: `Some other document pertaining to ${query}`,
|
|
187
|
-
metadata: {}
|
|
188
|
-
})
|
|
189
|
-
]
|
|
190
|
-
}
|
|
191
|
-
}
|
|
@@ -1,112 +0,0 @@
|
|
|
1
|
-
import 'server-only'
|
|
2
|
-
|
|
3
|
-
import { createAI, createStreamableValue, getMutableAIState } from 'ai/rsc'
|
|
4
|
-
|
|
5
|
-
import { BotMessage } from '@/components/stocks'
|
|
6
|
-
|
|
7
|
-
import { Message } from '@/lib/types'
|
|
8
|
-
import { nanoid } from '@/lib/utils'
|
|
9
|
-
import { StringOutputParser } from '@langchain/core/output_parsers'
|
|
10
|
-
import { ChatPromptTemplate } from '@langchain/core/prompts'
|
|
11
|
-
import { ChatOpenAI } from '@langchain/openai'
|
|
12
|
-
import { LangWatch } from 'langwatch'
|
|
13
|
-
|
|
14
|
-
async function submitUserMessage(message: string) {
|
|
15
|
-
'use server'
|
|
16
|
-
|
|
17
|
-
const langwatch = new LangWatch()
|
|
18
|
-
langwatch.on('error', e => {
|
|
19
|
-
console.log('Error from LangWatch:', e)
|
|
20
|
-
})
|
|
21
|
-
|
|
22
|
-
const trace = langwatch.getTrace()
|
|
23
|
-
|
|
24
|
-
const aiState = getMutableAIState<typeof LangChainAI>()
|
|
25
|
-
|
|
26
|
-
aiState.update({
|
|
27
|
-
...aiState.get(),
|
|
28
|
-
messages: [
|
|
29
|
-
{
|
|
30
|
-
id: nanoid(),
|
|
31
|
-
role: 'system',
|
|
32
|
-
content: 'Translate the following from English into Italian'
|
|
33
|
-
},
|
|
34
|
-
{
|
|
35
|
-
id: nanoid(),
|
|
36
|
-
role: 'user',
|
|
37
|
-
content: message
|
|
38
|
-
}
|
|
39
|
-
]
|
|
40
|
-
})
|
|
41
|
-
|
|
42
|
-
const prompt = ChatPromptTemplate.fromMessages([
|
|
43
|
-
['system', 'Translate the following from English into Italian'],
|
|
44
|
-
['human', '{input}']
|
|
45
|
-
])
|
|
46
|
-
const model = new ChatOpenAI({ model: 'gpt-4o-mini' })
|
|
47
|
-
const outputParser = new StringOutputParser()
|
|
48
|
-
|
|
49
|
-
const chain = prompt.pipe(model).pipe(outputParser)
|
|
50
|
-
|
|
51
|
-
const stream = await chain.stream(
|
|
52
|
-
{ input: message },
|
|
53
|
-
{ callbacks: [trace.getLangChainCallback()] }
|
|
54
|
-
)
|
|
55
|
-
|
|
56
|
-
let textStream = createStreamableValue('')
|
|
57
|
-
let textNode = <BotMessage content={textStream.value} />
|
|
58
|
-
let content = ''
|
|
59
|
-
|
|
60
|
-
setTimeout(async () => {
|
|
61
|
-
for await (const chunk of stream) {
|
|
62
|
-
textStream.update(chunk)
|
|
63
|
-
content += chunk
|
|
64
|
-
}
|
|
65
|
-
|
|
66
|
-
textStream?.done()
|
|
67
|
-
aiState.done({
|
|
68
|
-
...aiState.get(),
|
|
69
|
-
messages: [
|
|
70
|
-
...aiState.get().messages,
|
|
71
|
-
{
|
|
72
|
-
id: nanoid(),
|
|
73
|
-
role: 'assistant',
|
|
74
|
-
content
|
|
75
|
-
}
|
|
76
|
-
]
|
|
77
|
-
})
|
|
78
|
-
}, 0)
|
|
79
|
-
|
|
80
|
-
return {
|
|
81
|
-
id: nanoid(),
|
|
82
|
-
display: textNode
|
|
83
|
-
}
|
|
84
|
-
}
|
|
85
|
-
|
|
86
|
-
export type AIState = {
|
|
87
|
-
chatId: string
|
|
88
|
-
messages: Message[]
|
|
89
|
-
}
|
|
90
|
-
|
|
91
|
-
export type UIState = {
|
|
92
|
-
id: string
|
|
93
|
-
display: React.ReactNode
|
|
94
|
-
}[]
|
|
95
|
-
|
|
96
|
-
export const LangChainAI = createAI<AIState, UIState>({
|
|
97
|
-
actions: {
|
|
98
|
-
submitUserMessage
|
|
99
|
-
},
|
|
100
|
-
initialUIState: [],
|
|
101
|
-
initialAIState: { chatId: nanoid(), messages: [] },
|
|
102
|
-
onGetUIState: async () => {
|
|
103
|
-
'use server'
|
|
104
|
-
|
|
105
|
-
return undefined
|
|
106
|
-
},
|
|
107
|
-
onSetAIState: async ({ state }) => {
|
|
108
|
-
'use server'
|
|
109
|
-
|
|
110
|
-
return
|
|
111
|
-
}
|
|
112
|
-
})
|
|
@@ -1,208 +0,0 @@
|
|
|
1
|
-
import 'server-only'
|
|
2
|
-
|
|
3
|
-
import { openai } from '@ai-sdk/openai'
|
|
4
|
-
import {
|
|
5
|
-
createAI,
|
|
6
|
-
createStreamableValue,
|
|
7
|
-
getAIState,
|
|
8
|
-
getMutableAIState,
|
|
9
|
-
streamUI
|
|
10
|
-
} from 'ai/rsc'
|
|
11
|
-
|
|
12
|
-
import { BotMessage } from '@/components/stocks'
|
|
13
|
-
|
|
14
|
-
import { saveChat } from '@/app/actions'
|
|
15
|
-
import { auth } from '@/auth'
|
|
16
|
-
import { SpinnerMessage, UserMessage } from '@/components/stocks/message'
|
|
17
|
-
import { Chat, Message } from '@/lib/types'
|
|
18
|
-
import { nanoid } from '@/lib/utils'
|
|
19
|
-
import { LangWatch, convertFromVercelAIMessages } from 'langwatch'
|
|
20
|
-
|
|
21
|
-
async function submitUserMessage(content: string) {
|
|
22
|
-
'use server'
|
|
23
|
-
|
|
24
|
-
const langwatch = new LangWatch()
|
|
25
|
-
langwatch.on('error', e => {
|
|
26
|
-
console.log('Error from LangWatch:', e)
|
|
27
|
-
})
|
|
28
|
-
|
|
29
|
-
const trace = langwatch.getTrace()
|
|
30
|
-
|
|
31
|
-
const aiState = getMutableAIState<typeof LateUpdateTracing>()
|
|
32
|
-
|
|
33
|
-
aiState.update({
|
|
34
|
-
...aiState.get(),
|
|
35
|
-
messages: [
|
|
36
|
-
...aiState.get().messages,
|
|
37
|
-
{
|
|
38
|
-
id: nanoid(),
|
|
39
|
-
role: 'user',
|
|
40
|
-
content
|
|
41
|
-
}
|
|
42
|
-
]
|
|
43
|
-
})
|
|
44
|
-
|
|
45
|
-
const system = "You are a helpful assistant."
|
|
46
|
-
|
|
47
|
-
const span = trace.startLLMSpan({
|
|
48
|
-
model: 'gpt-4o-mini',
|
|
49
|
-
input: {
|
|
50
|
-
type: 'chat_messages',
|
|
51
|
-
value: [
|
|
52
|
-
{
|
|
53
|
-
role: 'system',
|
|
54
|
-
content: system
|
|
55
|
-
},
|
|
56
|
-
...convertFromVercelAIMessages(aiState.get().messages)
|
|
57
|
-
]
|
|
58
|
-
}
|
|
59
|
-
})
|
|
60
|
-
|
|
61
|
-
const onFinish = (output: Message[]) => {
|
|
62
|
-
aiState.done({
|
|
63
|
-
...aiState.get(),
|
|
64
|
-
messages: [...aiState.get().messages, ...output]
|
|
65
|
-
})
|
|
66
|
-
|
|
67
|
-
span.end({
|
|
68
|
-
output: {
|
|
69
|
-
type: 'chat_messages',
|
|
70
|
-
value: convertFromVercelAIMessages(output)
|
|
71
|
-
}
|
|
72
|
-
})
|
|
73
|
-
|
|
74
|
-
setTimeout(() => {
|
|
75
|
-
span.end({
|
|
76
|
-
params: {
|
|
77
|
-
late_update_at: (new Date()).toISOString()
|
|
78
|
-
}
|
|
79
|
-
})
|
|
80
|
-
}, 5000);
|
|
81
|
-
}
|
|
82
|
-
|
|
83
|
-
let textStream: undefined | ReturnType<typeof createStreamableValue<string>>
|
|
84
|
-
let textNode: undefined | React.ReactNode
|
|
85
|
-
|
|
86
|
-
const result = await streamUI({
|
|
87
|
-
model: openai('gpt-4o-mini'),
|
|
88
|
-
initial: <SpinnerMessage />,
|
|
89
|
-
system,
|
|
90
|
-
messages: [
|
|
91
|
-
...aiState.get().messages.map((message: any) => ({
|
|
92
|
-
role: message.role,
|
|
93
|
-
content: message.content,
|
|
94
|
-
name: message.name
|
|
95
|
-
}))
|
|
96
|
-
],
|
|
97
|
-
text: ({ content, done, delta }) => {
|
|
98
|
-
if (!textStream) {
|
|
99
|
-
textStream = createStreamableValue('')
|
|
100
|
-
textNode = <BotMessage content={textStream.value} />
|
|
101
|
-
}
|
|
102
|
-
|
|
103
|
-
if (done) {
|
|
104
|
-
textStream.done()
|
|
105
|
-
|
|
106
|
-
onFinish([
|
|
107
|
-
{
|
|
108
|
-
id: nanoid(),
|
|
109
|
-
role: 'assistant',
|
|
110
|
-
content
|
|
111
|
-
}
|
|
112
|
-
])
|
|
113
|
-
} else {
|
|
114
|
-
textStream.update(delta)
|
|
115
|
-
}
|
|
116
|
-
|
|
117
|
-
return textNode
|
|
118
|
-
}
|
|
119
|
-
})
|
|
120
|
-
|
|
121
|
-
return {
|
|
122
|
-
id: nanoid(),
|
|
123
|
-
display: result.value
|
|
124
|
-
}
|
|
125
|
-
}
|
|
126
|
-
|
|
127
|
-
export type AIState = {
|
|
128
|
-
chatId: string
|
|
129
|
-
messages: Message[]
|
|
130
|
-
}
|
|
131
|
-
|
|
132
|
-
export type UIState = {
|
|
133
|
-
id: string
|
|
134
|
-
display: React.ReactNode
|
|
135
|
-
}[]
|
|
136
|
-
|
|
137
|
-
export const LateUpdateTracing = createAI<AIState, UIState>({
|
|
138
|
-
actions: {
|
|
139
|
-
submitUserMessage
|
|
140
|
-
},
|
|
141
|
-
initialUIState: [],
|
|
142
|
-
initialAIState: { chatId: nanoid(), messages: [] },
|
|
143
|
-
onGetUIState: async () => {
|
|
144
|
-
'use server'
|
|
145
|
-
|
|
146
|
-
const session = await auth()
|
|
147
|
-
|
|
148
|
-
if (session && session.user) {
|
|
149
|
-
const aiState = getAIState()
|
|
150
|
-
|
|
151
|
-
if (aiState) {
|
|
152
|
-
// @ts-ignore
|
|
153
|
-
const uiState = getUIStateFromAIState(aiState)
|
|
154
|
-
return uiState
|
|
155
|
-
}
|
|
156
|
-
} else {
|
|
157
|
-
return
|
|
158
|
-
}
|
|
159
|
-
},
|
|
160
|
-
onSetAIState: async ({ state }) => {
|
|
161
|
-
'use server'
|
|
162
|
-
|
|
163
|
-
const session = await auth()
|
|
164
|
-
|
|
165
|
-
if (session && session.user) {
|
|
166
|
-
const { chatId, messages } = state
|
|
167
|
-
|
|
168
|
-
const createdAt = new Date()
|
|
169
|
-
const userId = session.user.id as string
|
|
170
|
-
const path = `/chat/${chatId}`
|
|
171
|
-
|
|
172
|
-
const firstMessageContent = messages[0].content as string
|
|
173
|
-
const title = firstMessageContent.substring(0, 100)
|
|
174
|
-
|
|
175
|
-
const chat: Chat = {
|
|
176
|
-
id: chatId,
|
|
177
|
-
title,
|
|
178
|
-
userId,
|
|
179
|
-
createdAt,
|
|
180
|
-
messages,
|
|
181
|
-
path
|
|
182
|
-
}
|
|
183
|
-
|
|
184
|
-
await saveChat(chat)
|
|
185
|
-
} else {
|
|
186
|
-
return
|
|
187
|
-
}
|
|
188
|
-
}
|
|
189
|
-
})
|
|
190
|
-
|
|
191
|
-
export const getUIStateFromAIState = (aiState: Chat) => {
|
|
192
|
-
return aiState.messages
|
|
193
|
-
.filter(message => message.role !== 'system')
|
|
194
|
-
.map((message, index) => ({
|
|
195
|
-
id: `${aiState.chatId}-${index}`,
|
|
196
|
-
display:
|
|
197
|
-
message.role === 'tool' ? (
|
|
198
|
-
message.content.map(tool => {
|
|
199
|
-
return `Tool used: ${tool.toolName}`
|
|
200
|
-
})
|
|
201
|
-
) : message.role === 'user' ? (
|
|
202
|
-
<UserMessage>{message.content as string}</UserMessage>
|
|
203
|
-
) : message.role === 'assistant' &&
|
|
204
|
-
typeof message.content === 'string' ? (
|
|
205
|
-
<BotMessage content={message.content} />
|
|
206
|
-
) : null
|
|
207
|
-
}))
|
|
208
|
-
}
|