langwatch 0.1.0 → 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. package/dist/{chunk-AP23NJ57.mjs → chunk-OVS4NSDE.mjs} +373 -2
  2. package/dist/chunk-OVS4NSDE.mjs.map +1 -0
  3. package/dist/index.d.mts +44 -2
  4. package/dist/index.d.ts +44 -2
  5. package/dist/index.js +6256 -472
  6. package/dist/index.js.map +1 -1
  7. package/dist/index.mjs +325 -351
  8. package/dist/index.mjs.map +1 -1
  9. package/dist/{utils-DDcm0z9v.d.mts → utils-K-jSEpnZ.d.mts} +4 -3
  10. package/dist/{utils-DDcm0z9v.d.ts → utils-K-jSEpnZ.d.ts} +4 -3
  11. package/dist/utils.d.mts +1 -1
  12. package/dist/utils.d.ts +1 -1
  13. package/dist/utils.js +370 -0
  14. package/dist/utils.js.map +1 -1
  15. package/dist/utils.mjs +3 -1
  16. package/example/README.md +3 -1
  17. package/example/app/(chat)/chat/[id]/page.tsx +1 -1
  18. package/example/app/(chat)/page.tsx +10 -5
  19. package/example/app/langchain/page.tsx +27 -0
  20. package/example/app/langchain-rag/page.tsx +28 -0
  21. package/example/app/share/[id]/page.tsx +1 -1
  22. package/example/components/chat-list.tsx +1 -1
  23. package/example/components/chat-panel.tsx +1 -1
  24. package/example/components/header.tsx +35 -13
  25. package/example/components/prompt-form.tsx +1 -1
  26. package/example/components/stocks/stock-purchase.tsx +1 -1
  27. package/example/components/stocks/stocks.tsx +1 -1
  28. package/example/lib/chat/langchain-rag.tsx +191 -0
  29. package/example/lib/chat/langchain.tsx +112 -0
  30. package/example/package-lock.json +285 -3
  31. package/example/package.json +1 -0
  32. package/package.json +11 -2
  33. package/src/index.ts +12 -3
  34. package/src/langchain.ts +557 -0
  35. package/src/types.ts +1 -1
  36. package/src/utils.ts +28 -1
  37. package/dist/chunk-AP23NJ57.mjs.map +0 -1
  38. /package/example/lib/chat/{actions.tsx → vercel-ai.tsx} +0 -0
  39. /package/src/{helpers.ts → typeUtils.ts} +0 -0
@@ -0,0 +1,191 @@
1
+ import 'server-only'
2
+
3
+ import { openai } from '@ai-sdk/openai'
4
+ import {
5
+ createAI,
6
+ createStreamableUI,
7
+ createStreamableValue,
8
+ getMutableAIState,
9
+ streamUI
10
+ } from 'ai/rsc'
11
+
12
+ import { BotCard, BotMessage, Purchase, Stock } from '@/components/stocks'
13
+
14
+ import { Events } from '@/components/stocks/events'
15
+ import { SpinnerMessage, UserMessage } from '@/components/stocks/message'
16
+ import { Stocks } from '@/components/stocks/stocks'
17
+ import { Chat, Message } from '@/lib/types'
18
+ import { nanoid } from '@/lib/utils'
19
+ import { LangWatch, convertFromVercelAIMessages } from 'langwatch'
20
+ import { ChatOpenAI } from '@langchain/openai'
21
+ import {
22
+ ChatPromptTemplate,
23
+ PromptTemplateInput
24
+ } from '@langchain/core/prompts'
25
+ import {
26
+ HumanMessage,
27
+ SystemMessage,
28
+ AIMessage,
29
+ ToolMessage,
30
+ BaseMessageLike
31
+ } from '@langchain/core/messages'
32
+ import { StringOutputParser } from '@langchain/core/output_parsers'
33
+ import { CallbackManagerForRetrieverRun } from '@langchain/core/callbacks/manager'
34
+ import {
35
+ BaseRetriever,
36
+ type BaseRetrieverInput
37
+ } from '@langchain/core/retrievers'
38
+ import { Document } from '@langchain/core/documents'
39
+ import {
40
+ RunnableLambda,
41
+ RunnableMap,
42
+ RunnablePassthrough
43
+ } from '@langchain/core/runnables'
44
+
45
+ async function submitUserMessage(message: string) {
46
+ 'use server'
47
+
48
+ const langwatch = new LangWatch()
49
+ langwatch.on('error', e => {
50
+ console.log('Error from LangWatch:', e)
51
+ })
52
+
53
+ const trace = langwatch.getTrace()
54
+
55
+ const aiState = getMutableAIState<typeof LangChainRAGAI>()
56
+
57
+ const messages: BaseMessageLike[] = [
58
+ ['system', 'Answer based on the retrieved context'],
59
+ ...(aiState.get().messages.map(message => {
60
+ if (message.role === 'system') {
61
+ return ['system', message.content.toString()]
62
+ }
63
+ if (message.role === 'user') {
64
+ return ['human', message.content.toString()]
65
+ }
66
+ if (message.role === 'tool') {
67
+ return ['tool', message.content.toString()]
68
+ }
69
+ return ['ai', message.content.toString()]
70
+ }) as BaseMessageLike[]),
71
+ ['ai', 'Retrieved the following context: {context}'],
72
+ ['human', '{question}']
73
+ ]
74
+
75
+ aiState.update({
76
+ ...aiState.get(),
77
+ messages: [
78
+ ...aiState.get().messages,
79
+ {
80
+ id: nanoid(),
81
+ role: 'user',
82
+ content: message
83
+ }
84
+ ]
85
+ })
86
+
87
+ const prompt = ChatPromptTemplate.fromMessages(messages)
88
+ const model = new ChatOpenAI({ model: 'gpt-3.5-turbo' })
89
+ const retriever = new CustomRetriever()
90
+ const outputParser = new StringOutputParser()
91
+
92
+ const setupAndRetrieval = RunnableMap.from({
93
+ context: new RunnableLambda({
94
+ func: (input: string) =>
95
+ retriever
96
+ .invoke(input, {
97
+ callbacks: [trace.getLangChainCallback()]
98
+ })
99
+ .then(response => response[0].pageContent)
100
+ }).withConfig({ runName: 'contextRetriever' }),
101
+ question: new RunnablePassthrough()
102
+ })
103
+
104
+ const chain = setupAndRetrieval.pipe(prompt).pipe(model).pipe(outputParser)
105
+
106
+ const stream = await chain.stream(message, {
107
+ callbacks: [trace.getLangChainCallback()]
108
+ })
109
+
110
+ let textStream = createStreamableValue('')
111
+ let textNode = <BotMessage content={textStream.value} />
112
+ let content = ''
113
+
114
+ setTimeout(async () => {
115
+ for await (const chunk of stream) {
116
+ textStream.update(chunk)
117
+ content += chunk
118
+ }
119
+
120
+ textStream?.done()
121
+ aiState.done({
122
+ ...aiState.get(),
123
+ messages: [
124
+ ...aiState.get().messages,
125
+ {
126
+ id: nanoid(),
127
+ role: 'assistant',
128
+ content
129
+ }
130
+ ]
131
+ })
132
+ }, 0)
133
+
134
+ return {
135
+ id: nanoid(),
136
+ display: textNode
137
+ }
138
+ }
139
+
140
+ export type AIState = {
141
+ chatId: string
142
+ messages: Message[]
143
+ }
144
+
145
+ export type UIState = {
146
+ id: string
147
+ display: React.ReactNode
148
+ }[]
149
+
150
+ export const LangChainRAGAI = createAI<AIState, UIState>({
151
+ actions: {
152
+ submitUserMessage
153
+ },
154
+ initialUIState: [],
155
+ initialAIState: { chatId: nanoid(), messages: [] },
156
+ onGetUIState: async () => {
157
+ 'use server'
158
+
159
+ return undefined
160
+ },
161
+ onSetAIState: async ({ state }) => {
162
+ 'use server'
163
+
164
+ return
165
+ }
166
+ })
167
+
168
+ export class CustomRetriever extends BaseRetriever {
169
+ lc_namespace = ['langchain', 'retrievers']
170
+
171
+ constructor(fields?: BaseRetrieverInput) {
172
+ super(fields)
173
+ }
174
+
175
+ async _getRelevantDocuments(
176
+ query: string,
177
+ _runManager?: CallbackManagerForRetrieverRun
178
+ ): Promise<Document[]> {
179
+ console.log('query', query)
180
+ return [
181
+ new Document({
182
+ pageContent: `Some document pertaining to ${query}`,
183
+ metadata: {}
184
+ }),
185
+ new Document({
186
+ pageContent: `Some other document pertaining to ${query}`,
187
+ metadata: {}
188
+ })
189
+ ]
190
+ }
191
+ }
@@ -0,0 +1,112 @@
1
+ import 'server-only'
2
+
3
+ import { createAI, createStreamableValue, getMutableAIState } from 'ai/rsc'
4
+
5
+ import { BotMessage } from '@/components/stocks'
6
+
7
+ import { Message } from '@/lib/types'
8
+ import { nanoid } from '@/lib/utils'
9
+ import { StringOutputParser } from '@langchain/core/output_parsers'
10
+ import { ChatPromptTemplate } from '@langchain/core/prompts'
11
+ import { ChatOpenAI } from '@langchain/openai'
12
+ import { LangWatch } from 'langwatch'
13
+
14
+ async function submitUserMessage(message: string) {
15
+ 'use server'
16
+
17
+ const langwatch = new LangWatch()
18
+ langwatch.on('error', e => {
19
+ console.log('Error from LangWatch:', e)
20
+ })
21
+
22
+ const trace = langwatch.getTrace()
23
+
24
+ const aiState = getMutableAIState<typeof LangChainAI>()
25
+
26
+ aiState.update({
27
+ ...aiState.get(),
28
+ messages: [
29
+ {
30
+ id: nanoid(),
31
+ role: 'system',
32
+ content: 'Translate the following from English into Italian'
33
+ },
34
+ {
35
+ id: nanoid(),
36
+ role: 'user',
37
+ content: message
38
+ }
39
+ ]
40
+ })
41
+
42
+ const prompt = ChatPromptTemplate.fromMessages([
43
+ ['system', 'Translate the following from English into Italian'],
44
+ ['human', '{input}']
45
+ ])
46
+ const model = new ChatOpenAI({ model: 'gpt-3.5-turbo' })
47
+ const outputParser = new StringOutputParser()
48
+
49
+ const chain = prompt.pipe(model).pipe(outputParser)
50
+
51
+ const stream = await chain.stream(
52
+ { input: message },
53
+ { callbacks: [trace.getLangChainCallback()] }
54
+ )
55
+
56
+ let textStream = createStreamableValue('')
57
+ let textNode = <BotMessage content={textStream.value} />
58
+ let content = ''
59
+
60
+ setTimeout(async () => {
61
+ for await (const chunk of stream) {
62
+ textStream.update(chunk)
63
+ content += chunk
64
+ }
65
+
66
+ textStream?.done()
67
+ aiState.done({
68
+ ...aiState.get(),
69
+ messages: [
70
+ ...aiState.get().messages,
71
+ {
72
+ id: nanoid(),
73
+ role: 'assistant',
74
+ content
75
+ }
76
+ ]
77
+ })
78
+ }, 0)
79
+
80
+ return {
81
+ id: nanoid(),
82
+ display: textNode
83
+ }
84
+ }
85
+
86
+ export type AIState = {
87
+ chatId: string
88
+ messages: Message[]
89
+ }
90
+
91
+ export type UIState = {
92
+ id: string
93
+ display: React.ReactNode
94
+ }[]
95
+
96
+ export const LangChainAI = createAI<AIState, UIState>({
97
+ actions: {
98
+ submitUserMessage
99
+ },
100
+ initialUIState: [],
101
+ initialAIState: { chatId: nanoid(), messages: [] },
102
+ onGetUIState: async () => {
103
+ 'use server'
104
+
105
+ return undefined
106
+ },
107
+ onSetAIState: async ({ state }) => {
108
+ 'use server'
109
+
110
+ return
111
+ }
112
+ })