langwatch 0.1.7 → 0.3.0-prerelease.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (235) hide show
  1. package/.editorconfig +16 -0
  2. package/LICENSE +7 -0
  3. package/README.md +268 -1
  4. package/copy-types.sh +19 -8
  5. package/examples/langchain/.env.example +2 -0
  6. package/examples/langchain/README.md +42 -0
  7. package/examples/langchain/package-lock.json +2930 -0
  8. package/examples/langchain/package.json +27 -0
  9. package/examples/langchain/src/cli-markdown.d.ts +137 -0
  10. package/examples/langchain/src/index.ts +109 -0
  11. package/examples/langchain/tsconfig.json +25 -0
  12. package/examples/langgraph/.env.example +2 -0
  13. package/examples/langgraph/README.md +42 -0
  14. package/examples/langgraph/package-lock.json +3031 -0
  15. package/examples/langgraph/package.json +28 -0
  16. package/examples/langgraph/src/cli-markdown.d.ts +137 -0
  17. package/examples/langgraph/src/index.ts +196 -0
  18. package/examples/langgraph/tsconfig.json +25 -0
  19. package/examples/mastra/.env.example +2 -0
  20. package/examples/mastra/README.md +57 -0
  21. package/examples/mastra/package-lock.json +5296 -0
  22. package/examples/mastra/package.json +32 -0
  23. package/examples/mastra/src/cli-markdown.d.ts +137 -0
  24. package/examples/mastra/src/index.ts +120 -0
  25. package/examples/mastra/src/mastra/agents/weather-agent.ts +30 -0
  26. package/examples/mastra/src/mastra/index.ts +21 -0
  27. package/examples/mastra/src/mastra/tools/weather-tool.ts +102 -0
  28. package/examples/mastra/tsconfig.json +25 -0
  29. package/examples/vercel-ai/.env.example +2 -0
  30. package/examples/vercel-ai/README.md +38 -0
  31. package/examples/vercel-ai/package-lock.json +2571 -0
  32. package/examples/vercel-ai/package.json +27 -0
  33. package/examples/vercel-ai/src/cli-markdown.d.ts +137 -0
  34. package/examples/vercel-ai/src/index.ts +110 -0
  35. package/examples/vercel-ai/src/instrumentation.ts +9 -0
  36. package/examples/vercel-ai/tsconfig.json +25 -0
  37. package/package.json +80 -33
  38. package/src/__tests__/client-browser.test.ts +92 -0
  39. package/src/__tests__/client-node.test.ts +76 -0
  40. package/src/__tests__/client.test.ts +71 -0
  41. package/src/__tests__/integration/client-browser.test.ts +46 -0
  42. package/src/__tests__/integration/client-node.test.ts +46 -0
  43. package/src/client-browser.ts +70 -0
  44. package/src/client-node.ts +82 -0
  45. package/src/client-shared.ts +72 -0
  46. package/src/client.ts +119 -0
  47. package/src/evaluation/__tests__/record-evaluation.test.ts +112 -0
  48. package/src/evaluation/__tests__/run-evaluation.test.ts +171 -0
  49. package/src/evaluation/index.ts +2 -0
  50. package/src/evaluation/record-evaluation.ts +101 -0
  51. package/src/evaluation/run-evaluation.ts +133 -0
  52. package/src/evaluation/tracer.ts +3 -0
  53. package/src/evaluation/types.ts +23 -0
  54. package/src/index.ts +10 -591
  55. package/src/internal/api/__tests__/errors.test.ts +98 -0
  56. package/src/internal/api/client.ts +30 -0
  57. package/src/internal/api/errors.ts +32 -0
  58. package/src/internal/generated/types/.gitkeep +0 -0
  59. package/src/observability/__tests__/integration/base.test.ts +74 -0
  60. package/src/observability/__tests__/integration/browser-setup-ordering.test.ts +60 -0
  61. package/src/observability/__tests__/integration/complex-nested-spans.test.ts +29 -0
  62. package/src/observability/__tests__/integration/error-handling.test.ts +24 -0
  63. package/src/observability/__tests__/integration/langwatch-disabled-otel.test.ts +24 -0
  64. package/src/observability/__tests__/integration/langwatch-first-then-vercel.test.ts +24 -0
  65. package/src/observability/__tests__/integration/multiple-setup-attempts.test.ts +27 -0
  66. package/src/observability/__tests__/integration/otel-ordering.test.ts +27 -0
  67. package/src/observability/__tests__/integration/vercel-configurations.test.ts +20 -0
  68. package/src/observability/__tests__/integration/vercel-first-then-langwatch.test.ts +27 -0
  69. package/src/observability/__tests__/span.test.ts +214 -0
  70. package/src/observability/__tests__/trace.test.ts +180 -0
  71. package/src/observability/exporters/index.ts +1 -0
  72. package/src/observability/exporters/langwatch-exporter.ts +53 -0
  73. package/src/observability/index.ts +4 -0
  74. package/src/observability/instrumentation/langchain/__tests__/integration/langchain-chatbot.test.ts +112 -0
  75. package/src/observability/instrumentation/langchain/__tests__/langchain.test.ts +284 -0
  76. package/src/observability/instrumentation/langchain/index.ts +624 -0
  77. package/src/observability/processors/__tests__/filterable-batch-span-exporter.test.ts +98 -0
  78. package/src/observability/processors/filterable-batch-span-processor.ts +99 -0
  79. package/src/observability/processors/index.ts +1 -0
  80. package/src/observability/semconv/attributes.ts +185 -0
  81. package/src/observability/semconv/events.ts +42 -0
  82. package/src/observability/semconv/index.ts +16 -0
  83. package/src/observability/semconv/values.ts +159 -0
  84. package/src/observability/span.ts +728 -0
  85. package/src/observability/trace.ts +301 -0
  86. package/src/prompt/__tests__/prompt.test.ts +139 -0
  87. package/src/prompt/get-prompt-version.ts +49 -0
  88. package/src/prompt/get-prompt.ts +44 -0
  89. package/src/prompt/index.ts +3 -0
  90. package/src/prompt/prompt.ts +133 -0
  91. package/src/prompt/service.ts +221 -0
  92. package/src/prompt/tracer.ts +3 -0
  93. package/src/prompt/types.ts +0 -0
  94. package/ts-to-zod.config.js +11 -0
  95. package/tsconfig.json +3 -9
  96. package/tsup.config.ts +11 -1
  97. package/vitest.config.ts +1 -0
  98. package/dist/chunk-FWBCQQYZ.mjs +0 -711
  99. package/dist/chunk-FWBCQQYZ.mjs.map +0 -1
  100. package/dist/index.d.mts +0 -1010
  101. package/dist/index.d.ts +0 -1010
  102. package/dist/index.js +0 -27294
  103. package/dist/index.js.map +0 -1
  104. package/dist/index.mjs +0 -959
  105. package/dist/index.mjs.map +0 -1
  106. package/dist/utils-B0pgWcps.d.mts +0 -303
  107. package/dist/utils-B0pgWcps.d.ts +0 -303
  108. package/dist/utils.d.mts +0 -2
  109. package/dist/utils.d.ts +0 -2
  110. package/dist/utils.js +0 -703
  111. package/dist/utils.js.map +0 -1
  112. package/dist/utils.mjs +0 -11
  113. package/dist/utils.mjs.map +0 -1
  114. package/example/.env.example +0 -12
  115. package/example/.eslintrc.json +0 -26
  116. package/example/LICENSE +0 -13
  117. package/example/README.md +0 -12
  118. package/example/app/(chat)/chat/[id]/page.tsx +0 -60
  119. package/example/app/(chat)/layout.tsx +0 -14
  120. package/example/app/(chat)/page.tsx +0 -27
  121. package/example/app/actions.ts +0 -156
  122. package/example/app/globals.css +0 -76
  123. package/example/app/guardrails/page.tsx +0 -26
  124. package/example/app/langchain/page.tsx +0 -27
  125. package/example/app/langchain-rag/page.tsx +0 -28
  126. package/example/app/late-update/page.tsx +0 -27
  127. package/example/app/layout.tsx +0 -64
  128. package/example/app/login/actions.ts +0 -71
  129. package/example/app/login/page.tsx +0 -18
  130. package/example/app/manual/page.tsx +0 -27
  131. package/example/app/new/page.tsx +0 -5
  132. package/example/app/opengraph-image.png +0 -0
  133. package/example/app/share/[id]/page.tsx +0 -58
  134. package/example/app/signup/actions.ts +0 -111
  135. package/example/app/signup/page.tsx +0 -18
  136. package/example/app/twitter-image.png +0 -0
  137. package/example/auth.config.ts +0 -42
  138. package/example/auth.ts +0 -45
  139. package/example/components/button-scroll-to-bottom.tsx +0 -36
  140. package/example/components/chat-history.tsx +0 -49
  141. package/example/components/chat-list.tsx +0 -52
  142. package/example/components/chat-message-actions.tsx +0 -40
  143. package/example/components/chat-message.tsx +0 -80
  144. package/example/components/chat-panel.tsx +0 -139
  145. package/example/components/chat-share-dialog.tsx +0 -95
  146. package/example/components/chat.tsx +0 -84
  147. package/example/components/clear-history.tsx +0 -75
  148. package/example/components/empty-screen.tsx +0 -38
  149. package/example/components/external-link.tsx +0 -29
  150. package/example/components/footer.tsx +0 -19
  151. package/example/components/header.tsx +0 -114
  152. package/example/components/login-button.tsx +0 -42
  153. package/example/components/login-form.tsx +0 -97
  154. package/example/components/markdown.tsx +0 -9
  155. package/example/components/prompt-form.tsx +0 -115
  156. package/example/components/providers.tsx +0 -17
  157. package/example/components/sidebar-actions.tsx +0 -125
  158. package/example/components/sidebar-desktop.tsx +0 -19
  159. package/example/components/sidebar-footer.tsx +0 -16
  160. package/example/components/sidebar-item.tsx +0 -124
  161. package/example/components/sidebar-items.tsx +0 -42
  162. package/example/components/sidebar-list.tsx +0 -38
  163. package/example/components/sidebar-mobile.tsx +0 -31
  164. package/example/components/sidebar-toggle.tsx +0 -24
  165. package/example/components/sidebar.tsx +0 -21
  166. package/example/components/signup-form.tsx +0 -95
  167. package/example/components/stocks/events-skeleton.tsx +0 -31
  168. package/example/components/stocks/events.tsx +0 -30
  169. package/example/components/stocks/index.tsx +0 -36
  170. package/example/components/stocks/message.tsx +0 -134
  171. package/example/components/stocks/spinner.tsx +0 -16
  172. package/example/components/stocks/stock-purchase.tsx +0 -146
  173. package/example/components/stocks/stock-skeleton.tsx +0 -22
  174. package/example/components/stocks/stock.tsx +0 -210
  175. package/example/components/stocks/stocks-skeleton.tsx +0 -9
  176. package/example/components/stocks/stocks.tsx +0 -67
  177. package/example/components/tailwind-indicator.tsx +0 -14
  178. package/example/components/theme-toggle.tsx +0 -31
  179. package/example/components/ui/alert-dialog.tsx +0 -141
  180. package/example/components/ui/badge.tsx +0 -36
  181. package/example/components/ui/button.tsx +0 -57
  182. package/example/components/ui/codeblock.tsx +0 -148
  183. package/example/components/ui/dialog.tsx +0 -122
  184. package/example/components/ui/dropdown-menu.tsx +0 -205
  185. package/example/components/ui/icons.tsx +0 -507
  186. package/example/components/ui/input.tsx +0 -25
  187. package/example/components/ui/label.tsx +0 -26
  188. package/example/components/ui/select.tsx +0 -164
  189. package/example/components/ui/separator.tsx +0 -31
  190. package/example/components/ui/sheet.tsx +0 -140
  191. package/example/components/ui/sonner.tsx +0 -31
  192. package/example/components/ui/switch.tsx +0 -29
  193. package/example/components/ui/textarea.tsx +0 -24
  194. package/example/components/ui/tooltip.tsx +0 -30
  195. package/example/components/user-menu.tsx +0 -53
  196. package/example/components.json +0 -17
  197. package/example/instrumentation.ts +0 -11
  198. package/example/lib/chat/guardrails.tsx +0 -181
  199. package/example/lib/chat/langchain-rag.tsx +0 -191
  200. package/example/lib/chat/langchain.tsx +0 -112
  201. package/example/lib/chat/late-update.tsx +0 -208
  202. package/example/lib/chat/manual.tsx +0 -605
  203. package/example/lib/chat/vercel-ai.tsx +0 -576
  204. package/example/lib/hooks/use-copy-to-clipboard.tsx +0 -33
  205. package/example/lib/hooks/use-enter-submit.tsx +0 -23
  206. package/example/lib/hooks/use-local-storage.ts +0 -24
  207. package/example/lib/hooks/use-scroll-anchor.tsx +0 -86
  208. package/example/lib/hooks/use-sidebar.tsx +0 -60
  209. package/example/lib/hooks/use-streamable-text.ts +0 -25
  210. package/example/lib/types.ts +0 -41
  211. package/example/lib/utils.ts +0 -89
  212. package/example/middleware.ts +0 -8
  213. package/example/next-env.d.ts +0 -5
  214. package/example/next.config.js +0 -16
  215. package/example/package-lock.json +0 -9990
  216. package/example/package.json +0 -84
  217. package/example/pnpm-lock.yaml +0 -5712
  218. package/example/postcss.config.js +0 -6
  219. package/example/prettier.config.cjs +0 -34
  220. package/example/public/apple-touch-icon.png +0 -0
  221. package/example/public/favicon-16x16.png +0 -0
  222. package/example/public/favicon.ico +0 -0
  223. package/example/public/next.svg +0 -1
  224. package/example/public/thirteen.svg +0 -1
  225. package/example/public/vercel.svg +0 -1
  226. package/example/tailwind.config.ts +0 -81
  227. package/example/tsconfig.json +0 -35
  228. package/src/LangWatchExporter.ts +0 -91
  229. package/src/evaluations.ts +0 -219
  230. package/src/index.test.ts +0 -402
  231. package/src/langchain.ts +0 -557
  232. package/src/typeUtils.ts +0 -89
  233. package/src/types.ts +0 -79
  234. package/src/utils.ts +0 -205
  235. /package/src/{server/types → internal/generated/openapi}/.gitkeep +0 -0
@@ -1,181 +0,0 @@
1
- import 'server-only'
2
-
3
- import {
4
- createAI,
5
- createStreamableValue,
6
- getMutableAIState,
7
- type MutableAIState
8
- } from 'ai/rsc'
9
-
10
- import { BotMessage } from '@/components/stocks'
11
-
12
- import { Message } from '@/lib/types'
13
- import { nanoid } from '@/lib/utils'
14
- import { StringOutputParser } from '@langchain/core/output_parsers'
15
- import { ChatPromptTemplate } from '@langchain/core/prompts'
16
- import { ChatOpenAI } from '@langchain/openai'
17
- import { LangWatch, type LangWatchTrace } from 'langwatch'
18
-
19
- async function submitUserMessage(message: string) {
20
- 'use server'
21
-
22
- const langwatch = new LangWatch()
23
- langwatch.on('error', e => {
24
- console.log('Error from LangWatch:', e)
25
- })
26
-
27
- const trace = langwatch.getTrace()
28
-
29
- const aiState = getMutableAIState<typeof Guardrails>()
30
- const textStream = createStreamableValue('')
31
- const textNode = <BotMessage content={textStream.value} />
32
-
33
- void llmStep({ message, trace, aiState, textStream })
34
-
35
- return {
36
- id: nanoid(),
37
- display: textNode
38
- }
39
- }
40
-
41
- async function llmStep({
42
- message,
43
- trace,
44
- aiState,
45
- textStream
46
- }: {
47
- message: string
48
- trace: LangWatchTrace
49
- aiState: MutableAIState<AIState>
50
- textStream: ReturnType<typeof createStreamableValue>
51
- }) {
52
- 'use server'
53
-
54
- textStream.update('Running Jailbreak guardrail...\n\n')
55
-
56
- const jailbreakPromise = trace.evaluate({
57
- evaluator: 'azure/jailbreak',
58
- name: 'Jailbreak Detection',
59
- input: message,
60
- settings: {},
61
- asGuardrail: true
62
- })
63
-
64
- const prompt = ChatPromptTemplate.fromMessages([
65
- ['system', 'Translate the following from English into Italian'],
66
- ['human', '{input}']
67
- ])
68
- const model = new ChatOpenAI({ model: 'gpt-4o-mini' })
69
- const outputParser = new StringOutputParser()
70
-
71
- const chain = prompt.pipe(model).pipe(outputParser)
72
-
73
- const chainPromise = chain.invoke(
74
- { input: message },
75
- { callbacks: [trace.getLangChainCallback()] }
76
- )
77
-
78
- const [jailbreakResult, result] = await Promise.all([
79
- jailbreakPromise,
80
- chainPromise
81
- ])
82
-
83
- if (!jailbreakResult.passed) {
84
- textStream.update('Jailbreak detected, stopping execution.')
85
- textStream.done()
86
- aiState.done()
87
- return
88
- }
89
-
90
- aiState.update({
91
- ...aiState.get(),
92
- messages: [
93
- {
94
- id: nanoid(),
95
- role: 'system',
96
- content: 'Translate the following from English into Italian'
97
- },
98
- {
99
- id: nanoid(),
100
- role: 'user',
101
- content: message
102
- }
103
- ]
104
- })
105
-
106
- textStream.update('Running Moderation guardrail...\n\n')
107
-
108
- const moderationGuardrail = await trace.evaluate({
109
- evaluator: 'openai/moderation',
110
- asGuardrail: true,
111
- name: 'Moderation',
112
- input: message,
113
- output: result, // optional
114
- settings: {
115
- model: 'text-moderation-stable',
116
- categories: {
117
- harassment: true,
118
- harassment_threatening: true,
119
- hate: true,
120
- hate_threatening: true,
121
- self_harm: true,
122
- self_harm_instructions: true,
123
- self_harm_intent: true,
124
- sexual: true,
125
- sexual_minors: true,
126
- violence: true,
127
- violence_graphic: true
128
- }
129
- }
130
- })
131
-
132
- if (!moderationGuardrail.passed) {
133
- textStream.update('Moderation failed, stopping execution.')
134
- textStream.done()
135
- aiState.done()
136
- return
137
- }
138
-
139
- textStream.update(result)
140
- textStream.done()
141
-
142
- aiState.done({
143
- ...aiState.get(),
144
- messages: [
145
- ...aiState.get().messages,
146
- {
147
- id: nanoid(),
148
- role: 'assistant',
149
- content: result
150
- }
151
- ]
152
- })
153
- }
154
-
155
- export type AIState = {
156
- chatId: string
157
- messages: Message[]
158
- }
159
-
160
- export type UIState = {
161
- id: string
162
- display: React.ReactNode
163
- }[]
164
-
165
- export const Guardrails = createAI<AIState, UIState>({
166
- actions: {
167
- submitUserMessage
168
- },
169
- initialUIState: [],
170
- initialAIState: { chatId: nanoid(), messages: [] },
171
- onGetUIState: async () => {
172
- 'use server'
173
-
174
- return undefined
175
- },
176
- onSetAIState: async ({ state }) => {
177
- 'use server'
178
-
179
- return
180
- }
181
- })
@@ -1,191 +0,0 @@
1
- import 'server-only'
2
-
3
- import { openai } from '@ai-sdk/openai'
4
- import {
5
- createAI,
6
- createStreamableUI,
7
- createStreamableValue,
8
- getMutableAIState,
9
- streamUI
10
- } from 'ai/rsc'
11
-
12
- import { BotCard, BotMessage, Purchase, Stock } from '@/components/stocks'
13
-
14
- import { Events } from '@/components/stocks/events'
15
- import { SpinnerMessage, UserMessage } from '@/components/stocks/message'
16
- import { Stocks } from '@/components/stocks/stocks'
17
- import { Chat, Message } from '@/lib/types'
18
- import { nanoid } from '@/lib/utils'
19
- import { LangWatch, convertFromVercelAIMessages } from 'langwatch'
20
- import { ChatOpenAI } from '@langchain/openai'
21
- import {
22
- ChatPromptTemplate,
23
- PromptTemplateInput
24
- } from '@langchain/core/prompts'
25
- import {
26
- HumanMessage,
27
- SystemMessage,
28
- AIMessage,
29
- ToolMessage,
30
- BaseMessageLike
31
- } from '@langchain/core/messages'
32
- import { StringOutputParser } from '@langchain/core/output_parsers'
33
- import { CallbackManagerForRetrieverRun } from '@langchain/core/callbacks/manager'
34
- import {
35
- BaseRetriever,
36
- type BaseRetrieverInput
37
- } from '@langchain/core/retrievers'
38
- import { Document } from '@langchain/core/documents'
39
- import {
40
- RunnableLambda,
41
- RunnableMap,
42
- RunnablePassthrough
43
- } from '@langchain/core/runnables'
44
-
45
- async function submitUserMessage(message: string) {
46
- 'use server'
47
-
48
- const langwatch = new LangWatch()
49
- langwatch.on('error', e => {
50
- console.log('Error from LangWatch:', e)
51
- })
52
-
53
- const trace = langwatch.getTrace()
54
-
55
- const aiState = getMutableAIState<typeof LangChainRAGAI>()
56
-
57
- const messages: BaseMessageLike[] = [
58
- ['system', 'Answer based on the retrieved context'],
59
- ...(aiState.get().messages.map(message => {
60
- if (message.role === 'system') {
61
- return ['system', message.content.toString()]
62
- }
63
- if (message.role === 'user') {
64
- return ['human', message.content.toString()]
65
- }
66
- if (message.role === 'tool') {
67
- return ['tool', message.content.toString()]
68
- }
69
- return ['ai', message.content.toString()]
70
- }) as BaseMessageLike[]),
71
- ['ai', 'Retrieved the following context: {context}'],
72
- ['human', '{question}']
73
- ]
74
-
75
- aiState.update({
76
- ...aiState.get(),
77
- messages: [
78
- ...aiState.get().messages,
79
- {
80
- id: nanoid(),
81
- role: 'user',
82
- content: message
83
- }
84
- ]
85
- })
86
-
87
- const prompt = ChatPromptTemplate.fromMessages(messages)
88
- const model = new ChatOpenAI({ model: 'gpt-4o-mini' })
89
- const retriever = new CustomRetriever()
90
- const outputParser = new StringOutputParser()
91
-
92
- const setupAndRetrieval = RunnableMap.from({
93
- context: new RunnableLambda({
94
- func: (input: string) =>
95
- retriever
96
- .invoke(input, {
97
- callbacks: [trace.getLangChainCallback()]
98
- })
99
- .then(response => response[0].pageContent)
100
- }).withConfig({ runName: 'contextRetriever' }),
101
- question: new RunnablePassthrough()
102
- })
103
-
104
- const chain = setupAndRetrieval.pipe(prompt).pipe(model).pipe(outputParser)
105
-
106
- const stream = await chain.stream(message, {
107
- callbacks: [trace.getLangChainCallback()]
108
- })
109
-
110
- let textStream = createStreamableValue('')
111
- let textNode = <BotMessage content={textStream.value} />
112
- let content = ''
113
-
114
- setTimeout(async () => {
115
- for await (const chunk of stream) {
116
- textStream.update(chunk)
117
- content += chunk
118
- }
119
-
120
- textStream?.done()
121
- aiState.done({
122
- ...aiState.get(),
123
- messages: [
124
- ...aiState.get().messages,
125
- {
126
- id: nanoid(),
127
- role: 'assistant',
128
- content
129
- }
130
- ]
131
- })
132
- }, 0)
133
-
134
- return {
135
- id: nanoid(),
136
- display: textNode
137
- }
138
- }
139
-
140
- export type AIState = {
141
- chatId: string
142
- messages: Message[]
143
- }
144
-
145
- export type UIState = {
146
- id: string
147
- display: React.ReactNode
148
- }[]
149
-
150
- export const LangChainRAGAI = createAI<AIState, UIState>({
151
- actions: {
152
- submitUserMessage
153
- },
154
- initialUIState: [],
155
- initialAIState: { chatId: nanoid(), messages: [] },
156
- onGetUIState: async () => {
157
- 'use server'
158
-
159
- return undefined
160
- },
161
- onSetAIState: async ({ state }) => {
162
- 'use server'
163
-
164
- return
165
- }
166
- })
167
-
168
- export class CustomRetriever extends BaseRetriever {
169
- lc_namespace = ['langchain', 'retrievers']
170
-
171
- constructor(fields?: BaseRetrieverInput) {
172
- super(fields)
173
- }
174
-
175
- async _getRelevantDocuments(
176
- query: string,
177
- _runManager?: CallbackManagerForRetrieverRun
178
- ): Promise<Document[]> {
179
- console.log('query', query)
180
- return [
181
- new Document({
182
- pageContent: `Some document pertaining to ${query}`,
183
- metadata: {}
184
- }),
185
- new Document({
186
- pageContent: `Some other document pertaining to ${query}`,
187
- metadata: {}
188
- })
189
- ]
190
- }
191
- }
@@ -1,112 +0,0 @@
1
- import 'server-only'
2
-
3
- import { createAI, createStreamableValue, getMutableAIState } from 'ai/rsc'
4
-
5
- import { BotMessage } from '@/components/stocks'
6
-
7
- import { Message } from '@/lib/types'
8
- import { nanoid } from '@/lib/utils'
9
- import { StringOutputParser } from '@langchain/core/output_parsers'
10
- import { ChatPromptTemplate } from '@langchain/core/prompts'
11
- import { ChatOpenAI } from '@langchain/openai'
12
- import { LangWatch } from 'langwatch'
13
-
14
- async function submitUserMessage(message: string) {
15
- 'use server'
16
-
17
- const langwatch = new LangWatch()
18
- langwatch.on('error', e => {
19
- console.log('Error from LangWatch:', e)
20
- })
21
-
22
- const trace = langwatch.getTrace()
23
-
24
- const aiState = getMutableAIState<typeof LangChainAI>()
25
-
26
- aiState.update({
27
- ...aiState.get(),
28
- messages: [
29
- {
30
- id: nanoid(),
31
- role: 'system',
32
- content: 'Translate the following from English into Italian'
33
- },
34
- {
35
- id: nanoid(),
36
- role: 'user',
37
- content: message
38
- }
39
- ]
40
- })
41
-
42
- const prompt = ChatPromptTemplate.fromMessages([
43
- ['system', 'Translate the following from English into Italian'],
44
- ['human', '{input}']
45
- ])
46
- const model = new ChatOpenAI({ model: 'gpt-4o-mini' })
47
- const outputParser = new StringOutputParser()
48
-
49
- const chain = prompt.pipe(model).pipe(outputParser)
50
-
51
- const stream = await chain.stream(
52
- { input: message },
53
- { callbacks: [trace.getLangChainCallback()] }
54
- )
55
-
56
- let textStream = createStreamableValue('')
57
- let textNode = <BotMessage content={textStream.value} />
58
- let content = ''
59
-
60
- setTimeout(async () => {
61
- for await (const chunk of stream) {
62
- textStream.update(chunk)
63
- content += chunk
64
- }
65
-
66
- textStream?.done()
67
- aiState.done({
68
- ...aiState.get(),
69
- messages: [
70
- ...aiState.get().messages,
71
- {
72
- id: nanoid(),
73
- role: 'assistant',
74
- content
75
- }
76
- ]
77
- })
78
- }, 0)
79
-
80
- return {
81
- id: nanoid(),
82
- display: textNode
83
- }
84
- }
85
-
86
- export type AIState = {
87
- chatId: string
88
- messages: Message[]
89
- }
90
-
91
- export type UIState = {
92
- id: string
93
- display: React.ReactNode
94
- }[]
95
-
96
- export const LangChainAI = createAI<AIState, UIState>({
97
- actions: {
98
- submitUserMessage
99
- },
100
- initialUIState: [],
101
- initialAIState: { chatId: nanoid(), messages: [] },
102
- onGetUIState: async () => {
103
- 'use server'
104
-
105
- return undefined
106
- },
107
- onSetAIState: async ({ state }) => {
108
- 'use server'
109
-
110
- return
111
- }
112
- })
@@ -1,208 +0,0 @@
1
- import 'server-only'
2
-
3
- import { openai } from '@ai-sdk/openai'
4
- import {
5
- createAI,
6
- createStreamableValue,
7
- getAIState,
8
- getMutableAIState,
9
- streamUI
10
- } from 'ai/rsc'
11
-
12
- import { BotMessage } from '@/components/stocks'
13
-
14
- import { saveChat } from '@/app/actions'
15
- import { auth } from '@/auth'
16
- import { SpinnerMessage, UserMessage } from '@/components/stocks/message'
17
- import { Chat, Message } from '@/lib/types'
18
- import { nanoid } from '@/lib/utils'
19
- import { LangWatch, convertFromVercelAIMessages } from 'langwatch'
20
-
21
- async function submitUserMessage(content: string) {
22
- 'use server'
23
-
24
- const langwatch = new LangWatch()
25
- langwatch.on('error', e => {
26
- console.log('Error from LangWatch:', e)
27
- })
28
-
29
- const trace = langwatch.getTrace()
30
-
31
- const aiState = getMutableAIState<typeof LateUpdateTracing>()
32
-
33
- aiState.update({
34
- ...aiState.get(),
35
- messages: [
36
- ...aiState.get().messages,
37
- {
38
- id: nanoid(),
39
- role: 'user',
40
- content
41
- }
42
- ]
43
- })
44
-
45
- const system = "You are a helpful assistant."
46
-
47
- const span = trace.startLLMSpan({
48
- model: 'gpt-4o-mini',
49
- input: {
50
- type: 'chat_messages',
51
- value: [
52
- {
53
- role: 'system',
54
- content: system
55
- },
56
- ...convertFromVercelAIMessages(aiState.get().messages)
57
- ]
58
- }
59
- })
60
-
61
- const onFinish = (output: Message[]) => {
62
- aiState.done({
63
- ...aiState.get(),
64
- messages: [...aiState.get().messages, ...output]
65
- })
66
-
67
- span.end({
68
- output: {
69
- type: 'chat_messages',
70
- value: convertFromVercelAIMessages(output)
71
- }
72
- })
73
-
74
- setTimeout(() => {
75
- span.end({
76
- params: {
77
- late_update_at: (new Date()).toISOString()
78
- }
79
- })
80
- }, 5000);
81
- }
82
-
83
- let textStream: undefined | ReturnType<typeof createStreamableValue<string>>
84
- let textNode: undefined | React.ReactNode
85
-
86
- const result = await streamUI({
87
- model: openai('gpt-4o-mini'),
88
- initial: <SpinnerMessage />,
89
- system,
90
- messages: [
91
- ...aiState.get().messages.map((message: any) => ({
92
- role: message.role,
93
- content: message.content,
94
- name: message.name
95
- }))
96
- ],
97
- text: ({ content, done, delta }) => {
98
- if (!textStream) {
99
- textStream = createStreamableValue('')
100
- textNode = <BotMessage content={textStream.value} />
101
- }
102
-
103
- if (done) {
104
- textStream.done()
105
-
106
- onFinish([
107
- {
108
- id: nanoid(),
109
- role: 'assistant',
110
- content
111
- }
112
- ])
113
- } else {
114
- textStream.update(delta)
115
- }
116
-
117
- return textNode
118
- }
119
- })
120
-
121
- return {
122
- id: nanoid(),
123
- display: result.value
124
- }
125
- }
126
-
127
- export type AIState = {
128
- chatId: string
129
- messages: Message[]
130
- }
131
-
132
- export type UIState = {
133
- id: string
134
- display: React.ReactNode
135
- }[]
136
-
137
- export const LateUpdateTracing = createAI<AIState, UIState>({
138
- actions: {
139
- submitUserMessage
140
- },
141
- initialUIState: [],
142
- initialAIState: { chatId: nanoid(), messages: [] },
143
- onGetUIState: async () => {
144
- 'use server'
145
-
146
- const session = await auth()
147
-
148
- if (session && session.user) {
149
- const aiState = getAIState()
150
-
151
- if (aiState) {
152
- // @ts-ignore
153
- const uiState = getUIStateFromAIState(aiState)
154
- return uiState
155
- }
156
- } else {
157
- return
158
- }
159
- },
160
- onSetAIState: async ({ state }) => {
161
- 'use server'
162
-
163
- const session = await auth()
164
-
165
- if (session && session.user) {
166
- const { chatId, messages } = state
167
-
168
- const createdAt = new Date()
169
- const userId = session.user.id as string
170
- const path = `/chat/${chatId}`
171
-
172
- const firstMessageContent = messages[0].content as string
173
- const title = firstMessageContent.substring(0, 100)
174
-
175
- const chat: Chat = {
176
- id: chatId,
177
- title,
178
- userId,
179
- createdAt,
180
- messages,
181
- path
182
- }
183
-
184
- await saveChat(chat)
185
- } else {
186
- return
187
- }
188
- }
189
- })
190
-
191
- export const getUIStateFromAIState = (aiState: Chat) => {
192
- return aiState.messages
193
- .filter(message => message.role !== 'system')
194
- .map((message, index) => ({
195
- id: `${aiState.chatId}-${index}`,
196
- display:
197
- message.role === 'tool' ? (
198
- message.content.map(tool => {
199
- return `Tool used: ${tool.toolName}`
200
- })
201
- ) : message.role === 'user' ? (
202
- <UserMessage>{message.content as string}</UserMessage>
203
- ) : message.role === 'assistant' &&
204
- typeof message.content === 'string' ? (
205
- <BotMessage content={message.content} />
206
- ) : null
207
- }))
208
- }