create-start-app 0.4.4 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (25) hide show
  1. package/dist/options.js +18 -18
  2. package/package.json +1 -1
  3. package/src/options.ts +18 -18
  4. package/templates/react/add-on/form/info.json +1 -1
  5. package/templates/react/add-on/start/package.json +1 -1
  6. package/templates/react/base/package.json +2 -2
  7. package/templates/react/example/tanchat/assets/src/routes/{example.chat.tsx.ejs → example.chat.tsx} +119 -57
  8. package/templates/react/example/tanchat/assets/src/utils/demo.ai.ts +64 -60
  9. package/templates/react/example/tanchat/package.json +1 -0
  10. package/templates/react/file-router/package.fr.json +1 -1
  11. package/templates/solid/add-on/form/info.json +1 -1
  12. package/templates/solid/example/tanchat/README.md +52 -0
  13. package/templates/solid/example/tanchat/assets/ai-streaming-server/README.md +110 -0
  14. package/templates/solid/example/tanchat/assets/ai-streaming-server/_dot_env.example +1 -0
  15. package/templates/solid/example/tanchat/assets/ai-streaming-server/package.json +26 -0
  16. package/templates/solid/example/tanchat/assets/ai-streaming-server/src/index.ts +102 -0
  17. package/templates/solid/example/tanchat/assets/ai-streaming-server/tsconfig.json +15 -0
  18. package/templates/solid/example/tanchat/assets/src/components/demo.SettingsDialog.tsx +149 -0
  19. package/templates/solid/example/tanchat/assets/src/demo.index.css +227 -0
  20. package/templates/solid/example/tanchat/assets/src/lib/demo-store.ts +13 -0
  21. package/templates/solid/example/tanchat/assets/src/routes/example.chat.tsx +435 -0
  22. package/templates/solid/example/tanchat/assets/src/store/demo.hooks.ts +17 -0
  23. package/templates/solid/example/tanchat/assets/src/store/demo.store.ts +133 -0
  24. package/templates/solid/example/tanchat/info.json +14 -0
  25. package/templates/solid/example/tanchat/package.json +7 -0
package/dist/options.js CHANGED
@@ -211,24 +211,24 @@ export async function promptForOptions(cliOptions) {
211
211
  selectedAddOns = value;
212
212
  }
213
213
  // Select any examples
214
- const selectedExamples = [];
215
- // const examples = allAddOns.filter((addOn) => addOn.type === 'example')
216
- // if (options.typescript && examples.length > 0) {
217
- // const value = await multiselect({
218
- // message: 'Would you like any examples?',
219
- // options: examples.map((addOn) => ({
220
- // value: addOn.id,
221
- // label: addOn.name,
222
- // hint: addOn.description,
223
- // })),
224
- // required: false,
225
- // })
226
- // if (isCancel(value)) {
227
- // cancel('Operation cancelled.')
228
- // process.exit(0)
229
- // }
230
- // selectedExamples = value
231
- // }
214
+ let selectedExamples = [];
215
+ const examples = allAddOns.filter((addOn) => addOn.type === 'example');
216
+ if (options.typescript && examples.length > 0) {
217
+ const value = await multiselect({
218
+ message: 'Would you like any examples?',
219
+ options: examples.map((addOn) => ({
220
+ value: addOn.id,
221
+ label: addOn.name,
222
+ hint: addOn.description,
223
+ })),
224
+ required: false,
225
+ });
226
+ if (isCancel(value)) {
227
+ cancel('Operation cancelled.');
228
+ process.exit(0);
229
+ }
230
+ selectedExamples = value;
231
+ }
232
232
  if (selectedAddOns.length > 0 || selectedExamples.length > 0) {
233
233
  options.chosenAddOns = await finalizeAddOns(options.framework, options.mode, [...selectedAddOns, ...selectedExamples]);
234
234
  options.tailwind = true;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "create-start-app",
3
- "version": "0.4.4",
3
+ "version": "0.5.0",
4
4
  "description": "Tanstack Application Builder",
5
5
  "bin": "./dist/index.js",
6
6
  "type": "module",
package/src/options.ts CHANGED
@@ -251,25 +251,25 @@ export async function promptForOptions(
251
251
  }
252
252
 
253
253
  // Select any examples
254
- const selectedExamples: Array<string> = []
255
- // const examples = allAddOns.filter((addOn) => addOn.type === 'example')
256
- // if (options.typescript && examples.length > 0) {
257
- // const value = await multiselect({
258
- // message: 'Would you like any examples?',
259
- // options: examples.map((addOn) => ({
260
- // value: addOn.id,
261
- // label: addOn.name,
262
- // hint: addOn.description,
263
- // })),
264
- // required: false,
265
- // })
254
+ let selectedExamples: Array<string> = []
255
+ const examples = allAddOns.filter((addOn) => addOn.type === 'example')
256
+ if (options.typescript && examples.length > 0) {
257
+ const value = await multiselect({
258
+ message: 'Would you like any examples?',
259
+ options: examples.map((addOn) => ({
260
+ value: addOn.id,
261
+ label: addOn.name,
262
+ hint: addOn.description,
263
+ })),
264
+ required: false,
265
+ })
266
266
 
267
- // if (isCancel(value)) {
268
- // cancel('Operation cancelled.')
269
- // process.exit(0)
270
- // }
271
- // selectedExamples = value
272
- // }
267
+ if (isCancel(value)) {
268
+ cancel('Operation cancelled.')
269
+ process.exit(0)
270
+ }
271
+ selectedExamples = value
272
+ }
273
273
 
274
274
  if (selectedAddOns.length > 0 || selectedExamples.length > 0) {
275
275
  options.chosenAddOns = await finalizeAddOns(
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "Form",
3
- "description": "TansStack Form",
3
+ "description": "TanStack Form",
4
4
  "phase": "add-on",
5
5
  "templates": ["file-router", "code-router"],
6
6
  "link": "https://tanstack.com/form/latest",
@@ -6,7 +6,7 @@
6
6
  },
7
7
  "dependencies": {
8
8
  "@tailwindcss/postcss": "^4.0.7",
9
- "@tanstack/react-start": "^1.111.12",
9
+ "@tanstack/react-start": "^1.112.1",
10
10
  "postcss": "^8.5.2",
11
11
  "vinxi": "^0.5.3",
12
12
  "vite-tsconfig-paths": "^5.1.4"
@@ -9,8 +9,8 @@
9
9
  "test": "vitest run"
10
10
  },
11
11
  "dependencies": {
12
- "@tanstack/react-router": "^1.104.1",
13
- "@tanstack/router-devtools": "^1.104.3",
12
+ "@tanstack/react-router": "^1.112.0",
13
+ "@tanstack/router-devtools": "^1.112.0",
14
14
  "react": "^19.0.0",
15
15
  "react-dom": "^19.0.0"
16
16
  },
@@ -1,15 +1,26 @@
1
1
  import { createFileRoute } from '@tanstack/react-router'
2
2
  import { useEffect, useState, useRef } from 'react'
3
- import { PlusCircle, MessageCircle, ChevronLeft, ChevronRight, Trash2, X, Menu, Send, Settings, User, LogOut, Edit2 } from 'lucide-react'
3
+ import {
4
+ PlusCircle,
5
+ MessageCircle,
6
+ Trash2,
7
+ Send,
8
+ Settings,
9
+ Edit2,
10
+ } from 'lucide-react'
4
11
  import ReactMarkdown from 'react-markdown'
5
12
  import rehypeRaw from 'rehype-raw'
6
13
  import rehypeSanitize from 'rehype-sanitize'
7
14
  import rehypeHighlight from 'rehype-highlight'
15
+
8
16
  import { SettingsDialog } from '../components/demo.SettingsDialog'
9
17
  import { useAppState } from '../store/demo.hooks'
10
18
  import { store } from '../store/demo.store'
11
- import { genAIResponse, type Message } from '../utils/demo.ai'
12
- import "../demo.index.css"
19
+ import { genAIResponse } from '../utils/demo.ai'
20
+
21
+ import type { Message } from '../utils/demo.ai'
22
+
23
+ import '../demo.index.css'
13
24
 
14
25
  function Home() {
15
26
  const {
@@ -23,22 +34,23 @@ function Home() {
23
34
  addMessage,
24
35
  setLoading,
25
36
  getCurrentConversation,
26
- getActivePrompt
37
+ getActivePrompt,
27
38
  } = useAppState()
28
39
 
29
40
  const currentConversation = getCurrentConversation(store.state)
30
41
  const messages = currentConversation?.messages || []
31
-
42
+
32
43
  // Local state
33
44
  const [input, setInput] = useState('')
34
45
  const [editingChatId, setEditingChatId] = useState<string | null>(null)
35
46
  const [isSettingsOpen, setIsSettingsOpen] = useState(false)
36
- const [isDropdownOpen, setIsDropdownOpen] = useState(false)
37
47
  const messagesContainerRef = useRef<HTMLDivElement>(null)
48
+ const [pendingMessage, setPendingMessage] = useState<Message | null>(null)
38
49
 
39
50
  const scrollToBottom = () => {
40
51
  if (messagesContainerRef.current) {
41
- messagesContainerRef.current.scrollTop = messagesContainerRef.current.scrollHeight
52
+ messagesContainerRef.current.scrollTop =
53
+ messagesContainerRef.current.scrollHeight
42
54
  }
43
55
  }
44
56
 
@@ -50,7 +62,7 @@ function Home() {
50
62
  const handleSubmit = async (e: React.FormEvent) => {
51
63
  e.preventDefault()
52
64
  if (!input.trim() || isLoading) return
53
-
65
+
54
66
  const currentInput = input
55
67
  setInput('') // Clear input early for better UX
56
68
  setLoading(true)
@@ -64,7 +76,7 @@ function Home() {
64
76
  const newConversation = {
65
77
  id: conversationId,
66
78
  title: currentInput.trim().slice(0, 30),
67
- messages: []
79
+ messages: [],
68
80
  }
69
81
  addConversation(newConversation)
70
82
  }
@@ -84,7 +96,7 @@ function Home() {
84
96
  if (activePrompt) {
85
97
  systemPrompt = {
86
98
  value: activePrompt.content,
87
- enabled: true
99
+ enabled: true,
88
100
  }
89
101
  }
90
102
 
@@ -92,21 +104,44 @@ function Home() {
92
104
  const response = await genAIResponse({
93
105
  data: {
94
106
  messages: [...messages, userMessage],
95
- systemPrompt
96
- }
107
+ systemPrompt,
108
+ },
97
109
  })
98
110
 
99
- if (!response.text?.trim()) {
100
- throw new Error('Received empty response from AI')
111
+ const reader = response.body?.getReader()
112
+ if (!reader) {
113
+ throw new Error('No reader found in response')
101
114
  }
102
115
 
103
- const assistantMessage: Message = {
116
+ const decoder = new TextDecoder()
117
+
118
+ let done = false
119
+ let newMessage = {
104
120
  id: (Date.now() + 1).toString(),
105
121
  role: 'assistant' as const,
106
- content: response.text
122
+ content: '',
123
+ }
124
+ while (!done) {
125
+ const out = await reader.read()
126
+ done = out.done
127
+ if (!done) {
128
+ try {
129
+ const json = JSON.parse(decoder.decode(out.value))
130
+ if (json.type === 'content_block_delta') {
131
+ newMessage = {
132
+ ...newMessage,
133
+ content: newMessage.content + json.delta.text,
134
+ }
135
+ setPendingMessage(newMessage)
136
+ }
137
+ } catch (e) {}
138
+ }
107
139
  }
108
140
 
109
- addMessage(conversationId, assistantMessage)
141
+ setPendingMessage(null)
142
+ if (newMessage.content.trim()) {
143
+ addMessage(conversationId, newMessage)
144
+ }
110
145
  } catch (error) {
111
146
  console.error('Error:', error)
112
147
  const errorMessage: Message = {
@@ -126,7 +161,7 @@ function Home() {
126
161
  const newConversation = {
127
162
  id: Date.now().toString(),
128
163
  title: 'New Chat',
129
- messages: []
164
+ messages: [],
130
165
  }
131
166
  addConversation(newConversation)
132
167
  }
@@ -184,7 +219,9 @@ function Home() {
184
219
  <input
185
220
  type="text"
186
221
  value={chat.title}
187
- onChange={(e) => handleUpdateChatTitle(chat.id, e.target.value)}
222
+ onChange={(e) =>
223
+ handleUpdateChatTitle(chat.id, e.target.value)
224
+ }
188
225
  onBlur={() => setEditingChatId(null)}
189
226
  onKeyDown={(e) => {
190
227
  if (e.key === 'Enter') {
@@ -229,37 +266,47 @@ function Home() {
229
266
  {currentConversationId ? (
230
267
  <>
231
268
  {/* Messages */}
232
- <div ref={messagesContainerRef} className="flex-1 overflow-y-auto pb-24">
269
+ <div
270
+ ref={messagesContainerRef}
271
+ className="flex-1 overflow-y-auto pb-24"
272
+ >
233
273
  <div className="max-w-3xl mx-auto w-full px-4">
234
- {messages.map((message) => (
235
- <div
236
- key={message.id}
237
- className={`py-6 ${message.role === 'assistant'
238
- ? 'bg-gradient-to-r from-orange-500/5 to-red-600/5'
239
- : 'bg-transparent'
240
- }`}
241
- >
242
- <div className="flex items-start gap-4 max-w-3xl mx-auto w-full">
243
- {message.role === 'assistant' ? (
244
- <div className="w-8 h-8 rounded-lg bg-gradient-to-r from-orange-500 to-red-600 mt-2 flex items-center justify-center text-sm font-medium text-white flex-shrink-0">
245
- AI
246
- </div>
247
- ) : (
248
- <div className="w-8 h-8 rounded-lg bg-gray-700 flex items-center justify-center text-sm font-medium text-white flex-shrink-0">
249
- Y
274
+ {[...messages, pendingMessage]
275
+ .filter((v) => v)
276
+ .map((message) => (
277
+ <div
278
+ key={message!.id}
279
+ className={`py-6 ${
280
+ message!.role === 'assistant'
281
+ ? 'bg-gradient-to-r from-orange-500/5 to-red-600/5'
282
+ : 'bg-transparent'
283
+ }`}
284
+ >
285
+ <div className="flex items-start gap-4 max-w-3xl mx-auto w-full">
286
+ {message!.role === 'assistant' ? (
287
+ <div className="w-8 h-8 rounded-lg bg-gradient-to-r from-orange-500 to-red-600 mt-2 flex items-center justify-center text-sm font-medium text-white flex-shrink-0">
288
+ AI
289
+ </div>
290
+ ) : (
291
+ <div className="w-8 h-8 rounded-lg bg-gray-700 flex items-center justify-center text-sm font-medium text-white flex-shrink-0">
292
+ Y
293
+ </div>
294
+ )}
295
+ <div className="flex-1 min-w-0">
296
+ <ReactMarkdown
297
+ className="prose dark:prose-invert max-w-none"
298
+ rehypePlugins={[
299
+ rehypeRaw,
300
+ rehypeSanitize,
301
+ rehypeHighlight,
302
+ ]}
303
+ >
304
+ {message!.content}
305
+ </ReactMarkdown>
250
306
  </div>
251
- )}
252
- <div className="flex-1 min-w-0">
253
- <ReactMarkdown
254
- className="prose dark:prose-invert max-w-none"
255
- rehypePlugins={[rehypeRaw, rehypeSanitize, rehypeHighlight]}
256
- >
257
- {message.content}
258
- </ReactMarkdown>
259
307
  </div>
260
308
  </div>
261
- </div>
262
- ))}
309
+ ))}
263
310
  {isLoading && (
264
311
  <div className="py-6 bg-gradient-to-r from-orange-500/5 to-red-600/5">
265
312
  <div className="flex items-start gap-4 max-w-3xl mx-auto w-full">
@@ -268,16 +315,29 @@ function Home() {
268
315
  <div className="absolute inset-[2px] rounded-lg bg-gray-900 flex items-center justify-center">
269
316
  <div className="relative w-full h-full rounded-lg bg-gradient-to-r from-orange-500 to-red-600 flex items-center justify-center">
270
317
  <div className="absolute inset-0 rounded-lg bg-gradient-to-r from-orange-500 to-red-600 animate-pulse"></div>
271
- <span className="relative z-10 text-sm font-medium text-white">AI</span>
318
+ <span className="relative z-10 text-sm font-medium text-white">
319
+ AI
320
+ </span>
272
321
  </div>
273
322
  </div>
274
323
  </div>
275
324
  <div className="flex items-center gap-3">
276
- <div className="text-gray-400 font-medium text-lg">Thinking</div>
325
+ <div className="text-gray-400 font-medium text-lg">
326
+ Thinking
327
+ </div>
277
328
  <div className="flex gap-2">
278
- <div className="w-2 h-2 rounded-full bg-orange-500 animate-[bounce_0.8s_infinite]" style={{ animationDelay: '0ms' }}></div>
279
- <div className="w-2 h-2 rounded-full bg-orange-500 animate-[bounce_0.8s_infinite]" style={{ animationDelay: '200ms' }}></div>
280
- <div className="w-2 h-2 rounded-full bg-orange-500 animate-[bounce_0.8s_infinite]" style={{ animationDelay: '400ms' }}></div>
329
+ <div
330
+ className="w-2 h-2 rounded-full bg-orange-500 animate-[bounce_0.8s_infinite]"
331
+ style={{ animationDelay: '0ms' }}
332
+ ></div>
333
+ <div
334
+ className="w-2 h-2 rounded-full bg-orange-500 animate-[bounce_0.8s_infinite]"
335
+ style={{ animationDelay: '200ms' }}
336
+ ></div>
337
+ <div
338
+ className="w-2 h-2 rounded-full bg-orange-500 animate-[bounce_0.8s_infinite]"
339
+ style={{ animationDelay: '400ms' }}
340
+ ></div>
281
341
  </div>
282
342
  </div>
283
343
  </div>
@@ -305,9 +365,10 @@ function Home() {
305
365
  rows={1}
306
366
  style={{ minHeight: '44px', maxHeight: '200px' }}
307
367
  onInput={(e) => {
308
- const target = e.target as HTMLTextAreaElement;
309
- target.style.height = 'auto';
310
- target.style.height = Math.min(target.scrollHeight, 200) + 'px';
368
+ const target = e.target as HTMLTextAreaElement
369
+ target.style.height = 'auto'
370
+ target.style.height =
371
+ Math.min(target.scrollHeight, 200) + 'px'
311
372
  }}
312
373
  />
313
374
  <button
@@ -329,7 +390,8 @@ function Home() {
329
390
  <span className="text-white">TanStack</span> Chat
330
391
  </h1>
331
392
  <p className="text-gray-400 mb-6 w-2/3 mx-auto text-lg">
332
- You can ask me about anything, I might or might not have a good answer, but you can still ask.
393
+ You can ask me about anything, I might or might not have a good
394
+ answer, but you can still ask.
333
395
  </p>
334
396
  <form onSubmit={handleSubmit}>
335
397
  <div className="relative max-w-xl mx-auto">
@@ -370,6 +432,6 @@ function Home() {
370
432
  )
371
433
  }
372
434
 
373
- export const Route = createFileRoute('/')({
374
- component: Home
435
+ export const Route = createFileRoute('/example/chat')({
436
+ component: Home,
375
437
  })
@@ -2,9 +2,9 @@ import { createServerFn } from '@tanstack/react-start'
2
2
  import { Anthropic } from '@anthropic-ai/sdk'
3
3
 
4
4
  export interface Message {
5
- id: string
6
- role: 'user' | 'assistant'
7
- content: string
5
+ id: string
6
+ role: 'user' | 'assistant'
7
+ content: string
8
8
  }
9
9
 
10
10
  const DEFAULT_SYSTEM_PROMPT = `You are TanStack Chat, an AI assistant using Markdown for clear and structured responses. Format your responses following these guidelines:
@@ -47,62 +47,66 @@ const DEFAULT_SYSTEM_PROMPT = `You are TanStack Chat, an AI assistant using Mark
47
47
  - Use inline \`code\` for technical terms
48
48
  - Include example usage where helpful
49
49
 
50
- Keep responses concise and well-structured. Use appropriate Markdown formatting to enhance readability and understanding.`;
50
+ Keep responses concise and well-structured. Use appropriate Markdown formatting to enhance readability and understanding.`
51
51
 
52
52
  // Non-streaming implementation
53
- export const genAIResponse = createServerFn({ method: 'GET' })
54
-
55
- .validator((d: {
56
- messages: Message[],
57
- systemPrompt?: { value: string, enabled: boolean },
58
- streamEnabled?: boolean
59
- }) => d)
60
- // .middleware([loggingMiddleware])
61
- .handler(async ({ data }) => {
62
- const anthropic = new Anthropic({
63
- apiKey: import.meta.env.VITE_ANTHROPIC_API_KEY || '',
64
- });
65
-
66
- // Filter out error messages and empty messages
67
- const formattedMessages = data.messages
68
- .filter(msg => msg.content.trim() !== '' && !msg.content.startsWith('Sorry, I encountered an error'))
69
- .map(msg => ({
70
- role: msg.role,
71
- content: msg.content.trim()
72
- }));
73
-
74
- if (formattedMessages.length === 0) {
75
- return { error: 'No valid messages to send' };
76
- }
77
-
78
- const systemPrompt = data.systemPrompt?.enabled
79
- ? `${DEFAULT_SYSTEM_PROMPT}\n\n${data.systemPrompt.value}`
80
- : DEFAULT_SYSTEM_PROMPT;
81
-
82
- // Debug log to verify prompt layering
83
- console.log('System Prompt Configuration:', {
84
- hasCustomPrompt: data.systemPrompt?.enabled,
85
- customPromptValue: data.systemPrompt?.value,
86
- finalPrompt: systemPrompt
87
- });
88
-
89
- try {
90
- const response = await anthropic.messages.create({
91
- model: "claude-3-5-sonnet-20241022",
92
- max_tokens: 4096,
93
- system: systemPrompt,
94
- messages: formattedMessages,
95
- });
96
-
97
- if (response.content[0].type === 'text') {
98
- return { text: response.content[0].text };
99
- }
100
- return { error: 'Unexpected response type' };
101
- } catch (error) {
102
- console.error('Error in genAIResponse:', error);
103
- if (error instanceof Error && error.message.includes('rate limit')) {
104
- return { error: 'Rate limit exceeded. Please try again in a moment.' };
105
- }
106
- return { error: error instanceof Error ? error.message : 'Failed to get AI response' };
107
- }
108
- });
53
+ export const genAIResponse = createServerFn({ method: 'GET', response: 'raw' })
54
+ .validator(
55
+ (d: {
56
+ messages: Array<Message>
57
+ systemPrompt?: { value: string; enabled: boolean }
58
+ }) => d,
59
+ )
60
+ // .middleware([loggingMiddleware])
61
+ .handler(async ({ data }) => {
62
+ const anthropic = new Anthropic({
63
+ apiKey: process.env.ANTHROPIC_API_KEY || '',
64
+ })
65
+
66
+ // Filter out error messages and empty messages
67
+ const formattedMessages = data.messages
68
+ .filter(
69
+ (msg) =>
70
+ msg.content.trim() !== '' &&
71
+ !msg.content.startsWith('Sorry, I encountered an error'),
72
+ )
73
+ .map((msg) => ({
74
+ role: msg.role,
75
+ content: msg.content.trim(),
76
+ }))
77
+
78
+ if (formattedMessages.length === 0) {
79
+ return { error: 'No valid messages to send' }
80
+ }
81
+
82
+ const systemPrompt = data.systemPrompt?.enabled
83
+ ? `${DEFAULT_SYSTEM_PROMPT}\n\n${data.systemPrompt.value}`
84
+ : DEFAULT_SYSTEM_PROMPT
85
+
86
+ // Debug log to verify prompt layering
87
+ console.log('System Prompt Configuration:', {
88
+ hasCustomPrompt: data.systemPrompt?.enabled,
89
+ customPromptValue: data.systemPrompt?.value,
90
+ finalPrompt: systemPrompt,
91
+ })
92
+
93
+ try {
94
+ const response = await anthropic.messages.stream({
95
+ model: 'claude-3-5-sonnet-20241022',
96
+ max_tokens: 4096,
97
+ system: systemPrompt,
98
+ messages: formattedMessages,
99
+ })
100
+
101
+ return new Response(response.toReadableStream())
102
+ } catch (error) {
103
+ console.error('Error in genAIResponse:', error)
104
+ if (error instanceof Error && error.message.includes('rate limit')) {
105
+ return { error: 'Rate limit exceeded. Please try again in a moment.' }
106
+ }
107
+ return {
108
+ error:
109
+ error instanceof Error ? error.message : 'Failed to get AI response',
110
+ }
111
+ }
112
+ })
@@ -1,6 +1,7 @@
1
1
  {
2
2
  "dependencies": {
3
3
  "@anthropic-ai/sdk": "^0.14.1",
4
+ "highlight.js": "^11.11.1",
4
5
  "react-markdown": "^9.0.1",
5
6
  "rehype-highlight": "^7.0.0",
6
7
  "rehype-raw": "^7.0.0",
@@ -1,5 +1,5 @@
1
1
  {
2
2
  "dependencies": {
3
- "@tanstack/router-plugin": "^1.105.0"
3
+ "@tanstack/router-plugin": "^1.112.0"
4
4
  }
5
5
  }
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "Form",
3
- "description": "TansStack Form",
3
+ "description": "TanStack Form",
4
4
  "phase": "add-on",
5
5
  "link": "https://tanstack.com/form/latest",
6
6
  "templates": ["file-router", "code-router"],
@@ -0,0 +1,52 @@
1
+ # TanStack Chat Application
2
+
3
+ Am example chat application built with TanStack Start, TanStack Store, and Claude AI.
4
+
5
+ ## Sidecar service
6
+
7
+ This applicaton requires a sidecar microservice to be running. The server is located in the `ai-streaming-service` directory.
8
+
9
+ In that directory you should edit the `.env.local` file to add your Anthropic API key:
10
+
11
+ ```env
12
+ ANTHROPIC_API_KEY=your_anthropic_api_key
13
+ ```
14
+
15
+ Then run the server:
16
+
17
+ ```bash
18
+ cd ai-streaming-service
19
+ npm install
20
+ npm run dev
21
+ ```
22
+
23
+ ## ✨ Features
24
+
25
+ ### AI Capabilities
26
+
27
+ - 🤖 Powered by Claude 3.5 Sonnet
28
+ - 📝 Rich markdown formatting with syntax highlighting
29
+ - 🎯 Customizable system prompts for tailored AI behavior
30
+ - 🔄 Real-time message updates and streaming responses (coming soon)
31
+
32
+ ### User Experience
33
+
34
+ - 🎨 Modern UI with Tailwind CSS and Lucide icons
35
+ - 🔍 Conversation management and history
36
+ - 🔐 Secure API key management
37
+ - 📋 Markdown rendering with code highlighting
38
+
39
+ ### Technical Features
40
+
41
+ - 📦 Centralized state management with TanStack Store
42
+ - 🔌 Extensible architecture for multiple AI providers
43
+ - 🛠️ TypeScript for type safety
44
+
45
+ ## Architecture
46
+
47
+ ### Tech Stack
48
+
49
+ - **Routing**: TanStack Router
50
+ - **State Management**: TanStack Store
51
+ - **Styling**: Tailwind CSS
52
+ - **AI Integration**: Anthropic's Claude API