@blank-utils/llm 0.2.3 → 0.2.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +134 -4
- package/dist/index.js +755 -293
- package/dist/react/chat-input.d.ts +23 -2
- package/dist/react/chat-input.d.ts.map +1 -1
- package/dist/react/components.d.ts +24 -12
- package/dist/react/components.d.ts.map +1 -1
- package/dist/react/index.d.ts +2 -2
- package/dist/react/index.d.ts.map +1 -1
- package/dist/react/index.js +778 -293
- package/package.json +6 -3
package/README.md
CHANGED
|
@@ -39,20 +39,61 @@ bun add @blank-utils/llm
|
|
|
39
39
|
|
|
40
40
|
## Quick Start
|
|
41
41
|
|
|
42
|
-
###
|
|
42
|
+
### Quick Chat (Fully Featured App)
|
|
43
|
+
|
|
44
|
+
The fastest way to get started. `<ChatApp>` includes the provider, model management, and UI in a single component:
|
|
45
|
+
|
|
46
|
+
```tsx
|
|
47
|
+
import { ChatApp } from "@blank-utils/llm/react";
|
|
48
|
+
|
|
49
|
+
export default function App() {
|
|
50
|
+
return (
|
|
51
|
+
<ChatApp
|
|
52
|
+
defaultModel="qwen-2.5-0.5b"
|
|
53
|
+
theme="dark" // 'dark' | 'light'
|
|
54
|
+
systemPrompt="You are a helpful assistant."
|
|
55
|
+
/>
|
|
56
|
+
);
|
|
57
|
+
}
|
|
58
|
+
```
|
|
59
|
+
|
|
60
|
+
### Components (Custom Setup)
|
|
61
|
+
|
|
62
|
+
If you already have a provider or want more control, use the `<Chat>` component:
|
|
63
|
+
|
|
64
|
+
```tsx
|
|
65
|
+
import { LLMProvider, Chat } from "@blank-utils/llm/react";
|
|
66
|
+
|
|
67
|
+
export default function App() {
|
|
68
|
+
return (
|
|
69
|
+
<LLMProvider model="qwen-2.5-0.5b">
|
|
70
|
+
<Chat
|
|
71
|
+
theme="dark"
|
|
72
|
+
systemPrompt="You are a helpful assistant."
|
|
73
|
+
placeholder="Ask me anything..."
|
|
74
|
+
/>
|
|
75
|
+
</LLMProvider>
|
|
76
|
+
);
|
|
77
|
+
}
|
|
78
|
+
```
|
|
79
|
+
|
|
80
|
+
### Custom UI with Hooks
|
|
81
|
+
|
|
82
|
+
Build your own interface from scratch using our hooks:
|
|
43
83
|
|
|
44
84
|
```tsx
|
|
45
85
|
import { LLMProvider, useChat, useLLM } from "@blank-utils/llm/react";
|
|
86
|
+
// ... (rest of the hooks example)
|
|
46
87
|
|
|
47
88
|
function App() {
|
|
48
89
|
return (
|
|
49
90
|
<LLMProvider model="qwen-2.5-0.5b">
|
|
50
|
-
<
|
|
91
|
+
<ChatUI />
|
|
51
92
|
</LLMProvider>
|
|
52
93
|
);
|
|
53
94
|
}
|
|
54
95
|
|
|
55
|
-
function
|
|
96
|
+
function ChatUI() {
|
|
56
97
|
const { isLoading, loadProgress } = useLLM();
|
|
57
98
|
const {
|
|
58
99
|
messages,
|
|
@@ -143,7 +184,9 @@ const cleanup = llm.attachToInput("#prompt-input", "#response-output", {
|
|
|
143
184
|
│ │ ├── webllm.ts # WebLLM backend (WebGPU)
|
|
144
185
|
│ │ └── transformers.ts # Transformers.js backend (WASM / WebGPU)
|
|
145
186
|
│ └── react/
|
|
146
|
-
│
|
|
187
|
+
│ ├── index.tsx # React context, provider, hooks
|
|
188
|
+
│ ├── components.tsx # <Chat> — ready-made chat interface
|
|
189
|
+
│ └── chat-input.tsx # <ChatInput> — auto-resizing input widget
|
|
147
190
|
└── dist/ # Built output (ESM)
|
|
148
191
|
```
|
|
149
192
|
|
|
@@ -279,6 +322,93 @@ Conditional rendering components:
|
|
|
279
322
|
</LLMReady>
|
|
280
323
|
```
|
|
281
324
|
|
|
325
|
+
### Default Chat Interface explained
|
|
326
|
+
|
|
327
|
+
The `<Chat>` and `<ChatApp>` components provide a production-ready interface with "Terminal Luxury" aesthetics.
|
|
328
|
+
|
|
329
|
+
**Key Features & Usage Points:**
|
|
330
|
+
|
|
331
|
+
- **✨ Zero Config**: Just drop it in. No CSS files to import, no state to manage.
|
|
332
|
+
- **🎨 Rich Text Rendering**:
|
|
333
|
+
- **Global Markdown**: Bold, cursives, lists, tables.
|
|
334
|
+
- **Code Blocks**: Syntax highlighting for 20+ languages.
|
|
335
|
+
- **Diagrams**: Renders `mermaid` diagrams automatically.
|
|
336
|
+
- **Math**: Supports LateX expressions.
|
|
337
|
+
- **⚡ Eager Interaction**: Users can type and send messages _while_ the model is still initializing. The chat controls the queue.
|
|
338
|
+
- **🌗 Theming**: Built-in 'dark' (cherry red accents) and 'light' modes.
|
|
339
|
+
- **🔄 Model Switching**:
|
|
340
|
+
- If using `<ChatApp />`, a model selector dropdown is included automatically.
|
|
341
|
+
- If using `<Chat />`, pass `onModelChange` to enable the dropdown.
|
|
342
|
+
- **🛠️ Extensible Toolbar**: Use the `inputActions` prop to add your own buttons (e.g., upload, clear) to the input area.
|
|
343
|
+
|
|
344
|
+
### `<Chat>` Component API
|
|
345
|
+
|
|
346
|
+
```tsx
|
|
347
|
+
<Chat
|
|
348
|
+
// Appearance
|
|
349
|
+
theme="dark" // 'dark' | 'light'
|
|
350
|
+
maxHeight="600px" // CSS max-height
|
|
351
|
+
className="my-chat" // Extra classes
|
|
352
|
+
// Content
|
|
353
|
+
systemPrompt="..." // Default: "You are a helpful AI assistant..."
|
|
354
|
+
welcomeMessage="..." // Text shown when chat is empty
|
|
355
|
+
placeholder="..." // Input placeholder
|
|
356
|
+
// Features
|
|
357
|
+
showHeader={true} // Toggle header/model info
|
|
358
|
+
showProgress={true} // Toggle loading progress bar
|
|
359
|
+
// Callbacks
|
|
360
|
+
onSend={(msg) => {}} // Listen to user messages
|
|
361
|
+
onResponse={(res) => {}} // Listen to AI responses
|
|
362
|
+
onModelChange={(id) => {}} // Enable model switching dropdown
|
|
363
|
+
inputActions={
|
|
364
|
+
<>
|
|
365
|
+
<button>Clear</button>
|
|
366
|
+
</>
|
|
367
|
+
} // Add custom buttons
|
|
368
|
+
/>
|
|
369
|
+
```
|
|
370
|
+
|
|
371
|
+
### `<ChatApp>` Component API
|
|
372
|
+
|
|
373
|
+
Wrapper that combines `LLMProvider` and `Chat`.
|
|
374
|
+
|
|
375
|
+
```tsx
|
|
376
|
+
<ChatApp
|
|
377
|
+
defaultModel="qwen-2.5-0.5b"
|
|
378
|
+
defaultBackend="auto" // 'webllm' | 'transformers'
|
|
379
|
+
autoLoad={true} // Start downloading immediately
|
|
380
|
+
{...chatProps} // All <Chat> props are supported
|
|
381
|
+
/>
|
|
382
|
+
```
|
|
383
|
+
|
|
384
|
+
### `<ChatInput>`
|
|
385
|
+
|
|
386
|
+
Standalone auto-resizing input component. Use it to build custom chat layouts:
|
|
387
|
+
|
|
388
|
+
```tsx
|
|
389
|
+
import { ChatInput } from "@blank-utils/llm/react";
|
|
390
|
+
|
|
391
|
+
<ChatInput
|
|
392
|
+
value={input} // Controlled value
|
|
393
|
+
onChange={setInput} // Value change handler
|
|
394
|
+
onSend={handleSend} // Submit handler (Enter or button)
|
|
395
|
+
onStop={handleStop} // Stop generation
|
|
396
|
+
disabled={false} // Disable input
|
|
397
|
+
isGenerating={false} // Show stop button instead of send
|
|
398
|
+
placeholder="Type..." // Placeholder text
|
|
399
|
+
maxRows={5} // Max rows before scroll
|
|
400
|
+
theme="dark" // 'dark' | 'light'
|
|
401
|
+
actions={<MyButtons />} // Custom toolbar actions
|
|
402
|
+
/>;
|
|
403
|
+
```
|
|
404
|
+
|
|
405
|
+
**Features:**
|
|
406
|
+
|
|
407
|
+
- 📝 Auto-resizes up to `maxRows` then scrolls
|
|
408
|
+
- ⌨️ Enter to send, Shift+Enter for newline
|
|
409
|
+
- ⏹️ Stop button while generating
|
|
410
|
+
- 🎨 Dark/light theme support
|
|
411
|
+
|
|
282
412
|
---
|
|
283
413
|
|
|
284
414
|
## Vanilla JS API
|