@blank-utils/llm 0.2.3 → 0.2.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +134 -4
- package/dist/index.js +755 -293
- package/dist/react/index.js +778 -293
- package/package.json +6 -3
- package/dist/backends/transformers.d.ts +0 -45
- package/dist/backends/transformers.d.ts.map +0 -1
- package/dist/backends/webllm.d.ts +0 -35
- package/dist/backends/webllm.d.ts.map +0 -1
- package/dist/core.d.ts +0 -71
- package/dist/core.d.ts.map +0 -1
- package/dist/detect.d.ts +0 -21
- package/dist/detect.d.ts.map +0 -1
- package/dist/helpers.d.ts +0 -35
- package/dist/helpers.d.ts.map +0 -1
- package/dist/index.d.ts +0 -27
- package/dist/index.d.ts.map +0 -1
- package/dist/models.d.ts +0 -74
- package/dist/models.d.ts.map +0 -1
- package/dist/react/chat-input.d.ts +0 -43
- package/dist/react/chat-input.d.ts.map +0 -1
- package/dist/react/components.d.ts +0 -52
- package/dist/react/components.d.ts.map +0 -1
- package/dist/react/index.d.ts +0 -311
- package/dist/react/index.d.ts.map +0 -1
- package/dist/types.d.ts +0 -180
- package/dist/types.d.ts.map +0 -1
package/dist/react/index.d.ts
DELETED
|
@@ -1,311 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* React Integration for Local LLM
|
|
3
|
-
*
|
|
4
|
-
* Provides React context, hooks, and components for easy LLM integration.
|
|
5
|
-
*
|
|
6
|
-
* @example
|
|
7
|
-
* ```tsx
|
|
8
|
-
* import { LLMProvider, useChat } from 'local-llm/react';
|
|
9
|
-
*
|
|
10
|
-
* function App() {
|
|
11
|
-
* return (
|
|
12
|
-
* <LLMProvider model="qwen-2.5-0.5b">
|
|
13
|
-
* <ChatComponent />
|
|
14
|
-
* </LLMProvider>
|
|
15
|
-
* );
|
|
16
|
-
* }
|
|
17
|
-
*
|
|
18
|
-
* function ChatComponent() {
|
|
19
|
-
* const { messages, send, isGenerating } = useChat();
|
|
20
|
-
*
|
|
21
|
-
* return (
|
|
22
|
-
* <div>
|
|
23
|
-
* {messages.map((m, i) => <p key={i}>{m.content}</p>)}
|
|
24
|
-
* <button onClick={() => send('Hello!')}>Send</button>
|
|
25
|
-
* </div>
|
|
26
|
-
* );
|
|
27
|
-
* }
|
|
28
|
-
* ```
|
|
29
|
-
*/
|
|
30
|
-
import * as React from 'react';
|
|
31
|
-
import type { LLMConfig, ChatMessage, GenerateOptions, LoadProgress, Backend } from '../types';
|
|
32
|
-
import { type LocalLLM } from '../core';
|
|
33
|
-
export interface LLMContextValue {
|
|
34
|
-
/** The LLM instance (null while loading) */
|
|
35
|
-
llm: LocalLLM | null;
|
|
36
|
-
/** Whether the model is currently loading */
|
|
37
|
-
isLoading: boolean;
|
|
38
|
-
/** Whether the model is ready for inference */
|
|
39
|
-
isReady: boolean;
|
|
40
|
-
/** Current loading progress */
|
|
41
|
-
loadProgress: LoadProgress | null;
|
|
42
|
-
/** Error if loading failed */
|
|
43
|
-
error: Error | null;
|
|
44
|
-
/** Current model ID */
|
|
45
|
-
modelId: string | null;
|
|
46
|
-
/** Backend being used */
|
|
47
|
-
backend: Backend | null;
|
|
48
|
-
/** Manually reload the model */
|
|
49
|
-
reload: () => Promise<void>;
|
|
50
|
-
/** Unload the model */
|
|
51
|
-
unload: () => Promise<void>;
|
|
52
|
-
}
|
|
53
|
-
export interface LLMProviderProps extends Omit<LLMConfig, 'onLoadProgress'> {
|
|
54
|
-
children: React.ReactNode;
|
|
55
|
-
/**
|
|
56
|
-
* Auto-load the model on mount
|
|
57
|
-
* @default true
|
|
58
|
-
*/
|
|
59
|
-
autoLoad?: boolean;
|
|
60
|
-
/**
|
|
61
|
-
* Callback when model finishes loading
|
|
62
|
-
*/
|
|
63
|
-
onLoad?: (llm: LocalLLM) => void;
|
|
64
|
-
/**
|
|
65
|
-
* Callback on loading progress
|
|
66
|
-
*/
|
|
67
|
-
onProgress?: (progress: LoadProgress) => void;
|
|
68
|
-
/**
|
|
69
|
-
* Callback on error
|
|
70
|
-
*/
|
|
71
|
-
onError?: (error: Error) => void;
|
|
72
|
-
}
|
|
73
|
-
/**
|
|
74
|
-
* Provider component that manages LLM lifecycle
|
|
75
|
-
*
|
|
76
|
-
* @example
|
|
77
|
-
* ```tsx
|
|
78
|
-
* <LLMProvider
|
|
79
|
-
* model="qwen-2.5-0.5b"
|
|
80
|
-
* backend="auto"
|
|
81
|
-
* onProgress={(p) => console.log(p.progress)}
|
|
82
|
-
* >
|
|
83
|
-
* <App />
|
|
84
|
-
* </LLMProvider>
|
|
85
|
-
* ```
|
|
86
|
-
*/
|
|
87
|
-
declare function LLMProvider({ children, autoLoad, onLoad, onProgress, onError, ...config }: LLMProviderProps): import("react/jsx-runtime").JSX.Element;
|
|
88
|
-
/**
|
|
89
|
-
* Access the LLM context
|
|
90
|
-
*
|
|
91
|
-
* @throws If used outside of LLMProvider
|
|
92
|
-
*
|
|
93
|
-
* @example
|
|
94
|
-
* ```tsx
|
|
95
|
-
* const { llm, isReady, loadProgress } = useLLM();
|
|
96
|
-
* ```
|
|
97
|
-
*/
|
|
98
|
-
declare function useLLM(): LLMContextValue;
|
|
99
|
-
export interface UseChatOptions {
|
|
100
|
-
/** Initial messages */
|
|
101
|
-
initialMessages?: ChatMessage[];
|
|
102
|
-
/** System prompt */
|
|
103
|
-
systemPrompt?: string;
|
|
104
|
-
/** Generation options */
|
|
105
|
-
generateOptions?: GenerateOptions;
|
|
106
|
-
/**
|
|
107
|
-
* Queue messages while model is loading
|
|
108
|
-
* When true, users can send messages before model loads - they'll be processed once ready
|
|
109
|
-
* @default true
|
|
110
|
-
*/
|
|
111
|
-
queueWhileLoading?: boolean;
|
|
112
|
-
/** Called when generation starts */
|
|
113
|
-
onStart?: () => void;
|
|
114
|
-
/** Called on each token (streaming) */
|
|
115
|
-
onToken?: (token: string, fullText: string) => void;
|
|
116
|
-
/** Called when generation completes */
|
|
117
|
-
onFinish?: (response: string) => void;
|
|
118
|
-
/** Called on error */
|
|
119
|
-
onError?: (error: Error) => void;
|
|
120
|
-
}
|
|
121
|
-
export interface UseChatReturn {
|
|
122
|
-
/** All messages in the conversation */
|
|
123
|
-
messages: ChatMessage[];
|
|
124
|
-
/** Current input value (for controlled input) */
|
|
125
|
-
input: string;
|
|
126
|
-
/** Set the input value */
|
|
127
|
-
setInput: (input: string) => void;
|
|
128
|
-
/** Whether currently generating a response */
|
|
129
|
-
isGenerating: boolean;
|
|
130
|
-
/** Whether a message is queued waiting for model to load */
|
|
131
|
-
isPending: boolean;
|
|
132
|
-
/** Current streaming text (while generating) */
|
|
133
|
-
streamingText: string;
|
|
134
|
-
/** Send a message and get a response */
|
|
135
|
-
send: (content?: string) => Promise<string>;
|
|
136
|
-
/** Stop the current generation */
|
|
137
|
-
stop: () => void;
|
|
138
|
-
/** Clear all messages */
|
|
139
|
-
clear: () => void;
|
|
140
|
-
/** Add a message without generating a response */
|
|
141
|
-
append: (message: ChatMessage) => void;
|
|
142
|
-
/** Reload/regenerate the last assistant message */
|
|
143
|
-
reload: () => Promise<string>;
|
|
144
|
-
}
|
|
145
|
-
/**
|
|
146
|
-
* Hook for managing a chat conversation with the LLM
|
|
147
|
-
*
|
|
148
|
-
* Supports **eager loading** - users can send messages while the model loads.
|
|
149
|
-
* Messages are queued and processed automatically once the model is ready.
|
|
150
|
-
*
|
|
151
|
-
* @example
|
|
152
|
-
* ```tsx
|
|
153
|
-
* function ChatComponent() {
|
|
154
|
-
* const { isLoading, loadProgress } = useLLM();
|
|
155
|
-
* const {
|
|
156
|
-
* messages,
|
|
157
|
-
* input,
|
|
158
|
-
* setInput,
|
|
159
|
-
* send,
|
|
160
|
-
* isGenerating,
|
|
161
|
-
* isPending, // true if message is queued waiting for model
|
|
162
|
-
* streamingText,
|
|
163
|
-
* } = useChat({
|
|
164
|
-
* systemPrompt: 'You are a helpful assistant.',
|
|
165
|
-
* queueWhileLoading: true, // default: true
|
|
166
|
-
* });
|
|
167
|
-
*
|
|
168
|
-
* return (
|
|
169
|
-
* <div>
|
|
170
|
-
* {isLoading && <p>Loading model... {loadProgress?.progress}%</p>}
|
|
171
|
-
*
|
|
172
|
-
* {messages.map((m, i) => (
|
|
173
|
-
* <div key={i} className={m.role}>
|
|
174
|
-
* {m.content}
|
|
175
|
-
* </div>
|
|
176
|
-
* ))}
|
|
177
|
-
*
|
|
178
|
-
* {isPending && <p className="pending">Waiting for model to load...</p>}
|
|
179
|
-
* {isGenerating && <div className="assistant">{streamingText}</div>}
|
|
180
|
-
*
|
|
181
|
-
* {/* Users can type immediately, even before model loads *\/}
|
|
182
|
-
* <input
|
|
183
|
-
* value={input}
|
|
184
|
-
* onChange={(e) => setInput(e.target.value)}
|
|
185
|
-
* onKeyDown={(e) => e.key === 'Enter' && send()}
|
|
186
|
-
* placeholder={isLoading ? 'Type now, send when ready...' : 'Type a message...'}
|
|
187
|
-
* />
|
|
188
|
-
* <button onClick={() => send()} disabled={isGenerating}>
|
|
189
|
-
* {isPending ? 'Queued...' : 'Send'}
|
|
190
|
-
* </button>
|
|
191
|
-
* </div>
|
|
192
|
-
* );
|
|
193
|
-
* }
|
|
194
|
-
* ```
|
|
195
|
-
*/
|
|
196
|
-
declare function useChat(options?: UseChatOptions): UseChatReturn;
|
|
197
|
-
export interface UseStreamOptions {
|
|
198
|
-
/** Generation options */
|
|
199
|
-
generateOptions?: GenerateOptions;
|
|
200
|
-
/** Called on each token */
|
|
201
|
-
onToken?: (token: string, fullText: string) => void;
|
|
202
|
-
/** Called when complete */
|
|
203
|
-
onFinish?: (response: string) => void;
|
|
204
|
-
/** Called on error */
|
|
205
|
-
onError?: (error: Error) => void;
|
|
206
|
-
}
|
|
207
|
-
export interface UseStreamReturn {
|
|
208
|
-
/** Current streamed text */
|
|
209
|
-
text: string;
|
|
210
|
-
/** Whether currently streaming */
|
|
211
|
-
isStreaming: boolean;
|
|
212
|
-
/** Start streaming a response */
|
|
213
|
-
stream: (messages: ChatMessage[] | string) => Promise<string>;
|
|
214
|
-
/** Stop streaming */
|
|
215
|
-
stop: () => void;
|
|
216
|
-
/** Clear the text */
|
|
217
|
-
clear: () => void;
|
|
218
|
-
}
|
|
219
|
-
/**
|
|
220
|
-
* Hook for simple streaming generation
|
|
221
|
-
*
|
|
222
|
-
* @example
|
|
223
|
-
* ```tsx
|
|
224
|
-
* function StreamComponent() {
|
|
225
|
-
* const { text, isStreaming, stream, clear } = useStream();
|
|
226
|
-
*
|
|
227
|
-
* return (
|
|
228
|
-
* <div>
|
|
229
|
-
* <pre>{text}</pre>
|
|
230
|
-
* <button onClick={() => stream('Tell me a story')} disabled={isStreaming}>
|
|
231
|
-
* Generate
|
|
232
|
-
* </button>
|
|
233
|
-
* <button onClick={clear}>Clear</button>
|
|
234
|
-
* </div>
|
|
235
|
-
* );
|
|
236
|
-
* }
|
|
237
|
-
* ```
|
|
238
|
-
*/
|
|
239
|
-
declare function useStream(options?: UseStreamOptions): UseStreamReturn;
|
|
240
|
-
export interface UseCompletionOptions {
|
|
241
|
-
/** Generation options */
|
|
242
|
-
generateOptions?: GenerateOptions;
|
|
243
|
-
}
|
|
244
|
-
export interface UseCompletionReturn {
|
|
245
|
-
/** Current completion text */
|
|
246
|
-
completion: string;
|
|
247
|
-
/** Whether currently generating */
|
|
248
|
-
isLoading: boolean;
|
|
249
|
-
/** Generate a completion (non-streaming) */
|
|
250
|
-
complete: (prompt: string) => Promise<string>;
|
|
251
|
-
/** Clear the completion */
|
|
252
|
-
clear: () => void;
|
|
253
|
-
}
|
|
254
|
-
/**
|
|
255
|
-
* Hook for simple non-streaming completion
|
|
256
|
-
*
|
|
257
|
-
* @example
|
|
258
|
-
* ```tsx
|
|
259
|
-
* function CompletionComponent() {
|
|
260
|
-
* const { completion, isLoading, complete } = useCompletion();
|
|
261
|
-
*
|
|
262
|
-
* return (
|
|
263
|
-
* <div>
|
|
264
|
-
* <p>{completion}</p>
|
|
265
|
-
* <button onClick={() => complete('Summarize this')} disabled={isLoading}>
|
|
266
|
-
* Complete
|
|
267
|
-
* </button>
|
|
268
|
-
* </div>
|
|
269
|
-
* );
|
|
270
|
-
* }
|
|
271
|
-
* ```
|
|
272
|
-
*/
|
|
273
|
-
declare function useCompletion(options?: UseCompletionOptions): UseCompletionReturn;
|
|
274
|
-
export interface LLMLoadingProps {
|
|
275
|
-
/** Custom loading UI */
|
|
276
|
-
children?: React.ReactNode;
|
|
277
|
-
/** Class name for the wrapper */
|
|
278
|
-
className?: string;
|
|
279
|
-
}
|
|
280
|
-
/**
|
|
281
|
-
* Component that shows loading state while LLM is loading
|
|
282
|
-
*
|
|
283
|
-
* @example
|
|
284
|
-
* ```tsx
|
|
285
|
-
* <LLMLoading>
|
|
286
|
-
* <p>Loading model...</p>
|
|
287
|
-
* </LLMLoading>
|
|
288
|
-
* ```
|
|
289
|
-
*/
|
|
290
|
-
declare function LLMLoading({ children, className }: LLMLoadingProps): import("react/jsx-runtime").JSX.Element | null;
|
|
291
|
-
export interface LLMReadyProps {
|
|
292
|
-
/** Content to show when ready */
|
|
293
|
-
children: React.ReactNode;
|
|
294
|
-
/** Content to show while loading */
|
|
295
|
-
fallback?: React.ReactNode;
|
|
296
|
-
}
|
|
297
|
-
/**
|
|
298
|
-
* Component that only renders children when LLM is ready
|
|
299
|
-
*
|
|
300
|
-
* @example
|
|
301
|
-
* ```tsx
|
|
302
|
-
* <LLMReady fallback={<Loading />}>
|
|
303
|
-
* <ChatInterface />
|
|
304
|
-
* </LLMReady>
|
|
305
|
-
* ```
|
|
306
|
-
*/
|
|
307
|
-
declare function LLMReady({ children, fallback }: LLMReadyProps): import("react/jsx-runtime").JSX.Element;
|
|
308
|
-
import { Chat, type ChatProps } from './components';
|
|
309
|
-
import { ChatInput, type ChatInputProps } from './chat-input';
|
|
310
|
-
export { LLMProvider, useLLM, useChat, useStream, useCompletion, LLMLoading, LLMReady, Chat, ChatInput, type ChatProps, type ChatInputProps, };
|
|
311
|
-
//# sourceMappingURL=index.d.ts.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/react/index.tsx"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA4BG;AACH,OAAO,KAAK,KAAK,MAAM,OAAO,CAAC;AAW/B,OAAO,KAAK,EACV,SAAS,EACT,WAAW,EACX,eAAe,EACf,YAAY,EACZ,OAAO,EACR,MAAM,UAAU,CAAC;AAElB,OAAO,EAAa,KAAK,QAAQ,EAAE,MAAM,SAAS,CAAC;AAQnD,MAAM,WAAW,eAAe;IAC9B,4CAA4C;IAC5C,GAAG,EAAE,QAAQ,GAAG,IAAI,CAAC;IAErB,6CAA6C;IAC7C,SAAS,EAAE,OAAO,CAAC;IAEnB,+CAA+C;IAC/C,OAAO,EAAE,OAAO,CAAC;IAEjB,+BAA+B;IAC/B,YAAY,EAAE,YAAY,GAAG,IAAI,CAAC;IAElC,8BAA8B;IAC9B,KAAK,EAAE,KAAK,GAAG,IAAI,CAAC;IAEpB,uBAAuB;IACvB,OAAO,EAAE,MAAM,GAAG,IAAI,CAAC;IAEvB,yBAAyB;IACzB,OAAO,EAAE,OAAO,GAAG,IAAI,CAAC;IAExB,gCAAgC;IAChC,MAAM,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC,CAAC;IAE5B,uBAAuB;IACvB,MAAM,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC,CAAC;CAC7B;AAQD,MAAM,WAAW,gBAAiB,SAAQ,IAAI,CAAC,SAAS,EAAE,gBAAgB,CAAC;IACzE,QAAQ,EAAE,KAAK,CAAC,SAAS,CAAC;IAE1B;;;OAGG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAC;IAEnB;;OAEG;IACH,MAAM,CAAC,EAAE,CAAC,GAAG,EAAE,QAAQ,KAAK,IAAI,CAAC;IAEjC;;OAEG;IACH,UAAU,CAAC,EAAE,CAAC,QAAQ,EAAE,YAAY,KAAK,IAAI,CAAC;IAE9C;;OAEG;IACH,OAAO,CAAC,EAAE,CAAC,KAAK,EAAE,KAAK,KAAK,IAAI,CAAC;CAClC;AAED;;;;;;;;;;;;;GAaG;AACH,iBAAS,WAAW,CAAC,EACnB,QAAQ,EACR,QAAe,EACf,MAAM,EACN,UAAU,EACV,OAAO,EACP,GAAG,MAAM,EACV,EAAE,gBAAgB,2CAsFlB;AAMD;;;;;;;;;GASG;AACH,iBAAS,MAAM,IAAI,eAAe,CAQjC;AAMD,MAAM,WAAW,cAAc;IAC7B,uBAAuB;IACvB,eAAe,CAAC,EAAE,WAAW,EAAE,CAAC;IAEhC,oBAAoB;IACpB,YAAY,CAAC,EAAE,MAAM,CAAC;IAEtB,yBAAyB;IACzB,eAAe,CAAC,EAAE,eAAe,CAAC;IAElC;;;;OAIG;IACH,iBAAiB,CAAC,EAAE,OAAO,CAAC;IAE5B,oCAAoC;IACpC,OAAO,CAAC,EAAE,MAAM,IAAI,CAAC;IAErB,uCAAuC;IACvC,OAAO,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,KAAK,IAAI,CAAC;IAEpD,uCAAuC;IACvC,QAAQ,CAAC,EAAE,CAAC,QAAQ,EAAE,MAAM,KAAK,IAAI,CAAC;IAEtC,sBAAsB;IACtB,OAAO,CAAC,EAAE,CAAC,KAAK,EAAE,KAAK,KAAK,IAAI,CAAC;CAClC;AAED,MAAM,WAAW,aAAa;IAC5B,uCAAuC;IACvC,QAAQ,EAAE,WAAW,EAAE,CAAC;IAExB,iDAAiD;IACjD,KAAK,EAAE,MAAM,CAAC;IAEd,0BAA0B;IAC1B,QAAQ,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,CAAC;IAElC,8CAA8C;IAC9C,YAAY,EAAE,OAAO,CAAC;IAEtB,4DAA4D;IAC5D,SAAS,EAAE,OAAO,CAAC;IAEnB,gDAAgD;IAChD,aAAa,EAAE,MAAM,CAAC;IAEtB,wCAAwC;IACxC,IAAI,EAAE,CAAC,OAAO,CAAC,EAAE,MAAM,KAAK,OAAO,CAAC,MAAM,CAAC,CAAC;IAE5C,kCAAkC;IAClC,IAAI,EAAE,MAAM,IAAI,CAAC;IAEjB,yBAAyB;IACzB,KAAK,EAAE,MAAM,IAAI,CAAC;IAElB,kDAAkD;IAClD,MAAM,EAAE,CAAC,OAAO,EAAE,WAAW,KAAK,IAAI,CAAC;IAEvC,mDAAmD;IACnD,MAAM,EAAE,MAAM,OAAO,CAAC,MAAM,CAAC,CAAC;CAC/B;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAkDG;AACH,iBAAS,OAAO,CAAC,OAAO,GAAE,cAAmB,GAAG,aAAa,CA6L5D;AAMD,MAAM,WAAW,gBAAgB;IAC/B,yBAAyB;IACzB,eAAe,CAAC,EAAE,eAAe,CAAC;IAElC,2BAA2B;IAC3B,OAAO,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,KAAK,IAAI,CAAC;IAEpD,2BAA2B;IAC3B,QAAQ,CAAC,EAAE,CAAC,QAAQ,EAAE,MAAM,KAAK,IAAI,CAAC;IAEtC,sBAAsB;IACtB,OAAO,CAAC,EAAE,CAAC,KAAK,EAAE,KAAK,KAAK,IAAI,CAAC;CAClC;AAED,MAAM,WAAW,eAAe;IAC9B,4BAA4B;IAC5B,IAAI,EAAE,MAAM,CAAC;IAEb,kCAAkC;IAClC,WAAW,EAAE,OAAO,CAAC;IAErB,iCAAiC;IACjC,MAAM,EAAE,CAAC,QAAQ,EAAE,WAAW,EAAE,GAAG,MAAM,KAAK,OAAO,CAAC,MAAM,CAAC,CAAC;IAE9D,qBAAqB;IACrB,IAAI,EAAE,MAAM,IAAI,CAAC;IAEjB,qBAAqB;IACrB,KAAK,EAAE,MAAM,IAAI,CAAC;CACnB;AAED;;;;;;;;;;;;;;;;;;;GAmBG;AACH,iBAAS,SAAS,CAAC,OAAO,GAAE,gBAAqB,GAAG,eAAe,CA2DlE;AAMD,MAAM,WAAW,oBAAoB;IACnC,yBAAyB;IACzB,eAAe,CAAC,EAAE,eAAe,CAAC;CACnC;AAED,MAAM,WAAW,mBAAmB;IAClC,8BAA8B;IAC9B,UAAU,EAAE,MAAM,CAAC;IAEnB,mCAAmC;IACnC,SAAS,EAAE,OAAO,CAAC;IAEnB,4CAA4C;IAC5C,QAAQ,EAAE,CAAC,MAAM,EAAE,MAAM,KAAK,OAAO,CAAC,MAAM,CAAC,CAAC;IAE9C,2BAA2B;IAC3B,KAAK,EAAE,MAAM,IAAI,CAAC;CACnB;AAED;;;;;;;;;;;;;;;;;;GAkBG;AACH,iBAAS,aAAa,CACpB,OAAO,GAAE,oBAAyB,GACjC,mBAAmB,CAuCrB;AAMD,MAAM,WAAW,eAAe;IAC9B,wBAAwB;IACxB,QAAQ,CAAC,EAAE,KAAK,CAAC,SAAS,CAAC;IAE3B,iCAAiC;IACjC,SAAS,CAAC,EAAE,MAAM,CAAC;CACpB;AAED;;;;;;;;;GASG;AACH,iBAAS,UAAU,CAAC,EAAE,QAAQ,EAAE,SAAS,EAAE,EAAE,eAAe,kDAe3D;AAMD,MAAM,WAAW,aAAa;IAC5B,iCAAiC;IACjC,QAAQ,EAAE,KAAK,CAAC,SAAS,CAAC;IAE1B,oCAAoC;IACpC,QAAQ,CAAC,EAAE,KAAK,CAAC,SAAS,CAAC;CAC5B;AAED;;;;;;;;;GASG;AACH,iBAAS,QAAQ,CAAC,EAAE,QAAQ,EAAE,QAAe,EAAE,EAAE,aAAa,2CAQ7D;AAGD,OAAO,EAAE,IAAI,EAAE,KAAK,SAAS,EAAE,MAAM,cAAc,CAAC;AACpD,OAAO,EAAE,SAAS,EAAE,KAAK,cAAc,EAAE,MAAM,cAAc,CAAC;AAE9D,OAAO,EAEL,WAAW,EACX,MAAM,EAGN,OAAO,EACP,SAAS,EACT,aAAa,EAGb,UAAU,EACV,QAAQ,EAGR,IAAI,EACJ,SAAS,EAGT,KAAK,SAAS,EACd,KAAK,cAAc,GACpB,CAAC"}
|
package/dist/types.d.ts
DELETED
|
@@ -1,180 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Local LLM - Browser-based LLM inference library
|
|
3
|
-
* Shared TypeScript types and interfaces
|
|
4
|
-
*/
|
|
5
|
-
/**
|
|
6
|
-
* Supported backend engines
|
|
7
|
-
*/
|
|
8
|
-
export type Backend = 'webllm' | 'transformers' | 'auto';
|
|
9
|
-
/**
|
|
10
|
-
* Device to run inference on
|
|
11
|
-
*/
|
|
12
|
-
export type Device = 'webgpu' | 'wasm' | 'auto';
|
|
13
|
-
/**
|
|
14
|
-
* Quantization options for model loading
|
|
15
|
-
*/
|
|
16
|
-
export type Quantization = 'q4' | 'q8' | 'fp16' | 'fp32';
|
|
17
|
-
import type { SupportedModel } from './models';
|
|
18
|
-
export type { SupportedModel };
|
|
19
|
-
/**
|
|
20
|
-
* Configuration for creating an LLM instance
|
|
21
|
-
*/
|
|
22
|
-
export interface LLMConfig {
|
|
23
|
-
/**
|
|
24
|
-
* Model identifier. For WebLLM, use MLC model IDs.
|
|
25
|
-
* For Transformers.js, use HuggingFace model IDs.
|
|
26
|
-
* @default 'Phi-3-mini-4k-instruct-q4f16_1-MLC' for WebLLM
|
|
27
|
-
*/
|
|
28
|
-
model?: SupportedModel;
|
|
29
|
-
/**
|
|
30
|
-
* Which backend to use
|
|
31
|
-
* @default 'auto' - Will prefer WebLLM if WebGPU available
|
|
32
|
-
*/
|
|
33
|
-
backend?: Backend;
|
|
34
|
-
/**
|
|
35
|
-
* Device to run on
|
|
36
|
-
* @default 'auto' - Prefers WebGPU, falls back to WASM
|
|
37
|
-
*/
|
|
38
|
-
device?: Device;
|
|
39
|
-
/**
|
|
40
|
-
* Quantization level (transformers.js only)
|
|
41
|
-
* @default 'q4'
|
|
42
|
-
*/
|
|
43
|
-
quantization?: Quantization;
|
|
44
|
-
/**
|
|
45
|
-
* System prompt to use for all conversations
|
|
46
|
-
*/
|
|
47
|
-
systemPrompt?: string;
|
|
48
|
-
/**
|
|
49
|
-
* Callback for model loading progress
|
|
50
|
-
*/
|
|
51
|
-
onLoadProgress?: LoadProgressCallback;
|
|
52
|
-
}
|
|
53
|
-
/**
|
|
54
|
-
* Role in a chat conversation
|
|
55
|
-
*/
|
|
56
|
-
export type MessageRole = 'system' | 'user' | 'assistant';
|
|
57
|
-
/**
|
|
58
|
-
* A single message in a chat conversation
|
|
59
|
-
*/
|
|
60
|
-
export interface ChatMessage {
|
|
61
|
-
role: MessageRole;
|
|
62
|
-
content: string;
|
|
63
|
-
}
|
|
64
|
-
/**
|
|
65
|
-
* Callback for streaming token output
|
|
66
|
-
*/
|
|
67
|
-
export type StreamCallback = (token: string, fullText: string) => void;
|
|
68
|
-
/**
|
|
69
|
-
* Loading progress information
|
|
70
|
-
*/
|
|
71
|
-
export interface LoadProgress {
|
|
72
|
-
/** Progress percentage (0-100) */
|
|
73
|
-
progress: number;
|
|
74
|
-
/** Current status text */
|
|
75
|
-
status: string;
|
|
76
|
-
/** Bytes loaded (if available) */
|
|
77
|
-
loaded?: number;
|
|
78
|
-
/** Total bytes (if available) */
|
|
79
|
-
total?: number;
|
|
80
|
-
}
|
|
81
|
-
/**
|
|
82
|
-
* Callback for model loading progress
|
|
83
|
-
*/
|
|
84
|
-
export type LoadProgressCallback = (progress: LoadProgress) => void;
|
|
85
|
-
/**
|
|
86
|
-
* Options for text generation
|
|
87
|
-
*/
|
|
88
|
-
export interface GenerateOptions {
|
|
89
|
-
/**
|
|
90
|
-
* Temperature for sampling (0-2)
|
|
91
|
-
* @default 0.7
|
|
92
|
-
*/
|
|
93
|
-
temperature?: number;
|
|
94
|
-
/**
|
|
95
|
-
* Maximum tokens to generate
|
|
96
|
-
* @default 512
|
|
97
|
-
*/
|
|
98
|
-
maxTokens?: number;
|
|
99
|
-
/**
|
|
100
|
-
* Top-p sampling
|
|
101
|
-
* @default 0.95
|
|
102
|
-
*/
|
|
103
|
-
topP?: number;
|
|
104
|
-
/**
|
|
105
|
-
* Stop sequences
|
|
106
|
-
*/
|
|
107
|
-
stopSequences?: string[];
|
|
108
|
-
}
|
|
109
|
-
/**
|
|
110
|
-
* Unified interface for LLM backends
|
|
111
|
-
*/
|
|
112
|
-
export interface LLMProvider {
|
|
113
|
-
/**
|
|
114
|
-
* Backend identifier
|
|
115
|
-
*/
|
|
116
|
-
readonly backend: Backend;
|
|
117
|
-
/**
|
|
118
|
-
* Whether the model is loaded and ready
|
|
119
|
-
*/
|
|
120
|
-
readonly isReady: boolean;
|
|
121
|
-
/**
|
|
122
|
-
* Current model ID
|
|
123
|
-
*/
|
|
124
|
-
readonly modelId: string | null;
|
|
125
|
-
/**
|
|
126
|
-
* Load a model
|
|
127
|
-
*/
|
|
128
|
-
load(modelId: string, onProgress?: LoadProgressCallback): Promise<void>;
|
|
129
|
-
/**
|
|
130
|
-
* Generate a response (non-streaming)
|
|
131
|
-
*/
|
|
132
|
-
chat(messages: ChatMessage[], options?: GenerateOptions): Promise<string>;
|
|
133
|
-
/**
|
|
134
|
-
* Generate a response with streaming
|
|
135
|
-
*/
|
|
136
|
-
stream(messages: ChatMessage[], onToken: StreamCallback, options?: GenerateOptions): Promise<string>;
|
|
137
|
-
/**
|
|
138
|
-
* Unload the model and free resources
|
|
139
|
-
*/
|
|
140
|
-
unload(): Promise<void>;
|
|
141
|
-
}
|
|
142
|
-
/**
|
|
143
|
-
* Options for attaching to an input element
|
|
144
|
-
*/
|
|
145
|
-
export interface AttachOptions {
|
|
146
|
-
/**
|
|
147
|
-
* Trigger generation on Enter key
|
|
148
|
-
* @default true
|
|
149
|
-
*/
|
|
150
|
-
triggerOnEnter?: boolean;
|
|
151
|
-
/**
|
|
152
|
-
* Clear input after sending
|
|
153
|
-
* @default true
|
|
154
|
-
*/
|
|
155
|
-
clearOnSend?: boolean;
|
|
156
|
-
/**
|
|
157
|
-
* Show loading indicator
|
|
158
|
-
* @default true
|
|
159
|
-
*/
|
|
160
|
-
showLoading?: boolean;
|
|
161
|
-
/**
|
|
162
|
-
* Custom loading text
|
|
163
|
-
* @default 'Thinking...'
|
|
164
|
-
*/
|
|
165
|
-
loadingText?: string;
|
|
166
|
-
}
|
|
167
|
-
/**
|
|
168
|
-
* Browser capability information
|
|
169
|
-
*/
|
|
170
|
-
export interface BrowserCapabilities {
|
|
171
|
-
/** WebGPU is available */
|
|
172
|
-
webgpu: boolean;
|
|
173
|
-
/** WebAssembly is available */
|
|
174
|
-
wasm: boolean;
|
|
175
|
-
/** Recommended backend based on capabilities */
|
|
176
|
-
recommendedBackend: Backend;
|
|
177
|
-
/** Recommended device based on capabilities */
|
|
178
|
-
recommendedDevice: Device;
|
|
179
|
-
}
|
|
180
|
-
//# sourceMappingURL=types.d.ts.map
|
package/dist/types.d.ts.map
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAMH;;GAEG;AACH,MAAM,MAAM,OAAO,GAAG,QAAQ,GAAG,cAAc,GAAG,MAAM,CAAC;AAEzD;;GAEG;AACH,MAAM,MAAM,MAAM,GAAG,QAAQ,GAAG,MAAM,GAAG,MAAM,CAAC;AAEhD;;GAEG;AACH,MAAM,MAAM,YAAY,GAAG,IAAI,GAAG,IAAI,GAAG,MAAM,GAAG,MAAM,CAAC;AAEzD,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,UAAU,CAAC;AAC/C,YAAY,EAAE,cAAc,EAAE,CAAC;AAE/B;;GAEG;AACH,MAAM,WAAW,SAAS;IACxB;;;;OAIG;IACH,KAAK,CAAC,EAAE,cAAc,CAAC;IAEvB;;;OAGG;IACH,OAAO,CAAC,EAAE,OAAO,CAAC;IAElB;;;OAGG;IACH,MAAM,CAAC,EAAE,MAAM,CAAC;IAEhB;;;OAGG;IACH,YAAY,CAAC,EAAE,YAAY,CAAC;IAE5B;;OAEG;IACH,YAAY,CAAC,EAAE,MAAM,CAAC;IAEtB;;OAEG;IACH,cAAc,CAAC,EAAE,oBAAoB,CAAC;CACvC;AAMD;;GAEG;AACH,MAAM,MAAM,WAAW,GAAG,QAAQ,GAAG,MAAM,GAAG,WAAW,CAAC;AAE1D;;GAEG;AACH,MAAM,WAAW,WAAW;IAC1B,IAAI,EAAE,WAAW,CAAC;IAClB,OAAO,EAAE,MAAM,CAAC;CACjB;AAMD;;GAEG;AACH,MAAM,MAAM,cAAc,GAAG,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,KAAK,IAAI,CAAC;AAEvE;;GAEG;AACH,MAAM,WAAW,YAAY;IAC3B,kCAAkC;IAClC,QAAQ,EAAE,MAAM,CAAC;IACjB,0BAA0B;IAC1B,MAAM,EAAE,MAAM,CAAC;IACf,kCAAkC;IAClC,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,iCAAiC;IACjC,KAAK,CAAC,EAAE,MAAM,CAAC;CAChB;AAED;;GAEG;AACH,MAAM,MAAM,oBAAoB,GAAG,CAAC,QAAQ,EAAE,YAAY,KAAK,IAAI,CAAC;AAMpE;;GAEG;AACH,MAAM,WAAW,eAAe;IAC9B;;;OAGG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IAErB;;;OAGG;IACH,SAAS,CAAC,EAAE,MAAM,CAAC;IAEnB;;;OAGG;IACH,IAAI,CAAC,EAAE,MAAM,CAAC;IAEd;;OAEG;IACH,aAAa,CAAC,EAAE,MAAM,EAAE,CAAC;CAC1B;AAMD;;GAEG;AACH,MAAM,WAAW,WAAW;IAC1B;;OAEG;IACH,QAAQ,CAAC,OAAO,EAAE,OAAO,CAAC;IAE1B;;OAEG;IACH,QAAQ,CAAC,OAAO,EAAE,OAAO,CAAC;IAE1B;;OAEG;IACH,QAAQ,CAAC,OAAO,EAAE,MAAM,GAAG,IAAI,CAAC;IAEhC;;OAEG;IACH,IAAI,CAAC,OAAO,EAAE,MAAM,EAAE,UAAU,CAAC,EAAE,oBAAoB,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;IAExE;;OAEG;IACH,IAAI,CAAC,QAAQ,EAAE,WAAW,EAAE,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC;IAE1E;;OAEG;IACH,MAAM,CACJ,QAAQ,EAAE,WAAW,EAAE,EACvB,OAAO,EAAE,cAAc,EACvB,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,MAAM,CAAC,CAAC;IAEnB;;OAEG;IACH,MAAM,IAAI,OAAO,CAAC,IAAI,CAAC,CAAC;CACzB;AAMD;;GAEG;AACH,MAAM,WAAW,aAAa;IAC5B;;;OAGG;IACH,cAAc,CAAC,EAAE,OAAO,CAAC;IAEzB;;;OAGG;IACH,WAAW,CAAC,EAAE,OAAO,CAAC;IAEtB;;;OAGG;IACH,WAAW,CAAC,EAAE,OAAO,CAAC;IAEtB;;;OAGG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;CACtB;AAMD;;GAEG;AACH,MAAM,WAAW,mBAAmB;IAClC,0BAA0B;IAC1B,MAAM,EAAE,OAAO,CAAC;IAChB,+BAA+B;IAC/B,IAAI,EAAE,OAAO,CAAC;IACd,gDAAgD;IAChD,kBAAkB,EAAE,OAAO,CAAC;IAC5B,+CAA+C;IAC/C,iBAAiB,EAAE,MAAM,CAAC;CAC3B"}
|