@blank-utils/llm 0.1.0 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/backends/transformers.d.ts +62 -0
- package/dist/backends/transformers.d.ts.map +1 -0
- package/dist/backends/webllm.d.ts +70 -0
- package/dist/backends/webllm.d.ts.map +1 -0
- package/dist/core.d.ts +71 -0
- package/dist/core.d.ts.map +1 -0
- package/dist/detect.d.ts +21 -0
- package/dist/detect.d.ts.map +1 -0
- package/dist/helpers.d.ts +35 -0
- package/dist/helpers.d.ts.map +1 -0
- package/dist/{index-mv82xhnr.js → index-yyxm9rp0.js} +4 -4
- package/dist/{index-mv82xhnr.js.map → index-yyxm9rp0.js.map} +2 -2
- package/dist/index.d.ts +27 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +37 -57128
- package/dist/index.js.map +3 -26
- package/dist/react/index.d.ts +310 -0
- package/dist/react/index.d.ts.map +1 -0
- package/dist/react/index.js +7 -6
- package/dist/react/index.js.map +5 -5
- package/dist/{transformers.web-nb96jrhe.js → transformers.web-1qr6h84s.js} +19 -19
- package/dist/{transformers.web-nb96jrhe.js.map → transformers.web-1qr6h84s.js.map} +2 -2
- package/dist/types.d.ts +178 -0
- package/dist/types.d.ts.map +1 -0
- package/package.json +37 -20
|
@@ -0,0 +1,310 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* React Integration for Local LLM
|
|
3
|
+
*
|
|
4
|
+
* Provides React context, hooks, and components for easy LLM integration.
|
|
5
|
+
*
|
|
6
|
+
* @example
|
|
7
|
+
* ```tsx
|
|
8
|
+
* import { LLMProvider, useChat } from 'local-llm/react';
|
|
9
|
+
*
|
|
10
|
+
* function App() {
|
|
11
|
+
* return (
|
|
12
|
+
* <LLMProvider model="qwen-2.5-0.5b">
|
|
13
|
+
* <ChatComponent />
|
|
14
|
+
* </LLMProvider>
|
|
15
|
+
* );
|
|
16
|
+
* }
|
|
17
|
+
*
|
|
18
|
+
* function ChatComponent() {
|
|
19
|
+
* const { messages, send, isGenerating } = useChat();
|
|
20
|
+
*
|
|
21
|
+
* return (
|
|
22
|
+
* <div>
|
|
23
|
+
* {messages.map((m, i) => <p key={i}>{m.content}</p>)}
|
|
24
|
+
* <button onClick={() => send('Hello!')}>Send</button>
|
|
25
|
+
* </div>
|
|
26
|
+
* );
|
|
27
|
+
* }
|
|
28
|
+
* ```
|
|
29
|
+
*/
|
|
30
|
+
import * as React from 'react';
|
|
31
|
+
import type { LLMConfig, ChatMessage, GenerateOptions, LoadProgress, Backend } from '../types';
|
|
32
|
+
import { type LocalLLM } from '../core';
|
|
33
|
+
export type { ChatMessage, GenerateOptions, LoadProgress, Backend } from '../types';
|
|
34
|
+
export type { LocalLLM } from '../core';
|
|
35
|
+
export interface LLMContextValue {
|
|
36
|
+
/** The LLM instance (null while loading) */
|
|
37
|
+
llm: LocalLLM | null;
|
|
38
|
+
/** Whether the model is currently loading */
|
|
39
|
+
isLoading: boolean;
|
|
40
|
+
/** Whether the model is ready for inference */
|
|
41
|
+
isReady: boolean;
|
|
42
|
+
/** Current loading progress */
|
|
43
|
+
loadProgress: LoadProgress | null;
|
|
44
|
+
/** Error if loading failed */
|
|
45
|
+
error: Error | null;
|
|
46
|
+
/** Current model ID */
|
|
47
|
+
modelId: string | null;
|
|
48
|
+
/** Backend being used */
|
|
49
|
+
backend: Backend | null;
|
|
50
|
+
/** Manually reload the model */
|
|
51
|
+
reload: () => Promise<void>;
|
|
52
|
+
/** Unload the model */
|
|
53
|
+
unload: () => Promise<void>;
|
|
54
|
+
}
|
|
55
|
+
export interface LLMProviderProps extends Omit<LLMConfig, 'onLoadProgress'> {
|
|
56
|
+
children: React.ReactNode;
|
|
57
|
+
/**
|
|
58
|
+
* Auto-load the model on mount
|
|
59
|
+
* @default true
|
|
60
|
+
*/
|
|
61
|
+
autoLoad?: boolean;
|
|
62
|
+
/**
|
|
63
|
+
* Callback when model finishes loading
|
|
64
|
+
*/
|
|
65
|
+
onLoad?: (llm: LocalLLM) => void;
|
|
66
|
+
/**
|
|
67
|
+
* Callback on loading progress
|
|
68
|
+
*/
|
|
69
|
+
onProgress?: (progress: LoadProgress) => void;
|
|
70
|
+
/**
|
|
71
|
+
* Callback on error
|
|
72
|
+
*/
|
|
73
|
+
onError?: (error: Error) => void;
|
|
74
|
+
}
|
|
75
|
+
/**
|
|
76
|
+
* Provider component that manages LLM lifecycle
|
|
77
|
+
*
|
|
78
|
+
* @example
|
|
79
|
+
* ```tsx
|
|
80
|
+
* <LLMProvider
|
|
81
|
+
* model="qwen-2.5-0.5b"
|
|
82
|
+
* backend="auto"
|
|
83
|
+
* onProgress={(p) => console.log(p.progress)}
|
|
84
|
+
* >
|
|
85
|
+
* <App />
|
|
86
|
+
* </LLMProvider>
|
|
87
|
+
* ```
|
|
88
|
+
*/
|
|
89
|
+
export declare function LLMProvider({ children, autoLoad, onLoad, onProgress, onError, ...config }: LLMProviderProps): import("react/jsx-runtime").JSX.Element;
|
|
90
|
+
/**
|
|
91
|
+
* Access the LLM context
|
|
92
|
+
*
|
|
93
|
+
* @throws If used outside of LLMProvider
|
|
94
|
+
*
|
|
95
|
+
* @example
|
|
96
|
+
* ```tsx
|
|
97
|
+
* const { llm, isReady, loadProgress } = useLLM();
|
|
98
|
+
* ```
|
|
99
|
+
*/
|
|
100
|
+
export declare function useLLM(): LLMContextValue;
|
|
101
|
+
export interface UseChatOptions {
|
|
102
|
+
/** Initial messages */
|
|
103
|
+
initialMessages?: ChatMessage[];
|
|
104
|
+
/** System prompt */
|
|
105
|
+
systemPrompt?: string;
|
|
106
|
+
/** Generation options */
|
|
107
|
+
generateOptions?: GenerateOptions;
|
|
108
|
+
/**
|
|
109
|
+
* Queue messages while model is loading
|
|
110
|
+
* When true, users can send messages before model loads - they'll be processed once ready
|
|
111
|
+
* @default true
|
|
112
|
+
*/
|
|
113
|
+
queueWhileLoading?: boolean;
|
|
114
|
+
/** Called when generation starts */
|
|
115
|
+
onStart?: () => void;
|
|
116
|
+
/** Called on each token (streaming) */
|
|
117
|
+
onToken?: (token: string, fullText: string) => void;
|
|
118
|
+
/** Called when generation completes */
|
|
119
|
+
onFinish?: (response: string) => void;
|
|
120
|
+
/** Called on error */
|
|
121
|
+
onError?: (error: Error) => void;
|
|
122
|
+
}
|
|
123
|
+
export interface UseChatReturn {
|
|
124
|
+
/** All messages in the conversation */
|
|
125
|
+
messages: ChatMessage[];
|
|
126
|
+
/** Current input value (for controlled input) */
|
|
127
|
+
input: string;
|
|
128
|
+
/** Set the input value */
|
|
129
|
+
setInput: (input: string) => void;
|
|
130
|
+
/** Whether currently generating a response */
|
|
131
|
+
isGenerating: boolean;
|
|
132
|
+
/** Whether a message is queued waiting for model to load */
|
|
133
|
+
isPending: boolean;
|
|
134
|
+
/** Current streaming text (while generating) */
|
|
135
|
+
streamingText: string;
|
|
136
|
+
/** Send a message and get a response */
|
|
137
|
+
send: (content?: string) => Promise<string>;
|
|
138
|
+
/** Stop the current generation */
|
|
139
|
+
stop: () => void;
|
|
140
|
+
/** Clear all messages */
|
|
141
|
+
clear: () => void;
|
|
142
|
+
/** Add a message without generating a response */
|
|
143
|
+
append: (message: ChatMessage) => void;
|
|
144
|
+
/** Reload/regenerate the last assistant message */
|
|
145
|
+
reload: () => Promise<string>;
|
|
146
|
+
}
|
|
147
|
+
/**
|
|
148
|
+
* Hook for managing a chat conversation with the LLM
|
|
149
|
+
*
|
|
150
|
+
* Supports **eager loading** - users can send messages while the model loads.
|
|
151
|
+
* Messages are queued and processed automatically once the model is ready.
|
|
152
|
+
*
|
|
153
|
+
* @example
|
|
154
|
+
* ```tsx
|
|
155
|
+
* function ChatComponent() {
|
|
156
|
+
* const { isLoading, loadProgress } = useLLM();
|
|
157
|
+
* const {
|
|
158
|
+
* messages,
|
|
159
|
+
* input,
|
|
160
|
+
* setInput,
|
|
161
|
+
* send,
|
|
162
|
+
* isGenerating,
|
|
163
|
+
* isPending, // true if message is queued waiting for model
|
|
164
|
+
* streamingText,
|
|
165
|
+
* } = useChat({
|
|
166
|
+
* systemPrompt: 'You are a helpful assistant.',
|
|
167
|
+
* queueWhileLoading: true, // default: true
|
|
168
|
+
* });
|
|
169
|
+
*
|
|
170
|
+
* return (
|
|
171
|
+
* <div>
|
|
172
|
+
* {isLoading && <p>Loading model... {loadProgress?.progress}%</p>}
|
|
173
|
+
*
|
|
174
|
+
* {messages.map((m, i) => (
|
|
175
|
+
* <div key={i} className={m.role}>
|
|
176
|
+
* {m.content}
|
|
177
|
+
* </div>
|
|
178
|
+
* ))}
|
|
179
|
+
*
|
|
180
|
+
* {isPending && <p className="pending">Waiting for model to load...</p>}
|
|
181
|
+
* {isGenerating && <div className="assistant">{streamingText}</div>}
|
|
182
|
+
*
|
|
183
|
+
* {/* Users can type immediately, even before model loads *\/}
|
|
184
|
+
* <input
|
|
185
|
+
* value={input}
|
|
186
|
+
* onChange={(e) => setInput(e.target.value)}
|
|
187
|
+
* onKeyDown={(e) => e.key === 'Enter' && send()}
|
|
188
|
+
* placeholder={isLoading ? 'Type now, send when ready...' : 'Type a message...'}
|
|
189
|
+
* />
|
|
190
|
+
* <button onClick={() => send()} disabled={isGenerating}>
|
|
191
|
+
* {isPending ? 'Queued...' : 'Send'}
|
|
192
|
+
* </button>
|
|
193
|
+
* </div>
|
|
194
|
+
* );
|
|
195
|
+
* }
|
|
196
|
+
* ```
|
|
197
|
+
*/
|
|
198
|
+
export declare function useChat(options?: UseChatOptions): UseChatReturn;
|
|
199
|
+
export interface UseStreamOptions {
|
|
200
|
+
/** Generation options */
|
|
201
|
+
generateOptions?: GenerateOptions;
|
|
202
|
+
/** Called on each token */
|
|
203
|
+
onToken?: (token: string, fullText: string) => void;
|
|
204
|
+
/** Called when complete */
|
|
205
|
+
onFinish?: (response: string) => void;
|
|
206
|
+
/** Called on error */
|
|
207
|
+
onError?: (error: Error) => void;
|
|
208
|
+
}
|
|
209
|
+
export interface UseStreamReturn {
|
|
210
|
+
/** Current streamed text */
|
|
211
|
+
text: string;
|
|
212
|
+
/** Whether currently streaming */
|
|
213
|
+
isStreaming: boolean;
|
|
214
|
+
/** Start streaming a response */
|
|
215
|
+
stream: (messages: ChatMessage[] | string) => Promise<string>;
|
|
216
|
+
/** Stop streaming */
|
|
217
|
+
stop: () => void;
|
|
218
|
+
/** Clear the text */
|
|
219
|
+
clear: () => void;
|
|
220
|
+
}
|
|
221
|
+
/**
|
|
222
|
+
* Hook for simple streaming generation
|
|
223
|
+
*
|
|
224
|
+
* @example
|
|
225
|
+
* ```tsx
|
|
226
|
+
* function StreamComponent() {
|
|
227
|
+
* const { text, isStreaming, stream, clear } = useStream();
|
|
228
|
+
*
|
|
229
|
+
* return (
|
|
230
|
+
* <div>
|
|
231
|
+
* <pre>{text}</pre>
|
|
232
|
+
* <button onClick={() => stream('Tell me a story')} disabled={isStreaming}>
|
|
233
|
+
* Generate
|
|
234
|
+
* </button>
|
|
235
|
+
* <button onClick={clear}>Clear</button>
|
|
236
|
+
* </div>
|
|
237
|
+
* );
|
|
238
|
+
* }
|
|
239
|
+
* ```
|
|
240
|
+
*/
|
|
241
|
+
export declare function useStream(options?: UseStreamOptions): UseStreamReturn;
|
|
242
|
+
export interface UseCompletionOptions {
|
|
243
|
+
/** Generation options */
|
|
244
|
+
generateOptions?: GenerateOptions;
|
|
245
|
+
}
|
|
246
|
+
export interface UseCompletionReturn {
|
|
247
|
+
/** Current completion text */
|
|
248
|
+
completion: string;
|
|
249
|
+
/** Whether currently generating */
|
|
250
|
+
isLoading: boolean;
|
|
251
|
+
/** Generate a completion (non-streaming) */
|
|
252
|
+
complete: (prompt: string) => Promise<string>;
|
|
253
|
+
/** Clear the completion */
|
|
254
|
+
clear: () => void;
|
|
255
|
+
}
|
|
256
|
+
/**
|
|
257
|
+
* Hook for simple non-streaming completion
|
|
258
|
+
*
|
|
259
|
+
* @example
|
|
260
|
+
* ```tsx
|
|
261
|
+
* function CompletionComponent() {
|
|
262
|
+
* const { completion, isLoading, complete } = useCompletion();
|
|
263
|
+
*
|
|
264
|
+
* return (
|
|
265
|
+
* <div>
|
|
266
|
+
* <p>{completion}</p>
|
|
267
|
+
* <button onClick={() => complete('Summarize this')} disabled={isLoading}>
|
|
268
|
+
* Complete
|
|
269
|
+
* </button>
|
|
270
|
+
* </div>
|
|
271
|
+
* );
|
|
272
|
+
* }
|
|
273
|
+
* ```
|
|
274
|
+
*/
|
|
275
|
+
export declare function useCompletion(options?: UseCompletionOptions): UseCompletionReturn;
|
|
276
|
+
export interface LLMLoadingProps {
|
|
277
|
+
/** Custom loading UI */
|
|
278
|
+
children?: React.ReactNode;
|
|
279
|
+
/** Class name for the wrapper */
|
|
280
|
+
className?: string;
|
|
281
|
+
}
|
|
282
|
+
/**
|
|
283
|
+
* Component that shows loading state while LLM is loading
|
|
284
|
+
*
|
|
285
|
+
* @example
|
|
286
|
+
* ```tsx
|
|
287
|
+
* <LLMLoading>
|
|
288
|
+
* <p>Loading model...</p>
|
|
289
|
+
* </LLMLoading>
|
|
290
|
+
* ```
|
|
291
|
+
*/
|
|
292
|
+
export declare function LLMLoading({ children, className }: LLMLoadingProps): import("react/jsx-runtime").JSX.Element | null;
|
|
293
|
+
export interface LLMReadyProps {
|
|
294
|
+
/** Content to show when ready */
|
|
295
|
+
children: React.ReactNode;
|
|
296
|
+
/** Content to show while loading */
|
|
297
|
+
fallback?: React.ReactNode;
|
|
298
|
+
}
|
|
299
|
+
/**
|
|
300
|
+
* Component that only renders children when LLM is ready
|
|
301
|
+
*
|
|
302
|
+
* @example
|
|
303
|
+
* ```tsx
|
|
304
|
+
* <LLMReady fallback={<Loading />}>
|
|
305
|
+
* <ChatInterface />
|
|
306
|
+
* </LLMReady>
|
|
307
|
+
* ```
|
|
308
|
+
*/
|
|
309
|
+
export declare function LLMReady({ children, fallback }: LLMReadyProps): import("react/jsx-runtime").JSX.Element;
|
|
310
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/react/index.tsx"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA4BG;AAEH,OAAO,KAAK,KAAK,MAAM,OAAO,CAAC;AAW/B,OAAO,KAAK,EACV,SAAS,EACT,WAAW,EACX,eAAe,EACf,YAAY,EACZ,OAAO,EACR,MAAM,UAAU,CAAC;AAElB,OAAO,EAAa,KAAK,QAAQ,EAAE,MAAM,SAAS,CAAC;AAGnD,YAAY,EAAE,WAAW,EAAE,eAAe,EAAE,YAAY,EAAE,OAAO,EAAE,MAAM,UAAU,CAAC;AACpF,YAAY,EAAE,QAAQ,EAAE,MAAM,SAAS,CAAC;AAMxC,MAAM,WAAW,eAAe;IAC9B,4CAA4C;IAC5C,GAAG,EAAE,QAAQ,GAAG,IAAI,CAAC;IAErB,6CAA6C;IAC7C,SAAS,EAAE,OAAO,CAAC;IAEnB,+CAA+C;IAC/C,OAAO,EAAE,OAAO,CAAC;IAEjB,+BAA+B;IAC/B,YAAY,EAAE,YAAY,GAAG,IAAI,CAAC;IAElC,8BAA8B;IAC9B,KAAK,EAAE,KAAK,GAAG,IAAI,CAAC;IAEpB,uBAAuB;IACvB,OAAO,EAAE,MAAM,GAAG,IAAI,CAAC;IAEvB,yBAAyB;IACzB,OAAO,EAAE,OAAO,GAAG,IAAI,CAAC;IAExB,gCAAgC;IAChC,MAAM,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC,CAAC;IAE5B,uBAAuB;IACvB,MAAM,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC,CAAC;CAC7B;AAQD,MAAM,WAAW,gBAAiB,SAAQ,IAAI,CAAC,SAAS,EAAE,gBAAgB,CAAC;IACzE,QAAQ,EAAE,KAAK,CAAC,SAAS,CAAC;IAE1B;;;OAGG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAC;IAEnB;;OAEG;IACH,MAAM,CAAC,EAAE,CAAC,GAAG,EAAE,QAAQ,KAAK,IAAI,CAAC;IAEjC;;OAEG;IACH,UAAU,CAAC,EAAE,CAAC,QAAQ,EAAE,YAAY,KAAK,IAAI,CAAC;IAE9C;;OAEG;IACH,OAAO,CAAC,EAAE,CAAC,KAAK,EAAE,KAAK,KAAK,IAAI,CAAC;CAClC;AAED;;;;;;;;;;;;;GAaG;AACH,wBAAgB,WAAW,CAAC,EAC1B,QAAQ,EACR,QAAe,EACf,MAAM,EACN,UAAU,EACV,OAAO,EACP,GAAG,MAAM,EACV,EAAE,gBAAgB,2CAsFlB;AAMD;;;;;;;;;GASG;AACH,wBAAgB,MAAM,IAAI,eAAe,CAQxC;AAMD,MAAM,WAAW,cAAc;IAC7B,uBAAuB;IACvB,eAAe,CAAC,EAAE,WAAW,EAAE,CAAC;IAEhC,oBAAoB;IACpB,YAAY,CAAC,EAAE,MAAM,CAAC;IAEtB,yBAAyB;IACzB,eAAe,CAAC,EAAE,eAAe,CAAC;IAElC;;;;OAIG;IACH,iBAAiB,CAAC,EAAE,OAAO,CAAC;IAE5B,oCAAoC;IACpC,OAAO,CAAC,EAAE,MAAM,IAAI,CAAC;IAErB,uCAAuC;IACvC,OAAO,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,KAAK,IAAI,CAAC;IAEpD,uCAAuC;IACvC,QAAQ,CAAC,EAAE,CAAC,QAAQ,EAAE,MAAM,KAAK,IAAI,CAAC;IAEtC,sBAAsB;IACtB,OAAO,CAAC,EAAE,CAAC,KAAK,EAAE,KAAK,KAAK,IAAI,CAAC;CAClC;AAED,MAAM,WAAW,aAAa;IAC5B,uCAAuC;IACvC,QAAQ,EAAE,WAAW,EAAE,CAAC;IAExB,iDAAiD;IACjD,KAAK,EAAE,MAAM,CAAC;IAEd,0BAA0B;IAC1B,QAAQ,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,CAAC;IAElC,8CAA8C;IAC9C,YAAY,EAAE,OAAO,CAAC;IAEtB,4DAA4D;IAC5D,SAAS,EAAE,OAAO,CAAC;IAEnB,gDAAgD;IAChD,aAAa,EAAE,MAAM,CAAC;IAEtB,wCAAwC;IACxC,IAAI,EAAE,CAAC,OAAO,CAAC,EAAE,MAAM,KAAK,OAAO,CAAC,MAAM,CAAC,CAAC;IAE5C,kCAAkC;IAClC,IAAI,EAAE,MAAM,IAAI,CAAC;IAEjB,yBAAyB;IACzB,KAAK,EAAE,MAAM,IAAI,CAAC;IAElB,kDAAkD;IAClD,MAAM,EAAE,CAAC,OAAO,EAAE,WAAW,KAAK,IAAI,CAAC;IAEvC,mDAAmD;IACnD,MAAM,EAAE,MAAM,OAAO,CAAC,MAAM,CAAC,CAAC;CAC/B;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAkDG;AACH,wBAAgB,OAAO,CAAC,OAAO,GAAE,cAAmB,GAAG,aAAa,CA6LnE;AAMD,MAAM,WAAW,gBAAgB;IAC/B,yBAAyB;IACzB,eAAe,CAAC,EAAE,eAAe,CAAC;IAElC,2BAA2B;IAC3B,OAAO,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,KAAK,IAAI,CAAC;IAEpD,2BAA2B;IAC3B,QAAQ,CAAC,EAAE,CAAC,QAAQ,EAAE,MAAM,KAAK,IAAI,CAAC;IAEtC,sBAAsB;IACtB,OAAO,CAAC,EAAE,CAAC,KAAK,EAAE,KAAK,KAAK,IAAI,CAAC;CAClC;AAED,MAAM,WAAW,eAAe;IAC9B,4BAA4B;IAC5B,IAAI,EAAE,MAAM,CAAC;IAEb,kCAAkC;IAClC,WAAW,EAAE,OAAO,CAAC;IAErB,iCAAiC;IACjC,MAAM,EAAE,CAAC,QAAQ,EAAE,WAAW,EAAE,GAAG,MAAM,KAAK,OAAO,CAAC,MAAM,CAAC,CAAC;IAE9D,qBAAqB;IACrB,IAAI,EAAE,MAAM,IAAI,CAAC;IAEjB,qBAAqB;IACrB,KAAK,EAAE,MAAM,IAAI,CAAC;CACnB;AAED;;;;;;;;;;;;;;;;;;;GAmBG;AACH,wBAAgB,SAAS,CAAC,OAAO,GAAE,gBAAqB,GAAG,eAAe,CA2DzE;AAMD,MAAM,WAAW,oBAAoB;IACnC,yBAAyB;IACzB,eAAe,CAAC,EAAE,eAAe,CAAC;CACnC;AAED,MAAM,WAAW,mBAAmB;IAClC,8BAA8B;IAC9B,UAAU,EAAE,MAAM,CAAC;IAEnB,mCAAmC;IACnC,SAAS,EAAE,OAAO,CAAC;IAEnB,4CAA4C;IAC5C,QAAQ,EAAE,CAAC,MAAM,EAAE,MAAM,KAAK,OAAO,CAAC,MAAM,CAAC,CAAC;IAE9C,2BAA2B;IAC3B,KAAK,EAAE,MAAM,IAAI,CAAC;CACnB;AAED;;;;;;;;;;;;;;;;;;GAkBG;AACH,wBAAgB,aAAa,CAC3B,OAAO,GAAE,oBAAyB,GACjC,mBAAmB,CAuCrB;AAMD,MAAM,WAAW,eAAe;IAC9B,wBAAwB;IACxB,QAAQ,CAAC,EAAE,KAAK,CAAC,SAAS,CAAC;IAE3B,iCAAiC;IACjC,SAAS,CAAC,EAAE,MAAM,CAAC;CACpB;AAED;;;;;;;;;GASG;AACH,wBAAgB,UAAU,CAAC,EAAE,QAAQ,EAAE,SAAS,EAAE,EAAE,eAAe,kDAelE;AAMD,MAAM,WAAW,aAAa;IAC5B,iCAAiC;IACjC,QAAQ,EAAE,KAAK,CAAC,SAAS,CAAC;IAE1B,oCAAoC;IACpC,QAAQ,CAAC,EAAE,KAAK,CAAC,SAAS,CAAC;CAC5B;AAED;;;;;;;;;GASG;AACH,wBAAgB,QAAQ,CAAC,EAAE,QAAQ,EAAE,QAAe,EAAE,EAAE,aAAa,2CAQpE"}
|
package/dist/react/index.js
CHANGED
|
@@ -118,7 +118,7 @@ class WebLLMProvider {
|
|
|
118
118
|
}
|
|
119
119
|
async load(modelId, onProgress) {
|
|
120
120
|
const resolvedModel = resolveModelId(modelId);
|
|
121
|
-
const { CreateMLCEngine } = await import("../index-
|
|
121
|
+
const { CreateMLCEngine } = await import("../index-yyxm9rp0.js");
|
|
122
122
|
const initProgressCallback = (report) => {
|
|
123
123
|
if (onProgress) {
|
|
124
124
|
const progress = {
|
|
@@ -308,7 +308,7 @@ class TransformersProvider {
|
|
|
308
308
|
}
|
|
309
309
|
async load(modelId, onProgress) {
|
|
310
310
|
const resolvedModel = modelId in TRANSFORMERS_MODELS ? TRANSFORMERS_MODELS[modelId] : modelId;
|
|
311
|
-
const { pipeline, env } = await import("../transformers.web-
|
|
311
|
+
const { pipeline, env } = await import("../transformers.web-1qr6h84s.js");
|
|
312
312
|
env.allowLocalModels = false;
|
|
313
313
|
env.useBrowserCache = true;
|
|
314
314
|
let deviceOption = "wasm";
|
|
@@ -359,7 +359,7 @@ class TransformersProvider {
|
|
|
359
359
|
throw new Error("Model not loaded. Call load() first.");
|
|
360
360
|
}
|
|
361
361
|
const prompt = formatPrompt(messages, this.currentModel);
|
|
362
|
-
const { TextStreamer } = await import("../transformers.web-
|
|
362
|
+
const { TextStreamer } = await import("../transformers.web-1qr6h84s.js");
|
|
363
363
|
let fullText = "";
|
|
364
364
|
const streamer = new TextStreamer(this.pipeline.tokenizer, {
|
|
365
365
|
skip_prompt: true,
|
|
@@ -521,7 +521,7 @@ function createLoadingIndicator(containerSelector) {
|
|
|
521
521
|
element: indicator
|
|
522
522
|
};
|
|
523
523
|
}
|
|
524
|
-
// src/
|
|
524
|
+
// src/core.ts
|
|
525
525
|
function normalizeMessages(input, systemPrompt) {
|
|
526
526
|
const messages = [];
|
|
527
527
|
if (systemPrompt) {
|
|
@@ -599,7 +599,6 @@ async function isWebGPUSupported() {
|
|
|
599
599
|
const caps = await detectCapabilities();
|
|
600
600
|
return caps.webgpu;
|
|
601
601
|
}
|
|
602
|
-
var src_default = createLLM;
|
|
603
602
|
|
|
604
603
|
// src/react/index.tsx
|
|
605
604
|
import { jsxDEV, Fragment } from "react/jsx-dev-runtime";
|
|
@@ -950,5 +949,7 @@ export {
|
|
|
950
949
|
LLMLoading
|
|
951
950
|
};
|
|
952
951
|
|
|
953
|
-
|
|
952
|
+
export { checkWebGPU, checkWasm, detectCapabilities, logCapabilities, DEFAULT_WEBLLM_MODEL, WEBLLM_MODELS, WebLLMProvider, createWebLLMProvider, DEFAULT_TRANSFORMERS_MODEL, TRANSFORMERS_MODELS, TransformersProvider, createTransformersProvider, createOutputStreamer, attachToElements, createChatUI, createLoadingIndicator, createLLM, isWebGPUSupported, LLMProvider, useLLM, useChat, useStream, useCompletion, LLMLoading, LLMReady };
|
|
953
|
+
|
|
954
|
+
//# debugId=654DAC199C2102F264756E2164756E21
|
|
954
955
|
//# sourceMappingURL=index.js.map
|