@blank-utils/llm 0.1.0 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,15 +1,15 @@
1
1
  {
2
2
  "version": 3,
3
- "sources": ["../src/react/index.tsx", "../src/detect.ts", "../src/backends/webllm.ts", "../src/backends/transformers.ts", "../src/helpers.ts", "../src/index.ts"],
3
+ "sources": ["../src/react/index.tsx", "../src/detect.ts", "../src/backends/webllm.ts", "../src/backends/transformers.ts", "../src/helpers.ts", "../src/core.ts"],
4
4
  "sourcesContent": [
5
- "/**\n * React Integration for Local LLM\n *\n * Provides React context, hooks, and components for easy LLM integration.\n *\n * @example\n * ```tsx\n * import { LLMProvider, useChat } from 'local-llm/react';\n *\n * function App() {\n * return (\n * <LLMProvider model=\"qwen-2.5-0.5b\">\n * <ChatComponent />\n * </LLMProvider>\n * );\n * }\n *\n * function ChatComponent() {\n * const { messages, send, isGenerating } = useChat();\n *\n * return (\n * <div>\n * {messages.map((m, i) => <p key={i}>{m.content}</p>)}\n * <button onClick={() => send('Hello!')}>Send</button>\n * </div>\n * );\n * }\n * ```\n */\n\nimport {\n createContext,\n useContext,\n useState,\n useCallback,\n useEffect,\n useMemo,\n useRef,\n type ReactNode,\n} from 'react';\n\nimport type {\n LLMConfig,\n ChatMessage,\n GenerateOptions,\n LoadProgress,\n Backend,\n} from '../types';\n\nimport { createLLM, type LocalLLM } from '../index';\n\n// ============================================================================\n// Context Types\n// ============================================================================\n\nexport interface LLMContextValue {\n /** The LLM instance (null while loading) */\n llm: LocalLLM | null;\n\n /** Whether the model is currently loading */\n isLoading: boolean;\n\n /** Whether the model is ready for inference */\n isReady: boolean;\n\n /** Current loading progress */\n loadProgress: LoadProgress | null;\n\n /** Error if loading failed */\n error: Error | null;\n\n /** Current model ID */\n modelId: string | null;\n\n /** Backend being used */\n backend: Backend | null;\n\n /** Manually reload the model */\n reload: () => Promise<void>;\n\n /** Unload the model */\n unload: () => Promise<void>;\n}\n\nconst LLMContext = createContext<LLMContextValue | null>(null);\n\n// ============================================================================\n// Provider\n// ============================================================================\n\nexport interface LLMProviderProps extends Omit<LLMConfig, 'onLoadProgress'> {\n children: ReactNode;\n\n /**\n * Auto-load the model on mount\n * @default true\n */\n autoLoad?: boolean;\n\n /**\n * Callback when model finishes loading\n */\n onLoad?: (llm: LocalLLM) => void;\n\n /**\n * Callback on loading progress\n */\n onProgress?: (progress: LoadProgress) => void;\n\n /**\n * Callback on error\n */\n onError?: (error: Error) => void;\n}\n\n/**\n * Provider component that manages LLM lifecycle\n *\n * @example\n * ```tsx\n * <LLMProvider\n * model=\"qwen-2.5-0.5b\"\n * backend=\"auto\"\n * onProgress={(p) => console.log(p.progress)}\n * >\n * <App />\n * </LLMProvider>\n * ```\n */\nexport function LLMProvider({\n children,\n autoLoad = true,\n onLoad,\n onProgress,\n onError,\n ...config\n}: LLMProviderProps) {\n const [llm, setLLM] = useState<LocalLLM | null>(null);\n const [isLoading, setIsLoading] = useState(false);\n const [loadProgress, setLoadProgress] = useState<LoadProgress | null>(null);\n const [error, setError] = useState<Error | null>(null);\n\n // Track if we've loaded to prevent double-loading in StrictMode\n const hasLoadedRef = useRef(false);\n const configRef = useRef(config);\n configRef.current = config;\n\n const load = useCallback(async () => {\n if (isLoading) return;\n\n setIsLoading(true);\n setError(null);\n setLoadProgress({ progress: 0, status: 'Initializing...' });\n\n try {\n const instance = await createLLM({\n ...configRef.current,\n onLoadProgress: (progress) => {\n setLoadProgress(progress);\n onProgress?.(progress);\n },\n });\n\n setLLM(instance);\n setLoadProgress({ progress: 100, status: 'Ready' });\n onLoad?.(instance);\n } catch (err) {\n const error = err instanceof Error ? err : new Error(String(err));\n setError(error);\n onError?.(error);\n } finally {\n setIsLoading(false);\n }\n }, [isLoading, onLoad, onProgress, onError]);\n\n const unload = useCallback(async () => {\n if (llm) {\n await llm.unload();\n setLLM(null);\n setLoadProgress(null);\n hasLoadedRef.current = false;\n }\n }, [llm]);\n\n const reload = useCallback(async () => {\n await unload();\n await load();\n }, [unload, load]);\n\n // Auto-load on mount\n useEffect(() => {\n if (autoLoad && !hasLoadedRef.current && !llm && !isLoading) {\n hasLoadedRef.current = true;\n load();\n }\n }, [autoLoad, llm, isLoading, load]);\n\n // Cleanup on unmount\n useEffect(() => {\n return () => {\n if (llm) {\n llm.unload().catch(console.error);\n }\n };\n }, [llm]);\n\n const value = useMemo<LLMContextValue>(\n () => ({\n llm,\n isLoading,\n isReady: llm?.isReady ?? false,\n loadProgress,\n error,\n modelId: llm?.modelId ?? null,\n backend: llm?.backend ?? null,\n reload,\n unload,\n }),\n [llm, isLoading, loadProgress, error, reload, unload]\n );\n\n return <LLMContext.Provider value={value}>{children}</LLMContext.Provider>;\n}\n\n// ============================================================================\n// Hooks\n// ============================================================================\n\n/**\n * Access the LLM context\n *\n * @throws If used outside of LLMProvider\n *\n * @example\n * ```tsx\n * const { llm, isReady, loadProgress } = useLLM();\n * ```\n */\nexport function useLLM(): LLMContextValue {\n const context = useContext(LLMContext);\n\n if (!context) {\n throw new Error('useLLM must be used within an LLMProvider');\n }\n\n return context;\n}\n\n// ============================================================================\n// useChat Hook\n// ============================================================================\n\nexport interface UseChatOptions {\n /** Initial messages */\n initialMessages?: ChatMessage[];\n\n /** System prompt */\n systemPrompt?: string;\n\n /** Generation options */\n generateOptions?: GenerateOptions;\n\n /**\n * Queue messages while model is loading\n * When true, users can send messages before model loads - they'll be processed once ready\n * @default true\n */\n queueWhileLoading?: boolean;\n\n /** Called when generation starts */\n onStart?: () => void;\n\n /** Called on each token (streaming) */\n onToken?: (token: string, fullText: string) => void;\n\n /** Called when generation completes */\n onFinish?: (response: string) => void;\n\n /** Called on error */\n onError?: (error: Error) => void;\n}\n\nexport interface UseChatReturn {\n /** All messages in the conversation */\n messages: ChatMessage[];\n\n /** Current input value (for controlled input) */\n input: string;\n\n /** Set the input value */\n setInput: (input: string) => void;\n\n /** Whether currently generating a response */\n isGenerating: boolean;\n\n /** Whether a message is queued waiting for model to load */\n isPending: boolean;\n\n /** Current streaming text (while generating) */\n streamingText: string;\n\n /** Send a message and get a response */\n send: (content?: string) => Promise<string>;\n\n /** Stop the current generation */\n stop: () => void;\n\n /** Clear all messages */\n clear: () => void;\n\n /** Add a message without generating a response */\n append: (message: ChatMessage) => void;\n\n /** Reload/regenerate the last assistant message */\n reload: () => Promise<string>;\n}\n\n/**\n * Hook for managing a chat conversation with the LLM\n * \n * Supports **eager loading** - users can send messages while the model loads.\n * Messages are queued and processed automatically once the model is ready.\n *\n * @example\n * ```tsx\n * function ChatComponent() {\n * const { isLoading, loadProgress } = useLLM();\n * const {\n * messages,\n * input,\n * setInput,\n * send,\n * isGenerating,\n * isPending, // true if message is queued waiting for model\n * streamingText,\n * } = useChat({\n * systemPrompt: 'You are a helpful assistant.',\n * queueWhileLoading: true, // default: true\n * });\n *\n * return (\n * <div>\n * {isLoading && <p>Loading model... {loadProgress?.progress}%</p>}\n * \n * {messages.map((m, i) => (\n * <div key={i} className={m.role}>\n * {m.content}\n * </div>\n * ))}\n * \n * {isPending && <p className=\"pending\">Waiting for model to load...</p>}\n * {isGenerating && <div className=\"assistant\">{streamingText}</div>}\n * \n * {/* Users can type immediately, even before model loads *\\/}\n * <input\n * value={input}\n * onChange={(e) => setInput(e.target.value)}\n * onKeyDown={(e) => e.key === 'Enter' && send()}\n * placeholder={isLoading ? 'Type now, send when ready...' : 'Type a message...'}\n * />\n * <button onClick={() => send()} disabled={isGenerating}>\n * {isPending ? 'Queued...' : 'Send'}\n * </button>\n * </div>\n * );\n * }\n * ```\n */\nexport function useChat(options: UseChatOptions = {}): UseChatReturn {\n const { llm, isReady, isLoading } = useLLM();\n\n const {\n initialMessages = [],\n systemPrompt,\n generateOptions,\n queueWhileLoading = true,\n onStart,\n onToken,\n onFinish,\n onError,\n } = options;\n\n const [messages, setMessages] = useState<ChatMessage[]>(initialMessages);\n const [input, setInput] = useState('');\n const [isGenerating, setIsGenerating] = useState(false);\n const [streamingText, setStreamingText] = useState('');\n \n // Track pending message that's waiting for model to load\n const [pendingMessage, setPendingMessage] = useState<string | null>(null);\n\n const abortRef = useRef(false);\n const isProcessingRef = useRef(false);\n\n // Internal function to actually generate a response\n const generateResponse = useCallback(\n async (userContent: string, currentMessages: ChatMessage[]): Promise<string> => {\n if (!llm || !isReady || isProcessingRef.current) {\n return '';\n }\n\n isProcessingRef.current = true;\n\n // Add user message\n const userMessage: ChatMessage = { role: 'user', content: userContent };\n setMessages((prev) => [...prev, userMessage]);\n\n // Build messages array for API\n const apiMessages: ChatMessage[] = [];\n\n if (systemPrompt) {\n apiMessages.push({ role: 'system', content: systemPrompt });\n }\n\n apiMessages.push(...currentMessages, userMessage);\n\n // Start generation\n setIsGenerating(true);\n setStreamingText('');\n abortRef.current = false;\n onStart?.();\n\n try {\n const response = await llm.stream(\n apiMessages,\n (token, fullText) => {\n if (abortRef.current) return;\n setStreamingText(fullText);\n onToken?.(token, fullText);\n },\n generateOptions\n );\n\n if (!abortRef.current) {\n // Add assistant message\n const assistantMessage: ChatMessage = {\n role: 'assistant',\n content: response,\n };\n setMessages((prev) => [...prev, assistantMessage]);\n setStreamingText('');\n onFinish?.(response);\n }\n\n return response;\n } catch (err) {\n const error = err instanceof Error ? err : new Error(String(err));\n onError?.(error);\n return '';\n } finally {\n setIsGenerating(false);\n isProcessingRef.current = false;\n }\n },\n [llm, isReady, systemPrompt, generateOptions, onStart, onToken, onFinish, onError]\n );\n\n // Process pending message when model becomes ready\n useEffect(() => {\n if (isReady && pendingMessage && !isProcessingRef.current) {\n const messageToProcess = pendingMessage;\n setPendingMessage(null);\n generateResponse(messageToProcess, messages);\n }\n }, [isReady, pendingMessage, messages, generateResponse]);\n\n const send = useCallback(\n async (content?: string): Promise<string> => {\n const messageContent = content ?? input;\n\n if (!messageContent.trim()) {\n return '';\n }\n\n // Clear input if using controlled input\n if (!content) {\n setInput('');\n }\n\n // If model is ready, process immediately\n if (llm && isReady) {\n return generateResponse(messageContent, messages);\n }\n\n // If model is still loading and queueing is enabled, queue the message\n if (isLoading && queueWhileLoading) {\n // Show the user message immediately even though model isn't ready\n const userMessage: ChatMessage = { role: 'user', content: messageContent };\n setMessages((prev) => [...prev, userMessage]);\n setPendingMessage(messageContent);\n return '';\n }\n\n // Model not ready and not loading (error state or not initialized)\n return '';\n },\n [input, llm, isReady, isLoading, queueWhileLoading, messages, generateResponse]\n );\n\n const stop = useCallback(() => {\n abortRef.current = true;\n setIsGenerating(false);\n setPendingMessage(null);\n\n // If there was streaming text, save it as a partial message\n if (streamingText) {\n setMessages((prev) => [\n ...prev,\n { role: 'assistant', content: streamingText + '...' },\n ]);\n setStreamingText('');\n }\n }, [streamingText]);\n\n const clear = useCallback(() => {\n setMessages(initialMessages);\n setStreamingText('');\n setInput('');\n setPendingMessage(null);\n }, [initialMessages]);\n\n const append = useCallback((message: ChatMessage) => {\n setMessages((prev) => [...prev, message]);\n }, []);\n\n const reload = useCallback(async (): Promise<string> => {\n if (messages.length === 0) return '';\n\n // Find the last user message\n const lastUserIndex = messages.findLastIndex((m) => m.role === 'user');\n if (lastUserIndex === -1) return '';\n\n // Remove messages from the last user message onwards\n const previousMessages = messages.slice(0, lastUserIndex);\n const lastUserMessage = messages[lastUserIndex];\n\n // Safety check (should never happen given lastUserIndex check)\n if (!lastUserMessage) return '';\n\n setMessages(previousMessages);\n\n // Re-send\n return send(lastUserMessage.content);\n }, [messages, send]);\n\n return {\n messages,\n input,\n setInput,\n isGenerating,\n isPending: pendingMessage !== null,\n streamingText,\n send,\n stop,\n clear,\n append,\n reload,\n };\n}\n\n// ============================================================================\n// useStream Hook\n// ============================================================================\n\nexport interface UseStreamOptions {\n /** Generation options */\n generateOptions?: GenerateOptions;\n\n /** Called on each token */\n onToken?: (token: string, fullText: string) => void;\n\n /** Called when complete */\n onFinish?: (response: string) => void;\n\n /** Called on error */\n onError?: (error: Error) => void;\n}\n\nexport interface UseStreamReturn {\n /** Current streamed text */\n text: string;\n\n /** Whether currently streaming */\n isStreaming: boolean;\n\n /** Start streaming a response */\n stream: (messages: ChatMessage[] | string) => Promise<string>;\n\n /** Stop streaming */\n stop: () => void;\n\n /** Clear the text */\n clear: () => void;\n}\n\n/**\n * Hook for simple streaming generation\n *\n * @example\n * ```tsx\n * function StreamComponent() {\n * const { text, isStreaming, stream, clear } = useStream();\n *\n * return (\n * <div>\n * <pre>{text}</pre>\n * <button onClick={() => stream('Tell me a story')} disabled={isStreaming}>\n * Generate\n * </button>\n * <button onClick={clear}>Clear</button>\n * </div>\n * );\n * }\n * ```\n */\nexport function useStream(options: UseStreamOptions = {}): UseStreamReturn {\n const { llm, isReady } = useLLM();\n const { generateOptions, onToken, onFinish, onError } = options;\n\n const [text, setText] = useState('');\n const [isStreaming, setIsStreaming] = useState(false);\n\n const abortRef = useRef(false);\n\n const stream = useCallback(\n async (input: ChatMessage[] | string): Promise<string> => {\n if (!llm || !isReady) {\n return '';\n }\n\n setIsStreaming(true);\n setText('');\n abortRef.current = false;\n\n try {\n const response = await llm.stream(\n input,\n (token, fullText) => {\n if (abortRef.current) return;\n setText(fullText);\n onToken?.(token, fullText);\n },\n generateOptions\n );\n\n onFinish?.(response);\n return response;\n } catch (err) {\n const error = err instanceof Error ? err : new Error(String(err));\n onError?.(error);\n return '';\n } finally {\n setIsStreaming(false);\n }\n },\n [llm, isReady, generateOptions, onToken, onFinish, onError]\n );\n\n const stop = useCallback(() => {\n abortRef.current = true;\n setIsStreaming(false);\n }, []);\n\n const clear = useCallback(() => {\n setText('');\n }, []);\n\n return {\n text,\n isStreaming,\n stream,\n stop,\n clear,\n };\n}\n\n// ============================================================================\n// useCompletion Hook (simple single-shot)\n// ============================================================================\n\nexport interface UseCompletionOptions {\n /** Generation options */\n generateOptions?: GenerateOptions;\n}\n\nexport interface UseCompletionReturn {\n /** Current completion text */\n completion: string;\n\n /** Whether currently generating */\n isLoading: boolean;\n\n /** Generate a completion (non-streaming) */\n complete: (prompt: string) => Promise<string>;\n\n /** Clear the completion */\n clear: () => void;\n}\n\n/**\n * Hook for simple non-streaming completion\n *\n * @example\n * ```tsx\n * function CompletionComponent() {\n * const { completion, isLoading, complete } = useCompletion();\n *\n * return (\n * <div>\n * <p>{completion}</p>\n * <button onClick={() => complete('Summarize this')} disabled={isLoading}>\n * Complete\n * </button>\n * </div>\n * );\n * }\n * ```\n */\nexport function useCompletion(\n options: UseCompletionOptions = {}\n): UseCompletionReturn {\n const { llm, isReady } = useLLM();\n const { generateOptions } = options;\n\n const [completion, setCompletion] = useState('');\n const [isLoading, setIsLoading] = useState(false);\n\n const complete = useCallback(\n async (prompt: string): Promise<string> => {\n if (!llm || !isReady) {\n return '';\n }\n\n setIsLoading(true);\n\n try {\n const response = await llm.chat(prompt, generateOptions);\n setCompletion(response);\n return response;\n } catch (err) {\n console.error('[useCompletion] Error:', err);\n return '';\n } finally {\n setIsLoading(false);\n }\n },\n [llm, isReady, generateOptions]\n );\n\n const clear = useCallback(() => {\n setCompletion('');\n }, []);\n\n return {\n completion,\n isLoading,\n complete,\n clear,\n };\n}\n\n// ============================================================================\n// Loading Component\n// ============================================================================\n\nexport interface LLMLoadingProps {\n /** Custom loading UI */\n children?: ReactNode;\n\n /** Class name for the wrapper */\n className?: string;\n}\n\n/**\n * Component that shows loading state while LLM is loading\n *\n * @example\n * ```tsx\n * <LLMLoading>\n * <p>Loading model...</p>\n * </LLMLoading>\n * ```\n */\nexport function LLMLoading({ children, className }: LLMLoadingProps) {\n const { isLoading, loadProgress } = useLLM();\n\n if (!isLoading) return null;\n\n if (children) {\n return <div className={className}>{children}</div>;\n }\n\n return (\n <div className={className}>\n <p>Loading model... {loadProgress?.progress ?? 0}%</p>\n <p>{loadProgress?.status}</p>\n </div>\n );\n}\n\n// ============================================================================\n// Ready Gate Component\n// ============================================================================\n\nexport interface LLMReadyProps {\n /** Content to show when ready */\n children: ReactNode;\n\n /** Content to show while loading */\n fallback?: ReactNode;\n}\n\n/**\n * Component that only renders children when LLM is ready\n *\n * @example\n * ```tsx\n * <LLMReady fallback={<Loading />}>\n * <ChatInterface />\n * </LLMReady>\n * ```\n */\nexport function LLMReady({ children, fallback = null }: LLMReadyProps) {\n const { isReady, isLoading } = useLLM();\n\n if (isLoading || !isReady) {\n return <>{fallback}</>;\n }\n\n return <>{children}</>;\n}\n",
5
+ "/**\n * React Integration for Local LLM\n *\n * Provides React context, hooks, and components for easy LLM integration.\n *\n * @example\n * ```tsx\n * import { LLMProvider, useChat } from 'local-llm/react';\n *\n * function App() {\n * return (\n * <LLMProvider model=\"qwen-2.5-0.5b\">\n * <ChatComponent />\n * </LLMProvider>\n * );\n * }\n *\n * function ChatComponent() {\n * const { messages, send, isGenerating } = useChat();\n *\n * return (\n * <div>\n * {messages.map((m, i) => <p key={i}>{m.content}</p>)}\n * <button onClick={() => send('Hello!')}>Send</button>\n * </div>\n * );\n * }\n * ```\n */\n\nimport * as React from 'react';\nimport {\n createContext,\n useContext,\n useState,\n useCallback,\n useEffect,\n useMemo,\n useRef,\n} from 'react';\n\nimport type {\n LLMConfig,\n ChatMessage,\n GenerateOptions,\n LoadProgress,\n Backend,\n} from '../types';\n\nimport { createLLM, type LocalLLM } from '../core';\n\n// Re-export types for convenience\nexport type { ChatMessage, GenerateOptions, LoadProgress, Backend } from '../types';\nexport type { LocalLLM } from '../core';\n\n// ============================================================================\n// Context Types\n// ============================================================================\n\nexport interface LLMContextValue {\n /** The LLM instance (null while loading) */\n llm: LocalLLM | null;\n\n /** Whether the model is currently loading */\n isLoading: boolean;\n\n /** Whether the model is ready for inference */\n isReady: boolean;\n\n /** Current loading progress */\n loadProgress: LoadProgress | null;\n\n /** Error if loading failed */\n error: Error | null;\n\n /** Current model ID */\n modelId: string | null;\n\n /** Backend being used */\n backend: Backend | null;\n\n /** Manually reload the model */\n reload: () => Promise<void>;\n\n /** Unload the model */\n unload: () => Promise<void>;\n}\n\nconst LLMContext = createContext<LLMContextValue | null>(null);\n\n// ============================================================================\n// Provider\n// ============================================================================\n\nexport interface LLMProviderProps extends Omit<LLMConfig, 'onLoadProgress'> {\n children: React.ReactNode;\n\n /**\n * Auto-load the model on mount\n * @default true\n */\n autoLoad?: boolean;\n\n /**\n * Callback when model finishes loading\n */\n onLoad?: (llm: LocalLLM) => void;\n\n /**\n * Callback on loading progress\n */\n onProgress?: (progress: LoadProgress) => void;\n\n /**\n * Callback on error\n */\n onError?: (error: Error) => void;\n}\n\n/**\n * Provider component that manages LLM lifecycle\n *\n * @example\n * ```tsx\n * <LLMProvider\n * model=\"qwen-2.5-0.5b\"\n * backend=\"auto\"\n * onProgress={(p) => console.log(p.progress)}\n * >\n * <App />\n * </LLMProvider>\n * ```\n */\nexport function LLMProvider({\n children,\n autoLoad = true,\n onLoad,\n onProgress,\n onError,\n ...config\n}: LLMProviderProps) {\n const [llm, setLLM] = useState<LocalLLM | null>(null);\n const [isLoading, setIsLoading] = useState(false);\n const [loadProgress, setLoadProgress] = useState<LoadProgress | null>(null);\n const [error, setError] = useState<Error | null>(null);\n\n // Track if we've loaded to prevent double-loading in StrictMode\n const hasLoadedRef = useRef(false);\n const configRef = useRef(config);\n configRef.current = config;\n\n const load = useCallback(async () => {\n if (isLoading) return;\n\n setIsLoading(true);\n setError(null);\n setLoadProgress({ progress: 0, status: 'Initializing...' });\n\n try {\n const instance = await createLLM({\n ...configRef.current,\n onLoadProgress: (progress) => {\n setLoadProgress(progress);\n onProgress?.(progress);\n },\n });\n\n setLLM(instance);\n setLoadProgress({ progress: 100, status: 'Ready' });\n onLoad?.(instance);\n } catch (err) {\n const error = err instanceof Error ? err : new Error(String(err));\n setError(error);\n onError?.(error);\n } finally {\n setIsLoading(false);\n }\n }, [isLoading, onLoad, onProgress, onError]);\n\n const unload = useCallback(async () => {\n if (llm) {\n await llm.unload();\n setLLM(null);\n setLoadProgress(null);\n hasLoadedRef.current = false;\n }\n }, [llm]);\n\n const reload = useCallback(async () => {\n await unload();\n await load();\n }, [unload, load]);\n\n // Auto-load on mount\n useEffect(() => {\n if (autoLoad && !hasLoadedRef.current && !llm && !isLoading) {\n hasLoadedRef.current = true;\n load();\n }\n }, [autoLoad, llm, isLoading, load]);\n\n // Cleanup on unmount\n useEffect(() => {\n return () => {\n if (llm) {\n llm.unload().catch(console.error);\n }\n };\n }, [llm]);\n\n const value = useMemo<LLMContextValue>(\n () => ({\n llm,\n isLoading,\n isReady: llm?.isReady ?? false,\n loadProgress,\n error,\n modelId: llm?.modelId ?? null,\n backend: llm?.backend ?? null,\n reload,\n unload,\n }),\n [llm, isLoading, loadProgress, error, reload, unload]\n );\n\n return <LLMContext.Provider value={value}>{children}</LLMContext.Provider>;\n}\n\n// ============================================================================\n// Hooks\n// ============================================================================\n\n/**\n * Access the LLM context\n *\n * @throws If used outside of LLMProvider\n *\n * @example\n * ```tsx\n * const { llm, isReady, loadProgress } = useLLM();\n * ```\n */\nexport function useLLM(): LLMContextValue {\n const context = useContext(LLMContext);\n\n if (!context) {\n throw new Error('useLLM must be used within an LLMProvider');\n }\n\n return context;\n}\n\n// ============================================================================\n// useChat Hook\n// ============================================================================\n\nexport interface UseChatOptions {\n /** Initial messages */\n initialMessages?: ChatMessage[];\n\n /** System prompt */\n systemPrompt?: string;\n\n /** Generation options */\n generateOptions?: GenerateOptions;\n\n /**\n * Queue messages while model is loading\n * When true, users can send messages before model loads - they'll be processed once ready\n * @default true\n */\n queueWhileLoading?: boolean;\n\n /** Called when generation starts */\n onStart?: () => void;\n\n /** Called on each token (streaming) */\n onToken?: (token: string, fullText: string) => void;\n\n /** Called when generation completes */\n onFinish?: (response: string) => void;\n\n /** Called on error */\n onError?: (error: Error) => void;\n}\n\nexport interface UseChatReturn {\n /** All messages in the conversation */\n messages: ChatMessage[];\n\n /** Current input value (for controlled input) */\n input: string;\n\n /** Set the input value */\n setInput: (input: string) => void;\n\n /** Whether currently generating a response */\n isGenerating: boolean;\n\n /** Whether a message is queued waiting for model to load */\n isPending: boolean;\n\n /** Current streaming text (while generating) */\n streamingText: string;\n\n /** Send a message and get a response */\n send: (content?: string) => Promise<string>;\n\n /** Stop the current generation */\n stop: () => void;\n\n /** Clear all messages */\n clear: () => void;\n\n /** Add a message without generating a response */\n append: (message: ChatMessage) => void;\n\n /** Reload/regenerate the last assistant message */\n reload: () => Promise<string>;\n}\n\n/**\n * Hook for managing a chat conversation with the LLM\n * \n * Supports **eager loading** - users can send messages while the model loads.\n * Messages are queued and processed automatically once the model is ready.\n *\n * @example\n * ```tsx\n * function ChatComponent() {\n * const { isLoading, loadProgress } = useLLM();\n * const {\n * messages,\n * input,\n * setInput,\n * send,\n * isGenerating,\n * isPending, // true if message is queued waiting for model\n * streamingText,\n * } = useChat({\n * systemPrompt: 'You are a helpful assistant.',\n * queueWhileLoading: true, // default: true\n * });\n *\n * return (\n * <div>\n * {isLoading && <p>Loading model... {loadProgress?.progress}%</p>}\n * \n * {messages.map((m, i) => (\n * <div key={i} className={m.role}>\n * {m.content}\n * </div>\n * ))}\n * \n * {isPending && <p className=\"pending\">Waiting for model to load...</p>}\n * {isGenerating && <div className=\"assistant\">{streamingText}</div>}\n * \n * {/* Users can type immediately, even before model loads *\\/}\n * <input\n * value={input}\n * onChange={(e) => setInput(e.target.value)}\n * onKeyDown={(e) => e.key === 'Enter' && send()}\n * placeholder={isLoading ? 'Type now, send when ready...' : 'Type a message...'}\n * />\n * <button onClick={() => send()} disabled={isGenerating}>\n * {isPending ? 'Queued...' : 'Send'}\n * </button>\n * </div>\n * );\n * }\n * ```\n */\nexport function useChat(options: UseChatOptions = {}): UseChatReturn {\n const { llm, isReady, isLoading } = useLLM();\n\n const {\n initialMessages = [],\n systemPrompt,\n generateOptions,\n queueWhileLoading = true,\n onStart,\n onToken,\n onFinish,\n onError,\n } = options;\n\n const [messages, setMessages] = useState<ChatMessage[]>(initialMessages);\n const [input, setInput] = useState('');\n const [isGenerating, setIsGenerating] = useState(false);\n const [streamingText, setStreamingText] = useState('');\n \n // Track pending message that's waiting for model to load\n const [pendingMessage, setPendingMessage] = useState<string | null>(null);\n\n const abortRef = useRef(false);\n const isProcessingRef = useRef(false);\n\n // Internal function to actually generate a response\n const generateResponse = useCallback(\n async (userContent: string, currentMessages: ChatMessage[]): Promise<string> => {\n if (!llm || !isReady || isProcessingRef.current) {\n return '';\n }\n\n isProcessingRef.current = true;\n\n // Add user message\n const userMessage: ChatMessage = { role: 'user', content: userContent };\n setMessages((prev) => [...prev, userMessage]);\n\n // Build messages array for API\n const apiMessages: ChatMessage[] = [];\n\n if (systemPrompt) {\n apiMessages.push({ role: 'system', content: systemPrompt });\n }\n\n apiMessages.push(...currentMessages, userMessage);\n\n // Start generation\n setIsGenerating(true);\n setStreamingText('');\n abortRef.current = false;\n onStart?.();\n\n try {\n const response = await llm.stream(\n apiMessages,\n (token, fullText) => {\n if (abortRef.current) return;\n setStreamingText(fullText);\n onToken?.(token, fullText);\n },\n generateOptions\n );\n\n if (!abortRef.current) {\n // Add assistant message\n const assistantMessage: ChatMessage = {\n role: 'assistant',\n content: response,\n };\n setMessages((prev) => [...prev, assistantMessage]);\n setStreamingText('');\n onFinish?.(response);\n }\n\n return response;\n } catch (err) {\n const error = err instanceof Error ? err : new Error(String(err));\n onError?.(error);\n return '';\n } finally {\n setIsGenerating(false);\n isProcessingRef.current = false;\n }\n },\n [llm, isReady, systemPrompt, generateOptions, onStart, onToken, onFinish, onError]\n );\n\n // Process pending message when model becomes ready\n useEffect(() => {\n if (isReady && pendingMessage && !isProcessingRef.current) {\n const messageToProcess = pendingMessage;\n setPendingMessage(null);\n generateResponse(messageToProcess, messages);\n }\n }, [isReady, pendingMessage, messages, generateResponse]);\n\n const send = useCallback(\n async (content?: string): Promise<string> => {\n const messageContent = content ?? input;\n\n if (!messageContent.trim()) {\n return '';\n }\n\n // Clear input if using controlled input\n if (!content) {\n setInput('');\n }\n\n // If model is ready, process immediately\n if (llm && isReady) {\n return generateResponse(messageContent, messages);\n }\n\n // If model is still loading and queueing is enabled, queue the message\n if (isLoading && queueWhileLoading) {\n // Show the user message immediately even though model isn't ready\n const userMessage: ChatMessage = { role: 'user', content: messageContent };\n setMessages((prev) => [...prev, userMessage]);\n setPendingMessage(messageContent);\n return '';\n }\n\n // Model not ready and not loading (error state or not initialized)\n return '';\n },\n [input, llm, isReady, isLoading, queueWhileLoading, messages, generateResponse]\n );\n\n const stop = useCallback(() => {\n abortRef.current = true;\n setIsGenerating(false);\n setPendingMessage(null);\n\n // If there was streaming text, save it as a partial message\n if (streamingText) {\n setMessages((prev) => [\n ...prev,\n { role: 'assistant', content: streamingText + '...' },\n ]);\n setStreamingText('');\n }\n }, [streamingText]);\n\n const clear = useCallback(() => {\n setMessages(initialMessages);\n setStreamingText('');\n setInput('');\n setPendingMessage(null);\n }, [initialMessages]);\n\n const append = useCallback((message: ChatMessage) => {\n setMessages((prev) => [...prev, message]);\n }, []);\n\n const reload = useCallback(async (): Promise<string> => {\n if (messages.length === 0) return '';\n\n // Find the last user message\n const lastUserIndex = messages.findLastIndex((m) => m.role === 'user');\n if (lastUserIndex === -1) return '';\n\n // Remove messages from the last user message onwards\n const previousMessages = messages.slice(0, lastUserIndex);\n const lastUserMessage = messages[lastUserIndex];\n\n // Safety check (should never happen given lastUserIndex check)\n if (!lastUserMessage) return '';\n\n setMessages(previousMessages);\n\n // Re-send\n return send(lastUserMessage.content);\n }, [messages, send]);\n\n return {\n messages,\n input,\n setInput,\n isGenerating,\n isPending: pendingMessage !== null,\n streamingText,\n send,\n stop,\n clear,\n append,\n reload,\n };\n}\n\n// ============================================================================\n// useStream Hook\n// ============================================================================\n\nexport interface UseStreamOptions {\n /** Generation options */\n generateOptions?: GenerateOptions;\n\n /** Called on each token */\n onToken?: (token: string, fullText: string) => void;\n\n /** Called when complete */\n onFinish?: (response: string) => void;\n\n /** Called on error */\n onError?: (error: Error) => void;\n}\n\nexport interface UseStreamReturn {\n /** Current streamed text */\n text: string;\n\n /** Whether currently streaming */\n isStreaming: boolean;\n\n /** Start streaming a response */\n stream: (messages: ChatMessage[] | string) => Promise<string>;\n\n /** Stop streaming */\n stop: () => void;\n\n /** Clear the text */\n clear: () => void;\n}\n\n/**\n * Hook for simple streaming generation\n *\n * @example\n * ```tsx\n * function StreamComponent() {\n * const { text, isStreaming, stream, clear } = useStream();\n *\n * return (\n * <div>\n * <pre>{text}</pre>\n * <button onClick={() => stream('Tell me a story')} disabled={isStreaming}>\n * Generate\n * </button>\n * <button onClick={clear}>Clear</button>\n * </div>\n * );\n * }\n * ```\n */\nexport function useStream(options: UseStreamOptions = {}): UseStreamReturn {\n const { llm, isReady } = useLLM();\n const { generateOptions, onToken, onFinish, onError } = options;\n\n const [text, setText] = useState('');\n const [isStreaming, setIsStreaming] = useState(false);\n\n const abortRef = useRef(false);\n\n const stream = useCallback(\n async (input: ChatMessage[] | string): Promise<string> => {\n if (!llm || !isReady) {\n return '';\n }\n\n setIsStreaming(true);\n setText('');\n abortRef.current = false;\n\n try {\n const response = await llm.stream(\n input,\n (token, fullText) => {\n if (abortRef.current) return;\n setText(fullText);\n onToken?.(token, fullText);\n },\n generateOptions\n );\n\n onFinish?.(response);\n return response;\n } catch (err) {\n const error = err instanceof Error ? err : new Error(String(err));\n onError?.(error);\n return '';\n } finally {\n setIsStreaming(false);\n }\n },\n [llm, isReady, generateOptions, onToken, onFinish, onError]\n );\n\n const stop = useCallback(() => {\n abortRef.current = true;\n setIsStreaming(false);\n }, []);\n\n const clear = useCallback(() => {\n setText('');\n }, []);\n\n return {\n text,\n isStreaming,\n stream,\n stop,\n clear,\n };\n}\n\n// ============================================================================\n// useCompletion Hook (simple single-shot)\n// ============================================================================\n\nexport interface UseCompletionOptions {\n /** Generation options */\n generateOptions?: GenerateOptions;\n}\n\nexport interface UseCompletionReturn {\n /** Current completion text */\n completion: string;\n\n /** Whether currently generating */\n isLoading: boolean;\n\n /** Generate a completion (non-streaming) */\n complete: (prompt: string) => Promise<string>;\n\n /** Clear the completion */\n clear: () => void;\n}\n\n/**\n * Hook for simple non-streaming completion\n *\n * @example\n * ```tsx\n * function CompletionComponent() {\n * const { completion, isLoading, complete } = useCompletion();\n *\n * return (\n * <div>\n * <p>{completion}</p>\n * <button onClick={() => complete('Summarize this')} disabled={isLoading}>\n * Complete\n * </button>\n * </div>\n * );\n * }\n * ```\n */\nexport function useCompletion(\n options: UseCompletionOptions = {}\n): UseCompletionReturn {\n const { llm, isReady } = useLLM();\n const { generateOptions } = options;\n\n const [completion, setCompletion] = useState('');\n const [isLoading, setIsLoading] = useState(false);\n\n const complete = useCallback(\n async (prompt: string): Promise<string> => {\n if (!llm || !isReady) {\n return '';\n }\n\n setIsLoading(true);\n\n try {\n const response = await llm.chat(prompt, generateOptions);\n setCompletion(response);\n return response;\n } catch (err) {\n console.error('[useCompletion] Error:', err);\n return '';\n } finally {\n setIsLoading(false);\n }\n },\n [llm, isReady, generateOptions]\n );\n\n const clear = useCallback(() => {\n setCompletion('');\n }, []);\n\n return {\n completion,\n isLoading,\n complete,\n clear,\n };\n}\n\n// ============================================================================\n// Loading Component\n// ============================================================================\n\nexport interface LLMLoadingProps {\n /** Custom loading UI */\n children?: React.ReactNode;\n\n /** Class name for the wrapper */\n className?: string;\n}\n\n/**\n * Component that shows loading state while LLM is loading\n *\n * @example\n * ```tsx\n * <LLMLoading>\n * <p>Loading model...</p>\n * </LLMLoading>\n * ```\n */\nexport function LLMLoading({ children, className }: LLMLoadingProps) {\n const { isLoading, loadProgress } = useLLM();\n\n if (!isLoading) return null;\n\n if (children) {\n return <div className={className}>{children}</div>;\n }\n\n return (\n <div className={className}>\n <p>Loading model... {loadProgress?.progress ?? 0}%</p>\n <p>{loadProgress?.status}</p>\n </div>\n );\n}\n\n// ============================================================================\n// Ready Gate Component\n// ============================================================================\n\nexport interface LLMReadyProps {\n /** Content to show when ready */\n children: React.ReactNode;\n\n /** Content to show while loading */\n fallback?: React.ReactNode;\n}\n\n/**\n * Component that only renders children when LLM is ready\n *\n * @example\n * ```tsx\n * <LLMReady fallback={<Loading />}>\n * <ChatInterface />\n * </LLMReady>\n * ```\n */\nexport function LLMReady({ children, fallback = null }: LLMReadyProps) {\n const { isReady, isLoading } = useLLM();\n\n if (isLoading || !isReady) {\n return <>{fallback}</>;\n }\n\n return <>{children}</>;\n}\n",
6
6
  "/**\n * Browser capability detection utilities\n */\n\nimport type { BrowserCapabilities, Backend, Device } from './types';\n\n/**\n * Check if WebGPU is available in the current browser\n */\nexport async function checkWebGPU(): Promise<boolean> {\n if (typeof navigator === 'undefined') return false;\n if (!('gpu' in navigator)) return false;\n\n try {\n const gpu = (navigator as unknown as { gpu: { requestAdapter(): Promise<unknown> } }).gpu;\n if (!gpu) return false;\n\n const adapter = await gpu.requestAdapter();\n return adapter !== null;\n } catch {\n return false;\n }\n}\n\n/**\n * Check if WebAssembly is available\n */\nexport function checkWasm(): boolean {\n if (typeof WebAssembly === 'undefined') return false;\n\n try {\n // Check for streaming compilation support\n return (\n typeof WebAssembly.instantiateStreaming === 'function' ||\n typeof WebAssembly.instantiate === 'function'\n );\n } catch {\n return false;\n }\n}\n\n/**\n * Detect browser capabilities and recommend backend/device\n */\nexport async function detectCapabilities(): Promise<BrowserCapabilities> {\n const webgpu = await checkWebGPU();\n const wasm = checkWasm();\n\n let recommendedBackend: Backend = 'transformers';\n let recommendedDevice: Device = 'wasm';\n\n if (webgpu) {\n // WebGPU available - prefer WebLLM for best performance\n recommendedBackend = 'webllm';\n recommendedDevice = 'webgpu';\n } else if (wasm) {\n // WASM only - use Transformers.js with WASM backend\n recommendedBackend = 'transformers';\n recommendedDevice = 'wasm';\n }\n\n return {\n webgpu,\n wasm,\n recommendedBackend,\n recommendedDevice,\n };\n}\n\n/**\n * Log capability detection results to console\n */\nexport async function logCapabilities(): Promise<BrowserCapabilities> {\n const caps = await detectCapabilities();\n\n console.log('[LocalLLM] Browser Capabilities:');\n console.log(` WebGPU: ${caps.webgpu ? '✓ supported' : '✗ not available'}`);\n console.log(` WASM: ${caps.wasm ? '✓ supported' : '✗ not available'}`);\n console.log(` Recommended backend: ${caps.recommendedBackend}`);\n console.log(` Recommended device: ${caps.recommendedDevice}`);\n\n return caps;\n}\n",
7
7
  "/**\n * WebLLM Backend Implementation\n * Primary backend using MLC's WebLLM for high-performance inference\n */\n\nimport type {\n LLMProvider,\n ChatMessage,\n GenerateOptions,\n StreamCallback,\n LoadProgressCallback,\n LoadProgress,\n Backend,\n} from '../types';\n\n// WebLLM types (dynamic import)\ntype MLCEngine = import('@mlc-ai/web-llm').MLCEngine;\ntype CreateMLCEngine = typeof import('@mlc-ai/web-llm').CreateMLCEngine;\n\n/**\n * Default model for WebLLM backend\n * Using Phi 3.5 Mini as it's well-tested and reasonably sized\n */\nexport const DEFAULT_WEBLLM_MODEL = 'Phi-3.5-mini-instruct-q4f16_1-MLC';\n\n/**\n * Popular WebLLM model options with correct MLC model IDs\n * These IDs must match exactly what's in web-llm's prebuiltAppConfig\n * \n * @see https://github.com/mlc-ai/web-llm/blob/main/src/config.ts\n */\nexport const WEBLLM_MODELS = {\n // === Llama 3.2 Models (Meta) - Excellent quality, reasonable size ===\n 'llama-3.2-1b': 'Llama-3.2-1B-Instruct-q4f16_1-MLC',\n 'llama-3.2-3b': 'Llama-3.2-3B-Instruct-q4f16_1-MLC',\n\n // === Llama 3.1 Models (Meta) - Larger, higher quality ===\n 'llama-3.1-8b': 'Llama-3.1-8B-Instruct-q4f16_1-MLC',\n 'llama-3.1-8b-1k': 'Llama-3.1-8B-Instruct-q4f16_1-MLC-1k', // Smaller context for lower memory\n\n // === Phi Models (Microsoft) - Great balance of size/quality ===\n 'phi-3.5-mini': 'Phi-3.5-mini-instruct-q4f16_1-MLC',\n 'phi-3.5-mini-1k': 'Phi-3.5-mini-instruct-q4f16_1-MLC-1k', // Smaller context for lower memory\n 'phi-3.5-vision': 'Phi-3.5-vision-instruct-q4f16_1-MLC', // Vision model\n\n // === Qwen 2.5 Models (Alibaba) - Good multilingual support ===\n 'qwen-2.5-0.5b': 'Qwen2.5-0.5B-Instruct-q4f16_1-MLC',\n 'qwen-2.5-1.5b': 'Qwen2.5-1.5B-Instruct-q4f16_1-MLC',\n 'qwen-2.5-3b': 'Qwen2.5-3B-Instruct-q4f16_1-MLC',\n 'qwen-2.5-7b': 'Qwen2.5-7B-Instruct-q4f16_1-MLC',\n 'qwen-2.5-coder-0.5b': 'Qwen2.5-Coder-0.5B-Instruct-q4f16_1-MLC',\n 'qwen-2.5-coder-1.5b': 'Qwen2.5-Coder-1.5B-Instruct-q4f16_1-MLC',\n\n // === Qwen 3 Models (Alibaba) - Latest generation ===\n 'qwen-3-0.6b': 'Qwen3-0.6B-q4f16_1-MLC',\n 'qwen-3-1.7b': 'Qwen3-1.7B-q4f16_1-MLC',\n 'qwen-3-4b': 'Qwen3-4B-q4f16_1-MLC',\n 'qwen-3-8b': 'Qwen3-8B-q4f16_1-MLC',\n\n // === Gemma 2 Models (Google) - Efficient and capable ===\n 'gemma-2-2b': 'gemma-2-2b-it-q4f16_1-MLC',\n 'gemma-2-2b-1k': 'gemma-2-2b-it-q4f16_1-MLC-1k', // Smaller context for lower memory\n 'gemma-2-9b': 'gemma-2-9b-it-q4f16_1-MLC',\n\n // === SmolLM2 Models (HuggingFace) - Ultra lightweight ===\n 'smollm2-135m': 'SmolLM2-135M-Instruct-q0f16-MLC',\n 'smollm2-360m': 'SmolLM2-360M-Instruct-q4f16_1-MLC',\n 'smollm2-1.7b': 'SmolLM2-1.7B-Instruct-q4f16_1-MLC',\n\n // === Mistral Models - Good general purpose ===\n 'mistral-7b': 'Mistral-7B-Instruct-v0.3-q4f16_1-MLC',\n\n // === DeepSeek R1 Distill Models - Reasoning focused ===\n 'deepseek-r1-qwen-7b': 'DeepSeek-R1-Distill-Qwen-7B-q4f16_1-MLC',\n 'deepseek-r1-llama-8b': 'DeepSeek-R1-Distill-Llama-8B-q4f16_1-MLC',\n\n // === Hermes Models - Function calling capable ===\n 'hermes-3-llama-3.2-3b': 'Hermes-3-Llama-3.2-3B-q4f16_1-MLC',\n 'hermes-3-llama-3.1-8b': 'Hermes-3-Llama-3.1-8B-q4f16_1-MLC',\n} as const;\n\nexport type WebLLMModelAlias = keyof typeof WEBLLM_MODELS;\n\n/**\n * Model size estimates for UI display\n */\nexport const WEBLLM_MODEL_SIZES: Record<WebLLMModelAlias, string> = {\n 'llama-3.2-1b': '~880MB',\n 'llama-3.2-3b': '~2.3GB',\n 'llama-3.1-8b': '~5GB',\n 'llama-3.1-8b-1k': '~4.6GB',\n 'phi-3.5-mini': '~3.7GB',\n 'phi-3.5-mini-1k': '~2.5GB',\n 'phi-3.5-vision': '~4GB',\n 'qwen-2.5-0.5b': '~945MB',\n 'qwen-2.5-1.5b': '~1.6GB',\n 'qwen-2.5-3b': '~2.5GB',\n 'qwen-2.5-7b': '~5.1GB',\n 'qwen-2.5-coder-0.5b': '~945MB',\n 'qwen-2.5-coder-1.5b': '~1.6GB',\n 'qwen-3-0.6b': '~1.4GB',\n 'qwen-3-1.7b': '~2GB',\n 'qwen-3-4b': '~3.4GB',\n 'qwen-3-8b': '~5.7GB',\n 'gemma-2-2b': '~1.9GB',\n 'gemma-2-2b-1k': '~1.6GB',\n 'gemma-2-9b': '~6.4GB',\n 'smollm2-135m': '~360MB',\n 'smollm2-360m': '~376MB',\n 'smollm2-1.7b': '~1.8GB',\n 'mistral-7b': '~4.6GB',\n 'deepseek-r1-qwen-7b': '~5.1GB',\n 'deepseek-r1-llama-8b': '~5GB',\n 'hermes-3-llama-3.2-3b': '~2.3GB',\n 'hermes-3-llama-3.1-8b': '~4.9GB',\n};\n\n/**\n * Resolve model alias to full MLC model ID\n */\nfunction resolveModelId(model: string): string {\n if (model in WEBLLM_MODELS) {\n return WEBLLM_MODELS[model as WebLLMModelAlias];\n }\n return model;\n}\n\n/**\n * WebLLM provider implementation\n */\nexport class WebLLMProvider implements LLMProvider {\n readonly backend: Backend = 'webllm';\n\n private engine: MLCEngine | null = null;\n private currentModel: string | null = null;\n\n get isReady(): boolean {\n return this.engine !== null && this.currentModel !== null;\n }\n\n get modelId(): string | null {\n return this.currentModel;\n }\n\n async load(modelId: string, onProgress?: LoadProgressCallback): Promise<void> {\n const resolvedModel = resolveModelId(modelId);\n\n // Dynamic import to avoid bundling issues\n const { CreateMLCEngine } = await import('@mlc-ai/web-llm');\n\n // Progress callback adapter\n const initProgressCallback = (report: { text: string; progress: number }) => {\n if (onProgress) {\n const progress: LoadProgress = {\n progress: Math.round(report.progress * 100),\n status: report.text,\n };\n onProgress(progress);\n }\n };\n\n this.engine = await CreateMLCEngine(resolvedModel, {\n initProgressCallback,\n });\n\n this.currentModel = resolvedModel;\n }\n\n async chat(messages: ChatMessage[], options?: GenerateOptions): Promise<string> {\n if (!this.engine) {\n throw new Error('Model not loaded. Call load() first.');\n }\n\n const response = await this.engine.chat.completions.create({\n messages: messages.map((m) => ({\n role: m.role,\n content: m.content,\n })),\n temperature: options?.temperature ?? 0.7,\n max_tokens: options?.maxTokens ?? 512,\n top_p: options?.topP ?? 0.95,\n stop: options?.stopSequences,\n });\n\n return response.choices[0]?.message?.content ?? '';\n }\n\n async stream(\n messages: ChatMessage[],\n onToken: StreamCallback,\n options?: GenerateOptions\n ): Promise<string> {\n if (!this.engine) {\n throw new Error('Model not loaded. Call load() first.');\n }\n\n const chunks = await this.engine.chat.completions.create({\n messages: messages.map((m) => ({\n role: m.role,\n content: m.content,\n })),\n temperature: options?.temperature ?? 0.7,\n max_tokens: options?.maxTokens ?? 512,\n top_p: options?.topP ?? 0.95,\n stop: options?.stopSequences,\n stream: true,\n });\n\n let fullText = '';\n\n for await (const chunk of chunks) {\n const token = chunk.choices[0]?.delta?.content ?? '';\n if (token) {\n fullText += token;\n onToken(token, fullText);\n }\n }\n\n return fullText;\n }\n\n async unload(): Promise<void> {\n if (this.engine) {\n await this.engine.unload();\n this.engine = null;\n this.currentModel = null;\n }\n }\n}\n\n/**\n * Create a WebLLM provider instance\n */\nexport function createWebLLMProvider(): WebLLMProvider {\n return new WebLLMProvider();\n}\n",
8
8
  "/**\n * Transformers.js Backend Implementation\n * Fallback backend using HuggingFace Transformers.js with ONNX runtime\n */\n\nimport type {\n LLMProvider,\n ChatMessage,\n GenerateOptions,\n StreamCallback,\n LoadProgressCallback,\n LoadProgress,\n Backend,\n Device,\n Quantization,\n} from '../types';\n\n// Import the specific pipeline type we need\nimport type { TextGenerationPipeline } from '@huggingface/transformers';\n\n/**\n * Default model for Transformers.js backend\n * Using Qwen2.5 0.5B as it's well-tested with ONNX\n */\nexport const DEFAULT_TRANSFORMERS_MODEL = 'onnx-community/Qwen2.5-0.5B-Instruct';\n\n/**\n * Transformers.js compatible models (must have ONNX weights)\n * These are specifically converted for browser use via transformers.js\n * \n * @see https://huggingface.co/onnx-community for more models\n */\nexport const TRANSFORMERS_MODELS = {\n // === Qwen 2.5 Models (Alibaba) - Excellent quality ===\n 'qwen-2.5-0.5b': 'onnx-community/Qwen2.5-0.5B-Instruct',\n 'qwen-2.5-1.5b': 'onnx-community/Qwen2.5-1.5B-Instruct',\n 'qwen-2.5-coder-0.5b': 'onnx-community/Qwen2.5-Coder-0.5B-Instruct',\n 'qwen-2.5-coder-1.5b': 'onnx-community/Qwen2.5-Coder-1.5B-Instruct',\n\n // === Qwen 3 Models (Alibaba) - Latest generation ===\n 'qwen-3-0.6b': 'onnx-community/Qwen3-0.6B-ONNX',\n\n // === SmolLM2 Models (HuggingFace) - Ultra lightweight ===\n 'smollm2-135m': 'HuggingFaceTB/SmolLM2-135M-Instruct',\n 'smollm2-360m': 'HuggingFaceTB/SmolLM2-360M-Instruct',\n 'smollm2-1.7b': 'HuggingFaceTB/SmolLM2-1.7B-Instruct',\n\n // === Phi Models (Microsoft) ===\n 'phi-3-mini': 'Xenova/Phi-3-mini-4k-instruct',\n\n // === TinyLlama - Very fast and light ===\n 'tinyllama': 'Xenova/TinyLlama-1.1B-Chat-v1.0',\n} as const;\n\nexport type TransformersModelAlias = keyof typeof TRANSFORMERS_MODELS;\n\n/**\n * Model size estimates for UI display\n */\nexport const TRANSFORMERS_MODEL_SIZES: Record<TransformersModelAlias, string> = {\n 'qwen-2.5-0.5b': '~350MB',\n 'qwen-2.5-1.5b': '~900MB',\n 'qwen-2.5-coder-0.5b': '~350MB',\n 'qwen-2.5-coder-1.5b': '~900MB',\n 'qwen-3-0.6b': '~400MB',\n 'smollm2-135m': '~100MB',\n 'smollm2-360m': '~250MB',\n 'smollm2-1.7b': '~1GB',\n 'phi-3-mini': '~2.3GB',\n 'tinyllama': '~700MB',\n};\n\n/**\n * Detect the chat template format based on model ID\n */\nfunction detectChatFormat(modelId: string): 'chatml' | 'llama' | 'phi' | 'generic' {\n const lower = modelId.toLowerCase();\n \n if (lower.includes('qwen') || lower.includes('smollm')) {\n return 'chatml';\n }\n if (lower.includes('llama') || lower.includes('tinyllama')) {\n return 'llama';\n }\n if (lower.includes('phi')) {\n return 'phi';\n }\n return 'generic';\n}\n\n/**\n * Format messages into a prompt string based on model type\n */\nfunction formatPrompt(messages: ChatMessage[], modelId: string): string {\n const format = detectChatFormat(modelId);\n \n switch (format) {\n case 'chatml': {\n // ChatML format (Qwen, SmolLM, etc.)\n let prompt = '';\n for (const msg of messages) {\n prompt += `<|im_start|>${msg.role}\\n${msg.content}<|im_end|>\\n`;\n }\n prompt += '<|im_start|>assistant\\n';\n return prompt;\n }\n \n case 'llama': {\n // Llama/TinyLlama format\n let prompt = '';\n for (const msg of messages) {\n if (msg.role === 'system') {\n prompt += `<s>[INST] <<SYS>>\\n${msg.content}\\n<</SYS>>\\n\\n`;\n } else if (msg.role === 'user') {\n if (!prompt.includes('[INST]')) {\n prompt += `<s>[INST] ${msg.content} [/INST]`;\n } else {\n prompt += `<s>[INST] ${msg.content} [/INST]`;\n }\n } else if (msg.role === 'assistant') {\n prompt += ` ${msg.content} </s>`;\n }\n }\n return prompt;\n }\n \n case 'phi': {\n // Phi format\n let prompt = '';\n for (const msg of messages) {\n if (msg.role === 'system') {\n prompt += `<|system|>\\n${msg.content}<|end|>\\n`;\n } else if (msg.role === 'user') {\n prompt += `<|user|>\\n${msg.content}<|end|>\\n`;\n } else if (msg.role === 'assistant') {\n prompt += `<|assistant|>\\n${msg.content}<|end|>\\n`;\n }\n }\n prompt += '<|assistant|>\\n';\n return prompt;\n }\n \n case 'generic':\n default: {\n // Simple generic format\n let prompt = '';\n for (const msg of messages) {\n prompt += `${msg.role}: ${msg.content}\\n`;\n }\n prompt += 'assistant: ';\n return prompt;\n }\n }\n}\n\n/**\n * Map quantization to Transformers.js dtype\n */\nfunction mapQuantization(quantization: Quantization): 'q4' | 'q8' | 'fp16' | 'fp32' {\n const map: Record<Quantization, 'q4' | 'q8' | 'fp16' | 'fp32'> = {\n q4: 'q4',\n q8: 'q8',\n fp16: 'fp16',\n fp32: 'fp32',\n };\n return map[quantization] ?? 'q4';\n}\n\n/**\n * Configuration for TransformersProvider\n */\nexport interface TransformersProviderConfig {\n device?: Device;\n quantization?: Quantization;\n}\n\n/**\n * Transformers.js provider implementation\n */\nexport class TransformersProvider implements LLMProvider {\n readonly backend: Backend = 'transformers';\n\n private pipeline: TextGenerationPipeline | null = null;\n private currentModel: string | null = null;\n private device: Device;\n private quantization: Quantization;\n\n constructor(config: TransformersProviderConfig = {}) {\n this.device = config.device ?? 'auto';\n this.quantization = config.quantization ?? 'q4';\n }\n\n get isReady(): boolean {\n return this.pipeline !== null && this.currentModel !== null;\n }\n\n get modelId(): string | null {\n return this.currentModel;\n }\n\n async load(modelId: string, onProgress?: LoadProgressCallback): Promise<void> {\n // Resolve alias to full model ID\n const resolvedModel = modelId in TRANSFORMERS_MODELS\n ? TRANSFORMERS_MODELS[modelId as TransformersModelAlias]\n : modelId;\n\n // Dynamic import\n const { pipeline, env } = await import('@huggingface/transformers');\n\n // Configure environment\n env.allowLocalModels = false;\n env.useBrowserCache = true;\n\n // Determine device\n let deviceOption: string = 'wasm';\n if (this.device === 'auto' || this.device === 'webgpu') {\n // Check if WebGPU is available\n if (typeof navigator !== 'undefined' && 'gpu' in navigator) {\n try {\n const gpu = (navigator as unknown as { gpu: { requestAdapter(): Promise<unknown> } }).gpu;\n const adapter = await gpu.requestAdapter();\n if (adapter) {\n deviceOption = 'webgpu';\n }\n } catch {\n // Fall back to WASM\n }\n }\n }\n\n // Create pipeline with progress callback\n const dtype = mapQuantization(this.quantization);\n \n this.pipeline = await pipeline('text-generation', resolvedModel, {\n dtype: dtype as 'q4' | 'q8' | 'fp16' | 'fp32',\n device: deviceOption as 'wasm' | 'webgpu',\n progress_callback: (progress: { status: string; progress?: number; file?: string }) => {\n if (onProgress) {\n const loadProgress: LoadProgress = {\n progress: Math.round((progress.progress ?? 0) * 100),\n status: progress.status,\n };\n onProgress(loadProgress);\n }\n },\n }) as any as TextGenerationPipeline;\n\n this.currentModel = resolvedModel;\n }\n\n async chat(messages: ChatMessage[], options?: GenerateOptions): Promise<string> {\n if (!this.pipeline || !this.currentModel) {\n throw new Error('Model not loaded. Call load() first.');\n }\n\n const prompt = formatPrompt(messages, this.currentModel);\n\n const result = await this.pipeline(prompt, {\n max_new_tokens: options?.maxTokens ?? 512,\n temperature: options?.temperature ?? 0.7,\n top_p: options?.topP ?? 0.95,\n do_sample: true,\n return_full_text: false,\n });\n\n // Extract generated text\n const output = Array.isArray(result) ? result[0] : result;\n return (output as { generated_text: string }).generated_text ?? '';\n }\n\n async stream(\n messages: ChatMessage[],\n onToken: StreamCallback,\n options?: GenerateOptions\n ): Promise<string> {\n if (!this.pipeline || !this.currentModel) {\n throw new Error('Model not loaded. Call load() first.');\n }\n\n const prompt = formatPrompt(messages, this.currentModel);\n\n // Transformers.js streaming via TextStreamer\n const { TextStreamer } = await import('@huggingface/transformers');\n \n let fullText = '';\n \n const streamer = new TextStreamer(this.pipeline.tokenizer, {\n skip_prompt: true,\n callback_function: (token: string) => {\n fullText += token;\n onToken(token, fullText);\n },\n });\n\n await this.pipeline(prompt, {\n max_new_tokens: options?.maxTokens ?? 512,\n temperature: options?.temperature ?? 0.7,\n top_p: options?.topP ?? 0.95,\n do_sample: true,\n return_full_text: false,\n streamer,\n });\n\n return fullText;\n }\n\n async unload(): Promise<void> {\n this.pipeline = null;\n this.currentModel = null;\n }\n}\n\n/**\n * Create a Transformers.js provider instance\n */\nexport function createTransformersProvider(config?: TransformersProviderConfig): TransformersProvider {\n return new TransformersProvider(config);\n}\n",
9
9
  "/**\n * DOM Helper Utilities\n * Easy integration with HTML input/output elements\n */\n\nimport type { StreamCallback, AttachOptions } from './types';\n\n/**\n * Get an element by selector, throw if not found\n */\nfunction getElement<T extends HTMLElement>(selector: string | T): T {\n if (typeof selector === 'string') {\n const el = document.querySelector<T>(selector);\n if (!el) {\n throw new Error(`Element not found: ${selector}`);\n }\n return el;\n }\n return selector;\n}\n\n/**\n * Creates a streaming callback that updates an output element\n */\nexport function createOutputStreamer(\n outputSelector: string | HTMLElement,\n options?: {\n append?: boolean;\n scrollToBottom?: boolean;\n }\n): StreamCallback {\n const output = getElement(outputSelector);\n const append = options?.append ?? false;\n const scrollToBottom = options?.scrollToBottom ?? true;\n\n let baseContent = append ? output.textContent ?? '' : '';\n\n return (_token: string, fullText: string) => {\n // Use textContent for plain text, innerHTML for HTML\n if (output instanceof HTMLInputElement || output instanceof HTMLTextAreaElement) {\n output.value = baseContent + fullText;\n } else {\n output.textContent = baseContent + fullText;\n }\n\n // Auto-scroll\n if (scrollToBottom) {\n output.scrollTop = output.scrollHeight;\n }\n };\n}\n\n/**\n * Attach LLM to input/output elements with automatic handling\n */\nexport function attachToElements(\n inputSelector: string | HTMLInputElement | HTMLTextAreaElement,\n outputSelector: string | HTMLElement,\n generateFn: (input: string, onToken: StreamCallback) => Promise<string>,\n options?: AttachOptions\n): () => void {\n const input = getElement<HTMLInputElement | HTMLTextAreaElement>(inputSelector);\n const output = getElement(outputSelector);\n\n const triggerOnEnter = options?.triggerOnEnter ?? true;\n const clearOnSend = options?.clearOnSend ?? true;\n const showLoading = options?.showLoading ?? true;\n const loadingText = options?.loadingText ?? 'Thinking...';\n\n let isGenerating = false;\n\n const handleGenerate = async () => {\n if (isGenerating) return;\n\n const text = input.value.trim();\n if (!text) return;\n\n isGenerating = true;\n\n // Clear input\n if (clearOnSend) {\n input.value = '';\n }\n\n // Show loading\n if (showLoading) {\n if (output instanceof HTMLInputElement || output instanceof HTMLTextAreaElement) {\n output.value = loadingText;\n } else {\n output.textContent = loadingText;\n }\n }\n\n try {\n const streamer = createOutputStreamer(output);\n await generateFn(text, streamer);\n } catch (error) {\n const errorMsg = error instanceof Error ? error.message : 'Generation failed';\n if (output instanceof HTMLInputElement || output instanceof HTMLTextAreaElement) {\n output.value = `Error: ${errorMsg}`;\n } else {\n output.textContent = `Error: ${errorMsg}`;\n }\n } finally {\n isGenerating = false;\n }\n };\n\n // Event listeners\n const keydownHandler = (e: KeyboardEvent) => {\n if (triggerOnEnter && e.key === 'Enter' && !e.shiftKey) {\n e.preventDefault();\n handleGenerate();\n }\n };\n\n input.addEventListener('keydown', keydownHandler as EventListener);\n\n // Return cleanup function\n return () => {\n input.removeEventListener('keydown', keydownHandler as EventListener);\n };\n}\n\n/**\n * Create a simple chat UI in a container\n */\nexport function createChatUI(containerSelector: string | HTMLElement): {\n input: HTMLTextAreaElement;\n output: HTMLDivElement;\n sendButton: HTMLButtonElement;\n cleanup: () => void;\n} {\n const container = getElement(containerSelector);\n\n // Create chat structure\n container.innerHTML = `\n <div class=\"llm-chat\" style=\"display: flex; flex-direction: column; height: 100%; font-family: system-ui, sans-serif;\">\n <div class=\"llm-chat-output\" style=\"flex: 1; overflow-y: auto; padding: 1rem; border: 1px solid #e5e7eb; border-radius: 8px; margin-bottom: 1rem; min-height: 200px; background: #fafafa;\">\n <p style=\"color: #9ca3af; margin: 0;\">Start chatting...</p>\n </div>\n <div class=\"llm-chat-input-row\" style=\"display: flex; gap: 0.5rem;\">\n <textarea class=\"llm-chat-input\" placeholder=\"Type a message...\" style=\"flex: 1; padding: 0.75rem; border: 1px solid #e5e7eb; border-radius: 8px; resize: none; font-size: 1rem; font-family: inherit;\"></textarea>\n <button class=\"llm-chat-send\" style=\"padding: 0.75rem 1.5rem; background: #3b82f6; color: white; border: none; border-radius: 8px; cursor: pointer; font-size: 1rem; font-weight: 500;\">Send</button>\n </div>\n </div>\n `;\n\n const input = container.querySelector<HTMLTextAreaElement>('.llm-chat-input')!;\n const output = container.querySelector<HTMLDivElement>('.llm-chat-output')!;\n const sendButton = container.querySelector<HTMLButtonElement>('.llm-chat-send')!;\n\n const cleanup = () => {\n container.innerHTML = '';\n };\n\n return { input, output, sendButton, cleanup };\n}\n\n/**\n * Create a loading indicator\n */\nexport function createLoadingIndicator(containerSelector: string | HTMLElement): {\n show: () => void;\n hide: () => void;\n setProgress: (percent: number, status?: string) => void;\n element: HTMLDivElement;\n} {\n const container = getElement(containerSelector);\n\n const indicator = document.createElement('div');\n indicator.className = 'llm-loading';\n indicator.style.cssText = `\n display: none;\n padding: 1rem;\n background: #f3f4f6;\n border-radius: 8px;\n text-align: center;\n font-family: system-ui, sans-serif;\n `;\n indicator.innerHTML = `\n <div class=\"llm-loading-progress\" style=\"height: 4px; background: #e5e7eb; border-radius: 2px; overflow: hidden; margin-bottom: 0.5rem;\">\n <div class=\"llm-loading-bar\" style=\"height: 100%; width: 0%; background: #3b82f6; transition: width 0.2s;\"></div>\n </div>\n <div class=\"llm-loading-status\" style=\"font-size: 0.875rem; color: #6b7280;\">Loading...</div>\n `;\n\n container.appendChild(indicator);\n\n const bar = indicator.querySelector<HTMLDivElement>('.llm-loading-bar')!;\n const status = indicator.querySelector<HTMLDivElement>('.llm-loading-status')!;\n\n return {\n show: () => {\n indicator.style.display = 'block';\n },\n hide: () => {\n indicator.style.display = 'none';\n },\n setProgress: (percent: number, statusText?: string) => {\n bar.style.width = `${Math.min(100, Math.max(0, percent))}%`;\n if (statusText !== undefined) {\n status.textContent = statusText;\n }\n },\n element: indicator,\n };\n}\n",
10
- "/**\n * Local LLM - Browser-based LLM inference library\n *\n * A simple, generalized library for interacting with LLMs directly in the browser.\n * Works in any codebase with WebGPU or WASM support.\n *\n * @example\n * ```typescript\n * import { createLLM } from 'local-llm';\n *\n * const llm = await createLLM({\n * onLoadProgress: (p) => console.log(`Loading: ${p.progress}%`)\n * });\n *\n * // Streaming generation\n * await llm.stream('Tell me a joke', (token) => console.log(token));\n *\n * // Attach to DOM elements\n * llm.attachToInput('#input', '#output');\n * ```\n *\n * @module local-llm\n */\n\n// Re-export types\nexport type {\n Backend,\n Device,\n Quantization,\n LLMConfig,\n MessageRole,\n ChatMessage,\n StreamCallback,\n LoadProgress,\n LoadProgressCallback,\n GenerateOptions,\n LLMProvider as LLMProviderInterface,\n AttachOptions,\n BrowserCapabilities,\n} from './types';\n\n// Re-export detection utilities\nexport { checkWebGPU, checkWasm, detectCapabilities, logCapabilities } from './detect';\n\n// Re-export backends\nexport {\n WebLLMProvider,\n createWebLLMProvider,\n DEFAULT_WEBLLM_MODEL,\n WEBLLM_MODELS,\n} from './backends/webllm';\n\nexport {\n TransformersProvider,\n createTransformersProvider,\n DEFAULT_TRANSFORMERS_MODEL,\n TRANSFORMERS_MODELS,\n} from './backends/transformers';\n\n// Re-export helpers\nexport {\n createOutputStreamer,\n attachToElements,\n createChatUI,\n createLoadingIndicator,\n} from './helpers';\n\n// Import for internal use\nimport type {\n LLMConfig,\n ChatMessage,\n GenerateOptions,\n StreamCallback,\n LoadProgressCallback,\n AttachOptions,\n} from './types';\n\nimport { detectCapabilities } from './detect';\nimport { WebLLMProvider, DEFAULT_WEBLLM_MODEL } from './backends/webllm';\nimport { TransformersProvider, DEFAULT_TRANSFORMERS_MODEL } from './backends/transformers';\nimport { attachToElements, createOutputStreamer } from './helpers';\n\n/**\n * Main LLM interface with simplified API\n */\nexport interface LocalLLM {\n /**\n * Whether the model is loaded and ready\n */\n readonly isReady: boolean;\n\n /**\n * The current model ID\n */\n readonly modelId: string | null;\n\n /**\n * The backend being used\n */\n readonly backend: 'webllm' | 'transformers';\n\n /**\n * Generate a chat response\n */\n chat(\n messages: ChatMessage[] | string,\n options?: GenerateOptions\n ): Promise<string>;\n\n /**\n * Generate with streaming output\n */\n stream(\n messages: ChatMessage[] | string,\n onToken: StreamCallback,\n options?: GenerateOptions\n ): Promise<string>;\n\n /**\n * Attach to input/output elements for automatic generation\n */\n attachToInput(\n inputSelector: string | HTMLInputElement | HTMLTextAreaElement,\n outputSelector: string | HTMLElement,\n options?: AttachOptions\n ): () => void;\n\n /**\n * Unload the model and free resources\n */\n unload(): Promise<void>;\n}\n\n/**\n * Normalize messages - convert string to ChatMessage array\n */\nfunction normalizeMessages(\n input: ChatMessage[] | string,\n systemPrompt?: string\n): ChatMessage[] {\n const messages: ChatMessage[] = [];\n\n if (systemPrompt) {\n messages.push({ role: 'system', content: systemPrompt });\n }\n\n if (typeof input === 'string') {\n messages.push({ role: 'user', content: input });\n } else {\n messages.push(...input);\n }\n\n return messages;\n}\n\n/**\n * Create a LocalLLM instance\n *\n * @param config - Configuration options\n * @returns Promise that resolves to a LocalLLM instance once the model is loaded\n *\n * @example\n * ```typescript\n * // Simple usage with defaults\n * const llm = await createLLM();\n *\n * // With configuration\n * const llm = await createLLM({\n * model: 'phi-3-mini',\n * backend: 'webllm',\n * systemPrompt: 'You are a helpful assistant.',\n * onLoadProgress: (p) => console.log(p.progress)\n * });\n * ```\n */\nexport async function createLLM(config: LLMConfig = {}): Promise<LocalLLM> {\n const {\n backend: requestedBackend = 'auto',\n device = 'auto',\n quantization = 'q4',\n systemPrompt,\n onLoadProgress,\n } = config;\n\n // Detect capabilities\n const capabilities = await detectCapabilities();\n\n // Determine which backend to use\n let useBackend: 'webllm' | 'transformers';\n\n if (requestedBackend === 'auto') {\n useBackend = capabilities.webgpu ? 'webllm' : 'transformers';\n } else if (requestedBackend === 'webllm') {\n if (!capabilities.webgpu) {\n console.warn('[LocalLLM] WebLLM requested but WebGPU not available. Falling back to Transformers.js');\n useBackend = 'transformers';\n } else {\n useBackend = 'webllm';\n }\n } else {\n useBackend = 'transformers';\n }\n\n // Determine model\n const model = config.model ?? (\n useBackend === 'webllm' ? DEFAULT_WEBLLM_MODEL : DEFAULT_TRANSFORMERS_MODEL\n );\n\n console.log(`[LocalLLM] Using ${useBackend} backend with model: ${model}`);\n\n // Create provider\n let provider: WebLLMProvider | TransformersProvider;\n\n if (useBackend === 'webllm') {\n provider = new WebLLMProvider();\n } else {\n provider = new TransformersProvider({ device, quantization });\n }\n\n // Load model\n await provider.load(model, onLoadProgress);\n\n // Create LocalLLM instance\n const llm: LocalLLM = {\n get isReady() {\n return provider.isReady;\n },\n\n get modelId() {\n return provider.modelId;\n },\n\n get backend() {\n return useBackend;\n },\n\n async chat(messages, options) {\n const normalizedMessages = normalizeMessages(messages, systemPrompt);\n return provider.chat(normalizedMessages, options);\n },\n\n async stream(messages, onToken, options) {\n const normalizedMessages = normalizeMessages(messages, systemPrompt);\n return provider.stream(normalizedMessages, onToken, options);\n },\n\n attachToInput(inputSelector, outputSelector, options) {\n return attachToElements(\n inputSelector,\n outputSelector,\n async (input, onToken) => {\n const normalizedMessages = normalizeMessages(input, systemPrompt);\n return provider.stream(normalizedMessages, onToken);\n },\n options\n );\n },\n\n async unload() {\n await provider.unload();\n },\n };\n\n return llm;\n}\n\n/**\n * Quick helper to test if the current browser supports WebGPU\n */\nexport async function isWebGPUSupported(): Promise<boolean> {\n const caps = await detectCapabilities();\n return caps.webgpu;\n}\n\n// Default export\nexport default createLLM;\n\n// ============================================================================\n// React Integration (tree-shakeable re-exports)\n// ============================================================================\n// These are re-exported for convenience but can also be imported directly from\n// 'local-llm/react' for smaller bundle sizes when not using all features.\n\nexport {\n // Context\n LLMProvider,\n useLLM,\n \n // Hooks\n useChat,\n useStream,\n useCompletion,\n \n // Components\n LLMLoading,\n LLMReady,\n \n // Types\n type LLMContextValue,\n type LLMProviderProps,\n type UseChatOptions,\n type UseChatReturn,\n type UseStreamOptions,\n type UseStreamReturn,\n type UseCompletionOptions,\n type UseCompletionReturn,\n type LLMLoadingProps,\n type LLMReadyProps,\n} from './react';\n\n"
10
+ "/**\n * Core LLM functionality - Separated from index.ts to avoid circular dependencies\n * with React integration.\n *\n * @module local-llm/core\n */\n\n// Re-export types\nexport type {\n Backend,\n Device,\n Quantization,\n LLMConfig,\n MessageRole,\n ChatMessage,\n StreamCallback,\n LoadProgress,\n LoadProgressCallback,\n GenerateOptions,\n LLMProvider as LLMProviderInterface,\n AttachOptions,\n BrowserCapabilities,\n} from './types';\n\n// Re-export detection utilities\nexport { checkWebGPU, checkWasm, detectCapabilities, logCapabilities } from './detect';\n\n// Re-export backends\nexport {\n WebLLMProvider,\n createWebLLMProvider,\n DEFAULT_WEBLLM_MODEL,\n WEBLLM_MODELS,\n} from './backends/webllm';\n\nexport {\n TransformersProvider,\n createTransformersProvider,\n DEFAULT_TRANSFORMERS_MODEL,\n TRANSFORMERS_MODELS,\n} from './backends/transformers';\n\n// Re-export helpers\nexport {\n createOutputStreamer,\n attachToElements,\n createChatUI,\n createLoadingIndicator,\n} from './helpers';\n\n// Import for internal use\nimport type {\n LLMConfig,\n ChatMessage,\n GenerateOptions,\n StreamCallback,\n AttachOptions,\n} from './types';\n\nimport { detectCapabilities } from './detect';\nimport { WebLLMProvider, DEFAULT_WEBLLM_MODEL } from './backends/webllm';\nimport { TransformersProvider, DEFAULT_TRANSFORMERS_MODEL } from './backends/transformers';\nimport { attachToElements } from './helpers';\n\n/**\n * Main LLM interface with simplified API\n */\nexport interface LocalLLM {\n /**\n * Whether the model is loaded and ready\n */\n readonly isReady: boolean;\n\n /**\n * The current model ID\n */\n readonly modelId: string | null;\n\n /**\n * The backend being used\n */\n readonly backend: 'webllm' | 'transformers';\n\n /**\n * Generate a chat response\n */\n chat(\n messages: ChatMessage[] | string,\n options?: GenerateOptions\n ): Promise<string>;\n\n /**\n * Generate with streaming output\n */\n stream(\n messages: ChatMessage[] | string,\n onToken: StreamCallback,\n options?: GenerateOptions\n ): Promise<string>;\n\n /**\n * Attach to input/output elements for automatic generation\n */\n attachToInput(\n inputSelector: string | HTMLInputElement | HTMLTextAreaElement,\n outputSelector: string | HTMLElement,\n options?: AttachOptions\n ): () => void;\n\n /**\n * Unload the model and free resources\n */\n unload(): Promise<void>;\n}\n\n/**\n * Normalize messages - convert string to ChatMessage array\n */\nfunction normalizeMessages(\n input: ChatMessage[] | string,\n systemPrompt?: string\n): ChatMessage[] {\n const messages: ChatMessage[] = [];\n\n if (systemPrompt) {\n messages.push({ role: 'system', content: systemPrompt });\n }\n\n if (typeof input === 'string') {\n messages.push({ role: 'user', content: input });\n } else {\n messages.push(...input);\n }\n\n return messages;\n}\n\n/**\n * Create a LocalLLM instance\n *\n * @param config - Configuration options\n * @returns Promise that resolves to a LocalLLM instance once the model is loaded\n *\n * @example\n * ```typescript\n * // Simple usage with defaults\n * const llm = await createLLM();\n *\n * // With configuration\n * const llm = await createLLM({\n * model: 'phi-3-mini',\n * backend: 'webllm',\n * systemPrompt: 'You are a helpful assistant.',\n * onLoadProgress: (p) => console.log(p.progress)\n * });\n * ```\n */\nexport async function createLLM(config: LLMConfig = {}): Promise<LocalLLM> {\n const {\n backend: requestedBackend = 'auto',\n device = 'auto',\n quantization = 'q4',\n systemPrompt,\n onLoadProgress,\n } = config;\n\n // Detect capabilities\n const capabilities = await detectCapabilities();\n\n // Determine which backend to use\n let useBackend: 'webllm' | 'transformers';\n\n if (requestedBackend === 'auto') {\n useBackend = capabilities.webgpu ? 'webllm' : 'transformers';\n } else if (requestedBackend === 'webllm') {\n if (!capabilities.webgpu) {\n console.warn('[LocalLLM] WebLLM requested but WebGPU not available. Falling back to Transformers.js');\n useBackend = 'transformers';\n } else {\n useBackend = 'webllm';\n }\n } else {\n useBackend = 'transformers';\n }\n\n // Determine model\n const model = config.model ?? (\n useBackend === 'webllm' ? DEFAULT_WEBLLM_MODEL : DEFAULT_TRANSFORMERS_MODEL\n );\n\n console.log(`[LocalLLM] Using ${useBackend} backend with model: ${model}`);\n\n // Create provider\n let provider: WebLLMProvider | TransformersProvider;\n\n if (useBackend === 'webllm') {\n provider = new WebLLMProvider();\n } else {\n provider = new TransformersProvider({ device, quantization });\n }\n\n // Load model\n await provider.load(model, onLoadProgress);\n\n // Create LocalLLM instance\n const llm: LocalLLM = {\n get isReady() {\n return provider.isReady;\n },\n\n get modelId() {\n return provider.modelId;\n },\n\n get backend() {\n return useBackend;\n },\n\n async chat(messages, options) {\n const normalizedMessages = normalizeMessages(messages, systemPrompt);\n return provider.chat(normalizedMessages, options);\n },\n\n async stream(messages, onToken, options) {\n const normalizedMessages = normalizeMessages(messages, systemPrompt);\n return provider.stream(normalizedMessages, onToken, options);\n },\n\n attachToInput(inputSelector, outputSelector, options) {\n return attachToElements(\n inputSelector,\n outputSelector,\n async (input, onToken) => {\n const normalizedMessages = normalizeMessages(input, systemPrompt);\n return provider.stream(normalizedMessages, onToken);\n },\n options\n );\n },\n\n async unload() {\n await provider.unload();\n },\n };\n\n return llm;\n}\n\n/**\n * Quick helper to test if the current browser supports WebGPU\n */\nexport async function isWebGPUSupported(): Promise<boolean> {\n const caps = await detectCapabilities();\n return caps.webgpu;\n}\n"
11
11
  ],
12
- "mappings": ";;;;;;AA8BA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACrBA,eAAsB,WAAW,GAAqB;AAAA,EACpD,IAAI,OAAO,cAAc;AAAA,IAAa,OAAO;AAAA,EAC7C,IAAI,EAAE,SAAS;AAAA,IAAY,OAAO;AAAA,EAElC,IAAI;AAAA,IACF,MAAM,MAAO,UAAyE;AAAA,IACtF,IAAI,CAAC;AAAA,MAAK,OAAO;AAAA,IAEjB,MAAM,UAAU,MAAM,IAAI,eAAe;AAAA,IACzC,OAAO,YAAY;AAAA,IACnB,MAAM;AAAA,IACN,OAAO;AAAA;AAAA;AAOJ,SAAS,SAAS,GAAY;AAAA,EACnC,IAAI,OAAO,gBAAgB;AAAA,IAAa,OAAO;AAAA,EAE/C,IAAI;AAAA,IAEF,OACE,OAAO,YAAY,yBAAyB,cAC5C,OAAO,YAAY,gBAAgB;AAAA,IAErC,MAAM;AAAA,IACN,OAAO;AAAA;AAAA;AAOX,eAAsB,kBAAkB,GAAiC;AAAA,EACvE,MAAM,SAAS,MAAM,YAAY;AAAA,EACjC,MAAM,OAAO,UAAU;AAAA,EAEvB,IAAI,qBAA8B;AAAA,EAClC,IAAI,oBAA4B;AAAA,EAEhC,IAAI,QAAQ;AAAA,IAEV,qBAAqB;AAAA,IACrB,oBAAoB;AAAA,EACtB,EAAO,SAAI,MAAM;AAAA,IAEf,qBAAqB;AAAA,IACrB,oBAAoB;AAAA,EACtB;AAAA,EAEA,OAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAAA;AAMF,eAAsB,eAAe,GAAiC;AAAA,EACpE,MAAM,OAAO,MAAM,mBAAmB;AAAA,EAEtC,QAAQ,IAAI,kCAAkC;AAAA,EAC9C,QAAQ,IAAI,aAAa,KAAK,SAAS,gBAAe,mBAAmB;AAAA,EACzE,QAAQ,IAAI,WAAW,KAAK,OAAO,gBAAe,mBAAmB;AAAA,EACrE,QAAQ,IAAI,0BAA0B,KAAK,oBAAoB;AAAA,EAC/D,QAAQ,IAAI,yBAAyB,KAAK,mBAAmB;AAAA,EAE7D,OAAO;AAAA;;AC1DF,IAAM,uBAAuB;AAQ7B,IAAM,gBAAgB;AAAA,EAE3B,gBAAgB;AAAA,EAChB,gBAAgB;AAAA,EAGhB,gBAAgB;AAAA,EAChB,mBAAmB;AAAA,EAGnB,gBAAgB;AAAA,EAChB,mBAAmB;AAAA,EACnB,kBAAkB;AAAA,EAGlB,iBAAiB;AAAA,EACjB,iBAAiB;AAAA,EACjB,eAAe;AAAA,EACf,eAAe;AAAA,EACf,uBAAuB;AAAA,EACvB,uBAAuB;AAAA,EAGvB,eAAe;AAAA,EACf,eAAe;AAAA,EACf,aAAa;AAAA,EACb,aAAa;AAAA,EAGb,cAAc;AAAA,EACd,iBAAiB;AAAA,EACjB,cAAc;AAAA,EAGd,gBAAgB;AAAA,EAChB,gBAAgB;AAAA,EAChB,gBAAgB;AAAA,EAGhB,cAAc;AAAA,EAGd,uBAAuB;AAAA,EACvB,wBAAwB;AAAA,EAGxB,yBAAyB;AAAA,EACzB,yBAAyB;AAC3B;AAyCA,SAAS,cAAc,CAAC,OAAuB;AAAA,EAC7C,IAAI,SAAS,eAAe;AAAA,IAC1B,OAAO,cAAc;AAAA,EACvB;AAAA,EACA,OAAO;AAAA;AAAA;AAMF,MAAM,eAAsC;AAAA,EACxC,UAAmB;AAAA,EAEpB,SAA2B;AAAA,EAC3B,eAA8B;AAAA,MAElC,OAAO,GAAY;AAAA,IACrB,OAAO,KAAK,WAAW,QAAQ,KAAK,iBAAiB;AAAA;AAAA,MAGnD,OAAO,GAAkB;AAAA,IAC3B,OAAO,KAAK;AAAA;AAAA,OAGR,KAAI,CAAC,SAAiB,YAAkD;AAAA,IAC5E,MAAM,gBAAgB,eAAe,OAAO;AAAA,IAG5C,QAAQ,oBAAoB,MAAa;AAAA,IAGzC,MAAM,uBAAuB,CAAC,WAA+C;AAAA,MAC3E,IAAI,YAAY;AAAA,QACd,MAAM,WAAyB;AAAA,UAC7B,UAAU,KAAK,MAAM,OAAO,WAAW,GAAG;AAAA,UAC1C,QAAQ,OAAO;AAAA,QACjB;AAAA,QACA,WAAW,QAAQ;AAAA,MACrB;AAAA;AAAA,IAGF,KAAK,SAAS,MAAM,gBAAgB,eAAe;AAAA,MACjD;AAAA,IACF,CAAC;AAAA,IAED,KAAK,eAAe;AAAA;AAAA,OAGhB,KAAI,CAAC,UAAyB,SAA4C;AAAA,IAC9E,IAAI,CAAC,KAAK,QAAQ;AAAA,MAChB,MAAM,IAAI,MAAM,sCAAsC;AAAA,IACxD;AAAA,IAEA,MAAM,WAAW,MAAM,KAAK,OAAO,KAAK,YAAY,OAAO;AAAA,MACzD,UAAU,SAAS,IAAI,CAAC,OAAO;AAAA,QAC7B,MAAM,EAAE;AAAA,QACR,SAAS,EAAE;AAAA,MACb,EAAE;AAAA,MACF,aAAa,SAAS,eAAe;AAAA,MACrC,YAAY,SAAS,aAAa;AAAA,MAClC,OAAO,SAAS,QAAQ;AAAA,MACxB,MAAM,SAAS;AAAA,IACjB,CAAC;AAAA,IAED,OAAO,SAAS,QAAQ,IAAI,SAAS,WAAW;AAAA;AAAA,OAG5C,OAAM,CACV,UACA,SACA,SACiB;AAAA,IACjB,IAAI,CAAC,KAAK,QAAQ;AAAA,MAChB,MAAM,IAAI,MAAM,sCAAsC;AAAA,IACxD;AAAA,IAEA,MAAM,SAAS,MAAM,KAAK,OAAO,KAAK,YAAY,OAAO;AAAA,MACvD,UAAU,SAAS,IAAI,CAAC,OAAO;AAAA,QAC7B,MAAM,EAAE;AAAA,QACR,SAAS,EAAE;AAAA,MACb,EAAE;AAAA,MACF,aAAa,SAAS,eAAe;AAAA,MACrC,YAAY,SAAS,aAAa;AAAA,MAClC,OAAO,SAAS,QAAQ;AAAA,MACxB,MAAM,SAAS;AAAA,MACf,QAAQ;AAAA,IACV,CAAC;AAAA,IAED,IAAI,WAAW;AAAA,IAEf,iBAAiB,SAAS,QAAQ;AAAA,MAChC,MAAM,QAAQ,MAAM,QAAQ,IAAI,OAAO,WAAW;AAAA,MAClD,IAAI,OAAO;AAAA,QACT,YAAY;AAAA,QACZ,QAAQ,OAAO,QAAQ;AAAA,MACzB;AAAA,IACF;AAAA,IAEA,OAAO;AAAA;AAAA,OAGH,OAAM,GAAkB;AAAA,IAC5B,IAAI,KAAK,QAAQ;AAAA,MACf,MAAM,KAAK,OAAO,OAAO;AAAA,MACzB,KAAK,SAAS;AAAA,MACd,KAAK,eAAe;AAAA,IACtB;AAAA;AAEJ;AAKO,SAAS,oBAAoB,GAAmB;AAAA,EACrD,OAAO,IAAI;AAAA;;AClNN,IAAM,6BAA6B;AAQnC,IAAM,sBAAsB;AAAA,EAEjC,iBAAiB;AAAA,EACjB,iBAAiB;AAAA,EACjB,uBAAuB;AAAA,EACvB,uBAAuB;AAAA,EAGvB,eAAe;AAAA,EAGf,gBAAgB;AAAA,EAChB,gBAAgB;AAAA,EAChB,gBAAgB;AAAA,EAGhB,cAAc;AAAA,EAGd,WAAa;AACf;AAuBA,SAAS,gBAAgB,CAAC,SAAyD;AAAA,EACjF,MAAM,QAAQ,QAAQ,YAAY;AAAA,EAElC,IAAI,MAAM,SAAS,MAAM,KAAK,MAAM,SAAS,QAAQ,GAAG;AAAA,IACtD,OAAO;AAAA,EACT;AAAA,EACA,IAAI,MAAM,SAAS,OAAO,KAAK,MAAM,SAAS,WAAW,GAAG;AAAA,IAC1D,OAAO;AAAA,EACT;AAAA,EACA,IAAI,MAAM,SAAS,KAAK,GAAG;AAAA,IACzB,OAAO;AAAA,EACT;AAAA,EACA,OAAO;AAAA;AAMT,SAAS,YAAY,CAAC,UAAyB,SAAyB;AAAA,EACtE,MAAM,SAAS,iBAAiB,OAAO;AAAA,EAEvC,QAAQ;AAAA,SACD,UAAU;AAAA,MAEb,IAAI,SAAS;AAAA,MACb,WAAW,OAAO,UAAU;AAAA,QAC1B,UAAU,eAAe,IAAI;AAAA,EAAS,IAAI;AAAA;AAAA,MAC5C;AAAA,MACA,UAAU;AAAA;AAAA,MACV,OAAO;AAAA,IACT;AAAA,SAEK,SAAS;AAAA,MAEZ,IAAI,SAAS;AAAA,MACb,WAAW,OAAO,UAAU;AAAA,QAC1B,IAAI,IAAI,SAAS,UAAU;AAAA,UACzB,UAAU;AAAA,EAAsB,IAAI;AAAA;AAAA;AAAA;AAAA,QACtC,EAAO,SAAI,IAAI,SAAS,QAAQ;AAAA,UAC9B,IAAI,CAAC,OAAO,SAAS,QAAQ,GAAG;AAAA,YAC9B,UAAU,aAAa,IAAI;AAAA,UAC7B,EAAO;AAAA,YACL,UAAU,aAAa,IAAI;AAAA;AAAA,QAE/B,EAAO,SAAI,IAAI,SAAS,aAAa;AAAA,UACnC,UAAU,IAAI,IAAI;AAAA,QACpB;AAAA,MACF;AAAA,MACA,OAAO;AAAA,IACT;AAAA,SAEK,OAAO;AAAA,MAEV,IAAI,SAAS;AAAA,MACb,WAAW,OAAO,UAAU;AAAA,QAC1B,IAAI,IAAI,SAAS,UAAU;AAAA,UACzB,UAAU;AAAA,EAAe,IAAI;AAAA;AAAA,QAC/B,EAAO,SAAI,IAAI,SAAS,QAAQ;AAAA,UAC9B,UAAU;AAAA,EAAa,IAAI;AAAA;AAAA,QAC7B,EAAO,SAAI,IAAI,SAAS,aAAa;AAAA,UACnC,UAAU;AAAA,EAAkB,IAAI;AAAA;AAAA,QAClC;AAAA,MACF;AAAA,MACA,UAAU;AAAA;AAAA,MACV,OAAO;AAAA,IACT;AAAA,SAEK;AAAA,aACI;AAAA,MAEP,IAAI,SAAS;AAAA,MACb,WAAW,OAAO,UAAU;AAAA,QAC1B,UAAU,GAAG,IAAI,SAAS,IAAI;AAAA;AAAA,MAChC;AAAA,MACA,UAAU;AAAA,MACV,OAAO;AAAA,IACT;AAAA;AAAA;AAOJ,SAAS,eAAe,CAAC,cAA2D;AAAA,EAClF,MAAM,MAA2D;AAAA,IAC/D,IAAI;AAAA,IACJ,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM;AAAA,EACR;AAAA,EACA,OAAO,IAAI,iBAAiB;AAAA;AAAA;AAcvB,MAAM,qBAA4C;AAAA,EAC9C,UAAmB;AAAA,EAEpB,WAA0C;AAAA,EAC1C,eAA8B;AAAA,EAC9B;AAAA,EACA;AAAA,EAER,WAAW,CAAC,SAAqC,CAAC,GAAG;AAAA,IACnD,KAAK,SAAS,OAAO,UAAU;AAAA,IAC/B,KAAK,eAAe,OAAO,gBAAgB;AAAA;AAAA,MAGzC,OAAO,GAAY;AAAA,IACrB,OAAO,KAAK,aAAa,QAAQ,KAAK,iBAAiB;AAAA;AAAA,MAGrD,OAAO,GAAkB;AAAA,IAC3B,OAAO,KAAK;AAAA;AAAA,OAGR,KAAI,CAAC,SAAiB,YAAkD;AAAA,IAE5E,MAAM,gBAAgB,WAAW,sBAC7B,oBAAoB,WACpB;AAAA,IAGJ,QAAQ,UAAU,QAAQ,MAAa;AAAA,IAGvC,IAAI,mBAAmB;AAAA,IACvB,IAAI,kBAAkB;AAAA,IAGtB,IAAI,eAAuB;AAAA,IAC3B,IAAI,KAAK,WAAW,UAAU,KAAK,WAAW,UAAU;AAAA,MAEtD,IAAI,OAAO,cAAc,eAAe,SAAS,WAAW;AAAA,QAC1D,IAAI;AAAA,UACF,MAAM,MAAO,UAAyE;AAAA,UACtF,MAAM,UAAU,MAAM,IAAI,eAAe;AAAA,UACzC,IAAI,SAAS;AAAA,YACX,eAAe;AAAA,UACjB;AAAA,UACA,MAAM;AAAA,MAGV;AAAA,IACF;AAAA,IAGA,MAAM,QAAQ,gBAAgB,KAAK,YAAY;AAAA,IAE/C,KAAK,WAAW,MAAM,SAAS,mBAAmB,eAAe;AAAA,MAC/D;AAAA,MACA,QAAQ;AAAA,MACR,mBAAmB,CAAC,aAAmE;AAAA,QACrF,IAAI,YAAY;AAAA,UACd,MAAM,eAA6B;AAAA,YACjC,UAAU,KAAK,OAAO,SAAS,YAAY,KAAK,GAAG;AAAA,YACnD,QAAQ,SAAS;AAAA,UACnB;AAAA,UACA,WAAW,YAAY;AAAA,QACzB;AAAA;AAAA,IAEJ,CAAC;AAAA,IAED,KAAK,eAAe;AAAA;AAAA,OAGhB,KAAI,CAAC,UAAyB,SAA4C;AAAA,IAC9E,IAAI,CAAC,KAAK,YAAY,CAAC,KAAK,cAAc;AAAA,MACxC,MAAM,IAAI,MAAM,sCAAsC;AAAA,IACxD;AAAA,IAEA,MAAM,SAAS,aAAa,UAAU,KAAK,YAAY;AAAA,IAEvD,MAAM,SAAS,MAAM,KAAK,SAAS,QAAQ;AAAA,MACzC,gBAAgB,SAAS,aAAa;AAAA,MACtC,aAAa,SAAS,eAAe;AAAA,MACrC,OAAO,SAAS,QAAQ;AAAA,MACxB,WAAW;AAAA,MACX,kBAAkB;AAAA,IACpB,CAAC;AAAA,IAGD,MAAM,SAAS,MAAM,QAAQ,MAAM,IAAI,OAAO,KAAK;AAAA,IACnD,OAAQ,OAAsC,kBAAkB;AAAA;AAAA,OAG5D,OAAM,CACV,UACA,SACA,SACiB;AAAA,IACjB,IAAI,CAAC,KAAK,YAAY,CAAC,KAAK,cAAc;AAAA,MACxC,MAAM,IAAI,MAAM,sCAAsC;AAAA,IACxD;AAAA,IAEA,MAAM,SAAS,aAAa,UAAU,KAAK,YAAY;AAAA,IAGvD,QAAQ,iBAAiB,MAAa;AAAA,IAEtC,IAAI,WAAW;AAAA,IAEf,MAAM,WAAW,IAAI,aAAa,KAAK,SAAS,WAAW;AAAA,MACzD,aAAa;AAAA,MACb,mBAAmB,CAAC,UAAkB;AAAA,QACpC,YAAY;AAAA,QACZ,QAAQ,OAAO,QAAQ;AAAA;AAAA,IAE3B,CAAC;AAAA,IAED,MAAM,KAAK,SAAS,QAAQ;AAAA,MAC1B,gBAAgB,SAAS,aAAa;AAAA,MACtC,aAAa,SAAS,eAAe;AAAA,MACrC,OAAO,SAAS,QAAQ;AAAA,MACxB,WAAW;AAAA,MACX,kBAAkB;AAAA,MAClB;AAAA,IACF,CAAC;AAAA,IAED,OAAO;AAAA;AAAA,OAGH,OAAM,GAAkB;AAAA,IAC5B,KAAK,WAAW;AAAA,IAChB,KAAK,eAAe;AAAA;AAExB;AAKO,SAAS,0BAA0B,CAAC,QAA2D;AAAA,EACpG,OAAO,IAAI,qBAAqB,MAAM;AAAA;;AClTxC,SAAS,UAAiC,CAAC,UAAyB;AAAA,EAClE,IAAI,OAAO,aAAa,UAAU;AAAA,IAChC,MAAM,KAAK,SAAS,cAAiB,QAAQ;AAAA,IAC7C,IAAI,CAAC,IAAI;AAAA,MACP,MAAM,IAAI,MAAM,sBAAsB,UAAU;AAAA,IAClD;AAAA,IACA,OAAO;AAAA,EACT;AAAA,EACA,OAAO;AAAA;AAMF,SAAS,oBAAoB,CAClC,gBACA,SAIgB;AAAA,EAChB,MAAM,SAAS,WAAW,cAAc;AAAA,EACxC,MAAM,SAAS,SAAS,UAAU;AAAA,EAClC,MAAM,iBAAiB,SAAS,kBAAkB;AAAA,EAElD,IAAI,cAAc,SAAS,OAAO,eAAe,KAAK;AAAA,EAEtD,OAAO,CAAC,QAAgB,aAAqB;AAAA,IAE3C,IAAI,kBAAkB,oBAAoB,kBAAkB,qBAAqB;AAAA,MAC/E,OAAO,QAAQ,cAAc;AAAA,IAC/B,EAAO;AAAA,MACL,OAAO,cAAc,cAAc;AAAA;AAAA,IAIrC,IAAI,gBAAgB;AAAA,MAClB,OAAO,YAAY,OAAO;AAAA,IAC5B;AAAA;AAAA;AAOG,SAAS,gBAAgB,CAC9B,eACA,gBACA,YACA,SACY;AAAA,EACZ,MAAM,QAAQ,WAAmD,aAAa;AAAA,EAC9E,MAAM,SAAS,WAAW,cAAc;AAAA,EAExC,MAAM,iBAAiB,SAAS,kBAAkB;AAAA,EAClD,MAAM,cAAc,SAAS,eAAe;AAAA,EAC5C,MAAM,cAAc,SAAS,eAAe;AAAA,EAC5C,MAAM,cAAc,SAAS,eAAe;AAAA,EAE5C,IAAI,eAAe;AAAA,EAEnB,MAAM,iBAAiB,YAAY;AAAA,IACjC,IAAI;AAAA,MAAc;AAAA,IAElB,MAAM,OAAO,MAAM,MAAM,KAAK;AAAA,IAC9B,IAAI,CAAC;AAAA,MAAM;AAAA,IAEX,eAAe;AAAA,IAGf,IAAI,aAAa;AAAA,MACf,MAAM,QAAQ;AAAA,IAChB;AAAA,IAGA,IAAI,aAAa;AAAA,MACf,IAAI,kBAAkB,oBAAoB,kBAAkB,qBAAqB;AAAA,QAC/E,OAAO,QAAQ;AAAA,MACjB,EAAO;AAAA,QACL,OAAO,cAAc;AAAA;AAAA,IAEzB;AAAA,IAEA,IAAI;AAAA,MACF,MAAM,WAAW,qBAAqB,MAAM;AAAA,MAC5C,MAAM,WAAW,MAAM,QAAQ;AAAA,MAC/B,OAAO,OAAO;AAAA,MACd,MAAM,WAAW,iBAAiB,QAAQ,MAAM,UAAU;AAAA,MAC1D,IAAI,kBAAkB,oBAAoB,kBAAkB,qBAAqB;AAAA,QAC/E,OAAO,QAAQ,UAAU;AAAA,MAC3B,EAAO;AAAA,QACL,OAAO,cAAc,UAAU;AAAA;AAAA,cAEjC;AAAA,MACA,eAAe;AAAA;AAAA;AAAA,EAKnB,MAAM,iBAAiB,CAAC,MAAqB;AAAA,IAC3C,IAAI,kBAAkB,EAAE,QAAQ,WAAW,CAAC,EAAE,UAAU;AAAA,MACtD,EAAE,eAAe;AAAA,MACjB,eAAe;AAAA,IACjB;AAAA;AAAA,EAGF,MAAM,iBAAiB,WAAW,cAA+B;AAAA,EAGjE,OAAO,MAAM;AAAA,IACX,MAAM,oBAAoB,WAAW,cAA+B;AAAA;AAAA;AAOjE,SAAS,YAAY,CAAC,mBAK3B;AAAA,EACA,MAAM,YAAY,WAAW,iBAAiB;AAAA,EAG9C,UAAU,YAAY;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYtB,MAAM,QAAQ,UAAU,cAAmC,iBAAiB;AAAA,EAC5E,MAAM,SAAS,UAAU,cAA8B,kBAAkB;AAAA,EACzE,MAAM,aAAa,UAAU,cAAiC,gBAAgB;AAAA,EAE9E,MAAM,UAAU,MAAM;AAAA,IACpB,UAAU,YAAY;AAAA;AAAA,EAGxB,OAAO,EAAE,OAAO,QAAQ,YAAY,QAAQ;AAAA;AAMvC,SAAS,sBAAsB,CAAC,mBAKrC;AAAA,EACA,MAAM,YAAY,WAAW,iBAAiB;AAAA,EAE9C,MAAM,YAAY,SAAS,cAAc,KAAK;AAAA,EAC9C,UAAU,YAAY;AAAA,EACtB,UAAU,MAAM,UAAU;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQ1B,UAAU,YAAY;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOtB,UAAU,YAAY,SAAS;AAAA,EAE/B,MAAM,MAAM,UAAU,cAA8B,kBAAkB;AAAA,EACtE,MAAM,SAAS,UAAU,cAA8B,qBAAqB;AAAA,EAE5E,OAAO;AAAA,IACL,MAAM,MAAM;AAAA,MACV,UAAU,MAAM,UAAU;AAAA;AAAA,IAE5B,MAAM,MAAM;AAAA,MACV,UAAU,MAAM,UAAU;AAAA;AAAA,IAE5B,aAAa,CAAC,SAAiB,eAAwB;AAAA,MACrD,IAAI,MAAM,QAAQ,GAAG,KAAK,IAAI,KAAK,KAAK,IAAI,GAAG,OAAO,CAAC;AAAA,MACvD,IAAI,eAAe,WAAW;AAAA,QAC5B,OAAO,cAAc;AAAA,MACvB;AAAA;AAAA,IAEF,SAAS;AAAA,EACX;AAAA;;ACtEF,SAAS,iBAAiB,CACxB,OACA,cACe;AAAA,EACf,MAAM,WAA0B,CAAC;AAAA,EAEjC,IAAI,cAAc;AAAA,IAChB,SAAS,KAAK,EAAE,MAAM,UAAU,SAAS,aAAa,CAAC;AAAA,EACzD;AAAA,EAEA,IAAI,OAAO,UAAU,UAAU;AAAA,IAC7B,SAAS,KAAK,EAAE,MAAM,QAAQ,SAAS,MAAM,CAAC;AAAA,EAChD,EAAO;AAAA,IACL,SAAS,KAAK,GAAG,KAAK;AAAA;AAAA,EAGxB,OAAO;AAAA;AAuBT,eAAsB,SAAS,CAAC,SAAoB,CAAC,GAAsB;AAAA,EACzE;AAAA,IACE,SAAS,mBAAmB;AAAA,IAC5B,SAAS;AAAA,IACT,eAAe;AAAA,IACf;AAAA,IACA;AAAA,MACE;AAAA,EAGJ,MAAM,eAAe,MAAM,mBAAmB;AAAA,EAG9C,IAAI;AAAA,EAEJ,IAAI,qBAAqB,QAAQ;AAAA,IAC/B,aAAa,aAAa,SAAS,WAAW;AAAA,EAChD,EAAO,SAAI,qBAAqB,UAAU;AAAA,IACxC,IAAI,CAAC,aAAa,QAAQ;AAAA,MACxB,QAAQ,KAAK,uFAAuF;AAAA,MACpG,aAAa;AAAA,IACf,EAAO;AAAA,MACL,aAAa;AAAA;AAAA,EAEjB,EAAO;AAAA,IACL,aAAa;AAAA;AAAA,EAIf,MAAM,QAAQ,OAAO,UACnB,eAAe,WAAW,uBAAuB;AAAA,EAGnD,QAAQ,IAAI,oBAAoB,kCAAkC,OAAO;AAAA,EAGzE,IAAI;AAAA,EAEJ,IAAI,eAAe,UAAU;AAAA,IAC3B,WAAW,IAAI;AAAA,EACjB,EAAO;AAAA,IACL,WAAW,IAAI,qBAAqB,EAAE,QAAQ,aAAa,CAAC;AAAA;AAAA,EAI9D,MAAM,SAAS,KAAK,OAAO,cAAc;AAAA,EAGzC,MAAM,MAAgB;AAAA,QAChB,OAAO,GAAG;AAAA,MACZ,OAAO,SAAS;AAAA;AAAA,QAGd,OAAO,GAAG;AAAA,MACZ,OAAO,SAAS;AAAA;AAAA,QAGd,OAAO,GAAG;AAAA,MACZ,OAAO;AAAA;AAAA,SAGH,KAAI,CAAC,UAAU,SAAS;AAAA,MAC5B,MAAM,qBAAqB,kBAAkB,UAAU,YAAY;AAAA,MACnE,OAAO,SAAS,KAAK,oBAAoB,OAAO;AAAA;AAAA,SAG5C,OAAM,CAAC,UAAU,SAAS,SAAS;AAAA,MACvC,MAAM,qBAAqB,kBAAkB,UAAU,YAAY;AAAA,MACnE,OAAO,SAAS,OAAO,oBAAoB,SAAS,OAAO;AAAA;AAAA,IAG7D,aAAa,CAAC,eAAe,gBAAgB,SAAS;AAAA,MACpD,OAAO,iBACL,eACA,gBACA,OAAO,OAAO,YAAY;AAAA,QACxB,MAAM,qBAAqB,kBAAkB,OAAO,YAAY;AAAA,QAChE,OAAO,SAAS,OAAO,oBAAoB,OAAO;AAAA,SAEpD,OACF;AAAA;AAAA,SAGI,OAAM,GAAG;AAAA,MACb,MAAM,SAAS,OAAO;AAAA;AAAA,EAE1B;AAAA,EAEA,OAAO;AAAA;AAMT,eAAsB,iBAAiB,GAAqB;AAAA,EAC1D,MAAM,OAAO,MAAM,mBAAmB;AAAA,EACtC,OAAO,KAAK;AAAA;AAId,IAAe;;;;AL/Lf,IAAM,aAAa,cAAsC,IAAI;AA6CtD,SAAS,WAAW;AAAA,EACzB;AAAA,EACA,WAAW;AAAA,EACX;AAAA,EACA;AAAA,EACA;AAAA,KACG;AAAA,GACgB;AAAA,EACnB,OAAO,KAAK,UAAU,SAA0B,IAAI;AAAA,EACpD,OAAO,WAAW,gBAAgB,SAAS,KAAK;AAAA,EAChD,OAAO,cAAc,mBAAmB,SAA8B,IAAI;AAAA,EAC1E,OAAO,OAAO,YAAY,SAAuB,IAAI;AAAA,EAGrD,MAAM,eAAe,OAAO,KAAK;AAAA,EACjC,MAAM,YAAY,OAAO,MAAM;AAAA,EAC/B,UAAU,UAAU;AAAA,EAEpB,MAAM,OAAO,YAAY,YAAY;AAAA,IACnC,IAAI;AAAA,MAAW;AAAA,IAEf,aAAa,IAAI;AAAA,IACjB,SAAS,IAAI;AAAA,IACb,gBAAgB,EAAE,UAAU,GAAG,QAAQ,kBAAkB,CAAC;AAAA,IAE1D,IAAI;AAAA,MACF,MAAM,WAAW,MAAM,UAAU;AAAA,WAC5B,UAAU;AAAA,QACb,gBAAgB,CAAC,aAAa;AAAA,UAC5B,gBAAgB,QAAQ;AAAA,UACxB,aAAa,QAAQ;AAAA;AAAA,MAEzB,CAAC;AAAA,MAED,OAAO,QAAQ;AAAA,MACf,gBAAgB,EAAE,UAAU,KAAK,QAAQ,QAAQ,CAAC;AAAA,MAClD,SAAS,QAAQ;AAAA,MACjB,OAAO,KAAK;AAAA,MACZ,MAAM,SAAQ,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC;AAAA,MAChE,SAAS,MAAK;AAAA,MACd,UAAU,MAAK;AAAA,cACf;AAAA,MACA,aAAa,KAAK;AAAA;AAAA,KAEnB,CAAC,WAAW,QAAQ,YAAY,OAAO,CAAC;AAAA,EAE3C,MAAM,SAAS,YAAY,YAAY;AAAA,IACrC,IAAI,KAAK;AAAA,MACP,MAAM,IAAI,OAAO;AAAA,MACjB,OAAO,IAAI;AAAA,MACX,gBAAgB,IAAI;AAAA,MACpB,aAAa,UAAU;AAAA,IACzB;AAAA,KACC,CAAC,GAAG,CAAC;AAAA,EAER,MAAM,SAAS,YAAY,YAAY;AAAA,IACrC,MAAM,OAAO;AAAA,IACb,MAAM,KAAK;AAAA,KACV,CAAC,QAAQ,IAAI,CAAC;AAAA,EAGjB,UAAU,MAAM;AAAA,IACd,IAAI,YAAY,CAAC,aAAa,WAAW,CAAC,OAAO,CAAC,WAAW;AAAA,MAC3D,aAAa,UAAU;AAAA,MACvB,KAAK;AAAA,IACP;AAAA,KACC,CAAC,UAAU,KAAK,WAAW,IAAI,CAAC;AAAA,EAGnC,UAAU,MAAM;AAAA,IACd,OAAO,MAAM;AAAA,MACX,IAAI,KAAK;AAAA,QACP,IAAI,OAAO,EAAE,MAAM,QAAQ,KAAK;AAAA,MAClC;AAAA;AAAA,KAED,CAAC,GAAG,CAAC;AAAA,EAER,MAAM,QAAQ,QACZ,OAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA,SAAS,KAAK,WAAW;AAAA,IACzB;AAAA,IACA;AAAA,IACA,SAAS,KAAK,WAAW;AAAA,IACzB,SAAS,KAAK,WAAW;AAAA,IACzB;AAAA,IACA;AAAA,EACF,IACA,CAAC,KAAK,WAAW,cAAc,OAAO,QAAQ,MAAM,CACtD;AAAA,EAEA,uBAAO,OAA+C,WAAW,UAA1D;AAAA,IAAqB;AAAA,IAArB;AAAA,sCAA+C;AAAA;AAiBjD,SAAS,MAAM,GAAoB;AAAA,EACxC,MAAM,UAAU,WAAW,UAAU;AAAA,EAErC,IAAI,CAAC,SAAS;AAAA,IACZ,MAAM,IAAI,MAAM,2CAA2C;AAAA,EAC7D;AAAA,EAEA,OAAO;AAAA;AA2HF,SAAS,OAAO,CAAC,UAA0B,CAAC,GAAkB;AAAA,EACnE,QAAQ,KAAK,SAAS,cAAc,OAAO;AAAA,EAE3C;AAAA,IACE,kBAAkB,CAAC;AAAA,IACnB;AAAA,IACA;AAAA,IACA,oBAAoB;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,MACE;AAAA,EAEJ,OAAO,UAAU,eAAe,SAAwB,eAAe;AAAA,EACvE,OAAO,OAAO,YAAY,SAAS,EAAE;AAAA,EACrC,OAAO,cAAc,mBAAmB,SAAS,KAAK;AAAA,EACtD,OAAO,eAAe,oBAAoB,SAAS,EAAE;AAAA,EAGrD,OAAO,gBAAgB,qBAAqB,SAAwB,IAAI;AAAA,EAExE,MAAM,WAAW,OAAO,KAAK;AAAA,EAC7B,MAAM,kBAAkB,OAAO,KAAK;AAAA,EAGpC,MAAM,mBAAmB,YACvB,OAAO,aAAqB,oBAAoD;AAAA,IAC9E,IAAI,CAAC,OAAO,CAAC,WAAW,gBAAgB,SAAS;AAAA,MAC/C,OAAO;AAAA,IACT;AAAA,IAEA,gBAAgB,UAAU;AAAA,IAG1B,MAAM,cAA2B,EAAE,MAAM,QAAQ,SAAS,YAAY;AAAA,IACtE,YAAY,CAAC,SAAS,CAAC,GAAG,MAAM,WAAW,CAAC;AAAA,IAG5C,MAAM,cAA6B,CAAC;AAAA,IAEpC,IAAI,cAAc;AAAA,MAChB,YAAY,KAAK,EAAE,MAAM,UAAU,SAAS,aAAa,CAAC;AAAA,IAC5D;AAAA,IAEA,YAAY,KAAK,GAAG,iBAAiB,WAAW;AAAA,IAGhD,gBAAgB,IAAI;AAAA,IACpB,iBAAiB,EAAE;AAAA,IACnB,SAAS,UAAU;AAAA,IACnB,UAAU;AAAA,IAEV,IAAI;AAAA,MACF,MAAM,WAAW,MAAM,IAAI,OACzB,aACA,CAAC,OAAO,aAAa;AAAA,QACnB,IAAI,SAAS;AAAA,UAAS;AAAA,QACtB,iBAAiB,QAAQ;AAAA,QACzB,UAAU,OAAO,QAAQ;AAAA,SAE3B,eACF;AAAA,MAEA,IAAI,CAAC,SAAS,SAAS;AAAA,QAErB,MAAM,mBAAgC;AAAA,UACpC,MAAM;AAAA,UACN,SAAS;AAAA,QACX;AAAA,QACA,YAAY,CAAC,SAAS,CAAC,GAAG,MAAM,gBAAgB,CAAC;AAAA,QACjD,iBAAiB,EAAE;AAAA,QACnB,WAAW,QAAQ;AAAA,MACrB;AAAA,MAEA,OAAO;AAAA,MACP,OAAO,KAAK;AAAA,MACZ,MAAM,QAAQ,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC;AAAA,MAChE,UAAU,KAAK;AAAA,MACf,OAAO;AAAA,cACP;AAAA,MACA,gBAAgB,KAAK;AAAA,MACrB,gBAAgB,UAAU;AAAA;AAAA,KAG9B,CAAC,KAAK,SAAS,cAAc,iBAAiB,SAAS,SAAS,UAAU,OAAO,CACnF;AAAA,EAGA,UAAU,MAAM;AAAA,IACd,IAAI,WAAW,kBAAkB,CAAC,gBAAgB,SAAS;AAAA,MACzD,MAAM,mBAAmB;AAAA,MACzB,kBAAkB,IAAI;AAAA,MACtB,iBAAiB,kBAAkB,QAAQ;AAAA,IAC7C;AAAA,KACC,CAAC,SAAS,gBAAgB,UAAU,gBAAgB,CAAC;AAAA,EAExD,MAAM,OAAO,YACX,OAAO,YAAsC;AAAA,IAC3C,MAAM,iBAAiB,WAAW;AAAA,IAElC,IAAI,CAAC,eAAe,KAAK,GAAG;AAAA,MAC1B,OAAO;AAAA,IACT;AAAA,IAGA,IAAI,CAAC,SAAS;AAAA,MACZ,SAAS,EAAE;AAAA,IACb;AAAA,IAGA,IAAI,OAAO,SAAS;AAAA,MAClB,OAAO,iBAAiB,gBAAgB,QAAQ;AAAA,IAClD;AAAA,IAGA,IAAI,aAAa,mBAAmB;AAAA,MAElC,MAAM,cAA2B,EAAE,MAAM,QAAQ,SAAS,eAAe;AAAA,MACzE,YAAY,CAAC,SAAS,CAAC,GAAG,MAAM,WAAW,CAAC;AAAA,MAC5C,kBAAkB,cAAc;AAAA,MAChC,OAAO;AAAA,IACT;AAAA,IAGA,OAAO;AAAA,KAET,CAAC,OAAO,KAAK,SAAS,WAAW,mBAAmB,UAAU,gBAAgB,CAChF;AAAA,EAEA,MAAM,OAAO,YAAY,MAAM;AAAA,IAC7B,SAAS,UAAU;AAAA,IACnB,gBAAgB,KAAK;AAAA,IACrB,kBAAkB,IAAI;AAAA,IAGtB,IAAI,eAAe;AAAA,MACjB,YAAY,CAAC,SAAS;AAAA,QACpB,GAAG;AAAA,QACH,EAAE,MAAM,aAAa,SAAS,gBAAgB,MAAM;AAAA,MACtD,CAAC;AAAA,MACD,iBAAiB,EAAE;AAAA,IACrB;AAAA,KACC,CAAC,aAAa,CAAC;AAAA,EAElB,MAAM,QAAQ,YAAY,MAAM;AAAA,IAC9B,YAAY,eAAe;AAAA,IAC3B,iBAAiB,EAAE;AAAA,IACnB,SAAS,EAAE;AAAA,IACX,kBAAkB,IAAI;AAAA,KACrB,CAAC,eAAe,CAAC;AAAA,EAEpB,MAAM,SAAS,YAAY,CAAC,YAAyB;AAAA,IACnD,YAAY,CAAC,SAAS,CAAC,GAAG,MAAM,OAAO,CAAC;AAAA,KACvC,CAAC,CAAC;AAAA,EAEL,MAAM,SAAS,YAAY,YAA6B;AAAA,IACtD,IAAI,SAAS,WAAW;AAAA,MAAG,OAAO;AAAA,IAGlC,MAAM,gBAAgB,SAAS,cAAc,CAAC,MAAM,EAAE,SAAS,MAAM;AAAA,IACrE,IAAI,kBAAkB;AAAA,MAAI,OAAO;AAAA,IAGjC,MAAM,mBAAmB,SAAS,MAAM,GAAG,aAAa;AAAA,IACxD,MAAM,kBAAkB,SAAS;AAAA,IAGjC,IAAI,CAAC;AAAA,MAAiB,OAAO;AAAA,IAE7B,YAAY,gBAAgB;AAAA,IAG5B,OAAO,KAAK,gBAAgB,OAAO;AAAA,KAClC,CAAC,UAAU,IAAI,CAAC;AAAA,EAEnB,OAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,WAAW,mBAAmB;AAAA,IAC9B;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAAA;AA0DK,SAAS,SAAS,CAAC,UAA4B,CAAC,GAAoB;AAAA,EACzE,QAAQ,KAAK,YAAY,OAAO;AAAA,EAChC,QAAQ,iBAAiB,SAAS,UAAU,YAAY;AAAA,EAExD,OAAO,MAAM,WAAW,SAAS,EAAE;AAAA,EACnC,OAAO,aAAa,kBAAkB,SAAS,KAAK;AAAA,EAEpD,MAAM,WAAW,OAAO,KAAK;AAAA,EAE7B,MAAM,SAAS,YACb,OAAO,UAAmD;AAAA,IACxD,IAAI,CAAC,OAAO,CAAC,SAAS;AAAA,MACpB,OAAO;AAAA,IACT;AAAA,IAEA,eAAe,IAAI;AAAA,IACnB,QAAQ,EAAE;AAAA,IACV,SAAS,UAAU;AAAA,IAEnB,IAAI;AAAA,MACF,MAAM,WAAW,MAAM,IAAI,OACzB,OACA,CAAC,OAAO,aAAa;AAAA,QACnB,IAAI,SAAS;AAAA,UAAS;AAAA,QACtB,QAAQ,QAAQ;AAAA,QAChB,UAAU,OAAO,QAAQ;AAAA,SAE3B,eACF;AAAA,MAEA,WAAW,QAAQ;AAAA,MACnB,OAAO;AAAA,MACP,OAAO,KAAK;AAAA,MACZ,MAAM,QAAQ,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC;AAAA,MAChE,UAAU,KAAK;AAAA,MACf,OAAO;AAAA,cACP;AAAA,MACA,eAAe,KAAK;AAAA;AAAA,KAGxB,CAAC,KAAK,SAAS,iBAAiB,SAAS,UAAU,OAAO,CAC5D;AAAA,EAEA,MAAM,OAAO,YAAY,MAAM;AAAA,IAC7B,SAAS,UAAU;AAAA,IACnB,eAAe,KAAK;AAAA,KACnB,CAAC,CAAC;AAAA,EAEL,MAAM,QAAQ,YAAY,MAAM;AAAA,IAC9B,QAAQ,EAAE;AAAA,KACT,CAAC,CAAC;AAAA,EAEL,OAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAAA;AA6CK,SAAS,aAAa,CAC3B,UAAgC,CAAC,GACZ;AAAA,EACrB,QAAQ,KAAK,YAAY,OAAO;AAAA,EAChC,QAAQ,oBAAoB;AAAA,EAE5B,OAAO,YAAY,iBAAiB,SAAS,EAAE;AAAA,EAC/C,OAAO,WAAW,gBAAgB,SAAS,KAAK;AAAA,EAEhD,MAAM,WAAW,YACf,OAAO,WAAoC;AAAA,IACzC,IAAI,CAAC,OAAO,CAAC,SAAS;AAAA,MACpB,OAAO;AAAA,IACT;AAAA,IAEA,aAAa,IAAI;AAAA,IAEjB,IAAI;AAAA,MACF,MAAM,WAAW,MAAM,IAAI,KAAK,QAAQ,eAAe;AAAA,MACvD,cAAc,QAAQ;AAAA,MACtB,OAAO;AAAA,MACP,OAAO,KAAK;AAAA,MACZ,QAAQ,MAAM,0BAA0B,GAAG;AAAA,MAC3C,OAAO;AAAA,cACP;AAAA,MACA,aAAa,KAAK;AAAA;AAAA,KAGtB,CAAC,KAAK,SAAS,eAAe,CAChC;AAAA,EAEA,MAAM,QAAQ,YAAY,MAAM;AAAA,IAC9B,cAAc,EAAE;AAAA,KACf,CAAC,CAAC;AAAA,EAEL,OAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAAA;AAyBK,SAAS,UAAU,GAAG,UAAU,aAA8B;AAAA,EACnE,QAAQ,WAAW,iBAAiB,OAAO;AAAA,EAE3C,IAAI,CAAC;AAAA,IAAW,OAAO;AAAA,EAEvB,IAAI,UAAU;AAAA,IACZ,uBAAO,OAAuC,OAAvC;AAAA,MAAK;AAAA,MAAL;AAAA,wCAAuC;AAAA,EAChD;AAAA,EAEA,uBACE,OAGE,OAHF;AAAA,IAAK;AAAA,IAAL,UAGE;AAAA,sBAFA,OAAoD,KAApD;AAAA,kBAAoD;AAAA,UAApD;AAAA,UAAqB,cAAc,YAAY;AAAA,UAA/C;AAAA;AAAA,yCAAoD;AAAA,sBACpD,OAA2B,KAA3B;AAAA,kBAAI,cAAc;AAAA,SAAlB,iCAA2B;AAAA;AAAA,KAF7B,gCAGE;AAAA;AA0BC,SAAS,QAAQ,GAAG,UAAU,WAAW,QAAuB;AAAA,EACrE,QAAQ,SAAS,cAAc,OAAO;AAAA,EAEtC,IAAI,aAAa,CAAC,SAAS;AAAA,IACzB,uBAAO;AAAA,gBAAG;AAAA,OAAH,iCAAc;AAAA,EACvB;AAAA,EAEA,uBAAO;AAAA;AAAA,sCAAc;AAAA;",
13
- "debugId": "9760E9C5A8B8695164756E2164756E21",
12
+ "mappings": ";;;;;;AA+BA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACtBA,eAAsB,WAAW,GAAqB;AAAA,EACpD,IAAI,OAAO,cAAc;AAAA,IAAa,OAAO;AAAA,EAC7C,IAAI,EAAE,SAAS;AAAA,IAAY,OAAO;AAAA,EAElC,IAAI;AAAA,IACF,MAAM,MAAO,UAAyE;AAAA,IACtF,IAAI,CAAC;AAAA,MAAK,OAAO;AAAA,IAEjB,MAAM,UAAU,MAAM,IAAI,eAAe;AAAA,IACzC,OAAO,YAAY;AAAA,IACnB,MAAM;AAAA,IACN,OAAO;AAAA;AAAA;AAOJ,SAAS,SAAS,GAAY;AAAA,EACnC,IAAI,OAAO,gBAAgB;AAAA,IAAa,OAAO;AAAA,EAE/C,IAAI;AAAA,IAEF,OACE,OAAO,YAAY,yBAAyB,cAC5C,OAAO,YAAY,gBAAgB;AAAA,IAErC,MAAM;AAAA,IACN,OAAO;AAAA;AAAA;AAOX,eAAsB,kBAAkB,GAAiC;AAAA,EACvE,MAAM,SAAS,MAAM,YAAY;AAAA,EACjC,MAAM,OAAO,UAAU;AAAA,EAEvB,IAAI,qBAA8B;AAAA,EAClC,IAAI,oBAA4B;AAAA,EAEhC,IAAI,QAAQ;AAAA,IAEV,qBAAqB;AAAA,IACrB,oBAAoB;AAAA,EACtB,EAAO,SAAI,MAAM;AAAA,IAEf,qBAAqB;AAAA,IACrB,oBAAoB;AAAA,EACtB;AAAA,EAEA,OAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAAA;AAMF,eAAsB,eAAe,GAAiC;AAAA,EACpE,MAAM,OAAO,MAAM,mBAAmB;AAAA,EAEtC,QAAQ,IAAI,kCAAkC;AAAA,EAC9C,QAAQ,IAAI,aAAa,KAAK,SAAS,gBAAe,mBAAmB;AAAA,EACzE,QAAQ,IAAI,WAAW,KAAK,OAAO,gBAAe,mBAAmB;AAAA,EACrE,QAAQ,IAAI,0BAA0B,KAAK,oBAAoB;AAAA,EAC/D,QAAQ,IAAI,yBAAyB,KAAK,mBAAmB;AAAA,EAE7D,OAAO;AAAA;;AC1DF,IAAM,uBAAuB;AAQ7B,IAAM,gBAAgB;AAAA,EAE3B,gBAAgB;AAAA,EAChB,gBAAgB;AAAA,EAGhB,gBAAgB;AAAA,EAChB,mBAAmB;AAAA,EAGnB,gBAAgB;AAAA,EAChB,mBAAmB;AAAA,EACnB,kBAAkB;AAAA,EAGlB,iBAAiB;AAAA,EACjB,iBAAiB;AAAA,EACjB,eAAe;AAAA,EACf,eAAe;AAAA,EACf,uBAAuB;AAAA,EACvB,uBAAuB;AAAA,EAGvB,eAAe;AAAA,EACf,eAAe;AAAA,EACf,aAAa;AAAA,EACb,aAAa;AAAA,EAGb,cAAc;AAAA,EACd,iBAAiB;AAAA,EACjB,cAAc;AAAA,EAGd,gBAAgB;AAAA,EAChB,gBAAgB;AAAA,EAChB,gBAAgB;AAAA,EAGhB,cAAc;AAAA,EAGd,uBAAuB;AAAA,EACvB,wBAAwB;AAAA,EAGxB,yBAAyB;AAAA,EACzB,yBAAyB;AAC3B;AAyCA,SAAS,cAAc,CAAC,OAAuB;AAAA,EAC7C,IAAI,SAAS,eAAe;AAAA,IAC1B,OAAO,cAAc;AAAA,EACvB;AAAA,EACA,OAAO;AAAA;AAAA;AAMF,MAAM,eAAsC;AAAA,EACxC,UAAmB;AAAA,EAEpB,SAA2B;AAAA,EAC3B,eAA8B;AAAA,MAElC,OAAO,GAAY;AAAA,IACrB,OAAO,KAAK,WAAW,QAAQ,KAAK,iBAAiB;AAAA;AAAA,MAGnD,OAAO,GAAkB;AAAA,IAC3B,OAAO,KAAK;AAAA;AAAA,OAGR,KAAI,CAAC,SAAiB,YAAkD;AAAA,IAC5E,MAAM,gBAAgB,eAAe,OAAO;AAAA,IAG5C,QAAQ,oBAAoB,MAAa;AAAA,IAGzC,MAAM,uBAAuB,CAAC,WAA+C;AAAA,MAC3E,IAAI,YAAY;AAAA,QACd,MAAM,WAAyB;AAAA,UAC7B,UAAU,KAAK,MAAM,OAAO,WAAW,GAAG;AAAA,UAC1C,QAAQ,OAAO;AAAA,QACjB;AAAA,QACA,WAAW,QAAQ;AAAA,MACrB;AAAA;AAAA,IAGF,KAAK,SAAS,MAAM,gBAAgB,eAAe;AAAA,MACjD;AAAA,IACF,CAAC;AAAA,IAED,KAAK,eAAe;AAAA;AAAA,OAGhB,KAAI,CAAC,UAAyB,SAA4C;AAAA,IAC9E,IAAI,CAAC,KAAK,QAAQ;AAAA,MAChB,MAAM,IAAI,MAAM,sCAAsC;AAAA,IACxD;AAAA,IAEA,MAAM,WAAW,MAAM,KAAK,OAAO,KAAK,YAAY,OAAO;AAAA,MACzD,UAAU,SAAS,IAAI,CAAC,OAAO;AAAA,QAC7B,MAAM,EAAE;AAAA,QACR,SAAS,EAAE;AAAA,MACb,EAAE;AAAA,MACF,aAAa,SAAS,eAAe;AAAA,MACrC,YAAY,SAAS,aAAa;AAAA,MAClC,OAAO,SAAS,QAAQ;AAAA,MACxB,MAAM,SAAS;AAAA,IACjB,CAAC;AAAA,IAED,OAAO,SAAS,QAAQ,IAAI,SAAS,WAAW;AAAA;AAAA,OAG5C,OAAM,CACV,UACA,SACA,SACiB;AAAA,IACjB,IAAI,CAAC,KAAK,QAAQ;AAAA,MAChB,MAAM,IAAI,MAAM,sCAAsC;AAAA,IACxD;AAAA,IAEA,MAAM,SAAS,MAAM,KAAK,OAAO,KAAK,YAAY,OAAO;AAAA,MACvD,UAAU,SAAS,IAAI,CAAC,OAAO;AAAA,QAC7B,MAAM,EAAE;AAAA,QACR,SAAS,EAAE;AAAA,MACb,EAAE;AAAA,MACF,aAAa,SAAS,eAAe;AAAA,MACrC,YAAY,SAAS,aAAa;AAAA,MAClC,OAAO,SAAS,QAAQ;AAAA,MACxB,MAAM,SAAS;AAAA,MACf,QAAQ;AAAA,IACV,CAAC;AAAA,IAED,IAAI,WAAW;AAAA,IAEf,iBAAiB,SAAS,QAAQ;AAAA,MAChC,MAAM,QAAQ,MAAM,QAAQ,IAAI,OAAO,WAAW;AAAA,MAClD,IAAI,OAAO;AAAA,QACT,YAAY;AAAA,QACZ,QAAQ,OAAO,QAAQ;AAAA,MACzB;AAAA,IACF;AAAA,IAEA,OAAO;AAAA;AAAA,OAGH,OAAM,GAAkB;AAAA,IAC5B,IAAI,KAAK,QAAQ;AAAA,MACf,MAAM,KAAK,OAAO,OAAO;AAAA,MACzB,KAAK,SAAS;AAAA,MACd,KAAK,eAAe;AAAA,IACtB;AAAA;AAEJ;AAKO,SAAS,oBAAoB,GAAmB;AAAA,EACrD,OAAO,IAAI;AAAA;;AClNN,IAAM,6BAA6B;AAQnC,IAAM,sBAAsB;AAAA,EAEjC,iBAAiB;AAAA,EACjB,iBAAiB;AAAA,EACjB,uBAAuB;AAAA,EACvB,uBAAuB;AAAA,EAGvB,eAAe;AAAA,EAGf,gBAAgB;AAAA,EAChB,gBAAgB;AAAA,EAChB,gBAAgB;AAAA,EAGhB,cAAc;AAAA,EAGd,WAAa;AACf;AAuBA,SAAS,gBAAgB,CAAC,SAAyD;AAAA,EACjF,MAAM,QAAQ,QAAQ,YAAY;AAAA,EAElC,IAAI,MAAM,SAAS,MAAM,KAAK,MAAM,SAAS,QAAQ,GAAG;AAAA,IACtD,OAAO;AAAA,EACT;AAAA,EACA,IAAI,MAAM,SAAS,OAAO,KAAK,MAAM,SAAS,WAAW,GAAG;AAAA,IAC1D,OAAO;AAAA,EACT;AAAA,EACA,IAAI,MAAM,SAAS,KAAK,GAAG;AAAA,IACzB,OAAO;AAAA,EACT;AAAA,EACA,OAAO;AAAA;AAMT,SAAS,YAAY,CAAC,UAAyB,SAAyB;AAAA,EACtE,MAAM,SAAS,iBAAiB,OAAO;AAAA,EAEvC,QAAQ;AAAA,SACD,UAAU;AAAA,MAEb,IAAI,SAAS;AAAA,MACb,WAAW,OAAO,UAAU;AAAA,QAC1B,UAAU,eAAe,IAAI;AAAA,EAAS,IAAI;AAAA;AAAA,MAC5C;AAAA,MACA,UAAU;AAAA;AAAA,MACV,OAAO;AAAA,IACT;AAAA,SAEK,SAAS;AAAA,MAEZ,IAAI,SAAS;AAAA,MACb,WAAW,OAAO,UAAU;AAAA,QAC1B,IAAI,IAAI,SAAS,UAAU;AAAA,UACzB,UAAU;AAAA,EAAsB,IAAI;AAAA;AAAA;AAAA;AAAA,QACtC,EAAO,SAAI,IAAI,SAAS,QAAQ;AAAA,UAC9B,IAAI,CAAC,OAAO,SAAS,QAAQ,GAAG;AAAA,YAC9B,UAAU,aAAa,IAAI;AAAA,UAC7B,EAAO;AAAA,YACL,UAAU,aAAa,IAAI;AAAA;AAAA,QAE/B,EAAO,SAAI,IAAI,SAAS,aAAa;AAAA,UACnC,UAAU,IAAI,IAAI;AAAA,QACpB;AAAA,MACF;AAAA,MACA,OAAO;AAAA,IACT;AAAA,SAEK,OAAO;AAAA,MAEV,IAAI,SAAS;AAAA,MACb,WAAW,OAAO,UAAU;AAAA,QAC1B,IAAI,IAAI,SAAS,UAAU;AAAA,UACzB,UAAU;AAAA,EAAe,IAAI;AAAA;AAAA,QAC/B,EAAO,SAAI,IAAI,SAAS,QAAQ;AAAA,UAC9B,UAAU;AAAA,EAAa,IAAI;AAAA;AAAA,QAC7B,EAAO,SAAI,IAAI,SAAS,aAAa;AAAA,UACnC,UAAU;AAAA,EAAkB,IAAI;AAAA;AAAA,QAClC;AAAA,MACF;AAAA,MACA,UAAU;AAAA;AAAA,MACV,OAAO;AAAA,IACT;AAAA,SAEK;AAAA,aACI;AAAA,MAEP,IAAI,SAAS;AAAA,MACb,WAAW,OAAO,UAAU;AAAA,QAC1B,UAAU,GAAG,IAAI,SAAS,IAAI;AAAA;AAAA,MAChC;AAAA,MACA,UAAU;AAAA,MACV,OAAO;AAAA,IACT;AAAA;AAAA;AAOJ,SAAS,eAAe,CAAC,cAA2D;AAAA,EAClF,MAAM,MAA2D;AAAA,IAC/D,IAAI;AAAA,IACJ,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,MAAM;AAAA,EACR;AAAA,EACA,OAAO,IAAI,iBAAiB;AAAA;AAAA;AAcvB,MAAM,qBAA4C;AAAA,EAC9C,UAAmB;AAAA,EAEpB,WAA0C;AAAA,EAC1C,eAA8B;AAAA,EAC9B;AAAA,EACA;AAAA,EAER,WAAW,CAAC,SAAqC,CAAC,GAAG;AAAA,IACnD,KAAK,SAAS,OAAO,UAAU;AAAA,IAC/B,KAAK,eAAe,OAAO,gBAAgB;AAAA;AAAA,MAGzC,OAAO,GAAY;AAAA,IACrB,OAAO,KAAK,aAAa,QAAQ,KAAK,iBAAiB;AAAA;AAAA,MAGrD,OAAO,GAAkB;AAAA,IAC3B,OAAO,KAAK;AAAA;AAAA,OAGR,KAAI,CAAC,SAAiB,YAAkD;AAAA,IAE5E,MAAM,gBAAgB,WAAW,sBAC7B,oBAAoB,WACpB;AAAA,IAGJ,QAAQ,UAAU,QAAQ,MAAa;AAAA,IAGvC,IAAI,mBAAmB;AAAA,IACvB,IAAI,kBAAkB;AAAA,IAGtB,IAAI,eAAuB;AAAA,IAC3B,IAAI,KAAK,WAAW,UAAU,KAAK,WAAW,UAAU;AAAA,MAEtD,IAAI,OAAO,cAAc,eAAe,SAAS,WAAW;AAAA,QAC1D,IAAI;AAAA,UACF,MAAM,MAAO,UAAyE;AAAA,UACtF,MAAM,UAAU,MAAM,IAAI,eAAe;AAAA,UACzC,IAAI,SAAS;AAAA,YACX,eAAe;AAAA,UACjB;AAAA,UACA,MAAM;AAAA,MAGV;AAAA,IACF;AAAA,IAGA,MAAM,QAAQ,gBAAgB,KAAK,YAAY;AAAA,IAE/C,KAAK,WAAW,MAAM,SAAS,mBAAmB,eAAe;AAAA,MAC/D;AAAA,MACA,QAAQ;AAAA,MACR,mBAAmB,CAAC,aAAmE;AAAA,QACrF,IAAI,YAAY;AAAA,UACd,MAAM,eAA6B;AAAA,YACjC,UAAU,KAAK,OAAO,SAAS,YAAY,KAAK,GAAG;AAAA,YACnD,QAAQ,SAAS;AAAA,UACnB;AAAA,UACA,WAAW,YAAY;AAAA,QACzB;AAAA;AAAA,IAEJ,CAAC;AAAA,IAED,KAAK,eAAe;AAAA;AAAA,OAGhB,KAAI,CAAC,UAAyB,SAA4C;AAAA,IAC9E,IAAI,CAAC,KAAK,YAAY,CAAC,KAAK,cAAc;AAAA,MACxC,MAAM,IAAI,MAAM,sCAAsC;AAAA,IACxD;AAAA,IAEA,MAAM,SAAS,aAAa,UAAU,KAAK,YAAY;AAAA,IAEvD,MAAM,SAAS,MAAM,KAAK,SAAS,QAAQ;AAAA,MACzC,gBAAgB,SAAS,aAAa;AAAA,MACtC,aAAa,SAAS,eAAe;AAAA,MACrC,OAAO,SAAS,QAAQ;AAAA,MACxB,WAAW;AAAA,MACX,kBAAkB;AAAA,IACpB,CAAC;AAAA,IAGD,MAAM,SAAS,MAAM,QAAQ,MAAM,IAAI,OAAO,KAAK;AAAA,IACnD,OAAQ,OAAsC,kBAAkB;AAAA;AAAA,OAG5D,OAAM,CACV,UACA,SACA,SACiB;AAAA,IACjB,IAAI,CAAC,KAAK,YAAY,CAAC,KAAK,cAAc;AAAA,MACxC,MAAM,IAAI,MAAM,sCAAsC;AAAA,IACxD;AAAA,IAEA,MAAM,SAAS,aAAa,UAAU,KAAK,YAAY;AAAA,IAGvD,QAAQ,iBAAiB,MAAa;AAAA,IAEtC,IAAI,WAAW;AAAA,IAEf,MAAM,WAAW,IAAI,aAAa,KAAK,SAAS,WAAW;AAAA,MACzD,aAAa;AAAA,MACb,mBAAmB,CAAC,UAAkB;AAAA,QACpC,YAAY;AAAA,QACZ,QAAQ,OAAO,QAAQ;AAAA;AAAA,IAE3B,CAAC;AAAA,IAED,MAAM,KAAK,SAAS,QAAQ;AAAA,MAC1B,gBAAgB,SAAS,aAAa;AAAA,MACtC,aAAa,SAAS,eAAe;AAAA,MACrC,OAAO,SAAS,QAAQ;AAAA,MACxB,WAAW;AAAA,MACX,kBAAkB;AAAA,MAClB;AAAA,IACF,CAAC;AAAA,IAED,OAAO;AAAA;AAAA,OAGH,OAAM,GAAkB;AAAA,IAC5B,KAAK,WAAW;AAAA,IAChB,KAAK,eAAe;AAAA;AAExB;AAKO,SAAS,0BAA0B,CAAC,QAA2D;AAAA,EACpG,OAAO,IAAI,qBAAqB,MAAM;AAAA;;AClTxC,SAAS,UAAiC,CAAC,UAAyB;AAAA,EAClE,IAAI,OAAO,aAAa,UAAU;AAAA,IAChC,MAAM,KAAK,SAAS,cAAiB,QAAQ;AAAA,IAC7C,IAAI,CAAC,IAAI;AAAA,MACP,MAAM,IAAI,MAAM,sBAAsB,UAAU;AAAA,IAClD;AAAA,IACA,OAAO;AAAA,EACT;AAAA,EACA,OAAO;AAAA;AAMF,SAAS,oBAAoB,CAClC,gBACA,SAIgB;AAAA,EAChB,MAAM,SAAS,WAAW,cAAc;AAAA,EACxC,MAAM,SAAS,SAAS,UAAU;AAAA,EAClC,MAAM,iBAAiB,SAAS,kBAAkB;AAAA,EAElD,IAAI,cAAc,SAAS,OAAO,eAAe,KAAK;AAAA,EAEtD,OAAO,CAAC,QAAgB,aAAqB;AAAA,IAE3C,IAAI,kBAAkB,oBAAoB,kBAAkB,qBAAqB;AAAA,MAC/E,OAAO,QAAQ,cAAc;AAAA,IAC/B,EAAO;AAAA,MACL,OAAO,cAAc,cAAc;AAAA;AAAA,IAIrC,IAAI,gBAAgB;AAAA,MAClB,OAAO,YAAY,OAAO;AAAA,IAC5B;AAAA;AAAA;AAOG,SAAS,gBAAgB,CAC9B,eACA,gBACA,YACA,SACY;AAAA,EACZ,MAAM,QAAQ,WAAmD,aAAa;AAAA,EAC9E,MAAM,SAAS,WAAW,cAAc;AAAA,EAExC,MAAM,iBAAiB,SAAS,kBAAkB;AAAA,EAClD,MAAM,cAAc,SAAS,eAAe;AAAA,EAC5C,MAAM,cAAc,SAAS,eAAe;AAAA,EAC5C,MAAM,cAAc,SAAS,eAAe;AAAA,EAE5C,IAAI,eAAe;AAAA,EAEnB,MAAM,iBAAiB,YAAY;AAAA,IACjC,IAAI;AAAA,MAAc;AAAA,IAElB,MAAM,OAAO,MAAM,MAAM,KAAK;AAAA,IAC9B,IAAI,CAAC;AAAA,MAAM;AAAA,IAEX,eAAe;AAAA,IAGf,IAAI,aAAa;AAAA,MACf,MAAM,QAAQ;AAAA,IAChB;AAAA,IAGA,IAAI,aAAa;AAAA,MACf,IAAI,kBAAkB,oBAAoB,kBAAkB,qBAAqB;AAAA,QAC/E,OAAO,QAAQ;AAAA,MACjB,EAAO;AAAA,QACL,OAAO,cAAc;AAAA;AAAA,IAEzB;AAAA,IAEA,IAAI;AAAA,MACF,MAAM,WAAW,qBAAqB,MAAM;AAAA,MAC5C,MAAM,WAAW,MAAM,QAAQ;AAAA,MAC/B,OAAO,OAAO;AAAA,MACd,MAAM,WAAW,iBAAiB,QAAQ,MAAM,UAAU;AAAA,MAC1D,IAAI,kBAAkB,oBAAoB,kBAAkB,qBAAqB;AAAA,QAC/E,OAAO,QAAQ,UAAU;AAAA,MAC3B,EAAO;AAAA,QACL,OAAO,cAAc,UAAU;AAAA;AAAA,cAEjC;AAAA,MACA,eAAe;AAAA;AAAA;AAAA,EAKnB,MAAM,iBAAiB,CAAC,MAAqB;AAAA,IAC3C,IAAI,kBAAkB,EAAE,QAAQ,WAAW,CAAC,EAAE,UAAU;AAAA,MACtD,EAAE,eAAe;AAAA,MACjB,eAAe;AAAA,IACjB;AAAA;AAAA,EAGF,MAAM,iBAAiB,WAAW,cAA+B;AAAA,EAGjE,OAAO,MAAM;AAAA,IACX,MAAM,oBAAoB,WAAW,cAA+B;AAAA;AAAA;AAOjE,SAAS,YAAY,CAAC,mBAK3B;AAAA,EACA,MAAM,YAAY,WAAW,iBAAiB;AAAA,EAG9C,UAAU,YAAY;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYtB,MAAM,QAAQ,UAAU,cAAmC,iBAAiB;AAAA,EAC5E,MAAM,SAAS,UAAU,cAA8B,kBAAkB;AAAA,EACzE,MAAM,aAAa,UAAU,cAAiC,gBAAgB;AAAA,EAE9E,MAAM,UAAU,MAAM;AAAA,IACpB,UAAU,YAAY;AAAA;AAAA,EAGxB,OAAO,EAAE,OAAO,QAAQ,YAAY,QAAQ;AAAA;AAMvC,SAAS,sBAAsB,CAAC,mBAKrC;AAAA,EACA,MAAM,YAAY,WAAW,iBAAiB;AAAA,EAE9C,MAAM,YAAY,SAAS,cAAc,KAAK;AAAA,EAC9C,UAAU,YAAY;AAAA,EACtB,UAAU,MAAM,UAAU;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQ1B,UAAU,YAAY;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOtB,UAAU,YAAY,SAAS;AAAA,EAE/B,MAAM,MAAM,UAAU,cAA8B,kBAAkB;AAAA,EACtE,MAAM,SAAS,UAAU,cAA8B,qBAAqB;AAAA,EAE5E,OAAO;AAAA,IACL,MAAM,MAAM;AAAA,MACV,UAAU,MAAM,UAAU;AAAA;AAAA,IAE5B,MAAM,MAAM;AAAA,MACV,UAAU,MAAM,UAAU;AAAA;AAAA,IAE5B,aAAa,CAAC,SAAiB,eAAwB;AAAA,MACrD,IAAI,MAAM,QAAQ,GAAG,KAAK,IAAI,KAAK,KAAK,IAAI,GAAG,OAAO,CAAC;AAAA,MACvD,IAAI,eAAe,WAAW;AAAA,QAC5B,OAAO,cAAc;AAAA,MACvB;AAAA;AAAA,IAEF,SAAS;AAAA,EACX;AAAA;;ACxFF,SAAS,iBAAiB,CACxB,OACA,cACe;AAAA,EACf,MAAM,WAA0B,CAAC;AAAA,EAEjC,IAAI,cAAc;AAAA,IAChB,SAAS,KAAK,EAAE,MAAM,UAAU,SAAS,aAAa,CAAC;AAAA,EACzD;AAAA,EAEA,IAAI,OAAO,UAAU,UAAU;AAAA,IAC7B,SAAS,KAAK,EAAE,MAAM,QAAQ,SAAS,MAAM,CAAC;AAAA,EAChD,EAAO;AAAA,IACL,SAAS,KAAK,GAAG,KAAK;AAAA;AAAA,EAGxB,OAAO;AAAA;AAuBT,eAAsB,SAAS,CAAC,SAAoB,CAAC,GAAsB;AAAA,EACzE;AAAA,IACE,SAAS,mBAAmB;AAAA,IAC5B,SAAS;AAAA,IACT,eAAe;AAAA,IACf;AAAA,IACA;AAAA,MACE;AAAA,EAGJ,MAAM,eAAe,MAAM,mBAAmB;AAAA,EAG9C,IAAI;AAAA,EAEJ,IAAI,qBAAqB,QAAQ;AAAA,IAC/B,aAAa,aAAa,SAAS,WAAW;AAAA,EAChD,EAAO,SAAI,qBAAqB,UAAU;AAAA,IACxC,IAAI,CAAC,aAAa,QAAQ;AAAA,MACxB,QAAQ,KAAK,uFAAuF;AAAA,MACpG,aAAa;AAAA,IACf,EAAO;AAAA,MACL,aAAa;AAAA;AAAA,EAEjB,EAAO;AAAA,IACL,aAAa;AAAA;AAAA,EAIf,MAAM,QAAQ,OAAO,UACnB,eAAe,WAAW,uBAAuB;AAAA,EAGnD,QAAQ,IAAI,oBAAoB,kCAAkC,OAAO;AAAA,EAGzE,IAAI;AAAA,EAEJ,IAAI,eAAe,UAAU;AAAA,IAC3B,WAAW,IAAI;AAAA,EACjB,EAAO;AAAA,IACL,WAAW,IAAI,qBAAqB,EAAE,QAAQ,aAAa,CAAC;AAAA;AAAA,EAI9D,MAAM,SAAS,KAAK,OAAO,cAAc;AAAA,EAGzC,MAAM,MAAgB;AAAA,QAChB,OAAO,GAAG;AAAA,MACZ,OAAO,SAAS;AAAA;AAAA,QAGd,OAAO,GAAG;AAAA,MACZ,OAAO,SAAS;AAAA;AAAA,QAGd,OAAO,GAAG;AAAA,MACZ,OAAO;AAAA;AAAA,SAGH,KAAI,CAAC,UAAU,SAAS;AAAA,MAC5B,MAAM,qBAAqB,kBAAkB,UAAU,YAAY;AAAA,MACnE,OAAO,SAAS,KAAK,oBAAoB,OAAO;AAAA;AAAA,SAG5C,OAAM,CAAC,UAAU,SAAS,SAAS;AAAA,MACvC,MAAM,qBAAqB,kBAAkB,UAAU,YAAY;AAAA,MACnE,OAAO,SAAS,OAAO,oBAAoB,SAAS,OAAO;AAAA;AAAA,IAG7D,aAAa,CAAC,eAAe,gBAAgB,SAAS;AAAA,MACpD,OAAO,iBACL,eACA,gBACA,OAAO,OAAO,YAAY;AAAA,QACxB,MAAM,qBAAqB,kBAAkB,OAAO,YAAY;AAAA,QAChE,OAAO,SAAS,OAAO,oBAAoB,OAAO;AAAA,SAEpD,OACF;AAAA;AAAA,SAGI,OAAM,GAAG;AAAA,MACb,MAAM,SAAS,OAAO;AAAA;AAAA,EAE1B;AAAA,EAEA,OAAO;AAAA;AAMT,eAAsB,iBAAiB,GAAqB;AAAA,EAC1D,MAAM,OAAO,MAAM,mBAAmB;AAAA,EACtC,OAAO,KAAK;AAAA;;;;ALrKd,IAAM,aAAa,cAAsC,IAAI;AA6CtD,SAAS,WAAW;AAAA,EACzB;AAAA,EACA,WAAW;AAAA,EACX;AAAA,EACA;AAAA,EACA;AAAA,KACG;AAAA,GACgB;AAAA,EACnB,OAAO,KAAK,UAAU,SAA0B,IAAI;AAAA,EACpD,OAAO,WAAW,gBAAgB,SAAS,KAAK;AAAA,EAChD,OAAO,cAAc,mBAAmB,SAA8B,IAAI;AAAA,EAC1E,OAAO,OAAO,YAAY,SAAuB,IAAI;AAAA,EAGrD,MAAM,eAAe,OAAO,KAAK;AAAA,EACjC,MAAM,YAAY,OAAO,MAAM;AAAA,EAC/B,UAAU,UAAU;AAAA,EAEpB,MAAM,OAAO,YAAY,YAAY;AAAA,IACnC,IAAI;AAAA,MAAW;AAAA,IAEf,aAAa,IAAI;AAAA,IACjB,SAAS,IAAI;AAAA,IACb,gBAAgB,EAAE,UAAU,GAAG,QAAQ,kBAAkB,CAAC;AAAA,IAE1D,IAAI;AAAA,MACF,MAAM,WAAW,MAAM,UAAU;AAAA,WAC5B,UAAU;AAAA,QACb,gBAAgB,CAAC,aAAa;AAAA,UAC5B,gBAAgB,QAAQ;AAAA,UACxB,aAAa,QAAQ;AAAA;AAAA,MAEzB,CAAC;AAAA,MAED,OAAO,QAAQ;AAAA,MACf,gBAAgB,EAAE,UAAU,KAAK,QAAQ,QAAQ,CAAC;AAAA,MAClD,SAAS,QAAQ;AAAA,MACjB,OAAO,KAAK;AAAA,MACZ,MAAM,SAAQ,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC;AAAA,MAChE,SAAS,MAAK;AAAA,MACd,UAAU,MAAK;AAAA,cACf;AAAA,MACA,aAAa,KAAK;AAAA;AAAA,KAEnB,CAAC,WAAW,QAAQ,YAAY,OAAO,CAAC;AAAA,EAE3C,MAAM,SAAS,YAAY,YAAY;AAAA,IACrC,IAAI,KAAK;AAAA,MACP,MAAM,IAAI,OAAO;AAAA,MACjB,OAAO,IAAI;AAAA,MACX,gBAAgB,IAAI;AAAA,MACpB,aAAa,UAAU;AAAA,IACzB;AAAA,KACC,CAAC,GAAG,CAAC;AAAA,EAER,MAAM,SAAS,YAAY,YAAY;AAAA,IACrC,MAAM,OAAO;AAAA,IACb,MAAM,KAAK;AAAA,KACV,CAAC,QAAQ,IAAI,CAAC;AAAA,EAGjB,UAAU,MAAM;AAAA,IACd,IAAI,YAAY,CAAC,aAAa,WAAW,CAAC,OAAO,CAAC,WAAW;AAAA,MAC3D,aAAa,UAAU;AAAA,MACvB,KAAK;AAAA,IACP;AAAA,KACC,CAAC,UAAU,KAAK,WAAW,IAAI,CAAC;AAAA,EAGnC,UAAU,MAAM;AAAA,IACd,OAAO,MAAM;AAAA,MACX,IAAI,KAAK;AAAA,QACP,IAAI,OAAO,EAAE,MAAM,QAAQ,KAAK;AAAA,MAClC;AAAA;AAAA,KAED,CAAC,GAAG,CAAC;AAAA,EAER,MAAM,QAAQ,QACZ,OAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA,SAAS,KAAK,WAAW;AAAA,IACzB;AAAA,IACA;AAAA,IACA,SAAS,KAAK,WAAW;AAAA,IACzB,SAAS,KAAK,WAAW;AAAA,IACzB;AAAA,IACA;AAAA,EACF,IACA,CAAC,KAAK,WAAW,cAAc,OAAO,QAAQ,MAAM,CACtD;AAAA,EAEA,uBAAO,OAA+C,WAAW,UAA1D;AAAA,IAAqB;AAAA,IAArB;AAAA,sCAA+C;AAAA;AAiBjD,SAAS,MAAM,GAAoB;AAAA,EACxC,MAAM,UAAU,WAAW,UAAU;AAAA,EAErC,IAAI,CAAC,SAAS;AAAA,IACZ,MAAM,IAAI,MAAM,2CAA2C;AAAA,EAC7D;AAAA,EAEA,OAAO;AAAA;AA2HF,SAAS,OAAO,CAAC,UAA0B,CAAC,GAAkB;AAAA,EACnE,QAAQ,KAAK,SAAS,cAAc,OAAO;AAAA,EAE3C;AAAA,IACE,kBAAkB,CAAC;AAAA,IACnB;AAAA,IACA;AAAA,IACA,oBAAoB;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,MACE;AAAA,EAEJ,OAAO,UAAU,eAAe,SAAwB,eAAe;AAAA,EACvE,OAAO,OAAO,YAAY,SAAS,EAAE;AAAA,EACrC,OAAO,cAAc,mBAAmB,SAAS,KAAK;AAAA,EACtD,OAAO,eAAe,oBAAoB,SAAS,EAAE;AAAA,EAGrD,OAAO,gBAAgB,qBAAqB,SAAwB,IAAI;AAAA,EAExE,MAAM,WAAW,OAAO,KAAK;AAAA,EAC7B,MAAM,kBAAkB,OAAO,KAAK;AAAA,EAGpC,MAAM,mBAAmB,YACvB,OAAO,aAAqB,oBAAoD;AAAA,IAC9E,IAAI,CAAC,OAAO,CAAC,WAAW,gBAAgB,SAAS;AAAA,MAC/C,OAAO;AAAA,IACT;AAAA,IAEA,gBAAgB,UAAU;AAAA,IAG1B,MAAM,cAA2B,EAAE,MAAM,QAAQ,SAAS,YAAY;AAAA,IACtE,YAAY,CAAC,SAAS,CAAC,GAAG,MAAM,WAAW,CAAC;AAAA,IAG5C,MAAM,cAA6B,CAAC;AAAA,IAEpC,IAAI,cAAc;AAAA,MAChB,YAAY,KAAK,EAAE,MAAM,UAAU,SAAS,aAAa,CAAC;AAAA,IAC5D;AAAA,IAEA,YAAY,KAAK,GAAG,iBAAiB,WAAW;AAAA,IAGhD,gBAAgB,IAAI;AAAA,IACpB,iBAAiB,EAAE;AAAA,IACnB,SAAS,UAAU;AAAA,IACnB,UAAU;AAAA,IAEV,IAAI;AAAA,MACF,MAAM,WAAW,MAAM,IAAI,OACzB,aACA,CAAC,OAAO,aAAa;AAAA,QACnB,IAAI,SAAS;AAAA,UAAS;AAAA,QACtB,iBAAiB,QAAQ;AAAA,QACzB,UAAU,OAAO,QAAQ;AAAA,SAE3B,eACF;AAAA,MAEA,IAAI,CAAC,SAAS,SAAS;AAAA,QAErB,MAAM,mBAAgC;AAAA,UACpC,MAAM;AAAA,UACN,SAAS;AAAA,QACX;AAAA,QACA,YAAY,CAAC,SAAS,CAAC,GAAG,MAAM,gBAAgB,CAAC;AAAA,QACjD,iBAAiB,EAAE;AAAA,QACnB,WAAW,QAAQ;AAAA,MACrB;AAAA,MAEA,OAAO;AAAA,MACP,OAAO,KAAK;AAAA,MACZ,MAAM,QAAQ,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC;AAAA,MAChE,UAAU,KAAK;AAAA,MACf,OAAO;AAAA,cACP;AAAA,MACA,gBAAgB,KAAK;AAAA,MACrB,gBAAgB,UAAU;AAAA;AAAA,KAG9B,CAAC,KAAK,SAAS,cAAc,iBAAiB,SAAS,SAAS,UAAU,OAAO,CACnF;AAAA,EAGA,UAAU,MAAM;AAAA,IACd,IAAI,WAAW,kBAAkB,CAAC,gBAAgB,SAAS;AAAA,MACzD,MAAM,mBAAmB;AAAA,MACzB,kBAAkB,IAAI;AAAA,MACtB,iBAAiB,kBAAkB,QAAQ;AAAA,IAC7C;AAAA,KACC,CAAC,SAAS,gBAAgB,UAAU,gBAAgB,CAAC;AAAA,EAExD,MAAM,OAAO,YACX,OAAO,YAAsC;AAAA,IAC3C,MAAM,iBAAiB,WAAW;AAAA,IAElC,IAAI,CAAC,eAAe,KAAK,GAAG;AAAA,MAC1B,OAAO;AAAA,IACT;AAAA,IAGA,IAAI,CAAC,SAAS;AAAA,MACZ,SAAS,EAAE;AAAA,IACb;AAAA,IAGA,IAAI,OAAO,SAAS;AAAA,MAClB,OAAO,iBAAiB,gBAAgB,QAAQ;AAAA,IAClD;AAAA,IAGA,IAAI,aAAa,mBAAmB;AAAA,MAElC,MAAM,cAA2B,EAAE,MAAM,QAAQ,SAAS,eAAe;AAAA,MACzE,YAAY,CAAC,SAAS,CAAC,GAAG,MAAM,WAAW,CAAC;AAAA,MAC5C,kBAAkB,cAAc;AAAA,MAChC,OAAO;AAAA,IACT;AAAA,IAGA,OAAO;AAAA,KAET,CAAC,OAAO,KAAK,SAAS,WAAW,mBAAmB,UAAU,gBAAgB,CAChF;AAAA,EAEA,MAAM,OAAO,YAAY,MAAM;AAAA,IAC7B,SAAS,UAAU;AAAA,IACnB,gBAAgB,KAAK;AAAA,IACrB,kBAAkB,IAAI;AAAA,IAGtB,IAAI,eAAe;AAAA,MACjB,YAAY,CAAC,SAAS;AAAA,QACpB,GAAG;AAAA,QACH,EAAE,MAAM,aAAa,SAAS,gBAAgB,MAAM;AAAA,MACtD,CAAC;AAAA,MACD,iBAAiB,EAAE;AAAA,IACrB;AAAA,KACC,CAAC,aAAa,CAAC;AAAA,EAElB,MAAM,QAAQ,YAAY,MAAM;AAAA,IAC9B,YAAY,eAAe;AAAA,IAC3B,iBAAiB,EAAE;AAAA,IACnB,SAAS,EAAE;AAAA,IACX,kBAAkB,IAAI;AAAA,KACrB,CAAC,eAAe,CAAC;AAAA,EAEpB,MAAM,SAAS,YAAY,CAAC,YAAyB;AAAA,IACnD,YAAY,CAAC,SAAS,CAAC,GAAG,MAAM,OAAO,CAAC;AAAA,KACvC,CAAC,CAAC;AAAA,EAEL,MAAM,SAAS,YAAY,YAA6B;AAAA,IACtD,IAAI,SAAS,WAAW;AAAA,MAAG,OAAO;AAAA,IAGlC,MAAM,gBAAgB,SAAS,cAAc,CAAC,MAAM,EAAE,SAAS,MAAM;AAAA,IACrE,IAAI,kBAAkB;AAAA,MAAI,OAAO;AAAA,IAGjC,MAAM,mBAAmB,SAAS,MAAM,GAAG,aAAa;AAAA,IACxD,MAAM,kBAAkB,SAAS;AAAA,IAGjC,IAAI,CAAC;AAAA,MAAiB,OAAO;AAAA,IAE7B,YAAY,gBAAgB;AAAA,IAG5B,OAAO,KAAK,gBAAgB,OAAO;AAAA,KAClC,CAAC,UAAU,IAAI,CAAC;AAAA,EAEnB,OAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,WAAW,mBAAmB;AAAA,IAC9B;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAAA;AA0DK,SAAS,SAAS,CAAC,UAA4B,CAAC,GAAoB;AAAA,EACzE,QAAQ,KAAK,YAAY,OAAO;AAAA,EAChC,QAAQ,iBAAiB,SAAS,UAAU,YAAY;AAAA,EAExD,OAAO,MAAM,WAAW,SAAS,EAAE;AAAA,EACnC,OAAO,aAAa,kBAAkB,SAAS,KAAK;AAAA,EAEpD,MAAM,WAAW,OAAO,KAAK;AAAA,EAE7B,MAAM,SAAS,YACb,OAAO,UAAmD;AAAA,IACxD,IAAI,CAAC,OAAO,CAAC,SAAS;AAAA,MACpB,OAAO;AAAA,IACT;AAAA,IAEA,eAAe,IAAI;AAAA,IACnB,QAAQ,EAAE;AAAA,IACV,SAAS,UAAU;AAAA,IAEnB,IAAI;AAAA,MACF,MAAM,WAAW,MAAM,IAAI,OACzB,OACA,CAAC,OAAO,aAAa;AAAA,QACnB,IAAI,SAAS;AAAA,UAAS;AAAA,QACtB,QAAQ,QAAQ;AAAA,QAChB,UAAU,OAAO,QAAQ;AAAA,SAE3B,eACF;AAAA,MAEA,WAAW,QAAQ;AAAA,MACnB,OAAO;AAAA,MACP,OAAO,KAAK;AAAA,MACZ,MAAM,QAAQ,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC;AAAA,MAChE,UAAU,KAAK;AAAA,MACf,OAAO;AAAA,cACP;AAAA,MACA,eAAe,KAAK;AAAA;AAAA,KAGxB,CAAC,KAAK,SAAS,iBAAiB,SAAS,UAAU,OAAO,CAC5D;AAAA,EAEA,MAAM,OAAO,YAAY,MAAM;AAAA,IAC7B,SAAS,UAAU;AAAA,IACnB,eAAe,KAAK;AAAA,KACnB,CAAC,CAAC;AAAA,EAEL,MAAM,QAAQ,YAAY,MAAM;AAAA,IAC9B,QAAQ,EAAE;AAAA,KACT,CAAC,CAAC;AAAA,EAEL,OAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAAA;AA6CK,SAAS,aAAa,CAC3B,UAAgC,CAAC,GACZ;AAAA,EACrB,QAAQ,KAAK,YAAY,OAAO;AAAA,EAChC,QAAQ,oBAAoB;AAAA,EAE5B,OAAO,YAAY,iBAAiB,SAAS,EAAE;AAAA,EAC/C,OAAO,WAAW,gBAAgB,SAAS,KAAK;AAAA,EAEhD,MAAM,WAAW,YACf,OAAO,WAAoC;AAAA,IACzC,IAAI,CAAC,OAAO,CAAC,SAAS;AAAA,MACpB,OAAO;AAAA,IACT;AAAA,IAEA,aAAa,IAAI;AAAA,IAEjB,IAAI;AAAA,MACF,MAAM,WAAW,MAAM,IAAI,KAAK,QAAQ,eAAe;AAAA,MACvD,cAAc,QAAQ;AAAA,MACtB,OAAO;AAAA,MACP,OAAO,KAAK;AAAA,MACZ,QAAQ,MAAM,0BAA0B,GAAG;AAAA,MAC3C,OAAO;AAAA,cACP;AAAA,MACA,aAAa,KAAK;AAAA;AAAA,KAGtB,CAAC,KAAK,SAAS,eAAe,CAChC;AAAA,EAEA,MAAM,QAAQ,YAAY,MAAM;AAAA,IAC9B,cAAc,EAAE;AAAA,KACf,CAAC,CAAC;AAAA,EAEL,OAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAAA;AAyBK,SAAS,UAAU,GAAG,UAAU,aAA8B;AAAA,EACnE,QAAQ,WAAW,iBAAiB,OAAO;AAAA,EAE3C,IAAI,CAAC;AAAA,IAAW,OAAO;AAAA,EAEvB,IAAI,UAAU;AAAA,IACZ,uBAAO,OAAuC,OAAvC;AAAA,MAAK;AAAA,MAAL;AAAA,wCAAuC;AAAA,EAChD;AAAA,EAEA,uBACE,OAGE,OAHF;AAAA,IAAK;AAAA,IAAL,UAGE;AAAA,sBAFA,OAAoD,KAApD;AAAA,kBAAoD;AAAA,UAApD;AAAA,UAAqB,cAAc,YAAY;AAAA,UAA/C;AAAA;AAAA,yCAAoD;AAAA,sBACpD,OAA2B,KAA3B;AAAA,kBAAI,cAAc;AAAA,SAAlB,iCAA2B;AAAA;AAAA,KAF7B,gCAGE;AAAA;AA0BC,SAAS,QAAQ,GAAG,UAAU,WAAW,QAAuB;AAAA,EACrE,QAAQ,SAAS,cAAc,OAAO;AAAA,EAEtC,IAAI,aAAa,CAAC,SAAS;AAAA,IACzB,uBAAO;AAAA,gBAAG;AAAA,OAAH,iCAAc;AAAA,EACvB;AAAA,EAEA,uBAAO;AAAA;AAAA,sCAAc;AAAA;",
13
+ "debugId": "654DAC199C2102F264756E2164756E21",
14
14
  "names": []
15
15
  }
@@ -3,7 +3,7 @@ import {
3
3
  __require
4
4
  } from "./index-55gkckqf.js";
5
5
 
6
- // node_modules/.pnpm/onnxruntime-common@1.21.0/node_modules/onnxruntime-common/dist/esm/index.js
6
+ // node_modules/onnxruntime-common/dist/esm/index.js
7
7
  var exports_esm = {};
8
8
  __export(exports_esm, {
9
9
  registerBackend: () => registerBackend,
@@ -15,7 +15,7 @@ __export(exports_esm, {
15
15
  InferenceSession: () => InferenceSession2
16
16
  });
17
17
 
18
- // node_modules/.pnpm/onnxruntime-common@1.21.0/node_modules/onnxruntime-common/dist/esm/backend-impl.js
18
+ // node_modules/onnxruntime-common/dist/esm/backend-impl.js
19
19
  var backends = new Map;
20
20
  var backendsSortedByPriority = [];
21
21
  var registerBackend = (name, backend, priority) => {
@@ -117,10 +117,10 @@ var resolveBackendAndExecutionProviders = async (options) => {
117
117
  })
118
118
  ];
119
119
  };
120
- // node_modules/.pnpm/onnxruntime-common@1.21.0/node_modules/onnxruntime-common/dist/esm/version.js
120
+ // node_modules/onnxruntime-common/dist/esm/version.js
121
121
  var version = "1.21.0";
122
122
 
123
- // node_modules/.pnpm/onnxruntime-common@1.21.0/node_modules/onnxruntime-common/dist/esm/env-impl.js
123
+ // node_modules/onnxruntime-common/dist/esm/env-impl.js
124
124
  var logLevelValue = "warning";
125
125
  var env = {
126
126
  wasm: {},
@@ -142,9 +142,9 @@ var env = {
142
142
  };
143
143
  Object.defineProperty(env, "logLevel", { enumerable: true });
144
144
 
145
- // node_modules/.pnpm/onnxruntime-common@1.21.0/node_modules/onnxruntime-common/dist/esm/env.js
145
+ // node_modules/onnxruntime-common/dist/esm/env.js
146
146
  var env2 = env;
147
- // node_modules/.pnpm/onnxruntime-common@1.21.0/node_modules/onnxruntime-common/dist/esm/tensor-conversion-impl.js
147
+ // node_modules/onnxruntime-common/dist/esm/tensor-conversion-impl.js
148
148
  var tensorToDataURL = (tensor, options) => {
149
149
  const canvas = typeof document !== "undefined" ? document.createElement("canvas") : new OffscreenCanvas(1, 1);
150
150
  canvas.width = tensor.dims[3];
@@ -303,7 +303,7 @@ var tensorToImageData = (tensor, options) => {
303
303
  return image;
304
304
  };
305
305
 
306
- // node_modules/.pnpm/onnxruntime-common@1.21.0/node_modules/onnxruntime-common/dist/esm/tensor-factory-impl.js
306
+ // node_modules/onnxruntime-common/dist/esm/tensor-factory-impl.js
307
307
  var bufferToTensor = (buffer, options) => {
308
308
  if (buffer === undefined) {
309
309
  throw new Error("Image buffer must be defined");
@@ -512,7 +512,7 @@ var tensorFromMLTensor = (mlTensor, options) => {
512
512
  };
513
513
  var tensorFromPinnedBuffer = (type, buffer, dims) => new Tensor({ location: "cpu-pinned", type, data: buffer, dims: dims ?? [buffer.length] });
514
514
 
515
- // node_modules/.pnpm/onnxruntime-common@1.21.0/node_modules/onnxruntime-common/dist/esm/tensor-impl-type-mapping.js
515
+ // node_modules/onnxruntime-common/dist/esm/tensor-impl-type-mapping.js
516
516
  var NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP = new Map([
517
517
  ["float32", Float32Array],
518
518
  ["uint8", Uint8Array],
@@ -561,7 +561,7 @@ var checkTypedArray = () => {
561
561
  }
562
562
  };
563
563
 
564
- // node_modules/.pnpm/onnxruntime-common@1.21.0/node_modules/onnxruntime-common/dist/esm/tensor-utils-impl.js
564
+ // node_modules/onnxruntime-common/dist/esm/tensor-utils-impl.js
565
565
  var calculateSize = (dims) => {
566
566
  let size = 1;
567
567
  for (let i = 0;i < dims.length; i++) {
@@ -613,7 +613,7 @@ var tensorReshape = (tensor, dims) => {
613
613
  }
614
614
  };
615
615
 
616
- // node_modules/.pnpm/onnxruntime-common@1.21.0/node_modules/onnxruntime-common/dist/esm/tensor-impl.js
616
+ // node_modules/onnxruntime-common/dist/esm/tensor-impl.js
617
617
  class Tensor {
618
618
  constructor(arg0, arg1, arg2) {
619
619
  checkTypedArray();
@@ -866,10 +866,10 @@ class Tensor {
866
866
  }
867
867
  }
868
868
 
869
- // node_modules/.pnpm/onnxruntime-common@1.21.0/node_modules/onnxruntime-common/dist/esm/tensor.js
869
+ // node_modules/onnxruntime-common/dist/esm/tensor.js
870
870
  var Tensor2 = Tensor;
871
871
 
872
- // node_modules/.pnpm/onnxruntime-common@1.21.0/node_modules/onnxruntime-common/dist/esm/trace.js
872
+ // node_modules/onnxruntime-common/dist/esm/trace.js
873
873
  var TRACE = (deviceType, label) => {
874
874
  if (typeof env.trace === "undefined" ? !env.wasm.trace : !env.trace) {
875
875
  return;
@@ -906,7 +906,7 @@ var TRACE_FUNC_END = (extraMsg) => {
906
906
  TRACE_FUNC("END", extraMsg);
907
907
  };
908
908
 
909
- // node_modules/.pnpm/onnxruntime-common@1.21.0/node_modules/onnxruntime-common/dist/esm/inference-session-impl.js
909
+ // node_modules/onnxruntime-common/dist/esm/inference-session-impl.js
910
910
  class InferenceSession {
911
911
  constructor(handler) {
912
912
  this.handler = handler;
@@ -1074,9 +1074,9 @@ class InferenceSession {
1074
1074
  }
1075
1075
  }
1076
1076
 
1077
- // node_modules/.pnpm/onnxruntime-common@1.21.0/node_modules/onnxruntime-common/dist/esm/inference-session.js
1077
+ // node_modules/onnxruntime-common/dist/esm/inference-session.js
1078
1078
  var InferenceSession2 = InferenceSession;
1079
- // node_modules/.pnpm/onnxruntime-web@1.22.0-dev.20250409-89f8206ba4/node_modules/onnxruntime-web/dist/ort.bundle.min.mjs
1079
+ // node_modules/onnxruntime-web/dist/ort.bundle.min.mjs
1080
1080
  var exports_ort_bundle_min = {};
1081
1081
  __export(exports_ort_bundle_min, {
1082
1082
  registerBackend: () => $t,
@@ -11882,8 +11882,8 @@ var IS = Nn;
11882
11882
  }
11883
11883
  Object.defineProperty(ge.versions, "web", { value: _a, enumerable: true });
11884
11884
 
11885
- // node_modules/.pnpm/@huggingface+transformers@3.8.1/node_modules/@huggingface/transformers/dist/transformers.web.js
11886
- var __dirname = "/Users/blank/Desktop/CREATE/local-llm/node_modules/.pnpm/@huggingface+transformers@3.8.1/node_modules/@huggingface/transformers/dist";
11885
+ // node_modules/@huggingface/transformers/dist/transformers.web.js
11886
+ var __dirname = "/Users/blank/Desktop/CREATE/local-llm/node_modules/@huggingface/transformers/dist";
11887
11887
  var __webpack_modules__ = {
11888
11888
  "onnxruntime-common": (module) => {
11889
11889
  /*!*************************************!*\
@@ -36098,5 +36098,5 @@ export {
36098
36098
  __webpack_exports__ASTFeatureExtractor as ASTFeatureExtractor
36099
36099
  };
36100
36100
 
36101
- //# debugId=EFD3ABB06C9A5D6E64756E2164756E21
36102
- //# sourceMappingURL=transformers.web-nb96jrhe.js.map
36101
+ //# debugId=1811CAE9675DB8CE64756E2164756E21
36102
+ //# sourceMappingURL=transformers.web-1qr6h84s.js.map