sparkecoder 0.1.76 → 0.1.78
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/agent/index.d.ts +3 -3
- package/dist/agent/index.js +28 -17
- package/dist/agent/index.js.map +1 -1
- package/dist/cli.js +43 -25
- package/dist/cli.js.map +1 -1
- package/dist/db/index.d.ts +2 -2
- package/dist/{index-DRscBFFX.d.ts → index-DT1l57s0.d.ts} +25 -25
- package/dist/index.d.ts +5 -5
- package/dist/index.js +43 -25
- package/dist/index.js.map +1 -1
- package/dist/{schema-C7Mm4Ykn.d.ts → schema-XcP0dedO.d.ts} +3 -3
- package/dist/{search-CVVfuBPZ.d.ts → search-CCffrVJE.d.ts} +4 -4
- package/dist/server/index.js +43 -25
- package/dist/server/index.js.map +1 -1
- package/dist/tools/index.d.ts +3 -3
- package/dist/tools/index.js +19 -14
- package/dist/tools/index.js.map +1 -1
- package/package.json +1 -1
- package/web/.next/BUILD_ID +1 -1
- package/web/.next/standalone/web/.next/BUILD_ID +1 -1
- package/web/.next/standalone/web/.next/build-manifest.json +2 -2
- package/web/.next/standalone/web/.next/prerender-manifest.json +3 -3
- package/web/.next/standalone/web/.next/server/app/_global-error.html +2 -2
- package/web/.next/standalone/web/.next/server/app/_global-error.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/_global-error.segments/__PAGE__.segment.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/_global-error.segments/_full.segment.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/_global-error.segments/_head.segment.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/_global-error.segments/_index.segment.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/_global-error.segments/_tree.segment.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/_not-found.html +1 -1
- package/web/.next/standalone/web/.next/server/app/_not-found.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/_not-found.segments/_full.segment.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/_not-found.segments/_head.segment.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/_not-found.segments/_index.segment.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/_not-found.segments/_not-found/__PAGE__.segment.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/_not-found.segments/_not-found.segment.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/_not-found.segments/_tree.segment.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/docs/installation.html +2 -2
- package/web/.next/standalone/web/.next/server/app/docs/installation.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/docs/installation.segments/_full.segment.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/docs/installation.segments/_head.segment.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/docs/installation.segments/_index.segment.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/docs/installation.segments/_tree.segment.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/docs/installation.segments/docs/installation/__PAGE__.segment.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/docs/installation.segments/docs/installation.segment.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/docs/installation.segments/docs.segment.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/docs/skills.html +2 -2
- package/web/.next/standalone/web/.next/server/app/docs/skills.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/docs/skills.segments/_full.segment.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/docs/skills.segments/_head.segment.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/docs/skills.segments/_index.segment.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/docs/skills.segments/_tree.segment.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/docs/skills.segments/docs/skills/__PAGE__.segment.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/docs/skills.segments/docs/skills.segment.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/docs/skills.segments/docs.segment.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/docs/tools.html +2 -2
- package/web/.next/standalone/web/.next/server/app/docs/tools.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/docs/tools.segments/_full.segment.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/docs/tools.segments/_head.segment.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/docs/tools.segments/_index.segment.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/docs/tools.segments/_tree.segment.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/docs/tools.segments/docs/tools/__PAGE__.segment.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/docs/tools.segments/docs/tools.segment.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/docs/tools.segments/docs.segment.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/docs.html +2 -2
- package/web/.next/standalone/web/.next/server/app/docs.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/docs.segments/_full.segment.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/docs.segments/_head.segment.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/docs.segments/_index.segment.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/docs.segments/_tree.segment.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/docs.segments/docs/__PAGE__.segment.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/docs.segments/docs.segment.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/index.html +1 -1
- package/web/.next/standalone/web/.next/server/app/index.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/index.segments/!KG1haW4p/__PAGE__.segment.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/index.segments/!KG1haW4p.segment.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/index.segments/_full.segment.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/index.segments/_head.segment.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/index.segments/_index.segment.rsc +1 -1
- package/web/.next/standalone/web/.next/server/app/index.segments/_tree.segment.rsc +1 -1
- package/web/.next/standalone/web/.next/server/pages/404.html +1 -1
- package/web/.next/standalone/web/.next/server/pages/500.html +2 -2
- package/web/.next/standalone/web/.next/server/server-reference-manifest.js +1 -1
- package/web/.next/standalone/web/.next/server/server-reference-manifest.json +1 -1
- package/web/.next/standalone/web/package-lock.json +3 -3
- /package/web/.next/standalone/web/.next/static/{nZpzcWd5nqIPZFd3PiefU → q5xKLVzzjdkOykOwVu4eK}/_buildManifest.js +0 -0
- /package/web/.next/standalone/web/.next/static/{nZpzcWd5nqIPZFd3PiefU → q5xKLVzzjdkOykOwVu4eK}/_clientMiddlewareManifest.json +0 -0
- /package/web/.next/standalone/web/.next/static/{nZpzcWd5nqIPZFd3PiefU → q5xKLVzzjdkOykOwVu4eK}/_ssgManifest.js +0 -0
- /package/web/.next/standalone/web/.next/static/static/{nZpzcWd5nqIPZFd3PiefU → q5xKLVzzjdkOykOwVu4eK}/_buildManifest.js +0 -0
- /package/web/.next/standalone/web/.next/static/static/{nZpzcWd5nqIPZFd3PiefU → q5xKLVzzjdkOykOwVu4eK}/_clientMiddlewareManifest.json +0 -0
- /package/web/.next/standalone/web/.next/static/static/{nZpzcWd5nqIPZFd3PiefU → q5xKLVzzjdkOykOwVu4eK}/_ssgManifest.js +0 -0
- /package/web/.next/static/{nZpzcWd5nqIPZFd3PiefU → q5xKLVzzjdkOykOwVu4eK}/_buildManifest.js +0 -0
- /package/web/.next/static/{nZpzcWd5nqIPZFd3PiefU → q5xKLVzzjdkOykOwVu4eK}/_clientMiddlewareManifest.json +0 -0
- /package/web/.next/static/{nZpzcWd5nqIPZFd3PiefU → q5xKLVzzjdkOykOwVu4eK}/_ssgManifest.js +0 -0
package/dist/server/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/db/remote.ts","../../src/db/index.ts","../../src/config/types.ts","../../src/config/index.ts","../../src/skills/index.ts","../../src/semantic/types.ts","../../src/semantic/namespace.ts","../../src/semantic/hasher.ts","../../src/semantic/chunker.ts","../../src/semantic/client.ts","../../src/semantic/indexer.ts","../../src/semantic/index.ts","../../src/tools/semantic-search.ts","../../src/utils/webhook.ts","../../src/browser/stream-proxy.ts","../../src/browser/recorder.ts","../../src/server/index.ts","../../src/server/routes/sessions.ts","../../src/agent/index.ts","../../src/agent/model.ts","../../src/agent/remote-model.ts","../../src/tools/bash.ts","../../src/utils/tokens.ts","../../src/utils/truncate.ts","../../src/terminal/tmux.ts","../../src/tools/read-file.ts","../../src/utils/resize-image.ts","../../src/tools/write-file.ts","../../src/checkpoints/index.ts","../../src/lsp/index.ts","../../src/lsp/servers.ts","../../src/lsp/client.ts","../../src/lsp/types.ts","../../src/tools/todo.ts","../../src/tools/load-skill.ts","../../src/tools/linter.ts","../../src/tools/search.ts","../../src/agent/subagent.ts","../../src/agent/subagents/search.ts","../../src/tools/code-graph.ts","../../src/tools/index.ts","../../src/tools/task.ts","../../src/tools/upload-file.ts","../../src/agent/context.ts","../../src/agent/prompts.ts","../../src/utils/sanitize-messages.ts","../../src/agent/model-limits.ts","../../src/server/devtools-store.ts","../../src/server/routes/agents.ts","../../src/server/resumable-stream.ts","../../src/server/routes/health.ts","../../src/server/routes/terminals.ts","../../src/server/routes/tasks.ts","../../src/utils/dependencies.ts"],"sourcesContent":["/**\n * Remote database client\n * \n * Implements the same interface as the local SQLite database\n * but calls the remote server via HTTP.\n */\n\nimport type {\n Session,\n Message,\n ToolExecution,\n TodoItem,\n ModelMessage,\n Terminal,\n ActiveStream,\n Checkpoint,\n FileBackup,\n SubagentExecution,\n SubagentStep,\n IndexedChunk,\n IndexStatusRecord,\n LoadedSkill,\n} from './schema.js';\n\nlet remoteServerUrl: string | null = null;\nlet authKey: string | null = null;\n\n/**\n * Initialize the remote database client\n */\nexport function initRemoteDatabase(serverUrl: string, key: string) {\n remoteServerUrl = serverUrl.replace(/\\/$/, ''); // Remove trailing slash\n authKey = key;\n}\n\n/**\n * Close the remote client (no-op, just for API compatibility)\n */\nexport function closeRemoteDatabase() {\n remoteServerUrl = null;\n authKey = null;\n}\n\n/**\n * Check if remote database is configured\n */\nexport function isRemoteConfigured(): boolean {\n return !!remoteServerUrl && !!authKey;\n}\n\n/**\n * Date fields that should be parsed from ISO strings to Date objects.\n * These are top-level metadata fields on database records (Session, Message, etc.),\n * NOT fields inside modelMessage content (tool outputs, etc.).\n */\nconst DATE_FIELDS = ['createdAt', 'updatedAt', 'startedAt', 'completedAt', 'stoppedAt', 'finishedAt', 'loadedAt', 'indexedAt', 'lastFullIndex', 'lastIncrementalIndex'];\n\n/**\n * Fields that contain AI SDK ModelMessage data and should NOT be recursively\n * processed by parseDates. The AI SDK's Zod schema requires tool output values\n * to be valid JSON primitives (string, number, boolean, null, object, array).\n * Converting date strings to Date objects inside these fields corrupts them and\n * causes AI_InvalidPromptError when the messages are passed back to streamText().\n */\nconst MODEL_MESSAGE_FIELDS = ['modelMessage', 'modelMessages'];\n\n/**\n * Parse date strings to Date objects on top-level record fields only.\n * \n * IMPORTANT: Does NOT recurse into `modelMessage` / `modelMessages` fields.\n * Those contain AI SDK ModelMessage data that must remain JSON-serializable.\n * Recursing into them converts date strings (e.g. `createdAt` inside tool\n * result outputs) to Date objects, which violates the AI SDK's jsonValueSchema\n * and triggers AI_InvalidPromptError on subsequent streamText() calls.\n */\nfunction parseDates(obj: any): any {\n if (obj === null || obj === undefined) return obj;\n if (Array.isArray(obj)) return obj.map(parseDates);\n if (typeof obj !== 'object' || obj instanceof Date) return obj;\n \n const result = { ...obj };\n for (const key of Object.keys(result)) {\n // Skip modelMessage fields entirely - these must stay JSON-serializable\n if (MODEL_MESSAGE_FIELDS.includes(key)) {\n continue;\n }\n if (DATE_FIELDS.includes(key) && typeof result[key] === 'string') {\n result[key] = new Date(result[key]);\n } else if (typeof result[key] === 'object') {\n result[key] = parseDates(result[key]);\n }\n }\n return result;\n}\n\n/**\n * HTTP helper for remote API calls\n * @param options.skipParseDates - If true, skip the parseDates post-processing.\n * Use for endpoints that return ModelMessage[] directly, since those must\n * remain JSON-serializable for the AI SDK.\n */\nasync function api<T>(\n path: string,\n options: { method?: string; body?: unknown; skipParseDates?: boolean } = {}\n): Promise<T> {\n if (!remoteServerUrl || !authKey) {\n throw new Error('Remote database not initialized');\n }\n \n const url = `${remoteServerUrl}/db${path}`;\n const init: RequestInit = {\n method: options.method || 'GET',\n headers: {\n 'Content-Type': 'application/json',\n 'Authorization': `Bearer ${authKey}`,\n },\n };\n \n if (options.body) {\n init.body = JSON.stringify(options.body);\n }\n \n const response = await fetch(url, init);\n \n if (!response.ok) {\n const error = await response.json().catch(() => ({ error: 'Unknown error' })) as { error?: string };\n throw new Error(error.error || `HTTP ${response.status}`);\n }\n \n const text = await response.text();\n if (!text || text === 'null') {\n return null as T;\n }\n \n const parsed = JSON.parse(text);\n\n // Skip date parsing for raw ModelMessage data - it must stay JSON-serializable\n if (options.skipParseDates) {\n return parsed as T;\n }\n\n // Parse JSON and convert date strings to Date objects\n return parseDates(parsed) as T;\n}\n\n// ============================================\n// Session Queries\n// ============================================\n\nexport const remoteSessionQueries = {\n create(data: { workingDirectory: string; model: string; name?: string; config?: any }): Promise<Session> {\n return api<Session>('/sessions', { method: 'POST', body: data });\n },\n\n getById(id: string): Promise<Session | undefined> {\n return api<Session | undefined>(`/sessions/${id}`).catch(() => undefined);\n },\n\n list(limit = 50, offset = 0): Promise<Session[]> {\n return api<Session[]>(`/sessions?limit=${limit}&offset=${offset}`);\n },\n\n updateStatus(id: string, status: Session['status']): Promise<Session | undefined> {\n return api<Session | undefined>(`/sessions/${id}`, { method: 'PATCH', body: { status } });\n },\n\n updateModel(id: string, model: string): Promise<Session | undefined> {\n return api<Session | undefined>(`/sessions/${id}`, { method: 'PATCH', body: { model } });\n },\n\n update(id: string, updates: { model?: string; name?: string; config?: any }): Promise<Session | undefined> {\n return api<Session | undefined>(`/sessions/${id}`, { method: 'PATCH', body: updates });\n },\n\n delete(id: string): Promise<boolean> {\n return api<{ success: boolean }>(`/sessions/${id}`, { method: 'DELETE' }).then(r => r?.success ?? false);\n },\n};\n\n// ============================================\n// Message Queries\n// ============================================\n\nexport const remoteMessageQueries = {\n async getNextSequence(sessionId: string): Promise<number> {\n const result = await api<{ nextSequence: number }>(`/messages/session/${sessionId}/next-sequence`);\n return result.nextSequence;\n },\n\n create(sessionId: string, modelMessage: ModelMessage): Promise<Message> {\n return api<Message>('/messages', { method: 'POST', body: { sessionId, modelMessage } });\n },\n\n addMany(sessionId: string, modelMessages: ModelMessage[]): Promise<Message[]> {\n return api<Message[]>('/messages/batch', { method: 'POST', body: { sessionId, modelMessages } });\n },\n\n getBySession(sessionId: string): Promise<Message[]> {\n return api<Message[]>(`/messages/session/${sessionId}`);\n },\n\n getModelMessages(sessionId: string): Promise<ModelMessage[]> {\n // IMPORTANT: skipParseDates=true because ModelMessage data must remain\n // JSON-serializable. The parseDates function would convert date strings\n // inside tool result outputs (e.g. todo items with createdAt) to Date\n // objects, which violates the AI SDK's jsonValueSchema and causes\n // AI_InvalidPromptError on subsequent streamText() calls.\n return api<ModelMessage[]>(`/messages/session/${sessionId}/model-messages`, { skipParseDates: true });\n },\n\n async getRecentBySession(sessionId: string, limit = 50): Promise<Message[]> {\n const messages = await api<Message[]>(`/messages/session/${sessionId}`);\n return messages.slice(-limit);\n },\n\n async countBySession(sessionId: string): Promise<number> {\n const result = await api<{ count: number }>(`/messages/session/${sessionId}/count`);\n return result.count;\n },\n\n async deleteBySession(sessionId: string): Promise<number> {\n const result = await api<{ deleted: number }>(`/messages/session/${sessionId}`, { method: 'DELETE' });\n return result.deleted;\n },\n\n async deleteFromSequence(sessionId: string, fromSequence: number): Promise<number> {\n const result = await api<{ deleted: number }>(\n `/messages/session/${sessionId}/from-sequence/${fromSequence}`,\n { method: 'DELETE' }\n );\n return result.deleted;\n },\n};\n\n// ============================================\n// Tool Execution Queries\n// ============================================\n\nexport const remoteToolExecutionQueries = {\n create(data: {\n sessionId: string;\n messageId?: string;\n toolName: string;\n toolCallId: string;\n input?: any;\n requiresApproval?: boolean;\n status?: 'pending' | 'approved' | 'rejected' | 'completed' | 'error';\n }): Promise<ToolExecution> {\n return api<ToolExecution>('/tool-executions', { method: 'POST', body: data });\n },\n\n getById(id: string): Promise<ToolExecution | undefined> {\n return api<ToolExecution | undefined>(`/tool-executions/${id}`).catch(() => undefined);\n },\n\n getByToolCallId(toolCallId: string): Promise<ToolExecution | undefined> {\n return api<ToolExecution | undefined>(`/tool-executions/by-tool-call-id/${toolCallId}`).catch(() => undefined);\n },\n\n getPendingApprovals(sessionId: string): Promise<ToolExecution[]> {\n return api<ToolExecution[]>(`/tool-executions/session/${sessionId}/pending`);\n },\n\n approve(id: string): Promise<ToolExecution | undefined> {\n return api<ToolExecution | undefined>(`/tool-executions/${id}`, { method: 'PATCH', body: { status: 'approved' } });\n },\n\n reject(id: string): Promise<ToolExecution | undefined> {\n return api<ToolExecution | undefined>(`/tool-executions/${id}`, { method: 'PATCH', body: { status: 'rejected' } });\n },\n\n complete(id: string, output: unknown, error?: string): Promise<ToolExecution | undefined> {\n return api<ToolExecution | undefined>(`/tool-executions/${id}`, {\n method: 'PATCH',\n body: { status: error ? 'error' : 'completed', output, error },\n });\n },\n\n getBySession(sessionId: string): Promise<ToolExecution[]> {\n return api<ToolExecution[]>(`/tool-executions/session/${sessionId}`);\n },\n\n async deleteAfterTime(sessionId: string, afterTime: Date | string): Promise<number> {\n // Handle both Date objects and ISO strings\n const timestamp = afterTime instanceof Date ? afterTime.getTime() : new Date(afterTime).getTime();\n const result = await api<{ deleted: number }>(\n `/tool-executions/session/${sessionId}/after/${timestamp}`,\n { method: 'DELETE' }\n );\n return result.deleted;\n },\n};\n\n// ============================================\n// Todo Queries\n// ============================================\n\nexport const remoteTodoQueries = {\n create(data: { sessionId: string; content: string; order?: number }): Promise<TodoItem> {\n return api<TodoItem>('/todos', { method: 'POST', body: data });\n },\n\n createMany(sessionId: string, items: Array<{ content: string; order?: number }>): Promise<TodoItem[]> {\n return api<TodoItem[]>('/todos/batch', { method: 'POST', body: { sessionId, items } });\n },\n\n getBySession(sessionId: string): Promise<TodoItem[]> {\n return api<TodoItem[]>(`/todos/session/${sessionId}`);\n },\n\n updateStatus(id: string, status: TodoItem['status']): Promise<TodoItem | undefined> {\n return api<TodoItem | undefined>(`/todos/${id}`, { method: 'PATCH', body: { status } });\n },\n\n async delete(id: string): Promise<boolean> {\n const result = await api<{ success: boolean }>(`/todos/${id}`, { method: 'DELETE' });\n return result?.success ?? false;\n },\n\n async clearSession(sessionId: string): Promise<number> {\n const result = await api<{ deleted: number }>(`/todos/session/${sessionId}`, { method: 'DELETE' });\n return result.deleted;\n },\n};\n\n// ============================================\n// Skill Queries\n// ============================================\n\nexport const remoteSkillQueries = {\n load(sessionId: string, skillName: string): Promise<LoadedSkill> {\n return api<LoadedSkill>('/skills', { method: 'POST', body: { sessionId, skillName } });\n },\n\n getBySession(sessionId: string): Promise<LoadedSkill[]> {\n return api<LoadedSkill[]>(`/skills/session/${sessionId}`);\n },\n\n async isLoaded(sessionId: string, skillName: string): Promise<boolean> {\n const result = await api<{ isLoaded: boolean }>(`/skills/session/${sessionId}/is-loaded/${skillName}`);\n return result.isLoaded;\n },\n};\n\n// ============================================\n// Terminal Queries\n// ============================================\n\nexport const remoteTerminalQueries = {\n create(data: { sessionId: string; command: string; cwd: string; name?: string }): Promise<Terminal> {\n return api<Terminal>('/terminals', { method: 'POST', body: data });\n },\n\n getById(id: string): Promise<Terminal | undefined> {\n return api<Terminal | undefined>(`/terminals/${id}`).catch(() => undefined);\n },\n\n getBySession(sessionId: string): Promise<Terminal[]> {\n return api<Terminal[]>(`/terminals/session/${sessionId}`);\n },\n\n getRunning(sessionId: string): Promise<Terminal[]> {\n return api<Terminal[]>(`/terminals/session/${sessionId}/running`);\n },\n\n updateStatus(id: string, status: Terminal['status'], exitCode?: number, error?: string): Promise<Terminal | undefined> {\n return api<Terminal | undefined>(`/terminals/${id}`, { method: 'PATCH', body: { status, exitCode, error } });\n },\n\n updatePid(id: string, pid: number): Promise<Terminal | undefined> {\n return api<Terminal | undefined>(`/terminals/${id}`, { method: 'PATCH', body: { pid } });\n },\n\n async delete(id: string): Promise<boolean> {\n const result = await api<{ success: boolean }>(`/terminals/${id}`, { method: 'DELETE' });\n return result?.success ?? false;\n },\n\n async deleteBySession(sessionId: string): Promise<number> {\n const result = await api<{ deleted: number }>(`/terminals/session/${sessionId}`, { method: 'DELETE' });\n return result.deleted;\n },\n};\n\n// ============================================\n// Active Stream Queries\n// ============================================\n\nexport const remoteActiveStreamQueries = {\n create(sessionId: string, streamId: string): Promise<ActiveStream> {\n return api<ActiveStream>('/streams', { method: 'POST', body: { sessionId, streamId } });\n },\n\n getBySessionId(sessionId: string): Promise<ActiveStream | undefined> {\n return api<ActiveStream | null>(`/streams/session/${sessionId}`).then(r => r ?? undefined);\n },\n\n getByStreamId(streamId: string): Promise<ActiveStream | undefined> {\n return api<ActiveStream | undefined>(`/streams/by-stream-id/${streamId}`).catch(() => undefined);\n },\n\n finish(streamId: string): Promise<ActiveStream | undefined> {\n return api<ActiveStream | undefined>(`/streams/by-stream-id/${streamId}`, { method: 'PATCH', body: { status: 'finished' } });\n },\n\n markError(streamId: string): Promise<ActiveStream | undefined> {\n return api<ActiveStream | undefined>(`/streams/by-stream-id/${streamId}`, { method: 'PATCH', body: { status: 'error' } });\n },\n\n async deleteBySession(sessionId: string): Promise<number> {\n const result = await api<{ deleted: number }>(`/streams/session/${sessionId}`, { method: 'DELETE' });\n return result.deleted;\n },\n};\n\n// ============================================\n// Checkpoint Queries\n// ============================================\n\nexport const remoteCheckpointQueries = {\n create(data: { sessionId: string; messageSequence: number; gitHead?: string }): Promise<Checkpoint> {\n return api<Checkpoint>('/checkpoints', { method: 'POST', body: data });\n },\n\n getById(id: string): Promise<Checkpoint | undefined> {\n return api<Checkpoint | undefined>(`/checkpoints/${id}`).catch(() => undefined);\n },\n\n getBySession(sessionId: string): Promise<Checkpoint[]> {\n return api<Checkpoint[]>(`/checkpoints/session/${sessionId}`);\n },\n\n getByMessageSequence(sessionId: string, messageSequence: number): Promise<Checkpoint | undefined> {\n return api<Checkpoint | null>(`/checkpoints/session/${sessionId}/by-sequence/${messageSequence}`).then(r => r ?? undefined);\n },\n\n getLatest(sessionId: string): Promise<Checkpoint | undefined> {\n return api<Checkpoint | null>(`/checkpoints/session/${sessionId}/latest`).then(r => r ?? undefined);\n },\n\n async deleteAfterSequence(sessionId: string, messageSequence: number): Promise<number> {\n const result = await api<{ deleted: number }>(\n `/checkpoints/session/${sessionId}/after-sequence/${messageSequence}`,\n { method: 'DELETE' }\n );\n return result.deleted;\n },\n\n async deleteBySession(sessionId: string): Promise<number> {\n const result = await api<{ deleted: number }>(`/checkpoints/session/${sessionId}`, { method: 'DELETE' });\n return result.deleted;\n },\n};\n\n// ============================================\n// File Backup Queries\n// ============================================\n\nexport const remoteFileBackupQueries = {\n create(data: {\n checkpointId: string;\n sessionId: string;\n filePath: string;\n originalContent: string | null;\n existed: boolean;\n }): Promise<FileBackup> {\n return api<FileBackup>('/file-backups', { method: 'POST', body: data });\n },\n\n getByCheckpoint(checkpointId: string): Promise<FileBackup[]> {\n return api<FileBackup[]>(`/file-backups/checkpoint/${checkpointId}`);\n },\n\n getBySession(sessionId: string): Promise<FileBackup[]> {\n return api<FileBackup[]>(`/file-backups/session/${sessionId}`);\n },\n\n getFromSequence(sessionId: string, messageSequence: number): Promise<FileBackup[]> {\n return api<FileBackup[]>(`/file-backups/session/${sessionId}/from-sequence/${messageSequence}`);\n },\n\n async hasBackup(checkpointId: string, filePath: string): Promise<boolean> {\n const result = await api<{ hasBackup: boolean }>(\n `/file-backups/checkpoint/${checkpointId}/has-backup/${encodeURIComponent(filePath)}`\n );\n return result.hasBackup;\n },\n\n async deleteBySession(sessionId: string): Promise<number> {\n const result = await api<{ deleted: number }>(`/file-backups/session/${sessionId}`, { method: 'DELETE' });\n return result.deleted;\n },\n};\n\n// ============================================\n// Subagent Queries\n// ============================================\n\nexport const remoteSubagentQueries = {\n create(data: {\n sessionId: string;\n toolCallId: string;\n subagentType: string;\n task: string;\n model: string;\n }): Promise<SubagentExecution> {\n return api<SubagentExecution>('/subagents', { method: 'POST', body: data });\n },\n\n getById(id: string): Promise<SubagentExecution | undefined> {\n return api<SubagentExecution | undefined>(`/subagents/${id}`).catch(() => undefined);\n },\n\n getByToolCallId(toolCallId: string): Promise<SubagentExecution | undefined> {\n return api<SubagentExecution | undefined>(`/subagents/by-tool-call-id/${toolCallId}`).catch(() => undefined);\n },\n\n getBySession(sessionId: string): Promise<SubagentExecution[]> {\n return api<SubagentExecution[]>(`/subagents/session/${sessionId}`);\n },\n\n addStep(id: string, step: SubagentStep): Promise<SubagentExecution | undefined> {\n return api<SubagentExecution | undefined>(`/subagents/${id}/add-step`, { method: 'POST', body: { step } }).catch(() => undefined);\n },\n\n complete(id: string, result: unknown): Promise<SubagentExecution | undefined> {\n return api<SubagentExecution | undefined>(`/subagents/${id}`, { method: 'PATCH', body: { status: 'completed', result } }).catch(() => undefined);\n },\n\n markError(id: string, error: string): Promise<SubagentExecution | undefined> {\n return api<SubagentExecution | undefined>(`/subagents/${id}`, { method: 'PATCH', body: { status: 'error', error } }).catch(() => undefined);\n },\n\n cancel(id: string): Promise<SubagentExecution | undefined> {\n return api<SubagentExecution | undefined>(`/subagents/${id}`, { method: 'PATCH', body: { status: 'cancelled' } }).catch(() => undefined);\n },\n\n async deleteBySession(sessionId: string): Promise<number> {\n const result = await api<{ deleted: number }>(`/subagents/session/${sessionId}`, { method: 'DELETE' });\n return result.deleted;\n },\n};\n\n// ============================================\n// Indexed Chunk Queries\n// ============================================\n\nexport const remoteIndexedChunkQueries = {\n upsert(\n _db: any, // Ignored - for API compatibility\n data: {\n id: string;\n contentHash: string;\n filePath: string;\n repoNamespace: string;\n startLine?: number;\n endLine?: number;\n language?: string;\n }\n ): Promise<IndexedChunk> {\n return api<IndexedChunk>('/indexed-chunks', { method: 'POST', body: data });\n },\n\n batchUpsert(\n _db: any,\n chunks: Array<{\n id: string;\n contentHash: string;\n filePath: string;\n repoNamespace: string;\n startLine?: number;\n endLine?: number;\n language?: string;\n }>\n ): Promise<{ created: number; updated: number }> {\n return api<{ created: number; updated: number }>('/indexed-chunks/batch', { \n method: 'POST', \n body: { chunks } \n });\n },\n\n getById(_db: any, id: string): Promise<IndexedChunk | undefined> {\n return api<IndexedChunk | undefined>(`/indexed-chunks/${id}`).catch(() => undefined);\n },\n\n getByNamespace(_db: any, namespace: string): Promise<IndexedChunk[]> {\n return api<IndexedChunk[]>(`/indexed-chunks/namespace/${namespace}`);\n },\n\n getByFilePath(_db: any, namespace: string, filePath: string): Promise<IndexedChunk[]> {\n return api<IndexedChunk[]>(`/indexed-chunks/namespace/${namespace}/file/${encodeURIComponent(filePath)}`);\n },\n\n async deleteByNamespace(_db: any, namespace: string): Promise<number> {\n const result = await api<{ deleted: number }>(`/indexed-chunks/namespace/${namespace}`, { method: 'DELETE' });\n return result.deleted;\n },\n\n async deleteByFilePath(_db: any, namespace: string, filePath: string): Promise<number> {\n const result = await api<{ deleted: number }>(\n `/indexed-chunks/namespace/${namespace}/file/${encodeURIComponent(filePath)}`,\n { method: 'DELETE' }\n );\n return result.deleted;\n },\n\n async countByNamespace(_db: any, namespace: string): Promise<number> {\n const result = await api<{ count: number }>(`/indexed-chunks/namespace/${namespace}/count`);\n return result.count;\n },\n};\n\n// ============================================\n// Index Status Queries\n// ============================================\n\nexport const remoteIndexStatusQueries = {\n upsert(\n _db: any, // Ignored\n data: {\n id: string;\n repoNamespace: string;\n totalChunks?: number;\n lastFullIndex?: Date;\n lastIncrementalIndex?: Date;\n }\n ): Promise<IndexStatusRecord> {\n return api<IndexStatusRecord>('/index-status', {\n method: 'POST',\n body: {\n ...data,\n lastFullIndex: data.lastFullIndex?.toISOString(),\n lastIncrementalIndex: data.lastIncrementalIndex?.toISOString(),\n },\n });\n },\n\n get(_db: any, namespace: string): Promise<IndexStatusRecord | undefined> {\n return api<IndexStatusRecord | null>(`/index-status/namespace/${namespace}`).then(r => r ?? undefined);\n },\n\n async delete(_db: any, namespace: string): Promise<boolean> {\n const result = await api<{ success: boolean }>(`/index-status/namespace/${namespace}`, { method: 'DELETE' });\n return result?.success ?? false;\n },\n\n list(_db: any): Promise<IndexStatusRecord[]> {\n return api<IndexStatusRecord[]>('/index-status');\n },\n};\n\n// ============================================\n// Storage (GCS) — calls /storage/* endpoints\n// ============================================\n\nexport interface SessionFile {\n id: string;\n fileName: string;\n contentType: string;\n sizeBytes: number | null;\n category: string;\n createdAt: string;\n downloadUrl: string | null;\n downloadUrlExpiresAt: string | null;\n}\n\nexport interface UploadUrlResponse {\n fileId: string;\n uploadUrl: string;\n gcsPath: string;\n expiresAt: string;\n}\n\nasync function storageApi<T>(\n path: string,\n options: { method?: string; body?: unknown } = {}\n): Promise<T> {\n if (!remoteServerUrl || !authKey) {\n throw new Error('Remote database not initialized');\n }\n\n const url = `${remoteServerUrl}/storage${path}`;\n const init: RequestInit = {\n method: options.method || 'GET',\n headers: {\n 'Content-Type': 'application/json',\n 'Authorization': `Bearer ${authKey}`,\n },\n };\n\n if (options.body) {\n init.body = JSON.stringify(options.body);\n }\n\n const response = await fetch(url, init);\n if (!response.ok) {\n const errorText = await response.text().catch(() => 'Unknown error');\n throw new Error(`Storage API error ${response.status}: ${errorText}`);\n }\n return response.json() as Promise<T>;\n}\n\nexport const storageQueries = {\n async getUploadUrl(\n sessionId: string,\n fileName: string,\n contentType: string,\n category?: string\n ): Promise<UploadUrlResponse> {\n return storageApi<UploadUrlResponse>('/upload-url', {\n method: 'POST',\n body: { sessionId, fileName, contentType, category },\n });\n },\n\n async getSessionFiles(sessionId: string): Promise<SessionFile[]> {\n const result = await storageApi<{ files: SessionFile[] }>(`/files/${sessionId}`);\n return result.files;\n },\n\n async getDownloadUrl(fileId: string): Promise<{ downloadUrl: string; expiresAt: string }> {\n return storageApi<{ downloadUrl: string; expiresAt: string }>(`/download/${fileId}`);\n },\n\n async deleteFile(fileId: string): Promise<void> {\n await storageApi(`/files/${fileId}`, { method: 'DELETE' });\n },\n\n async updateFile(fileId: string, data: { sizeBytes?: number }): Promise<void> {\n await storageApi(`/files/${fileId}`, { method: 'PATCH', body: data });\n },\n};\n","/**\n * Database layer - Remote MongoDB only\n * \n * All data is stored on the remote server at agent.sparkecode.com\n */\n\nimport {\n initRemoteDatabase,\n closeRemoteDatabase,\n remoteSessionQueries,\n remoteMessageQueries,\n remoteToolExecutionQueries,\n remoteTodoQueries,\n remoteSkillQueries,\n remoteTerminalQueries,\n remoteActiveStreamQueries,\n remoteCheckpointQueries,\n remoteFileBackupQueries,\n remoteSubagentQueries,\n remoteIndexedChunkQueries,\n remoteIndexStatusQueries,\n} from './remote.js';\n\n// Re-export types from schema\nexport type {\n Session,\n NewSession,\n Message,\n NewMessage,\n ToolExecution,\n NewToolExecution,\n TodoItem,\n NewTodoItem,\n SessionConfig,\n ModelMessage,\n UserModelMessage,\n UserContentPart,\n UserTextPart,\n UserImagePart,\n UserFilePart,\n Terminal,\n NewTerminal,\n ActiveStream,\n NewActiveStream,\n Checkpoint,\n NewCheckpoint,\n FileBackup,\n NewFileBackup,\n SubagentExecution,\n NewSubagentExecution,\n SubagentStep,\n IndexedChunk,\n NewIndexedChunk,\n IndexStatusRecord,\n NewIndexStatusRecord,\n LoadedSkill,\n TaskConfig,\n} from './schema.js';\n\nlet initialized = false;\n\n/**\n * Initialize the database with remote server config\n * @param config - Remote server configuration { url, authKey }\n */\nexport function initDatabase(config: { url: string; authKey: string }) {\n initRemoteDatabase(config.url, config.authKey);\n initialized = true;\n}\n\n/**\n * Get a stub database object for API compatibility\n * Functions that take a db parameter will ignore it for remote operations\n */\nexport function getDb() {\n if (!initialized) {\n throw new Error('Database not initialized. Call initDatabase first.');\n }\n // Return a stub - the actual queries use remote API calls\n return {} as any;\n}\n\n/**\n * Check if using remote database (always true now)\n */\nexport function isUsingRemote(): boolean {\n return true;\n}\n\n/**\n * Close the database connection\n */\nexport function closeDatabase() {\n closeRemoteDatabase();\n initialized = false;\n}\n\n// Re-export query objects with cleaner names\nexport const sessionQueries = remoteSessionQueries;\nexport const messageQueries = remoteMessageQueries;\nexport const toolExecutionQueries = remoteToolExecutionQueries;\nexport const todoQueries = remoteTodoQueries;\nexport const skillQueries = remoteSkillQueries;\nexport const terminalQueries = remoteTerminalQueries;\nexport const activeStreamQueries = remoteActiveStreamQueries;\nexport const checkpointQueries = remoteCheckpointQueries;\nexport const fileBackupQueries = remoteFileBackupQueries;\nexport const subagentQueries = remoteSubagentQueries;\nexport const indexedChunkQueries = remoteIndexedChunkQueries;\nexport const indexStatusQueries = remoteIndexStatusQueries;\n","import { z } from 'zod';\n\n// Tool approval configuration\nexport const ToolApprovalConfigSchema = z.object({\n bash: z.boolean().optional().default(true),\n write_file: z.boolean().optional().default(false),\n read_file: z.boolean().optional().default(false),\n load_skill: z.boolean().optional().default(false),\n todo: z.boolean().optional().default(false),\n});\n\n// Skill definition (from frontmatter)\nexport const SkillMetadataSchema = z.object({\n name: z.string(),\n description: z.string(),\n // Whether to always inject this skill into context (vs on-demand loading)\n alwaysApply: z.boolean().optional().default(false),\n // Glob patterns - auto-inject when working with matching files\n globs: z.array(z.string()).optional().default([]),\n});\n\n// Skill loading type\nexport type SkillLoadType = 'always' | 'on_demand' | 'glob_matched';\n\n// Task mode configuration (stored inside SessionConfig)\nexport const TaskConfigSchema = z.object({\n enabled: z.boolean(),\n outputSchema: z.record(z.string(), z.unknown()),\n webhookUrl: z.string().url().optional(),\n maxIterations: z.number().optional(),\n status: z.enum(['running', 'completed', 'failed']),\n result: z.unknown().optional(),\n error: z.string().optional(),\n iterations: z.number().optional(),\n});\n\n// Session-specific config (stored in DB)\nexport const SessionConfigSchema = z.object({\n toolApprovals: z.record(z.string(), z.boolean()).optional(),\n approvalWebhook: z.string().url().optional(),\n skillsDirectory: z.string().optional(),\n maxContextChars: z.number().optional().default(200_000),\n task: TaskConfigSchema.optional(),\n});\n\n// Vector Gateway configuration for semantic search\nexport const VectorGatewayConfigSchema = z\n .object({\n // Redis cluster nodes URL for Vector Gateway (or use REDIS_CLUSTER_NODES env var)\n redisUrl: z.string().optional(),\n // HTTP URL for database operations (or use VECTOR_HTTP_URL env var)\n httpUrl: z.string().optional(),\n // Embedding model to use (default: text-embedding-3-small)\n embeddingModel: z.string().default('gemini-embedding-001'),\n // Custom namespace override (auto-generated from git remote if not set)\n namespace: z.string().optional(),\n // File patterns to include in indexing\n include: z\n .array(z.string())\n .optional()\n .default([\n '**/*.ts',\n '**/*.tsx',\n '**/*.js',\n '**/*.jsx',\n '**/*.py',\n '**/*.go',\n '**/*.rs',\n '**/*.java',\n '**/*.md',\n '**/*.mdx',\n '**/*.txt',\n ]),\n // File patterns to exclude from indexing\n exclude: z\n .array(z.string())\n .optional()\n .default([\n '**/node_modules/**',\n '**/dist/**',\n '**/build/**',\n '**/.git/**',\n '**/.next/**',\n '**/*.min.js',\n '**/*.bundle.js',\n '**/pnpm-lock.yaml',\n '**/package-lock.json',\n '**/yarn.lock',\n '**/.test-workspace/**',\n '**/.semantic-test-workspace/**',\n '**/.semantic-integration-test/**',\n ]),\n })\n .optional();\n\n// Remote server configuration\nexport const RemoteServerConfigSchema = z\n .object({\n // URL of the remote server (e.g., https://agent.sparkecode.com)\n url: z.string().url().optional(),\n // Auth key for the remote server (auto-generated on first use if not set)\n // Can also be set via SPARKECODER_AUTH_KEY env var\n authKey: z.string().optional(),\n })\n .optional();\n\n// Main sparkecoder config file schema\nexport const SparkcoderConfigSchema = z.object({\n // Default model to use (Vercel AI Gateway format)\n defaultModel: z.string().default('anthropic/claude-opus-4-6'),\n\n // Working directory for file operations\n workingDirectory: z.string().optional(),\n\n // Tool approval settings\n toolApprovals: ToolApprovalConfigSchema.optional().default({}),\n\n // Approval webhook URL (called when approval is needed)\n approvalWebhook: z.string().url().optional(),\n\n // Skills configuration\n skills: z\n .object({\n // Directory containing skill files\n directory: z.string().optional().default('./skills'),\n // Additional skill directories to include\n additionalDirectories: z.array(z.string()).optional().default([]),\n })\n .optional()\n .default({}),\n\n // Context management\n context: z\n .object({\n // Maximum context size before summarization (in characters)\n maxChars: z.number().optional().default(200_000),\n // Enable automatic summarization\n autoSummarize: z.boolean().optional().default(true),\n // Number of recent messages to keep after summarization\n keepRecentMessages: z.number().optional().default(10),\n })\n .optional()\n .default({}),\n\n // Server configuration\n server: z\n .object({\n port: z.number().default(3141),\n host: z.string().default('127.0.0.1'),\n // Public URL for web UI to connect to API (for Docker/remote access)\n // If not set, defaults to http://{host}:{port}\n publicUrl: z.string().url().optional(),\n })\n .default({ port: 3141, host: '127.0.0.1' }),\n\n // Database path (used for local SQLite - ignored if remoteServer is configured)\n databasePath: z.string().optional().default('./sparkecoder.db'),\n\n // Remote server configuration (for centralized storage)\n // If configured, uses remote MongoDB instead of local SQLite\n remoteServer: RemoteServerConfigSchema,\n\n // Vector Gateway configuration for semantic search\n vectorGateway: VectorGatewayConfigSchema,\n});\n\nexport type ToolApprovalConfig = z.infer<typeof ToolApprovalConfigSchema>;\nexport type SkillMetadata = z.infer<typeof SkillMetadataSchema>;\nexport type SessionConfig = z.infer<typeof SessionConfigSchema>;\nexport type VectorGatewayConfig = z.infer<typeof VectorGatewayConfigSchema>;\nexport type RemoteServerConfig = z.infer<typeof RemoteServerConfigSchema>;\nexport type SparkcoderConfig = z.infer<typeof SparkcoderConfigSchema>;\n\n// Discovered skill sources\nexport interface DiscoveredSkills {\n // Directories where all skills are always loaded\n alwaysLoadedDirs: Array<{ path: string; priority: number }>;\n // Directories where skills are on-demand (frontmatter can override)\n onDemandDirs: Array<{ path: string; priority: number }>;\n // Path to AGENTS.md if it exists (always loaded)\n agentsMdPath: string | null;\n // All directories in priority order (for deduplication)\n allDirectories: string[];\n}\n\n// Resolved vector gateway config with env var overrides applied\nexport interface ResolvedVectorGatewayConfig {\n redisUrl: string | null;\n httpUrl: string | null;\n embeddingModel: string;\n namespace: string | null;\n include: string[];\n exclude: string[];\n}\n\n// Resolved remote server config\nexport interface ResolvedRemoteServerConfig {\n url: string | null;\n authKey: string | null;\n isConfigured: boolean;\n}\n\n// Runtime config with resolved paths\nexport interface ResolvedConfig extends Omit<SparkcoderConfig, 'server'> {\n server: {\n port: number;\n host: string;\n publicUrl?: string;\n };\n resolvedWorkingDirectory: string;\n resolvedSkillsDirectories: string[];\n resolvedDatabasePath: string;\n // Enhanced skill discovery\n discoveredSkills: DiscoveredSkills;\n // Resolved vector gateway config (with env var overrides)\n resolvedVectorGateway: ResolvedVectorGatewayConfig;\n // Resolved remote server config (with env var overrides)\n resolvedRemoteServer: ResolvedRemoteServerConfig;\n}\n","import { existsSync, readFileSync, mkdirSync, writeFileSync } from 'node:fs';\nimport { resolve, dirname, join } from 'node:path';\nimport { homedir, platform } from 'node:os';\nimport {\n SparkcoderConfig,\n SparkcoderConfigSchema,\n ResolvedConfig,\n DiscoveredSkills,\n ResolvedVectorGatewayConfig,\n ResolvedRemoteServerConfig,\n} from './types.js';\n\nconst CONFIG_FILE_NAMES = [\n 'sparkecoder.config.json',\n 'sparkecoder.json',\n '.sparkecoder.json',\n];\n\n/**\n * Discover all skill directories in the working directory\n * Searches for:\n * - .sparkecoder/rules/ (always loaded, priority 1)\n * - .sparkecoder/skills/ (on-demand, priority 2)\n * - .cursor/rules/ (parse frontmatter, priority 3)\n * - .claude/skills/ (on-demand, priority 4)\n * - skills/ (legacy, on-demand, priority 5)\n * - AGENTS.md (always loaded)\n */\nexport function discoverSkillDirectories(workingDir: string): DiscoveredSkills {\n const alwaysLoadedDirs: Array<{ path: string; priority: number }> = [];\n const onDemandDirs: Array<{ path: string; priority: number }> = [];\n const allDirectories: string[] = [];\n let agentsMdPath: string | null = null;\n\n // Priority 1: .sparkecoder/rules/ (always loaded)\n const sparkRulesDir = join(workingDir, '.sparkecoder', 'rules');\n if (existsSync(sparkRulesDir)) {\n alwaysLoadedDirs.push({ path: sparkRulesDir, priority: 1 });\n allDirectories.push(sparkRulesDir);\n }\n\n // Priority 2: .sparkecoder/skills/ (on-demand)\n const sparkSkillsDir = join(workingDir, '.sparkecoder', 'skills');\n if (existsSync(sparkSkillsDir)) {\n onDemandDirs.push({ path: sparkSkillsDir, priority: 2 });\n allDirectories.push(sparkSkillsDir);\n }\n\n // Priority 3: .cursor/rules/ (parse frontmatter for alwaysApply)\n const cursorRulesDir = join(workingDir, '.cursor', 'rules');\n if (existsSync(cursorRulesDir)) {\n // Cursor rules can be either - will be determined by frontmatter\n onDemandDirs.push({ path: cursorRulesDir, priority: 3 });\n allDirectories.push(cursorRulesDir);\n }\n\n // Priority 4: .claude/skills/ (on-demand)\n const claudeSkillsDir = join(workingDir, '.claude', 'skills');\n if (existsSync(claudeSkillsDir)) {\n onDemandDirs.push({ path: claudeSkillsDir, priority: 4 });\n allDirectories.push(claudeSkillsDir);\n }\n\n // Priority 5: skills/ (legacy, on-demand)\n const legacySkillsDir = join(workingDir, 'skills');\n if (existsSync(legacySkillsDir)) {\n onDemandDirs.push({ path: legacySkillsDir, priority: 5 });\n allDirectories.push(legacySkillsDir);\n }\n\n // Check for AGENTS.md (always loaded)\n const agentsMd = join(workingDir, 'AGENTS.md');\n if (existsSync(agentsMd)) {\n agentsMdPath = agentsMd;\n }\n\n // Also add built-in skills directory\n // Try multiple paths: dev mode (tsx) resolves from src/config/, production resolves from dist/\n const baseDir = dirname(import.meta.url.replace('file://', ''));\n const builtInCandidates = [\n resolve(baseDir, '../skills/default'), // dev: src/config → src/skills/default\n resolve(baseDir, './skills/default'), // prod: dist/ → dist/skills/default\n ];\n const builtInSkillsDir = builtInCandidates.find(p => existsSync(p));\n if (builtInSkillsDir) {\n onDemandDirs.push({ path: builtInSkillsDir, priority: 100 }); // Lowest priority\n allDirectories.push(builtInSkillsDir);\n }\n\n return {\n alwaysLoadedDirs,\n onDemandDirs,\n agentsMdPath,\n allDirectories,\n };\n}\n\n/**\n * Get the standard application data directory for the current OS\n * - macOS: ~/Library/Application Support/sparkecoder\n * - Windows: %APPDATA%/sparkecoder\n * - Linux: ~/.local/share/sparkecoder\n */\nexport function getAppDataDirectory(): string {\n const appName = 'sparkecoder';\n \n switch (platform()) {\n case 'darwin':\n return join(homedir(), 'Library', 'Application Support', appName);\n case 'win32':\n return join(process.env.APPDATA || join(homedir(), 'AppData', 'Roaming'), appName);\n default:\n // Linux and other Unix-like systems\n return join(process.env.XDG_DATA_HOME || join(homedir(), '.local', 'share'), appName);\n }\n}\n\n/**\n * Ensure the app data directory exists\n */\nexport function ensureAppDataDirectory(): string {\n const dir = getAppDataDirectory();\n if (!existsSync(dir)) {\n mkdirSync(dir, { recursive: true });\n }\n return dir;\n}\n\nlet cachedConfig: ResolvedConfig | null = null;\n\n/**\n * Find the config file by searching:\n * 1. Up the directory tree from startDir (project-specific config)\n * 2. In the app data directory (global config)\n */\nfunction findConfigFile(startDir: string): string | null {\n // First, search up the directory tree\n let currentDir = startDir;\n\n while (currentDir !== dirname(currentDir)) {\n for (const fileName of CONFIG_FILE_NAMES) {\n const configPath = resolve(currentDir, fileName);\n if (existsSync(configPath)) {\n return configPath;\n }\n }\n currentDir = dirname(currentDir);\n }\n\n // If not found, check the app data directory for a global config\n const appDataDir = getAppDataDirectory();\n for (const fileName of CONFIG_FILE_NAMES) {\n const configPath = join(appDataDir, fileName);\n if (existsSync(configPath)) {\n return configPath;\n }\n }\n\n return null;\n}\n\n/**\n * Load and parse the config file\n */\nexport function loadConfig(\n configPath?: string,\n workingDirectory?: string\n): ResolvedConfig {\n const cwd = workingDirectory || process.cwd();\n\n // Try to find config file\n let rawConfig: Partial<SparkcoderConfig> = {};\n let configDir = cwd;\n\n if (configPath) {\n if (!existsSync(configPath)) {\n throw new Error(`Config file not found: ${configPath}`);\n }\n const content = readFileSync(configPath, 'utf-8');\n rawConfig = JSON.parse(content);\n configDir = dirname(resolve(configPath));\n } else {\n const foundPath = findConfigFile(cwd);\n if (foundPath) {\n const content = readFileSync(foundPath, 'utf-8');\n rawConfig = JSON.parse(content);\n configDir = dirname(foundPath);\n }\n }\n\n // Override with environment variables\n if (process.env.SPARKECODER_MODEL) {\n rawConfig.defaultModel = process.env.SPARKECODER_MODEL;\n }\n if (process.env.SPARKECODER_PORT) {\n rawConfig.server = {\n port: parseInt(process.env.SPARKECODER_PORT, 10),\n host: rawConfig.server?.host ?? '127.0.0.1',\n };\n }\n if (process.env.DATABASE_PATH) {\n rawConfig.databasePath = process.env.DATABASE_PATH;\n }\n\n // Parse and validate\n const config = SparkcoderConfigSchema.parse(rawConfig);\n\n // Resolve working directory\n // Priority: CLI argument > absolute path in config > current working directory\n // Note: workingDirectory in config is only used if it's an absolute path,\n // otherwise we default to where the CLI was run from\n let resolvedWorkingDirectory: string;\n if (workingDirectory) {\n // Explicitly passed via CLI\n resolvedWorkingDirectory = workingDirectory;\n } else if (config.workingDirectory && config.workingDirectory !== '.' && config.workingDirectory.startsWith('/')) {\n // Absolute path in config\n resolvedWorkingDirectory = config.workingDirectory;\n } else {\n // Default to current working directory (where CLI was run)\n resolvedWorkingDirectory = process.cwd();\n }\n\n // Discover skill directories from standard locations\n const discovered = discoverSkillDirectories(resolvedWorkingDirectory);\n\n // Combine discovered directories with any additional configured directories\n const additionalDirs = (config.skills?.additionalDirectories || [])\n .map((dir) => resolve(configDir, dir))\n .filter((dir) => existsSync(dir));\n\n const resolvedSkillsDirectories = [\n ...discovered.allDirectories,\n ...additionalDirs,\n ];\n\n // Use app data directory for database by default, unless explicitly configured\n let resolvedDatabasePath: string;\n if (config.databasePath && config.databasePath !== './sparkecoder.db') {\n // User explicitly set a custom path\n resolvedDatabasePath = resolve(configDir, config.databasePath);\n } else {\n // Use standard OS app data directory\n const appDataDir = ensureAppDataDirectory();\n resolvedDatabasePath = join(appDataDir, 'sparkecoder.db');\n }\n\n // Resolve vector gateway config with env var overrides\n const resolvedVectorGateway: ResolvedVectorGatewayConfig = {\n redisUrl: process.env.REDIS_CLUSTER_NODES || config.vectorGateway?.redisUrl || null,\n httpUrl: process.env.VECTOR_HTTP_URL || config.vectorGateway?.httpUrl || null,\n embeddingModel:\n process.env.VECTOR_EMBEDDING_MODEL ||\n config.vectorGateway?.embeddingModel ||\n 'gemini-embedding-001',\n namespace: config.vectorGateway?.namespace || null,\n include: config.vectorGateway?.include || [\n '**/*.ts',\n '**/*.tsx',\n '**/*.js',\n '**/*.jsx',\n '**/*.py',\n '**/*.go',\n '**/*.rs',\n '**/*.java',\n '**/*.md',\n '**/*.mdx',\n '**/*.txt',\n ],\n exclude: config.vectorGateway?.exclude || [\n '**/node_modules/**',\n '**/dist/**',\n '**/build/**',\n '**/.git/**',\n '**/.next/**',\n '**/*.min.js',\n '**/*.bundle.js',\n '**/pnpm-lock.yaml',\n '**/package-lock.json',\n '**/yarn.lock',\n '**/.test-workspace/**',\n '**/.semantic-test-workspace/**',\n '**/.semantic-integration-test/**',\n ],\n };\n\n // Resolve remote server config with env var overrides\n // Default to production server when not explicitly configured\n const DEFAULT_REMOTE_URL = 'https://agent-remote-server.sparkecode.com';\n const remoteUrl = process.env.SPARKECODER_REMOTE_URL || config.remoteServer?.url || DEFAULT_REMOTE_URL;\n const remoteAuthKey = process.env.SPARKECODER_AUTH_KEY || config.remoteServer?.authKey || loadStoredAuthKey();\n \n const resolvedRemoteServer: ResolvedRemoteServerConfig = {\n url: remoteUrl,\n authKey: remoteAuthKey,\n isConfigured: !!remoteUrl && !!remoteAuthKey,\n };\n\n const resolved: ResolvedConfig = {\n ...config,\n server: {\n port: config.server.port,\n host: config.server.host ?? '127.0.0.1',\n publicUrl: config.server.publicUrl,\n },\n resolvedWorkingDirectory,\n resolvedSkillsDirectories,\n resolvedDatabasePath,\n discoveredSkills: discovered,\n resolvedVectorGateway,\n resolvedRemoteServer,\n };\n\n cachedConfig = resolved;\n return resolved;\n}\n\n/**\n * Get the cached config (must call loadConfig first)\n */\nexport function getConfig(): ResolvedConfig {\n if (!cachedConfig) {\n throw new Error('Config not loaded. Call loadConfig first.');\n }\n return cachedConfig;\n}\n\n/**\n * Check if a tool requires approval\n */\nexport function requiresApproval(\n toolName: string,\n sessionConfig?: { toolApprovals?: Record<string, boolean> }\n): boolean {\n const config = getConfig();\n\n // Session-level wildcard \"*\" overrides everything (used by --dangerously-skip-approvals)\n if (sessionConfig?.toolApprovals?.['*'] !== undefined) {\n return sessionConfig.toolApprovals['*'];\n }\n\n // Session-level per-tool override takes precedence\n if (sessionConfig?.toolApprovals?.[toolName] !== undefined) {\n return sessionConfig.toolApprovals[toolName];\n }\n\n // Check global config\n const globalApprovals = config.toolApprovals as Record<string, boolean>;\n if (globalApprovals[toolName] !== undefined) {\n return globalApprovals[toolName];\n }\n\n // Default: bash requires approval, others don't\n if (toolName === 'bash') {\n return true;\n }\n\n return false;\n}\n\n/**\n * Create a default config file\n */\nexport function createDefaultConfig(): SparkcoderConfig {\n return {\n defaultModel: 'anthropic/claude-opus-4-6',\n // workingDirectory is intentionally not set - defaults to where CLI is run\n toolApprovals: {\n bash: true,\n write_file: false,\n read_file: false,\n load_skill: false,\n todo: false,\n },\n skills: {\n directory: './skills',\n additionalDirectories: [],\n },\n context: {\n maxChars: 200_000,\n autoSummarize: true,\n keepRecentMessages: 10,\n },\n server: {\n port: 3141,\n host: '127.0.0.1',\n },\n databasePath: './sparkecoder.db',\n };\n}\n\n// ============================================\n// Auth Key Management (for remote server)\n// ============================================\n\nconst AUTH_KEY_FILE = 'auth-key.json';\n\ninterface StoredAuthKey {\n authKey: string;\n createdAt: string;\n userId?: string;\n}\n\n/**\n * Load stored auth key from app data directory\n */\nfunction loadStoredAuthKey(): string | null {\n const keysPath = join(getAppDataDirectory(), AUTH_KEY_FILE);\n if (!existsSync(keysPath)) {\n return null;\n }\n try {\n const content = readFileSync(keysPath, 'utf-8');\n const data = JSON.parse(content) as StoredAuthKey;\n return data.authKey || null;\n } catch {\n return null;\n }\n}\n\n/**\n * Save auth key to app data directory\n */\nexport function saveAuthKey(authKey: string, userId?: string): void {\n const appDir = ensureAppDataDirectory();\n const keysPath = join(appDir, AUTH_KEY_FILE);\n const data: StoredAuthKey = {\n authKey,\n createdAt: new Date().toISOString(),\n userId,\n };\n writeFileSync(keysPath, JSON.stringify(data, null, 2), { mode: 0o600 });\n}\n\n/**\n * Get stored auth key info\n */\nexport function getStoredAuthKeyInfo(): StoredAuthKey | null {\n const keysPath = join(getAppDataDirectory(), AUTH_KEY_FILE);\n if (!existsSync(keysPath)) {\n return null;\n }\n try {\n const content = readFileSync(keysPath, 'utf-8');\n return JSON.parse(content) as StoredAuthKey;\n } catch {\n return null;\n }\n}\n\n/**\n * Register with remote server and get new auth key\n */\nexport async function registerWithRemoteServer(\n serverUrl: string,\n name?: string\n): Promise<{ authKey: string; userId: string }> {\n const response = await fetch(`${serverUrl}/auth/register`, {\n method: 'POST',\n headers: { 'Content-Type': 'application/json' },\n body: JSON.stringify({ name: name || `CLI ${new Date().toISOString()}` }),\n });\n \n if (!response.ok) {\n const error = await response.json().catch(() => ({})) as { error?: string };\n throw new Error(error.error || `Failed to register: HTTP ${response.status}`);\n }\n \n const data = await response.json() as { authKey: string; userId: string };\n \n // Save the auth key\n saveAuthKey(data.authKey, data.userId);\n \n return data;\n}\n\n/**\n * Ensure we have a valid auth key for the remote server\n * If not configured, registers with the remote server to get one\n */\nexport async function ensureRemoteAuthKey(serverUrl: string): Promise<string> {\n // Check env var first\n if (process.env.SPARKECODER_AUTH_KEY) {\n return process.env.SPARKECODER_AUTH_KEY;\n }\n \n // Check stored key\n const storedKey = loadStoredAuthKey();\n if (storedKey) {\n return storedKey;\n }\n \n // Register with remote server\n const { authKey } = await registerWithRemoteServer(serverUrl);\n return authKey;\n}\n\n// ============================================\n// API Key Management\n// ============================================\n\nconst API_KEYS_FILE = 'api-keys.json';\n\n// Provider to environment variable mapping\nconst PROVIDER_ENV_MAP: Record<string, string> = {\n anthropic: 'ANTHROPIC_API_KEY',\n openai: 'OPENAI_API_KEY',\n google: 'GOOGLE_GENERATIVE_AI_API_KEY',\n xai: 'XAI_API_KEY',\n 'ai-gateway': 'AI_GATEWAY_API_KEY',\n};\n\n// All supported providers\nexport const SUPPORTED_PROVIDERS = Object.keys(PROVIDER_ENV_MAP);\n\ninterface StoredApiKeys {\n [provider: string]: string;\n}\n\n/**\n * Get the path to the API keys file\n */\nfunction getApiKeysPath(): string {\n const appDir = ensureAppDataDirectory();\n return join(appDir, API_KEYS_FILE);\n}\n\n/**\n * Load stored API keys from file\n */\nfunction loadStoredApiKeys(): StoredApiKeys {\n const keysPath = getApiKeysPath();\n if (!existsSync(keysPath)) {\n return {};\n }\n try {\n const content = readFileSync(keysPath, 'utf-8');\n return JSON.parse(content);\n } catch {\n return {};\n }\n}\n\n/**\n * Save API keys to file\n */\nfunction saveStoredApiKeys(keys: StoredApiKeys): void {\n const keysPath = getApiKeysPath();\n writeFileSync(keysPath, JSON.stringify(keys, null, 2), { mode: 0o600 }); // Secure permissions\n}\n\n/**\n * Load API keys from storage into environment variables.\n * Called on startup.\n *\n * When a remote server is configured for inference, local API keys are not\n * required -- all LLM calls are proxied through the remote server which\n * holds the keys. This function still runs so that local-only fallback\n * mode keeps working.\n */\nexport function loadApiKeysIntoEnv(): void {\n const storedKeys = loadStoredApiKeys();\n \n for (const [provider, envVar] of Object.entries(PROVIDER_ENV_MAP)) {\n if (!process.env[envVar] && storedKeys[provider]) {\n process.env[envVar] = storedKeys[provider];\n }\n }\n}\n\n/**\n * Check whether AI inference is handled by the remote server.\n * When true, local API keys are not needed.\n */\nexport function isRemoteInferenceConfigured(): boolean {\n try {\n const config = getConfig();\n return config.resolvedRemoteServer.isConfigured;\n } catch {\n return false;\n }\n}\n\n/**\n * Set an API key for a provider\n * Saves to storage and sets in current process env\n */\nexport function setApiKey(provider: string, apiKey: string): void {\n const normalizedProvider = provider.toLowerCase();\n const envVar = PROVIDER_ENV_MAP[normalizedProvider];\n \n if (!envVar) {\n throw new Error(`Unknown provider: ${provider}. Supported: ${SUPPORTED_PROVIDERS.join(', ')}`);\n }\n \n // Save to storage\n const storedKeys = loadStoredApiKeys();\n storedKeys[normalizedProvider] = apiKey;\n saveStoredApiKeys(storedKeys);\n \n // Set in current process\n process.env[envVar] = apiKey;\n}\n\n/**\n * Remove an API key for a provider\n */\nexport function removeApiKey(provider: string): void {\n const normalizedProvider = provider.toLowerCase();\n const envVar = PROVIDER_ENV_MAP[normalizedProvider];\n \n if (!envVar) {\n throw new Error(`Unknown provider: ${provider}. Supported: ${SUPPORTED_PROVIDERS.join(', ')}`);\n }\n \n // Remove from storage\n const storedKeys = loadStoredApiKeys();\n delete storedKeys[normalizedProvider];\n saveStoredApiKeys(storedKeys);\n \n // Remove from current process (if it was from storage)\n // Note: We can't know if it was from env or storage, so we don't remove from env\n}\n\n/**\n * Get API key status for all providers\n * Returns masked keys (first 4 and last 4 chars) and source (env/storage/none)\n */\nexport function getApiKeyStatus(): Array<{\n provider: string;\n envVar: string;\n configured: boolean;\n source: 'env' | 'storage' | 'none';\n maskedKey: string | null;\n}> {\n const storedKeys = loadStoredApiKeys();\n \n return SUPPORTED_PROVIDERS.map((provider) => {\n const envVar = PROVIDER_ENV_MAP[provider];\n const envValue = process.env[envVar];\n const storedValue = storedKeys[provider];\n \n let source: 'env' | 'storage' | 'none' = 'none';\n let value: string | undefined;\n \n if (envValue) {\n // Check if it came from storage (by comparing)\n if (storedValue && envValue === storedValue) {\n source = 'storage';\n } else {\n source = 'env';\n }\n value = envValue;\n } else if (storedValue) {\n source = 'storage';\n value = storedValue;\n }\n \n return {\n provider,\n envVar,\n configured: !!value,\n source,\n maskedKey: value ? maskApiKey(value) : null,\n };\n });\n}\n\n/**\n * Mask an API key for display (show first 4 and last 4 chars)\n */\nfunction maskApiKey(key: string): string {\n if (key.length <= 12) {\n return '****' + key.slice(-4);\n }\n return key.slice(0, 4) + '...' + key.slice(-4);\n}\n\nexport * from './types.js';\n","import { readFile, readdir } from 'node:fs/promises';\nimport { resolve, basename, extname, relative } from 'node:path';\nimport { existsSync, readFileSync } from 'node:fs';\nimport { minimatch } from 'minimatch';\nimport { SkillMetadata, SkillMetadataSchema, SkillLoadType, DiscoveredSkills } from '../config/types.js';\n\nexport interface Skill {\n name: string;\n description: string;\n filePath: string;\n content?: string; // Only loaded when explicitly requested\n // Enhanced properties\n alwaysApply: boolean;\n globs: string[];\n loadType: SkillLoadType;\n priority: number; // Lower = higher priority for deduplication\n sourceDir: string; // Which directory this skill came from\n}\n\nexport interface SkillWithContent extends Skill {\n content: string;\n}\n\n/**\n * Parse skill metadata from frontmatter\n * Handles YAML-like format including arrays for globs\n */\nfunction parseSkillFrontmatter(content: string): { metadata: SkillMetadata; body: string } | null {\n const frontmatterMatch = content.match(/^---\\n([\\s\\S]*?)\\n---\\n([\\s\\S]*)$/);\n \n if (!frontmatterMatch) {\n return null;\n }\n\n const [, frontmatter, body] = frontmatterMatch;\n \n try {\n // Parse YAML-like frontmatter\n const lines = frontmatter.split('\\n');\n const data: Record<string, unknown> = {};\n let currentArray: string[] | null = null;\n let currentArrayKey: string | null = null;\n \n for (const line of lines) {\n // Check if this is an array item (starts with -)\n if (currentArrayKey && line.trim().startsWith('-')) {\n let value = line.trim().slice(1).trim();\n // Remove quotes if present\n if ((value.startsWith('\"') && value.endsWith('\"')) ||\n (value.startsWith(\"'\") && value.endsWith(\"'\"))) {\n value = value.slice(1, -1);\n }\n currentArray?.push(value);\n continue;\n }\n \n // Close any open array when we hit a non-array line\n if (currentArrayKey && currentArray) {\n data[currentArrayKey] = currentArray;\n currentArray = null;\n currentArrayKey = null;\n }\n \n const colonIndex = line.indexOf(':');\n if (colonIndex > 0) {\n const key = line.slice(0, colonIndex).trim();\n let value = line.slice(colonIndex + 1).trim();\n \n // Check if this starts an array (empty value followed by - items)\n if (value === '' || value === '[]') {\n currentArrayKey = key;\n currentArray = [];\n continue;\n }\n \n // Handle inline arrays like globs: [\"*.tsx\", \"*.jsx\"]\n if (value.startsWith('[') && value.endsWith(']')) {\n const arrayContent = value.slice(1, -1);\n const items = arrayContent.split(',').map(item => {\n let trimmed = item.trim();\n if ((trimmed.startsWith('\"') && trimmed.endsWith('\"')) ||\n (trimmed.startsWith(\"'\") && trimmed.endsWith(\"'\"))) {\n trimmed = trimmed.slice(1, -1);\n }\n return trimmed;\n }).filter(item => item.length > 0);\n data[key] = items;\n continue;\n }\n \n // Remove quotes if present\n if ((value.startsWith('\"') && value.endsWith('\"')) ||\n (value.startsWith(\"'\") && value.endsWith(\"'\"))) {\n value = value.slice(1, -1);\n }\n \n // Handle boolean values\n if (value === 'true') {\n data[key] = true;\n } else if (value === 'false') {\n data[key] = false;\n } else {\n data[key] = value;\n }\n }\n }\n \n // Close any remaining open array\n if (currentArrayKey && currentArray) {\n data[currentArrayKey] = currentArray;\n }\n\n const metadata = SkillMetadataSchema.parse(data);\n return { metadata, body: body.trim() };\n } catch {\n return null;\n }\n}\n\n/**\n * Get skill name from filename if no frontmatter\n */\nfunction getSkillNameFromPath(filePath: string): string {\n return basename(filePath, extname(filePath))\n .replace(/[-_]/g, ' ')\n .replace(/\\b\\w/g, (c) => c.toUpperCase());\n}\n\n/**\n * Options for loading skills from a directory\n */\ninterface LoadSkillsOptions {\n // Priority for deduplication (lower = higher priority)\n priority?: number;\n // Default load type if not specified in frontmatter\n defaultLoadType?: SkillLoadType;\n // Force alwaysApply for all skills in this directory\n forceAlwaysApply?: boolean;\n}\n\n/**\n * Load all skills from a directory (metadata only)\n */\nexport async function loadSkillsFromDirectory(\n directory: string,\n options: LoadSkillsOptions = {}\n): Promise<Skill[]> {\n const {\n priority = 50,\n defaultLoadType = 'on_demand',\n forceAlwaysApply = false,\n } = options;\n\n if (!existsSync(directory)) {\n return [];\n }\n\n const skills: Skill[] = [];\n const entries = await readdir(directory, { withFileTypes: true });\n\n for (const entry of entries) {\n // Handle both files and directories (for Claude-style SKILL.md in subdirs)\n let filePath: string;\n let fileName: string;\n\n if (entry.isDirectory()) {\n // Check for SKILL.md inside the directory (Claude format)\n const skillMdPath = resolve(directory, entry.name, 'SKILL.md');\n if (existsSync(skillMdPath)) {\n filePath = skillMdPath;\n fileName = entry.name;\n } else {\n continue;\n }\n } else if (entry.name.endsWith('.md') || entry.name.endsWith('.mdc')) {\n filePath = resolve(directory, entry.name);\n fileName = entry.name;\n } else {\n continue;\n }\n\n const content = await readFile(filePath, 'utf-8');\n const parsed = parseSkillFrontmatter(content);\n\n if (parsed) {\n const alwaysApply = forceAlwaysApply || parsed.metadata.alwaysApply;\n const loadType: SkillLoadType = alwaysApply ? 'always' : defaultLoadType;\n\n skills.push({\n name: parsed.metadata.name,\n description: parsed.metadata.description,\n filePath,\n alwaysApply,\n globs: parsed.metadata.globs,\n loadType,\n priority,\n sourceDir: directory,\n });\n } else {\n // Use filename as name, first paragraph as description\n const name = getSkillNameFromPath(filePath);\n const firstParagraph = content.split('\\n\\n')[0]?.slice(0, 200) || 'No description';\n \n skills.push({\n name,\n description: firstParagraph.replace(/^#\\s*/, '').trim(),\n filePath,\n alwaysApply: forceAlwaysApply,\n globs: [],\n loadType: forceAlwaysApply ? 'always' : defaultLoadType,\n priority,\n sourceDir: directory,\n });\n }\n }\n\n return skills;\n}\n\n/**\n * Load all skills from multiple directories (legacy function for backwards compatibility)\n */\nexport async function loadAllSkills(directories: string[]): Promise<Skill[]> {\n const allSkills: Skill[] = [];\n const seenNames = new Set<string>();\n\n for (const dir of directories) {\n const skills = await loadSkillsFromDirectory(dir);\n for (const skill of skills) {\n // Avoid duplicates (first one wins)\n if (!seenNames.has(skill.name.toLowerCase())) {\n seenNames.add(skill.name.toLowerCase());\n allSkills.push(skill);\n }\n }\n }\n\n return allSkills;\n}\n\n/**\n * Load all skills from discovered directories with proper priority and typing\n */\nexport async function loadAllSkillsFromDiscovered(\n discovered: DiscoveredSkills\n): Promise<{ always: SkillWithContent[]; onDemand: Skill[]; all: Skill[] }> {\n const allSkills: Skill[] = [];\n const seenNames = new Set<string>();\n\n // Load from always-loaded directories (force alwaysApply = true)\n for (const { path, priority } of discovered.alwaysLoadedDirs) {\n const skills = await loadSkillsFromDirectory(path, {\n priority,\n defaultLoadType: 'always',\n forceAlwaysApply: true,\n });\n for (const skill of skills) {\n if (!seenNames.has(skill.name.toLowerCase())) {\n seenNames.add(skill.name.toLowerCase());\n allSkills.push(skill);\n }\n }\n }\n\n // Load from on-demand directories (respect frontmatter)\n for (const { path, priority } of discovered.onDemandDirs) {\n const skills = await loadSkillsFromDirectory(path, {\n priority,\n defaultLoadType: 'on_demand',\n forceAlwaysApply: false,\n });\n for (const skill of skills) {\n if (!seenNames.has(skill.name.toLowerCase())) {\n seenNames.add(skill.name.toLowerCase());\n allSkills.push(skill);\n }\n }\n }\n\n // Separate into always-loaded (with content) and on-demand\n const alwaysSkills = allSkills.filter(s => s.alwaysApply || s.loadType === 'always');\n const onDemandSkills = allSkills.filter(s => !s.alwaysApply && s.loadType !== 'always');\n\n // Load content for always-applied skills\n const alwaysWithContent: SkillWithContent[] = await Promise.all(\n alwaysSkills.map(async (skill) => {\n const content = await readFile(skill.filePath, 'utf-8');\n const parsed = parseSkillFrontmatter(content);\n return {\n ...skill,\n content: parsed ? parsed.body : content,\n };\n })\n );\n\n return {\n always: alwaysWithContent,\n onDemand: onDemandSkills,\n all: allSkills,\n };\n}\n\n/**\n * Get skills that should be auto-injected based on glob patterns matching active files\n */\nexport async function getGlobMatchedSkills(\n skills: Skill[],\n activeFiles: string[],\n workingDirectory: string\n): Promise<SkillWithContent[]> {\n if (activeFiles.length === 0) {\n return [];\n }\n\n // Normalize active files to relative paths\n const relativeFiles = activeFiles.map(f => {\n if (f.startsWith(workingDirectory)) {\n return relative(workingDirectory, f);\n }\n return f;\n });\n\n // Find skills with matching globs that aren't already always-applied\n const matchedSkills = skills.filter(skill => {\n // Skip if already always applied (those are loaded separately)\n if (skill.alwaysApply || skill.loadType === 'always') {\n return false;\n }\n\n // Skip if no globs defined\n if (!skill.globs || skill.globs.length === 0) {\n return false;\n }\n\n // Check if any active file matches any glob\n return relativeFiles.some(file =>\n skill.globs.some(pattern => minimatch(file, pattern, { matchBase: true }))\n );\n });\n\n // Load content for matched skills\n const matchedWithContent: SkillWithContent[] = await Promise.all(\n matchedSkills.map(async (skill) => {\n const content = await readFile(skill.filePath, 'utf-8');\n const parsed = parseSkillFrontmatter(content);\n return {\n ...skill,\n content: parsed ? parsed.body : content,\n loadType: 'glob_matched' as SkillLoadType,\n };\n })\n );\n\n return matchedWithContent;\n}\n\n/**\n * Load AGENTS.md content if it exists\n */\nexport async function loadAgentsMd(agentsMdPath: string | null): Promise<string | null> {\n if (!agentsMdPath || !existsSync(agentsMdPath)) {\n return null;\n }\n\n const content = await readFile(agentsMdPath, 'utf-8');\n return content;\n}\n\n/**\n * Load a skill's full content by name\n */\nexport async function loadSkillContent(\n skillName: string,\n directories: string[]\n): Promise<SkillWithContent | null> {\n const allSkills = await loadAllSkills(directories);\n const skill = allSkills.find(\n (s) => s.name.toLowerCase() === skillName.toLowerCase()\n );\n\n if (!skill) {\n return null;\n }\n\n const content = await readFile(skill.filePath, 'utf-8');\n const parsed = parseSkillFrontmatter(content);\n\n return {\n ...skill,\n content: parsed ? parsed.body : content,\n };\n}\n\n/**\n * Format on-demand skills list for context (shows as available to load)\n */\nexport function formatSkillsForContext(skills: Skill[]): string {\n // Filter to only on-demand skills\n const onDemandSkills = skills.filter(s => !s.alwaysApply && s.loadType !== 'always');\n\n if (onDemandSkills.length === 0) {\n return 'No on-demand skills available.';\n }\n\n const lines = ['Available skills (use load_skill tool to load into context):'];\n for (const skill of onDemandSkills) {\n const globInfo = skill.globs?.length ? ` [auto-loads for: ${skill.globs.join(', ')}]` : '';\n lines.push(`- ${skill.name}: ${skill.description}${globInfo}`);\n }\n\n return lines.join('\\n');\n}\n\n/**\n * Format always-loaded skills content for injection into system prompt\n */\nexport function formatAlwaysLoadedSkills(skills: SkillWithContent[]): string {\n if (skills.length === 0) {\n return '';\n }\n\n const sections: string[] = [];\n \n for (const skill of skills) {\n sections.push(`### ${skill.name}\\n\\n${skill.content}`);\n }\n\n return `## Active Rules & Skills (Always Loaded)\\n\\n${sections.join('\\n\\n---\\n\\n')}`;\n}\n\n/**\n * Format glob-matched skills content for injection into system prompt\n */\nexport function formatGlobMatchedSkills(skills: SkillWithContent[]): string {\n if (skills.length === 0) {\n return '';\n }\n\n const sections: string[] = [];\n \n for (const skill of skills) {\n sections.push(`### ${skill.name}\\n\\n${skill.content}`);\n }\n\n return `## Context-Relevant Skills (Auto-loaded based on active files)\\n\\n${sections.join('\\n\\n---\\n\\n')}`;\n}\n\n/**\n * Format AGENTS.md content for injection\n */\nexport function formatAgentsMdContent(content: string | null): string {\n if (!content) {\n return '';\n }\n\n return `## Project Instructions (AGENTS.md)\\n\\n${content}`;\n}\n","/**\n * Types for semantic search and indexing\n */\n\n// Chunk types for different code structures\nexport type ChunkType = 'function' | 'class' | 'block' | 'sliding';\n\n// A chunk of code/text ready for embedding\nexport interface Chunk {\n // Unique ID: {contentHash}_{chunkIndex}\n id: string;\n // The text content to embed\n text: string;\n // SHA-256 hash of the chunk content\n contentHash: string;\n // Chunk index within the file\n chunkIndex: number;\n // Metadata for filtering and display\n metadata: ChunkMetadata;\n}\n\nexport interface ChunkMetadata {\n // Relative file path from repo root\n filePath: string;\n // Line range in the source file\n startLine: number;\n endLine: number;\n // Detected language\n language: string;\n // Type of chunk (function, class, sliding window, etc.)\n chunkType: ChunkType;\n // Optional: function/class name if semantic chunk\n symbolName?: string;\n}\n\n// Options for the indexing operation\nexport interface IndexOptions {\n // Working directory (repo root)\n workingDirectory: string;\n // Force full re-index (ignore existing hashes)\n force?: boolean;\n // Verbose logging\n verbose?: boolean;\n // Progress callback\n onProgress?: (progress: IndexProgress) => void;\n}\n\n// Progress during indexing\nexport interface IndexProgress {\n phase: 'scanning' | 'chunking' | 'checking' | 'embedding' | 'done';\n totalFiles: number;\n processedFiles: number;\n totalChunks: number;\n newChunks: number;\n skippedChunks: number;\n currentFile?: string;\n}\n\n// Result of indexing operation\nexport interface IndexResult {\n success: boolean;\n namespace: string;\n totalFiles: number;\n totalChunks: number;\n newChunks: number;\n skippedChunks: number;\n failedChunks: number;\n duration: number; // milliseconds\n errors: Array<{ file: string; error: string }>;\n}\n\n// Index status for a repository\nexport interface IndexStatus {\n namespace: string;\n totalChunks: number;\n lastFullIndex: Date | null;\n lastIncrementalIndex: Date | null;\n isConfigured: boolean;\n}\n\n// Semantic search match result\nexport interface SemanticMatch {\n // File path relative to repo root\n filePath: string;\n // Line range\n startLine: number;\n endLine: number;\n // Similarity score (0-1)\n score: number;\n // Snippet of matching text\n snippet: string;\n // Symbol name if available\n symbolName?: string;\n // Language\n language: string;\n}\n\n// Options for semantic search\nexport interface SemanticSearchOptions {\n // Number of results to return\n topK?: number;\n // Filter by file glob pattern\n filePattern?: string;\n // Filter by language\n language?: string;\n // Minimum score threshold (0-1)\n minScore?: number;\n}\n\n// Result of semantic search\nexport interface SemanticSearchResult {\n success: boolean;\n query: string;\n matches: SemanticMatch[];\n duration: number; // milliseconds\n error?: string;\n}\n","/**\n * Git remote to namespace resolution\n * Converts git remote URLs to TurboPuffer namespaces\n */\n\nimport { execSync } from 'node:child_process';\n\n/**\n * Get the git remote URL for a repository\n * Returns null if not a git repo or no remote configured\n */\nexport function getGitRemoteUrl(workingDirectory: string): string | null {\n try {\n const result = execSync('git remote get-url origin', {\n cwd: workingDirectory,\n encoding: 'utf-8',\n stdio: ['pipe', 'pipe', 'pipe'],\n });\n return result.trim();\n } catch {\n return null;\n }\n}\n\n/**\n * Parse a git remote URL to extract org and repo name\n * Supports:\n * - https://github.com/org/repo.git\n * - git@github.com:org/repo.git\n * - https://gitlab.com/org/repo\n * - ssh://git@bitbucket.org/org/repo.git\n */\nexport function parseGitRemoteUrl(url: string): { org: string; repo: string } | null {\n // Remove .git suffix if present\n const cleanUrl = url.replace(/\\.git$/, '');\n\n // Try SSH format: git@github.com:org/repo\n const sshMatch = cleanUrl.match(/git@[^:]+:([^/]+)\\/(.+)$/);\n if (sshMatch) {\n return { org: sshMatch[1], repo: sshMatch[2] };\n }\n\n // Try HTTPS format: https://github.com/org/repo\n const httpsMatch = cleanUrl.match(/https?:\\/\\/[^/]+\\/([^/]+)\\/(.+)$/);\n if (httpsMatch) {\n return { org: httpsMatch[1], repo: httpsMatch[2] };\n }\n\n // Try SSH with protocol: ssh://git@github.com/org/repo\n const sshProtoMatch = cleanUrl.match(/ssh:\\/\\/[^/]+\\/([^/]+)\\/(.+)$/);\n if (sshProtoMatch) {\n return { org: sshProtoMatch[1], repo: sshProtoMatch[2] };\n }\n\n return null;\n}\n\n/**\n * Sanitize a string for use in a namespace\n * - Lowercase\n * - Replace non-alphanumeric with underscores\n * - Remove leading/trailing underscores\n * - Collapse multiple underscores\n */\nfunction sanitizeForNamespace(str: string): string {\n return str\n .toLowerCase()\n .replace(/[^a-z0-9]/g, '_')\n .replace(/^_+|_+$/g, '')\n .replace(/_+/g, '_');\n}\n\n/**\n * Get the namespace for a repository\n * Format: sparkecoder_{org}_{repo}\n * \n * @param workingDirectory - The repo working directory\n * @param configuredNamespace - Optional namespace override from config\n * @returns The namespace string, or null if not a git repo\n */\nexport async function getRepoNamespace(\n workingDirectory: string,\n configuredNamespace?: string | null\n): Promise<string | null> {\n // Use configured namespace if provided\n if (configuredNamespace) {\n return configuredNamespace;\n }\n\n // Get git remote URL\n const remoteUrl = getGitRemoteUrl(workingDirectory);\n if (!remoteUrl) {\n return null;\n }\n\n // Parse org and repo\n const parsed = parseGitRemoteUrl(remoteUrl);\n if (!parsed) {\n return null;\n }\n\n // Build namespace\n const org = sanitizeForNamespace(parsed.org);\n const repo = sanitizeForNamespace(parsed.repo);\n return `sparkecoder_${org}_${repo}`;\n}\n\n/**\n * Check if the working directory is a git repository\n */\nexport function isGitRepository(workingDirectory: string): boolean {\n try {\n execSync('git rev-parse --git-dir', {\n cwd: workingDirectory,\n encoding: 'utf-8',\n stdio: ['pipe', 'pipe', 'pipe'],\n });\n return true;\n } catch {\n return false;\n }\n}\n\n/**\n * Get the current git branch name\n */\nexport function getCurrentBranch(workingDirectory: string): string | null {\n try {\n const result = execSync('git rev-parse --abbrev-ref HEAD', {\n cwd: workingDirectory,\n encoding: 'utf-8',\n stdio: ['pipe', 'pipe', 'pipe'],\n });\n return result.trim();\n } catch {\n return null;\n }\n}\n\n/**\n * Get the current git commit hash (short form)\n */\nexport function getCurrentCommit(workingDirectory: string): string | null {\n try {\n const result = execSync('git rev-parse --short HEAD', {\n cwd: workingDirectory,\n encoding: 'utf-8',\n stdio: ['pipe', 'pipe', 'pipe'],\n });\n return result.trim();\n } catch {\n return null;\n }\n}\n","/**\n * Content hashing utilities for deduplication\n * Uses SHA-256 for deterministic content hashing\n */\n\nimport { createHash } from 'node:crypto';\n\n/**\n * Compute SHA-256 hash of content\n * Returns first 16 characters of hex digest for a reasonable ID length\n */\nexport function computeContentHash(content: string): string {\n const hash = createHash('sha256');\n hash.update(content, 'utf-8');\n return hash.digest('hex').slice(0, 16);\n}\n\n/**\n * Compute a full SHA-256 hash (64 chars)\n */\nexport function computeFullHash(content: string): string {\n const hash = createHash('sha256');\n hash.update(content, 'utf-8');\n return hash.digest('hex');\n}\n\n/**\n * Generate a chunk ID from content hash and chunk index\n * Format: {contentHash}_{chunkIndex}\n */\nexport function generateChunkId(contentHash: string, chunkIndex: number): string {\n return `${contentHash}_${chunkIndex}`;\n}\n\n/**\n * Parse a chunk ID to extract content hash and chunk index\n */\nexport function parseChunkId(chunkId: string): { contentHash: string; chunkIndex: number } | null {\n const match = chunkId.match(/^([a-f0-9]+)_(\\d+)$/);\n if (!match) {\n return null;\n }\n return {\n contentHash: match[1],\n chunkIndex: parseInt(match[2], 10),\n };\n}\n\n/**\n * Compute hash for a file's content\n * Normalizes line endings for consistent hashing across platforms\n */\nexport function computeFileHash(content: string): string {\n // Normalize line endings to LF\n const normalized = content.replace(/\\r\\n/g, '\\n');\n return computeContentHash(normalized);\n}\n","/**\n * Hybrid code chunking\n * - Semantic chunking for code files (by function/class)\n * - Sliding window for documentation and text files\n */\n\nimport { extname, basename } from 'node:path';\nimport { Chunk, ChunkMetadata, ChunkType } from './types.js';\nimport { computeContentHash, generateChunkId } from './hasher.js';\n\n// Language detection by file extension\nconst LANGUAGE_MAP: Record<string, string> = {\n '.ts': 'typescript',\n '.tsx': 'typescript',\n '.js': 'javascript',\n '.jsx': 'javascript',\n '.mjs': 'javascript',\n '.cjs': 'javascript',\n '.py': 'python',\n '.go': 'go',\n '.rs': 'rust',\n '.java': 'java',\n '.kt': 'kotlin',\n '.swift': 'swift',\n '.rb': 'ruby',\n '.php': 'php',\n '.c': 'c',\n '.cpp': 'cpp',\n '.h': 'c',\n '.hpp': 'cpp',\n '.cs': 'csharp',\n '.md': 'markdown',\n '.mdx': 'markdown',\n '.txt': 'text',\n '.json': 'json',\n '.yaml': 'yaml',\n '.yml': 'yaml',\n '.toml': 'toml',\n '.xml': 'xml',\n '.html': 'html',\n '.css': 'css',\n '.scss': 'scss',\n '.less': 'less',\n '.sql': 'sql',\n '.sh': 'shell',\n '.bash': 'shell',\n '.zsh': 'shell',\n};\n\n// Languages that support semantic chunking\nconst SEMANTIC_LANGUAGES = new Set([\n 'typescript',\n 'javascript',\n 'python',\n 'go',\n 'rust',\n 'java',\n 'kotlin',\n 'swift',\n 'ruby',\n 'php',\n 'c',\n 'cpp',\n 'csharp',\n]);\n\n// Sliding window config\nconst SLIDING_WINDOW_SIZE = 1500; // ~500 tokens\nconst SLIDING_WINDOW_OVERLAP = 300; // ~100 tokens\n\n// Max chunk size (to avoid very long embeddings)\nconst MAX_CHUNK_SIZE = 4000; // ~1300 tokens\n\n/**\n * Detect language from file path\n */\nexport function detectLanguage(filePath: string): string {\n const ext = extname(filePath).toLowerCase();\n return LANGUAGE_MAP[ext] || 'unknown';\n}\n\n/**\n * Check if a language supports semantic chunking\n */\nexport function supportsSemanticChunking(language: string): boolean {\n return SEMANTIC_LANGUAGES.has(language);\n}\n\n/**\n * Chunk a file into embedding-ready chunks\n */\nexport function chunkFile(filePath: string, content: string): Chunk[] {\n const language = detectLanguage(filePath);\n \n // Skip empty files\n if (!content.trim()) {\n return [];\n }\n\n // Use semantic chunking for supported languages, sliding window otherwise\n if (supportsSemanticChunking(language)) {\n return chunkCodeSemantic(filePath, content, language);\n } else {\n return chunkSlidingWindow(filePath, content, language);\n }\n}\n\n/**\n * Semantic chunking for code files\n * Extracts functions, classes, and significant blocks\n */\nfunction chunkCodeSemantic(filePath: string, content: string, language: string): Chunk[] {\n const chunks: Chunk[] = [];\n const lines = content.split('\\n');\n \n // Simple regex-based extraction (tree-sitter would be more accurate)\n // This is a pragmatic approach that works for most cases\n \n const blocks = extractCodeBlocks(lines, language);\n \n if (blocks.length === 0) {\n // Fall back to sliding window if no blocks found\n return chunkSlidingWindow(filePath, content, language);\n }\n\n for (let i = 0; i < blocks.length; i++) {\n const block = blocks[i];\n const blockContent = lines.slice(block.startLine, block.endLine + 1).join('\\n');\n \n // Skip very small blocks\n if (blockContent.trim().length < 50) {\n continue;\n }\n\n // If block is too large, split it\n if (blockContent.length > MAX_CHUNK_SIZE) {\n const subChunks = splitLargeBlock(filePath, blockContent, block.startLine, language, block.type, block.name);\n chunks.push(...subChunks);\n } else {\n const contentHash = computeContentHash(blockContent);\n const chunkId = generateChunkId(contentHash, i);\n \n chunks.push({\n id: chunkId,\n text: buildChunkText(filePath, blockContent, block.name),\n contentHash,\n chunkIndex: i,\n metadata: {\n filePath,\n startLine: block.startLine + 1, // 1-indexed\n endLine: block.endLine + 1,\n language,\n chunkType: block.type as ChunkType,\n symbolName: block.name,\n },\n });\n }\n }\n\n // If no meaningful chunks, fall back to sliding window\n if (chunks.length === 0) {\n return chunkSlidingWindow(filePath, content, language);\n }\n\n return reindexChunks(chunks);\n}\n\n/**\n * Extract code blocks (functions, classes) from source code\n */\nfunction extractCodeBlocks(\n lines: string[],\n language: string\n): Array<{ startLine: number; endLine: number; type: string; name?: string }> {\n const blocks: Array<{ startLine: number; endLine: number; type: string; name?: string }> = [];\n \n // Language-specific patterns\n const patterns = getLanguagePatterns(language);\n \n let i = 0;\n while (i < lines.length) {\n const line = lines[i];\n \n // Check for function/class definitions\n for (const pattern of patterns) {\n const match = line.match(pattern.regex);\n if (match) {\n const name = match[1];\n const endLine = findBlockEnd(lines, i, language);\n \n blocks.push({\n startLine: i,\n endLine,\n type: pattern.type,\n name,\n });\n \n i = endLine + 1;\n break;\n }\n }\n \n i++;\n }\n\n // Merge adjacent small blocks\n return mergeSmallBlocks(blocks, lines);\n}\n\n/**\n * Get regex patterns for extracting code blocks\n */\nfunction getLanguagePatterns(language: string): Array<{ regex: RegExp; type: string }> {\n switch (language) {\n case 'typescript':\n case 'javascript':\n return [\n { regex: /^\\s*(?:export\\s+)?(?:async\\s+)?function\\s+(\\w+)/, type: 'function' },\n { regex: /^\\s*(?:export\\s+)?(?:const|let|var)\\s+(\\w+)\\s*=\\s*(?:async\\s+)?(?:\\([^)]*\\)|[^=])\\s*=>/, type: 'function' },\n { regex: /^\\s*(?:export\\s+)?class\\s+(\\w+)/, type: 'class' },\n { regex: /^\\s*(?:export\\s+)?interface\\s+(\\w+)/, type: 'class' },\n { regex: /^\\s*(?:export\\s+)?type\\s+(\\w+)/, type: 'class' },\n ];\n case 'python':\n return [\n { regex: /^\\s*(?:async\\s+)?def\\s+(\\w+)/, type: 'function' },\n { regex: /^\\s*class\\s+(\\w+)/, type: 'class' },\n ];\n case 'go':\n return [\n { regex: /^\\s*func\\s+(?:\\([^)]+\\)\\s+)?(\\w+)/, type: 'function' },\n { regex: /^\\s*type\\s+(\\w+)\\s+struct/, type: 'class' },\n { regex: /^\\s*type\\s+(\\w+)\\s+interface/, type: 'class' },\n ];\n case 'rust':\n return [\n { regex: /^\\s*(?:pub\\s+)?(?:async\\s+)?fn\\s+(\\w+)/, type: 'function' },\n { regex: /^\\s*(?:pub\\s+)?struct\\s+(\\w+)/, type: 'class' },\n { regex: /^\\s*(?:pub\\s+)?impl\\s+(?:<[^>]+>\\s+)?(\\w+)/, type: 'class' },\n { regex: /^\\s*(?:pub\\s+)?trait\\s+(\\w+)/, type: 'class' },\n ];\n case 'java':\n case 'kotlin':\n return [\n { regex: /^\\s*(?:public|private|protected)?\\s*(?:static\\s+)?(?:\\w+\\s+)?(\\w+)\\s*\\(/, type: 'function' },\n { regex: /^\\s*(?:public|private|protected)?\\s*(?:abstract\\s+)?class\\s+(\\w+)/, type: 'class' },\n { regex: /^\\s*(?:public|private|protected)?\\s*interface\\s+(\\w+)/, type: 'class' },\n ];\n default:\n return [\n { regex: /^\\s*(?:function|def|fn|func)\\s+(\\w+)/, type: 'function' },\n { regex: /^\\s*class\\s+(\\w+)/, type: 'class' },\n ];\n }\n}\n\n/**\n * Find the end of a code block (matching braces/indentation)\n */\nfunction findBlockEnd(lines: string[], startLine: number, language: string): number {\n // Python uses indentation\n if (language === 'python') {\n return findPythonBlockEnd(lines, startLine);\n }\n \n // Most languages use braces\n return findBraceBlockEnd(lines, startLine);\n}\n\n/**\n * Find block end for brace-based languages\n */\nfunction findBraceBlockEnd(lines: string[], startLine: number): number {\n let braceCount = 0;\n let foundOpen = false;\n \n for (let i = startLine; i < lines.length; i++) {\n const line = lines[i];\n \n for (const char of line) {\n if (char === '{') {\n braceCount++;\n foundOpen = true;\n } else if (char === '}') {\n braceCount--;\n }\n }\n \n if (foundOpen && braceCount === 0) {\n return i;\n }\n }\n \n // If no matching brace found, return a reasonable chunk\n return Math.min(startLine + 50, lines.length - 1);\n}\n\n/**\n * Find block end for Python (indentation-based)\n */\nfunction findPythonBlockEnd(lines: string[], startLine: number): number {\n const startIndent = getIndentLevel(lines[startLine]);\n \n for (let i = startLine + 1; i < lines.length; i++) {\n const line = lines[i];\n \n // Skip empty lines\n if (!line.trim()) {\n continue;\n }\n \n const indent = getIndentLevel(line);\n \n // Block ends when we return to same or lower indentation\n if (indent <= startIndent && line.trim()) {\n return i - 1;\n }\n }\n \n return lines.length - 1;\n}\n\n/**\n * Get indentation level of a line\n */\nfunction getIndentLevel(line: string): number {\n const match = line.match(/^(\\s*)/);\n return match ? match[1].length : 0;\n}\n\n/**\n * Merge small adjacent blocks\n */\nfunction mergeSmallBlocks(\n blocks: Array<{ startLine: number; endLine: number; type: string; name?: string }>,\n lines: string[]\n): Array<{ startLine: number; endLine: number; type: string; name?: string }> {\n if (blocks.length === 0) {\n return blocks;\n }\n\n const merged: typeof blocks = [];\n let current = blocks[0];\n\n for (let i = 1; i < blocks.length; i++) {\n const next = blocks[i];\n const currentContent = lines.slice(current.startLine, current.endLine + 1).join('\\n');\n const gap = next.startLine - current.endLine;\n \n // Merge if current block is small and gap is small\n if (currentContent.length < 500 && gap <= 3) {\n current = {\n startLine: current.startLine,\n endLine: next.endLine,\n type: 'block',\n name: current.name,\n };\n } else {\n merged.push(current);\n current = next;\n }\n }\n \n merged.push(current);\n return merged;\n}\n\n/**\n * Split a large block into smaller chunks\n */\nfunction splitLargeBlock(\n filePath: string,\n content: string,\n startLine: number,\n language: string,\n type: string,\n name?: string\n): Chunk[] {\n const chunks: Chunk[] = [];\n const lines = content.split('\\n');\n \n let currentStart = 0;\n let currentChunk = '';\n \n for (let i = 0; i < lines.length; i++) {\n const line = lines[i];\n const newChunk = currentChunk + (currentChunk ? '\\n' : '') + line;\n \n if (newChunk.length > MAX_CHUNK_SIZE && currentChunk) {\n const contentHash = computeContentHash(currentChunk);\n chunks.push({\n id: generateChunkId(contentHash, chunks.length),\n text: buildChunkText(filePath, currentChunk, name),\n contentHash,\n chunkIndex: chunks.length,\n metadata: {\n filePath,\n startLine: startLine + currentStart + 1,\n endLine: startLine + i,\n language,\n chunkType: type as ChunkType,\n symbolName: name,\n },\n });\n \n currentStart = i;\n currentChunk = line;\n } else {\n currentChunk = newChunk;\n }\n }\n \n // Add remaining content\n if (currentChunk.trim()) {\n const contentHash = computeContentHash(currentChunk);\n chunks.push({\n id: generateChunkId(contentHash, chunks.length),\n text: buildChunkText(filePath, currentChunk, name),\n contentHash,\n chunkIndex: chunks.length,\n metadata: {\n filePath,\n startLine: startLine + currentStart + 1,\n endLine: startLine + lines.length,\n language,\n chunkType: type as ChunkType,\n symbolName: name,\n },\n });\n }\n\n return chunks;\n}\n\n/**\n * Sliding window chunking for non-code files\n */\nfunction chunkSlidingWindow(filePath: string, content: string, language: string): Chunk[] {\n const chunks: Chunk[] = [];\n \n // If content is small enough, return as single chunk\n if (content.length <= MAX_CHUNK_SIZE) {\n const contentHash = computeContentHash(content);\n chunks.push({\n id: generateChunkId(contentHash, 0),\n text: buildChunkText(filePath, content),\n contentHash,\n chunkIndex: 0,\n metadata: {\n filePath,\n startLine: 1,\n endLine: content.split('\\n').length,\n language,\n chunkType: 'sliding',\n },\n });\n return chunks;\n }\n\n // Split by lines to preserve line boundaries\n const lines = content.split('\\n');\n let currentStart = 0;\n let currentChunk = '';\n let currentLineStart = 0;\n\n for (let i = 0; i < lines.length; i++) {\n const line = lines[i];\n const newChunk = currentChunk + (currentChunk ? '\\n' : '') + line;\n \n if (newChunk.length >= SLIDING_WINDOW_SIZE) {\n const contentHash = computeContentHash(currentChunk || newChunk);\n chunks.push({\n id: generateChunkId(contentHash, chunks.length),\n text: buildChunkText(filePath, currentChunk || newChunk),\n contentHash,\n chunkIndex: chunks.length,\n metadata: {\n filePath,\n startLine: currentLineStart + 1,\n endLine: i + 1,\n language,\n chunkType: 'sliding',\n },\n });\n \n // Move back for overlap\n const overlapLines = Math.floor(SLIDING_WINDOW_OVERLAP / 50); // rough estimate\n currentLineStart = Math.max(currentStart, i - overlapLines);\n currentChunk = lines.slice(currentLineStart, i + 1).join('\\n');\n currentStart = currentLineStart;\n } else {\n currentChunk = newChunk;\n }\n }\n\n // Add remaining content\n if (currentChunk.trim() && currentChunk.length > 50) {\n const contentHash = computeContentHash(currentChunk);\n chunks.push({\n id: generateChunkId(contentHash, chunks.length),\n text: buildChunkText(filePath, currentChunk),\n contentHash,\n chunkIndex: chunks.length,\n metadata: {\n filePath,\n startLine: currentLineStart + 1,\n endLine: lines.length,\n language,\n chunkType: 'sliding',\n },\n });\n }\n\n return reindexChunks(chunks);\n}\n\n/**\n * Build the text to embed, including file context\n */\nfunction buildChunkText(filePath: string, content: string, symbolName?: string): string {\n const fileName = basename(filePath);\n let text = `File: ${filePath}\\n`;\n \n if (symbolName) {\n text += `Symbol: ${symbolName}\\n`;\n }\n \n text += `\\n${content}`;\n return text;\n}\n\n/**\n * Re-index chunks to ensure sequential chunk indices\n */\nfunction reindexChunks(chunks: Chunk[]): Chunk[] {\n return chunks.map((chunk, index) => ({\n ...chunk,\n chunkIndex: index,\n id: generateChunkId(chunk.contentHash, index),\n }));\n}\n","/**\n * Vector client - uses remote server API for vector operations\n * This removes the need for the private vector SDK in the client\n */\n\nimport { getConfig } from '../config/index.js';\n\n// Types for vector operations (matching remote server API)\nexport interface EmbeddingRequest {\n texts: Array<{ id: string; text: string; document: Record<string, unknown> }>;\n namespace: string;\n embeddingModel?: string;\n}\n\nexport interface EmbeddingError {\n id?: string;\n error: string;\n}\n\nexport interface EmbeddingResult {\n processedCount: number;\n failedCount: number;\n errors?: EmbeddingError[];\n}\n\nexport interface SearchRequest {\n query: string;\n namespace: string;\n topK?: number;\n embeddingModel?: string;\n}\n\nexport interface SearchMatch {\n id: string;\n score: number;\n metadata?: Record<string, unknown>;\n}\n\nexport interface SearchResult {\n matches: SearchMatch[];\n}\n\n// Remote vector client state\nlet remoteServerUrl: string | null = null;\nlet authKey: string | null = null;\n\n/**\n * Initialize the vector client with remote server config\n */\nexport function initVectorClient(serverUrl: string, key: string) {\n remoteServerUrl = serverUrl.replace(/\\/$/, '');\n authKey = key;\n}\n\n/**\n * Check if vector client is configured\n */\nexport function isVectorClientConfigured(): boolean {\n return !!remoteServerUrl && !!authKey;\n}\n\n/**\n * HTTP helper for remote vector API calls\n */\nasync function vectorApi<T>(\n path: string,\n options: { method?: string; body?: unknown } = {}\n): Promise<T> {\n if (!remoteServerUrl || !authKey) {\n throw new Error('Vector client not initialized - remote server not configured');\n }\n \n const url = `${remoteServerUrl}/vectors${path}`;\n const init: RequestInit = {\n method: options.method || 'GET',\n headers: {\n 'Content-Type': 'application/json',\n 'Authorization': `Bearer ${authKey}`,\n },\n };\n \n if (options.body) {\n init.body = JSON.stringify(options.body);\n }\n \n const response = await fetch(url, init);\n \n if (!response.ok) {\n const error = await response.json().catch(() => ({ error: 'Unknown error' })) as { error?: string };\n throw new Error(error.error || `HTTP ${response.status}`);\n }\n \n return response.json() as Promise<T>;\n}\n\n/**\n * Remote vector client that calls the remote server\n * Implements same interface as the old VectorClient SDK\n */\nexport const remoteVectorClient = {\n embeddings: {\n /**\n * Create embeddings and store in vector DB\n */\n async createAndWait(\n texts: Array<{ id: string; text: string; document: Record<string, unknown> }>,\n options: {\n namespace: string;\n embeddingModel?: string;\n }\n ): Promise<EmbeddingResult> {\n return vectorApi<EmbeddingResult>('/embed', {\n method: 'POST',\n body: {\n texts,\n namespace: options.namespace,\n embeddingModel: options.embeddingModel,\n },\n });\n },\n },\n \n search: {\n /**\n * Query vectors using semantic search\n */\n async queryAndWait(\n query: string,\n options: {\n namespace: string;\n topK?: number;\n includeMetadata?: boolean;\n embeddingModel?: string;\n }\n ): Promise<SearchResult> {\n return vectorApi<SearchResult>('/search', {\n method: 'POST',\n body: {\n query,\n namespace: options.namespace,\n topK: options.topK || 10,\n embeddingModel: options.embeddingModel,\n },\n });\n },\n },\n \n /**\n * Delete a namespace (if supported)\n */\n async deleteNamespace(namespace: string): Promise<void> {\n await vectorApi(`/namespace/${encodeURIComponent(namespace)}`, {\n method: 'DELETE',\n });\n },\n \n /**\n * Close client (no-op for HTTP client)\n */\n async close(): Promise<void> {\n // No-op - HTTP connections don't need cleanup\n },\n};\n\n// Type alias for the vector client\nexport type VectorClient = typeof remoteVectorClient;\n\n/**\n * Get the Vector client\n * Returns null if remote server is not configured\n */\nexport function getVectorClient(): VectorClient | null {\n if (!isVectorClientConfigured()) {\n // Try to initialize from config\n try {\n const config = getConfig();\n if (config.resolvedRemoteServer.url && config.resolvedRemoteServer.authKey) {\n initVectorClient(config.resolvedRemoteServer.url, config.resolvedRemoteServer.authKey);\n } else {\n return null;\n }\n } catch {\n return null;\n }\n }\n \n return remoteVectorClient;\n}\n\n/**\n * Close the vector client (no-op for HTTP client)\n */\nexport async function closeVectorClient(): Promise<void> {\n // No-op - HTTP connections don't need cleanup\n}\n\n/**\n * Check if Vector Gateway is configured (via remote server)\n */\nexport function isVectorGatewayConfigured(): boolean {\n try {\n const config = getConfig();\n return !!(config.resolvedRemoteServer.url && config.resolvedRemoteServer.authKey);\n } catch {\n return false;\n }\n}\n\n/**\n * Get the configured embedding model\n */\nexport function getEmbeddingModel(): string {\n try {\n const config = getConfig();\n return config.resolvedVectorGateway.embeddingModel;\n } catch {\n return 'gemini-embedding-001';\n }\n}\n","/**\n * Repository indexing pipeline\n * Walks the repo, chunks files, and sends to Vector Gateway for embedding\n */\n\nimport { readFileSync, statSync } from 'node:fs';\nimport { join, relative } from 'node:path';\nimport { minimatch } from 'minimatch';\nimport { getConfig } from '../config/index.js';\nimport { getDb, indexedChunkQueries, indexStatusQueries } from '../db/index.js';\nimport { Chunk, IndexOptions, IndexProgress, IndexResult, IndexStatus } from './types.js';\nimport { getRepoNamespace, isGitRepository } from './namespace.js';\nimport { chunkFile } from './chunker.js';\nimport { getVectorClient, closeVectorClient, getEmbeddingModel } from './client.js';\n\n// Max file size to index (1MB)\nconst MAX_FILE_SIZE = 1024 * 1024;\n\n// Batch size and concurrency for embedding requests\nconst EMBEDDING_BATCH_SIZE = 50;\nconst EMBEDDING_CONCURRENCY = 5;\nconst EMBEDDING_RETRIES = 2;\n\nfunction parsePositiveInt(value: string | undefined, fallback: number): number {\n const parsed = Number(value);\n if (!Number.isFinite(parsed) || parsed <= 0) {\n return fallback;\n }\n return Math.floor(parsed);\n}\n\nfunction formatError(error: unknown): string {\n if (error instanceof Error) {\n return error.message || 'Unknown error';\n }\n if (typeof error === 'string') {\n return error;\n }\n try {\n return JSON.stringify(error);\n } catch {\n return 'Unknown error';\n }\n}\n\nfunction sleep(ms: number): Promise<void> {\n return new Promise((resolve) => setTimeout(resolve, ms));\n}\n\n/**\n * Check if a path matches any exclude pattern\n */\nfunction isPathExcluded(relativePath: string, exclude: string[]): boolean {\n return exclude.some((pattern) => {\n // Direct match\n if (minimatch(relativePath, pattern, { dot: true })) {\n return true;\n }\n // For directory patterns like \"node_modules/**\", also check if the path\n // starts with the directory name (for skipping entire directories)\n if (pattern.endsWith('/**')) {\n const dirPattern = pattern.slice(0, -3); // Remove \"/**\"\n if (relativePath === dirPattern || relativePath.startsWith(dirPattern + '/')) {\n return true;\n }\n }\n return false;\n });\n}\n\n/**\n * Walk directory and collect files matching patterns\n */\nasync function walkDirectory(\n dir: string,\n include: string[],\n exclude: string[],\n baseDir: string\n): Promise<string[]> {\n const { readdirSync } = await import('node:fs');\n const { join, relative } = await import('node:path');\n \n const files: string[] = [];\n \n function walk(currentDir: string) {\n let entries;\n try {\n entries = readdirSync(currentDir, { withFileTypes: true });\n } catch {\n // Skip directories we can't read\n return;\n }\n \n for (const entry of entries) {\n const fullPath = join(currentDir, entry.name);\n const relativePath = relative(baseDir, fullPath);\n \n // Check exclusions first\n if (isPathExcluded(relativePath, exclude)) {\n continue;\n }\n \n if (entry.isDirectory()) {\n walk(fullPath);\n } else if (entry.isFile()) {\n // Check inclusions\n const isIncluded = include.some((pattern) => {\n return minimatch(relativePath, pattern, { dot: true });\n });\n \n if (isIncluded) {\n files.push(fullPath);\n }\n }\n }\n }\n \n walk(dir);\n return files;\n}\n\n/**\n * Check if a file should be skipped (binary, too large, etc.)\n */\nfunction shouldSkipFile(filePath: string): { skip: boolean; reason?: string } {\n try {\n const stats = statSync(filePath);\n \n if (stats.size > MAX_FILE_SIZE) {\n return { skip: true, reason: 'File too large (>1MB)' };\n }\n \n if (stats.size === 0) {\n return { skip: true, reason: 'Empty file' };\n }\n \n // For text detection, just try to read the file as UTF-8\n // If it fails or has null bytes, it's likely binary\n try {\n const content = readFileSync(filePath, 'utf-8');\n // Check for null bytes in first 1000 chars (binary indicator)\n const sample = content.slice(0, 1000);\n if (sample.includes('\\0')) {\n return { skip: true, reason: 'Binary file' };\n }\n } catch {\n return { skip: true, reason: 'Cannot read as text' };\n }\n \n return { skip: false };\n } catch (error) {\n return { skip: true, reason: `Error reading file: ${error}` };\n }\n}\n\n/**\n * Index a repository for semantic search\n */\nexport async function indexRepository(options: IndexOptions): Promise<IndexResult> {\n const startTime = Date.now();\n const errors: Array<{ file: string; error: string }> = [];\n \n const progress: IndexProgress = {\n phase: 'scanning',\n totalFiles: 0,\n processedFiles: 0,\n totalChunks: 0,\n newChunks: 0,\n skippedChunks: 0,\n };\n \n const reportProgress = () => {\n if (options.onProgress) {\n options.onProgress({ ...progress });\n }\n };\n\n // Check if git repository\n if (!isGitRepository(options.workingDirectory)) {\n return {\n success: false,\n namespace: '',\n totalFiles: 0,\n totalChunks: 0,\n newChunks: 0,\n skippedChunks: 0,\n failedChunks: 0,\n duration: Date.now() - startTime,\n errors: [{ file: '', error: 'Not a git repository' }],\n };\n }\n\n // Get config\n const config = getConfig();\n const { include, exclude, namespace: configNamespace } = config.resolvedVectorGateway;\n\n // Get namespace\n const namespace = await getRepoNamespace(options.workingDirectory, configNamespace);\n if (!namespace) {\n return {\n success: false,\n namespace: '',\n totalFiles: 0,\n totalChunks: 0,\n newChunks: 0,\n skippedChunks: 0,\n failedChunks: 0,\n duration: Date.now() - startTime,\n errors: [{ file: '', error: 'Could not determine repository namespace. Ensure git remote is configured.' }],\n };\n }\n\n // Get vector client\n const client = getVectorClient();\n if (!client) {\n return {\n success: false,\n namespace,\n totalFiles: 0,\n totalChunks: 0,\n newChunks: 0,\n skippedChunks: 0,\n failedChunks: 0,\n duration: Date.now() - startTime,\n errors: [{ file: '', error: 'Remote server not configured. Set SPARKECODER_REMOTE_URL/SPARKECODER_AUTH_KEY or remoteServer in sparkecoder.config.json' }],\n };\n }\n\n try {\n // Phase 1: Scan files\n progress.phase = 'scanning';\n reportProgress();\n \n const files = await walkDirectory(\n options.workingDirectory,\n include,\n exclude,\n options.workingDirectory\n );\n \n progress.totalFiles = files.length;\n reportProgress();\n\n // Phase 2: Chunk files\n progress.phase = 'chunking';\n reportProgress();\n \n const allChunks: Chunk[] = [];\n \n for (const filePath of files) {\n const relativePath = relative(options.workingDirectory, filePath);\n progress.currentFile = relativePath;\n \n const skipCheck = shouldSkipFile(filePath);\n if (skipCheck.skip) {\n if (options.verbose) {\n console.log(`Skipping ${relativePath}: ${skipCheck.reason}`);\n }\n progress.processedFiles++;\n reportProgress();\n continue;\n }\n \n try {\n const content = readFileSync(filePath, 'utf-8');\n const chunks = chunkFile(relativePath, content);\n allChunks.push(...chunks);\n progress.totalChunks += chunks.length;\n } catch (error) {\n errors.push({ file: relativePath, error: String(error) });\n }\n \n progress.processedFiles++;\n reportProgress();\n }\n\n // Phase 3: Check existing hashes\n progress.phase = 'checking';\n reportProgress();\n \n const db = getDb();\n const existingHashes = new Set<string>();\n \n if (!options.force) {\n // Get all existing chunk IDs for this namespace\n const existingChunks = await indexedChunkQueries.getByNamespace(db, namespace);\n for (const chunk of existingChunks) {\n existingHashes.add(chunk.id);\n }\n }\n \n // Filter to new chunks only\n const newChunks = allChunks.filter((chunk) => !existingHashes.has(chunk.id));\n progress.newChunks = newChunks.length;\n progress.skippedChunks = allChunks.length - newChunks.length;\n reportProgress();\n\n // Phase 4: Embed new chunks\n progress.phase = 'embedding';\n reportProgress();\n \n const embeddingModel = getEmbeddingModel();\n let failedChunks = 0;\n \n // Process in batches (parallelized with a worker pool)\n const batchSize = parsePositiveInt(process.env.SPARKECODER_INDEX_BATCH_SIZE, EMBEDDING_BATCH_SIZE);\n const concurrency = parsePositiveInt(process.env.SPARKECODER_INDEX_CONCURRENCY, EMBEDDING_CONCURRENCY);\n const maxRetries = parsePositiveInt(process.env.SPARKECODER_INDEX_RETRIES, EMBEDDING_RETRIES);\n const totalBatches = Math.ceil(newChunks.length / batchSize);\n console.log(\n `[indexer] Starting embedding: ${newChunks.length} chunks in ${totalBatches} batches (batchSize=${batchSize}, concurrency=${concurrency}, retries=${maxRetries})`\n );\n \n const batches = newChunks.reduce<Array<{ batchNum: number; batch: Chunk[] }>>((acc, chunk, index) => {\n if (index % batchSize === 0) {\n acc.push({\n batchNum: Math.floor(index / batchSize) + 1,\n batch: newChunks.slice(index, index + batchSize),\n });\n }\n return acc;\n }, []);\n\n const processBatch = async (batchNum: number, batch: Chunk[]) => {\n console.log(`[indexer] Batch ${batchNum}/${totalBatches}: embedding ${batch.length} chunks...`);\n const texts = batch.map((chunk) => ({\n id: chunk.id,\n text: chunk.text,\n document: {\n filePath: chunk.metadata.filePath,\n startLine: chunk.metadata.startLine,\n endLine: chunk.metadata.endLine,\n language: chunk.metadata.language,\n chunkType: chunk.metadata.chunkType,\n symbolName: chunk.metadata.symbolName,\n contentHash: chunk.contentHash,\n },\n }));\n\n for (let attempt = 0; attempt <= maxRetries; attempt++) {\n const batchStartTime = Date.now();\n try {\n const result = await client.embeddings.createAndWait(texts, {\n namespace,\n embeddingModel,\n });\n \n const embedTime = Date.now() - batchStartTime;\n console.log(`[indexer] Batch ${batchNum}: embed completed in ${embedTime}ms - processed: ${result.processedCount}, failed: ${result.failedCount}`);\n \n failedChunks += result.failedCount;\n \n // Record successful chunks in local DB (batch upsert)\n const successfulChunks = batch.filter(\n (chunk) => !result.errors?.find((e) => e.id === chunk.id)\n );\n \n if (successfulChunks.length > 0) {\n console.log(`[indexer] Batch ${batchNum}: recording ${successfulChunks.length} chunks to DB...`);\n const dbStartTime = Date.now();\n \n await indexedChunkQueries.batchUpsert(db, successfulChunks.map((chunk) => ({\n id: chunk.id,\n contentHash: chunk.contentHash,\n filePath: chunk.metadata.filePath,\n repoNamespace: namespace,\n startLine: chunk.metadata.startLine,\n endLine: chunk.metadata.endLine,\n language: chunk.metadata.language,\n })));\n \n const dbTime = Date.now() - dbStartTime;\n console.log(`[indexer] Batch ${batchNum}: DB batch upsert completed in ${dbTime}ms`);\n }\n \n if (result.errors?.length) {\n for (const err of result.errors) {\n const chunk = batch.find((c) => c.id === err.id);\n if (chunk) {\n errors.push({ file: chunk.metadata.filePath, error: err.error });\n }\n }\n } else if (result.failedCount > 0) {\n errors.push({ file: `batch ${batchNum}`, error: `Embedding failed for ${result.failedCount} chunks (no error details returned)` });\n }\n \n return;\n } catch (error) {\n const errorMsg = formatError(error);\n console.error(`[indexer] Batch ${batchNum}: ERROR (attempt ${attempt + 1}/${maxRetries + 1}) - ${errorMsg}`);\n if (attempt >= maxRetries) {\n failedChunks += batch.length;\n errors.push({ file: `batch ${batchNum}`, error: errorMsg });\n return;\n }\n await sleep(500 * (attempt + 1));\n } finally {\n reportProgress();\n }\n }\n };\n\n let nextBatchIndex = 0;\n const workerCount = Math.min(concurrency, batches.length);\n const workers = Array.from({ length: workerCount }, async () => {\n while (nextBatchIndex < batches.length) {\n const currentIndex = nextBatchIndex;\n nextBatchIndex += 1;\n const { batchNum, batch } = batches[currentIndex];\n await processBatch(batchNum, batch);\n }\n });\n\n await Promise.all(workers);\n \n console.log(`[indexer] Embedding complete. Updating index status...`);\n\n // Update index status\n try {\n console.log(`[indexer] Calling indexStatusQueries.upsert with totalChunks: ${allChunks.length}`);\n await indexStatusQueries.upsert(db, {\n id: namespace,\n repoNamespace: namespace,\n totalChunks: allChunks.length,\n lastFullIndex: options.force ? new Date() : undefined,\n lastIncrementalIndex: new Date(),\n });\n console.log(`[indexer] Index status updated successfully`);\n } catch (statusError) {\n console.error(`[indexer] Failed to update index status:`, statusError);\n throw statusError;\n }\n\n // Phase 5: Done\n progress.phase = 'done';\n reportProgress();\n\n return {\n success: true,\n namespace,\n totalFiles: files.length,\n totalChunks: allChunks.length,\n newChunks: newChunks.length - failedChunks,\n skippedChunks: progress.skippedChunks,\n failedChunks,\n duration: Date.now() - startTime,\n errors,\n };\n } finally {\n await closeVectorClient();\n }\n}\n\n/**\n * Get the index status for a repository\n */\nexport async function getIndexStatus(workingDirectory: string): Promise<IndexStatus> {\n const config = getConfig();\n const namespace = await getRepoNamespace(\n workingDirectory,\n config.resolvedVectorGateway.namespace\n );\n \n const isConfigured = config.resolvedRemoteServer.isConfigured;\n \n if (!namespace) {\n return {\n namespace: '',\n totalChunks: 0,\n lastFullIndex: null,\n lastIncrementalIndex: null,\n isConfigured,\n };\n }\n \n try {\n const db = getDb();\n const status = await indexStatusQueries.get(db, namespace);\n \n if (!status) {\n return {\n namespace,\n totalChunks: 0,\n lastFullIndex: null,\n lastIncrementalIndex: null,\n isConfigured,\n };\n }\n \n return {\n namespace,\n totalChunks: status.totalChunks ?? 0,\n lastFullIndex: status.lastFullIndex ?? null,\n lastIncrementalIndex: status.lastIncrementalIndex ?? null,\n isConfigured,\n };\n } catch {\n return {\n namespace,\n totalChunks: 0,\n lastFullIndex: null,\n lastIncrementalIndex: null,\n isConfigured,\n };\n }\n}\n\n/**\n * Check if an index exists for a repository\n */\nexport async function checkIndexExists(workingDirectory: string): Promise<boolean> {\n const status = await getIndexStatus(workingDirectory);\n return status.totalChunks > 0;\n}\n","/**\n * Semantic search module\n * Provides indexing and semantic search capabilities using Vector Gateway\n */\n\n// Types\nexport * from './types.js';\n\n// Namespace resolution\nexport {\n getRepoNamespace,\n isGitRepository,\n getCurrentBranch,\n getCurrentCommit,\n getGitRemoteUrl,\n parseGitRemoteUrl,\n} from './namespace.js';\n\n// Content hashing\nexport {\n computeContentHash,\n computeFullHash,\n computeFileHash,\n generateChunkId,\n parseChunkId,\n} from './hasher.js';\n\n// File chunking\nexport {\n chunkFile,\n detectLanguage,\n supportsSemanticChunking,\n} from './chunker.js';\n\n// Vector client\nexport {\n getVectorClient,\n closeVectorClient,\n isVectorGatewayConfigured,\n getEmbeddingModel,\n} from './client.js';\n\n// Indexing\nexport {\n indexRepository,\n getIndexStatus,\n checkIndexExists,\n} from './indexer.js';\n","/**\n * Semantic Search Tool\n * Uses Vector Gateway to perform semantic similarity search on indexed codebase\n */\n\nimport { tool } from 'ai';\nimport { z } from 'zod';\nimport { existsSync, readFileSync } from 'node:fs';\nimport { join } from 'node:path';\nimport { minimatch } from 'minimatch';\nimport {\n getVectorClient,\n closeVectorClient,\n getRepoNamespace,\n getEmbeddingModel,\n} from '../semantic/index.js';\nimport { getConfig } from '../config/index.js';\nimport { SemanticMatch } from '../semantic/types.js';\n\nexport interface SemanticSearchToolOptions {\n workingDirectory: string;\n}\n\nexport interface SemanticSearchResult {\n success: boolean;\n query?: string;\n matches?: SemanticMatch[];\n totalMatches?: number;\n duration?: number;\n error?: string;\n}\n\nconst semanticSearchInputSchema = z.object({\n query: z\n .string()\n .describe('Natural language search query describing what you want to find'),\n topK: z\n .number()\n .optional()\n .default(10)\n .describe('Number of results to return (default: 10, max: 50)'),\n filePattern: z\n .string()\n .optional()\n .describe('Filter results by file glob pattern (e.g., \"*.ts\", \"src/**/*.py\")'),\n language: z\n .string()\n .optional()\n .describe('Filter by programming language (e.g., \"typescript\", \"python\")'),\n});\n\n/**\n * Create the semantic_search tool\n */\nexport function createSemanticSearchTool(options: SemanticSearchToolOptions) {\n return tool({\n description: `Search the codebase using semantic similarity. This tool finds code by understanding its meaning, not just matching text.\n\nUse this tool when:\n- You need to understand how something works in the codebase\n- You're looking for code related to a concept (e.g., \"authentication\", \"database queries\")\n- You want to find implementations of features\n- The user asks \"where is X?\" or \"how does Y work?\"\n\nThis tool requires the repository to be indexed first with 'sparkecoder index'.\n\nReturns matching code snippets with file paths, line numbers, and relevance scores.`,\n\n inputSchema: semanticSearchInputSchema,\n\n execute: async ({\n query,\n topK,\n filePattern,\n language,\n }: z.infer<typeof semanticSearchInputSchema>): Promise<SemanticSearchResult> => {\n const startTime = Date.now();\n\n try {\n const config = getConfig();\n\n const namespace = await getRepoNamespace(\n options.workingDirectory,\n config.resolvedVectorGateway.namespace\n );\n\n if (!namespace) {\n return {\n success: false,\n error: 'Repository namespace not found. Ensure this is a git repository with a remote configured.',\n };\n }\n\n const client = getVectorClient();\n if (!client) {\n return {\n success: false,\n error: 'Remote server not configured. Set SPARKECODER_REMOTE_URL/SPARKECODER_AUTH_KEY or run sparkecoder to register.',\n };\n }\n\n try {\n const limitedTopK = Math.min(Math.max(1, topK), 50);\n\n const embeddingModel = getEmbeddingModel();\n const result = await client.search.queryAndWait(query, {\n namespace,\n topK: limitedTopK * 2,\n includeMetadata: true,\n embeddingModel,\n });\n\n const matches: SemanticMatch[] = [];\n\n for (const match of result.matches) {\n const metadata = match.metadata as Record<string, unknown> | undefined;\n if (!metadata) continue;\n\n const filePath = metadata.filePath as string;\n const startLine = metadata.startLine as number;\n const endLine = metadata.endLine as number;\n const matchLanguage = metadata.language as string;\n const symbolName = metadata.symbolName as string | undefined;\n\n if (filePattern) {\n const matchesPattern = minimatch(filePath, filePattern, { dot: true });\n if (!matchesPattern) continue;\n }\n\n if (language && matchLanguage !== language.toLowerCase()) {\n continue;\n }\n\n const fullPath = join(options.workingDirectory, filePath);\n if (!existsSync(fullPath)) {\n continue;\n }\n\n let snippet = '';\n try {\n const content = readFileSync(fullPath, 'utf-8');\n const lines = content.split('\\n');\n const snippetLines = lines.slice(\n Math.max(0, startLine - 1),\n Math.min(lines.length, endLine)\n );\n snippet = snippetLines.join('\\n');\n\n if (snippet.length > 500) {\n snippet = snippet.slice(0, 500) + '\\n... (truncated)';\n }\n } catch {\n // Ignore read errors\n }\n\n matches.push({\n filePath,\n startLine,\n endLine,\n score: match.score,\n snippet,\n symbolName,\n language: matchLanguage,\n });\n\n if (matches.length >= limitedTopK) {\n break;\n }\n }\n\n return {\n success: true,\n query,\n matches,\n totalMatches: matches.length,\n duration: Date.now() - startTime,\n };\n } finally {\n await closeVectorClient();\n }\n } catch (error) {\n return {\n success: false,\n error: `Semantic search failed: ${error instanceof Error ? error.message : String(error)}`,\n };\n }\n },\n });\n}\n","export type WebhookEventType =\n | 'task.started'\n | 'task.message'\n | 'task.tool_call'\n | 'task.tool_result'\n | 'task.step_finished'\n | 'task.completed'\n | 'task.failed';\n\nexport interface WebhookEvent {\n type: WebhookEventType;\n taskId: string;\n sessionId: string;\n timestamp: string;\n data: unknown;\n}\n\n/**\n * Fire-and-forget POST to a webhook URL.\n * Silently ignores network/timeout errors so the agent loop is never blocked.\n */\nexport async function sendWebhook(url: string, event: WebhookEvent): Promise<void> {\n try {\n const controller = new AbortController();\n const timeout = setTimeout(() => controller.abort(), 5000);\n\n await fetch(url, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n 'X-SparkECoder-Event': event.type,\n },\n body: JSON.stringify(event),\n signal: controller.signal,\n });\n\n clearTimeout(timeout);\n } catch {\n // Fire-and-forget: silently ignore errors\n }\n}\n","import WebSocket from 'ws';\nimport { EventEmitter } from 'node:events';\n\nexport interface BrowserFrameMetadata {\n deviceWidth: number;\n deviceHeight: number;\n pageScaleFactor: number;\n offsetTop: number;\n scrollOffsetX: number;\n scrollOffsetY: number;\n}\n\nexport interface BrowserFrame {\n data: string; // base64-encoded JPEG\n metadata: BrowserFrameMetadata;\n timestamp: number;\n}\n\nexport interface BrowserStatus {\n connected: boolean;\n screencasting: boolean;\n viewportWidth?: number;\n viewportHeight?: number;\n}\n\nexport interface BrowserMouseEvent {\n type: 'input_mouse';\n eventType: 'mousePressed' | 'mouseReleased' | 'mouseMoved' | 'mouseWheel';\n x: number;\n y: number;\n button?: 'left' | 'right' | 'middle';\n clickCount?: number;\n deltaX?: number;\n deltaY?: number;\n}\n\nexport interface BrowserKeyboardEvent {\n type: 'input_keyboard';\n eventType: 'keyDown' | 'keyUp' | 'char';\n key: string;\n code?: string;\n text?: string;\n modifiers?: number;\n}\n\nexport interface BrowserTouchEvent {\n type: 'input_touch';\n eventType: 'touchStart' | 'touchMove' | 'touchEnd';\n touchPoints: Array<{ x: number; y: number; id?: number }>;\n}\n\nexport type BrowserInputEvent = BrowserMouseEvent | BrowserKeyboardEvent | BrowserTouchEvent;\n\nexport interface BrowserStreamProxyEvents {\n frame: (frame: BrowserFrame) => void;\n status: (status: BrowserStatus) => void;\n close: () => void;\n error: (error: Error) => void;\n}\n\nconst RECONNECT_DELAY_MS = 1000;\nconst MAX_RECONNECT_ATTEMPTS = 20;\nconst FRAME_THROTTLE_MS = 100; // ~10 fps max\n\n/**\n * Proxy that connects to an agent-browser WebSocket stream server\n * and forwards frames/status to consumers via EventEmitter.\n */\nexport class BrowserStreamProxy extends EventEmitter {\n private ws: WebSocket | null = null;\n private port: number;\n private reconnectAttempts = 0;\n private reconnectTimer: ReturnType<typeof setTimeout> | null = null;\n private destroyed = false;\n private lastFrameTime = 0;\n private _latestFrame: BrowserFrame | null = null;\n private _connected = false;\n\n constructor(port: number) {\n super();\n this.port = port;\n }\n\n get connected(): boolean {\n return this._connected;\n }\n\n get latestFrame(): BrowserFrame | null {\n return this._latestFrame;\n }\n\n connect(): void {\n if (this.destroyed) return;\n console.log(`[BROWSER-WS] connect() called for port ${this.port}`);\n this.doConnect();\n }\n\n private doConnect(): void {\n if (this.destroyed) return;\n\n const url = `ws://localhost:${this.port}`;\n console.log(`[BROWSER-WS] Attempting WebSocket connection to ${url} (attempt ${this.reconnectAttempts + 1}/${MAX_RECONNECT_ATTEMPTS})`);\n try {\n this.ws = new WebSocket(url);\n } catch (err) {\n console.warn(`[BROWSER-WS] WebSocket constructor threw for ${url}:`, err);\n this.scheduleReconnect();\n return;\n }\n\n this.ws.on('open', () => {\n console.log(`[BROWSER-WS] Connected to ${url} (after ${this.reconnectAttempts} retries)`);\n this.reconnectAttempts = 0;\n this._connected = true;\n // Don't emit screencasting: true here — wait for the real status\n // from the StreamServer. It sends status immediately on connection\n // with the actual isScreencasting state.\n });\n\n this.ws.on('message', (raw: Buffer | string) => {\n try {\n const msg = JSON.parse(typeof raw === 'string' ? raw : raw.toString('utf8'));\n this.handleMessage(msg);\n } catch (err) {\n console.warn(`[BROWSER-WS] Malformed message from ${url}:`, err);\n }\n });\n\n this.ws.on('close', (code, reason) => {\n const wasConnected = this._connected;\n this._connected = false;\n console.log(`[BROWSER-WS] Connection closed: code=${code} reason=\"${reason?.toString() || ''}\" wasConnected=${wasConnected} destroyed=${this.destroyed}`);\n if (wasConnected) {\n this.emit('status', { connected: false, screencasting: false } satisfies BrowserStatus);\n }\n if (!this.destroyed) {\n this.scheduleReconnect();\n }\n });\n\n this.ws.on('error', (err) => {\n console.warn(`[BROWSER-WS] WebSocket error on port ${this.port}:`, err.message);\n });\n }\n\n private frameCount = 0;\n private throttledCount = 0;\n private lastFrameLogTime = 0;\n\n private handleMessage(msg: any): void {\n if (msg.type === 'frame') {\n const now = Date.now();\n if (now - this.lastFrameTime < FRAME_THROTTLE_MS) {\n this.throttledCount++;\n return;\n }\n this.lastFrameTime = now;\n this.frameCount++;\n\n // Log frame stats every 5 seconds\n if (now - this.lastFrameLogTime > 5000) {\n console.log(`[BROWSER-WS] Frame stats: emitted=${this.frameCount} throttled=${this.throttledCount} listeners=${this.listenerCount('frame')} dataSize=${msg.data?.length ?? 0}`);\n this.lastFrameLogTime = now;\n }\n\n const frame: BrowserFrame = {\n data: msg.data,\n metadata: msg.metadata ?? {\n deviceWidth: 1280,\n deviceHeight: 720,\n pageScaleFactor: 1,\n offsetTop: 0,\n scrollOffsetX: 0,\n scrollOffsetY: 0,\n },\n timestamp: now,\n };\n this._latestFrame = frame;\n this.emit('frame', frame);\n } else if (msg.type === 'status') {\n console.log(`[BROWSER-WS] Status message received:`, JSON.stringify(msg));\n this.emit('status', {\n connected: msg.connected ?? true,\n screencasting: msg.screencasting ?? true,\n viewportWidth: msg.viewportWidth,\n viewportHeight: msg.viewportHeight,\n } satisfies BrowserStatus);\n } else {\n console.log(`[BROWSER-WS] Unknown message type: ${msg.type}`);\n }\n }\n\n private scheduleReconnect(): void {\n if (this.destroyed || this.reconnectAttempts >= MAX_RECONNECT_ATTEMPTS) {\n console.log(`[BROWSER-WS] Giving up reconnection: destroyed=${this.destroyed} attempts=${this.reconnectAttempts}/${MAX_RECONNECT_ATTEMPTS}`);\n this.emit('close');\n return;\n }\n this.reconnectAttempts++;\n // Fast retries for the first 5 attempts (browser is starting up),\n // then back off for later retries (browser may have closed)\n const delay = this.reconnectAttempts <= 5\n ? RECONNECT_DELAY_MS\n : RECONNECT_DELAY_MS * (this.reconnectAttempts - 4);\n console.log(`[BROWSER-WS] Scheduling reconnect in ${delay}ms (attempt ${this.reconnectAttempts}/${MAX_RECONNECT_ATTEMPTS})`);\n this.reconnectTimer = setTimeout(() => this.doConnect(), delay);\n }\n\n /**\n * Send an input event to the browser for pair-browsing.\n */\n injectInput(event: BrowserInputEvent): void {\n if (this.ws?.readyState === WebSocket.OPEN) {\n this.ws.send(JSON.stringify(event));\n }\n }\n\n /**\n * Ask the StreamServer to send its current status (triggers sendStatus and\n * re-evaluates screencasting). Useful when listeners are replaced on a new\n * stream and we want a fresh status event.\n */\n requestStatus(): void {\n if (this.ws?.readyState === WebSocket.OPEN) {\n console.log(`[BROWSER-WS] Requesting fresh status from StreamServer`);\n this.ws.send(JSON.stringify({ type: 'status' }));\n }\n }\n\n destroy(): void {\n console.log(`[BROWSER-WS] Destroying proxy for port ${this.port} (emitted ${this.frameCount} frames, throttled ${this.throttledCount})`);\n this.destroyed = true;\n if (this.reconnectTimer) {\n clearTimeout(this.reconnectTimer);\n this.reconnectTimer = null;\n }\n if (this.ws) {\n this.ws.removeAllListeners();\n this.ws.close();\n this.ws = null;\n }\n this._connected = false;\n this.removeAllListeners();\n }\n}\n\n// Registry of active proxies per session\nconst activeProxies = new Map<string, BrowserStreamProxy>();\n\nexport function getOrCreateProxy(sessionId: string, port: number): BrowserStreamProxy {\n const existing = activeProxies.get(sessionId);\n if (existing) {\n console.log(`[BROWSER-WS] Reusing existing proxy for session ${sessionId} (connected=${existing.connected})`);\n return existing;\n }\n\n console.log(`[BROWSER-WS] Creating new proxy for session ${sessionId} on port ${port} (active proxies: ${activeProxies.size})`);\n const proxy = new BrowserStreamProxy(port);\n activeProxies.set(sessionId, proxy);\n proxy.on('close', () => {\n console.log(`[BROWSER-WS] Proxy closed for session ${sessionId}, removing from registry`);\n activeProxies.delete(sessionId);\n });\n proxy.connect();\n return proxy;\n}\n\nexport function getProxy(sessionId: string): BrowserStreamProxy | undefined {\n return activeProxies.get(sessionId);\n}\n\nexport function destroyProxy(sessionId: string): void {\n const proxy = activeProxies.get(sessionId);\n if (proxy) {\n console.log(`[BROWSER-WS] destroyProxy() called for session ${sessionId}`);\n proxy.destroy();\n activeProxies.delete(sessionId);\n } else {\n console.log(`[BROWSER-WS] destroyProxy() called but no proxy exists for session ${sessionId}`);\n }\n}\n","import { exec } from 'node:child_process';\nimport { promisify } from 'node:util';\nimport { writeFile, mkdir, readFile, unlink, readdir, rm } from 'node:fs/promises';\nimport { join } from 'node:path';\nimport { tmpdir } from 'node:os';\nimport { nanoid } from 'nanoid';\nimport type { BrowserFrame } from './stream-proxy.js';\n\nconst execAsync = promisify(exec);\n\ninterface RecordedFrame {\n data: Buffer;\n timestamp: number;\n}\n\nexport class FrameRecorder {\n private frames: RecordedFrame[] = [];\n private startTime: number | null = null;\n private recording = false;\n private sessionId: string;\n\n constructor(sessionId: string) {\n this.sessionId = sessionId;\n }\n\n get isRecording(): boolean {\n return this.recording;\n }\n\n get frameCount(): number {\n return this.frames.length;\n }\n\n start(): void {\n this.frames = [];\n this.startTime = Date.now();\n this.recording = true;\n }\n\n addFrame(frame: BrowserFrame): void {\n if (!this.recording) return;\n this.frames.push({\n data: Buffer.from(frame.data, 'base64'),\n timestamp: frame.timestamp,\n });\n }\n\n stop(): void {\n this.recording = false;\n }\n\n /**\n * Encode recorded frames into an MP4 using ffmpeg.\n * Returns the file path to the generated MP4, or null if encoding fails.\n */\n async encode(): Promise<{ path: string; sizeBytes: number } | null> {\n if (this.frames.length === 0) return null;\n\n const workDir = join(tmpdir(), `sparkecoder-recording-${nanoid(8)}`);\n await mkdir(workDir, { recursive: true });\n\n try {\n // Write frames as numbered JPEG files\n for (let i = 0; i < this.frames.length; i++) {\n const framePath = join(workDir, `frame_${String(i).padStart(6, '0')}.jpg`);\n await writeFile(framePath, this.frames[i].data);\n }\n\n // Calculate average FPS from timestamps\n const duration = (this.frames[this.frames.length - 1].timestamp - this.frames[0].timestamp) / 1000;\n const fps = duration > 0 ? Math.round(this.frames.length / duration) : 10;\n const clampedFps = Math.max(1, Math.min(fps, 30));\n\n const outputPath = join(workDir, `recording_${this.sessionId}.mp4`);\n\n // Try ffmpeg\n const hasFfmpeg = await checkFfmpeg();\n if (hasFfmpeg) {\n await execAsync(\n `ffmpeg -y -framerate ${clampedFps} -i \"${join(workDir, 'frame_%06d.jpg')}\" ` +\n `-c:v libx264 -pix_fmt yuv420p -preset fast -crf 23 ` +\n `\"${outputPath}\"`,\n { timeout: 120_000 }\n );\n } else {\n // Fallback: create MJPEG-in-MP4 using raw ffmpeg with mjpeg codec\n // If ffmpeg isn't available at all, return null\n console.warn('[RECORDER] ffmpeg not available, cannot encode recording');\n await cleanup(workDir);\n return null;\n }\n\n const outputBuf = await readFile(outputPath);\n\n // Clean up frame files but keep the output\n const files = await readdir(workDir);\n for (const f of files) {\n if (f.startsWith('frame_')) {\n await unlink(join(workDir, f)).catch(() => {});\n }\n }\n\n return { path: outputPath, sizeBytes: outputBuf.length };\n } catch (error) {\n console.error('[RECORDER] Failed to encode recording:', error);\n await cleanup(workDir);\n return null;\n }\n }\n\n /** Discard all frames and free memory */\n clear(): void {\n this.frames = [];\n this.startTime = null;\n this.recording = false;\n }\n}\n\nasync function checkFfmpeg(): Promise<boolean> {\n try {\n await execAsync('ffmpeg -version', { timeout: 5000 });\n return true;\n } catch {\n return false;\n }\n}\n\nasync function cleanup(dir: string): Promise<void> {\n try {\n await rm(dir, { recursive: true, force: true });\n } catch {\n // Best effort\n }\n}\n","import 'dotenv/config'; // Load .env file early\nimport { Hono } from 'hono';\nimport { serve, type ServerType } from '@hono/node-server';\nimport { cors } from 'hono/cors';\nimport { logger } from 'hono/logger';\nimport { existsSync, mkdirSync, writeFileSync } from 'node:fs';\nimport { resolve, dirname, join } from 'node:path';\nimport { spawn, type ChildProcess } from 'node:child_process';\nimport { createServer as createNetServer } from 'node:net';\nimport { fileURLToPath } from 'node:url';\nimport { sessions } from './routes/sessions.js';\nimport { agents } from './routes/agents.js';\nimport { health } from './routes/health.js';\nimport { terminals } from './routes/terminals.js';\nimport tasks from './routes/tasks.js';\nimport * as tmux from '../terminal/tmux.js';\nimport { loadConfig, getConfig, loadApiKeysIntoEnv, ensureRemoteAuthKey, saveAuthKey } from '../config/index.js';\nimport { initDatabase, closeDatabase, isUsingRemote } from '../db/index.js';\nimport { checkDependencies } from '../utils/dependencies.js';\n\nlet serverInstance: ServerType | null = null;\nlet webUIProcess: ChildProcess | null = null;\n\n// Default web UI port\nconst DEFAULT_WEB_PORT = 6969;\nconst WEB_PORT_SEQUENCE = [6969, 6970, 6971, 6972, 6973, 6974, 6975, 6976, 6977, 6978];\n\nexport interface ServerOptions {\n port?: number;\n host?: string;\n workingDirectory?: string;\n configPath?: string;\n quiet?: boolean; // Disable HTTP request logging\n webUI?: boolean; // Start web UI (default: true)\n webPort?: number; // Web UI port (default: 6969)\n publicUrl?: string; // Public URL for web UI to connect to API (for Docker/remote access)\n}\n\n// Get the web directory path\nfunction getWebDirectory(): string | null {\n try {\n const currentDir = dirname(fileURLToPath(import.meta.url));\n const webDir = resolve(currentDir, '..', 'web');\n \n if (existsSync(webDir) && existsSync(join(webDir, 'package.json'))) {\n return webDir;\n }\n \n const altWebDir = resolve(currentDir, '..', '..', 'web');\n if (existsSync(altWebDir) && existsSync(join(altWebDir, 'package.json'))) {\n return altWebDir;\n }\n \n return null;\n } catch {\n return null;\n }\n}\n\n// Check if a SparkECoder web UI is running on a port\nasync function isSparkcoderWebRunning(port: number): Promise<boolean> {\n try {\n const response = await fetch(`http://localhost:${port}/api/health`, { \n signal: AbortSignal.timeout(1000) \n });\n if (response.ok) {\n const data = await response.json() as { name?: string };\n return data.name === 'sparkecoder-web';\n }\n return false;\n } catch {\n return false;\n }\n}\n\n// Check if a port is in use\nfunction isPortInUse(port: number): Promise<boolean> {\n return new Promise((resolve) => {\n const server = createNetServer();\n \n server.once('error', (err: NodeJS.ErrnoException) => {\n if (err.code === 'EADDRINUSE') {\n resolve(true);\n } else {\n resolve(false);\n }\n });\n \n server.once('listening', () => {\n server.close();\n resolve(false);\n });\n \n server.listen(port, '0.0.0.0');\n });\n}\n\n// Find an available port for the web UI\nasync function findWebPort(preferredPort: number): Promise<{ port: number; alreadyRunning: boolean }> {\n if (await isSparkcoderWebRunning(preferredPort)) {\n return { port: preferredPort, alreadyRunning: true };\n }\n \n if (!(await isPortInUse(preferredPort))) {\n return { port: preferredPort, alreadyRunning: false };\n }\n \n for (const port of WEB_PORT_SEQUENCE) {\n if (port === preferredPort) continue;\n \n if (await isSparkcoderWebRunning(port)) {\n return { port, alreadyRunning: true };\n }\n \n if (!(await isPortInUse(port))) {\n return { port, alreadyRunning: false };\n }\n }\n \n return { port: preferredPort, alreadyRunning: false };\n}\n\n// Check if production build exists\nfunction hasProductionBuild(webDir: string): boolean {\n const buildIdPath = join(webDir, '.next', 'BUILD_ID');\n return existsSync(buildIdPath);\n}\n\n// Check if source files exist (for dev mode)\nfunction hasSourceFiles(webDir: string): boolean {\n // Check for app directory (Next.js App Router)\n const appDir = join(webDir, 'src', 'app');\n const pagesDir = join(webDir, 'src', 'pages');\n const rootAppDir = join(webDir, 'app');\n const rootPagesDir = join(webDir, 'pages');\n \n return existsSync(appDir) || existsSync(pagesDir) || existsSync(rootAppDir) || existsSync(rootPagesDir);\n}\n\n// Check if standalone build exists and find the server.js path\nfunction getStandaloneServerPath(webDir: string): string | null {\n // Check for standalone server - may be at different paths depending on project structure\n const possiblePaths = [\n join(webDir, '.next', 'standalone', 'server.js'),\n join(webDir, '.next', 'standalone', 'web', 'server.js'),\n ];\n \n for (const serverPath of possiblePaths) {\n if (existsSync(serverPath)) {\n return serverPath;\n }\n }\n \n return null;\n}\n\n// Run a command and wait for it to complete\nfunction runCommand(command: string, args: string[], cwd: string, env: NodeJS.ProcessEnv): Promise<{ success: boolean; output: string }> {\n return new Promise((resolve) => {\n const child = spawn(command, args, {\n cwd,\n stdio: ['ignore', 'pipe', 'pipe'],\n env,\n shell: true,\n });\n \n let output = '';\n child.stdout?.on('data', (data: Buffer) => { output += data.toString(); });\n child.stderr?.on('data', (data: Buffer) => { output += data.toString(); });\n \n child.on('close', (code) => {\n resolve({ success: code === 0, output });\n });\n \n child.on('error', (err) => {\n resolve({ success: false, output: err.message });\n });\n });\n}\n\n// Start the web UI\nasync function startWebUI(\n apiPort: number,\n webPort: number = DEFAULT_WEB_PORT,\n quiet: boolean = false,\n publicUrl?: string\n): Promise<{ process: ChildProcess | null; port: number; started?: boolean }> {\n const webDir = getWebDirectory();\n \n if (!webDir) {\n if (!quiet) console.log(' ⚠ Web UI not found, skipping...');\n return { process: null, port: webPort };\n }\n \n const { port: actualPort, alreadyRunning } = await findWebPort(webPort);\n \n if (alreadyRunning) {\n if (!quiet) console.log(` ✓ Web UI already running at http://localhost:${actualPort}`);\n return { process: null, port: actualPort };\n }\n \n // Determine which package manager to use (prefer pnpm if available)\n const usePnpm = existsSync(join(webDir, 'pnpm-lock.yaml'));\n const useNpm = !usePnpm && existsSync(join(webDir, 'package-lock.json'));\n \n const pkgManager = usePnpm ? 'pnpm' : useNpm ? 'npm' : 'npx';\n \n // Create a clean environment without tsx's module resolution pollution\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n const { NODE_OPTIONS, TSX_TSCONFIG_PATH, ...cleanEnv } = process.env;\n \n // Use publicUrl if provided, otherwise default to localhost\n const apiUrl = publicUrl || `http://127.0.0.1:${apiPort}`;\n \n // Write runtime config to a JSON file that the web app can read\n // This avoids NEXT_PUBLIC_* env var build-time issues\n const runtimeConfig = { apiBaseUrl: apiUrl };\n const runtimeConfigPath = join(webDir, 'runtime-config.json');\n try {\n writeFileSync(runtimeConfigPath, JSON.stringify(runtimeConfig, null, 2));\n if (!quiet) console.log(` 📝 Runtime config written to ${runtimeConfigPath}`);\n } catch (err) {\n if (!quiet) console.warn(` ⚠ Could not write runtime config: ${err}`);\n }\n \n const webEnv: NodeJS.ProcessEnv & { PORT: string; HOSTNAME?: string } = {\n ...cleanEnv,\n PORT: String(actualPort), // Next.js respects PORT env var\n };\n \n // Determine which mode to run in based on available files\n // Priority:\n // 1. Standalone build (for npm package distribution)\n // 2. Production build with next start\n // 3. Development mode with next dev (only if source files exist)\n const hasSource = hasSourceFiles(webDir);\n const standaloneServerPath = getStandaloneServerPath(webDir);\n const hasBuild = hasProductionBuild(webDir);\n const isProduction = process.env.NODE_ENV === 'production';\n \n let command: string;\n let args: string[];\n let cwd = webDir; // Default cwd\n \n if (standaloneServerPath) {\n // Standalone build available - use Node to run the standalone server\n // This is the preferred mode for npm package distribution\n command = 'node';\n args = ['server.js'];\n \n // Standalone server must run from its own directory\n cwd = dirname(standaloneServerPath);\n \n // Standalone server uses PORT env var\n webEnv.PORT = String(actualPort);\n webEnv.HOSTNAME = '0.0.0.0';\n \n if (!quiet) console.log(' 📦 Starting Web UI from standalone build...');\n } else if (hasBuild && (isProduction || !hasSource)) {\n // Production build exists and either:\n // - Running in production mode, OR\n // - No source files (npm package without source)\n command = pkgManager;\n // Use PORT env var for npm/pnpm (already set in webEnv), only pass -p for npx\n args = pkgManager === 'npx' \n ? ['next', 'start', '-p', String(actualPort)]\n : ['run', 'start'];\n } else if (hasSource) {\n // Development mode: run next dev (source files required)\n if (isProduction && !hasBuild) {\n // Production mode but no build - need to build first\n if (!quiet) console.log(' 📦 Building Web UI for production...');\n \n const buildArgs = pkgManager === 'npx' \n ? ['next', 'build'] \n : ['run', 'build'];\n \n const buildResult = await runCommand(pkgManager, buildArgs, webDir, webEnv);\n \n if (!buildResult.success) {\n if (!quiet) console.error(' ❌ Web UI build failed');\n return { process: null, port: actualPort };\n }\n \n if (!quiet) console.log(' ✓ Web UI build complete');\n \n command = pkgManager;\n // Use PORT env var for npm/pnpm (already set in webEnv), only pass -p for npx\n args = pkgManager === 'npx' \n ? ['next', 'start', '-p', String(actualPort)]\n : ['run', 'start'];\n } else {\n // Development mode with source files\n command = pkgManager;\n // Use PORT env var for npm/pnpm (already set in webEnv), only pass -p for npx\n args = pkgManager === 'npx'\n ? ['next', 'dev', '-p', String(actualPort)]\n : ['run', 'dev'];\n }\n } else {\n // No standalone, no build, no source files - can't start web UI\n if (!quiet) {\n console.error(' ❌ Web UI cannot start: no build or source files found');\n console.error(' This may be a packaging issue. Try reinstalling sparkecoder.');\n }\n return { process: null, port: actualPort };\n }\n \n const child = spawn(command, args, {\n cwd,\n stdio: ['ignore', 'pipe', 'pipe'],\n env: webEnv,\n detached: false,\n shell: true,\n });\n \n // Wait for the web UI to actually start (with timeout)\n const startupTimeout = 30000; // 30 seconds\n let started = false;\n let exited = false;\n let exitCode: number | null = null;\n \n const startedPromise = new Promise<boolean>((resolve) => {\n const timeout = setTimeout(() => {\n if (!started && !exited) {\n resolve(false);\n }\n }, startupTimeout);\n \n child.stdout?.on('data', (data: Buffer) => {\n const output = data.toString();\n if (!quiet) {\n // Show all stdout in verbose mode\n const lines = output.trim().split('\\n').filter(l => l.trim());\n for (const line of lines) {\n console.log(` Web UI: ${line}`);\n }\n }\n if (!started && (output.includes('Ready') || output.includes('started') || output.includes('localhost'))) {\n started = true;\n clearTimeout(timeout);\n resolve(true);\n }\n });\n \n child.stderr?.on('data', (data: Buffer) => {\n const output = data.toString().trim();\n if (!quiet && output) {\n console.error(` Web UI: ${output.slice(0, 500)}`);\n }\n });\n \n child.on('error', (err) => {\n if (!quiet) console.error(` ❌ Web UI spawn error: ${err.message}`);\n clearTimeout(timeout);\n resolve(false);\n });\n \n child.on('exit', (code) => {\n exited = true;\n exitCode = code;\n if (!started) {\n clearTimeout(timeout);\n resolve(false);\n }\n webUIProcess = null;\n });\n });\n \n webUIProcess = child;\n \n // Wait for startup (but don't block indefinitely)\n const didStart = await startedPromise;\n \n if (!didStart) {\n if (exited && exitCode !== 0) {\n if (!quiet) console.error(` ❌ Web UI failed to start (exit code: ${exitCode})`);\n } else if (!exited) {\n if (!quiet) console.log(` ⚠ Web UI startup timed out, continuing anyway...`);\n }\n // Don't kill the process, it might still be starting\n }\n \n return { process: child, port: actualPort, started: didStart };\n}\n\n// Stop the web UI\nexport function stopWebUI(): void {\n if (webUIProcess) {\n webUIProcess.kill('SIGTERM');\n webUIProcess = null;\n }\n}\n\nexport async function createApp(options: { quiet?: boolean } = {}) {\n const app = new Hono();\n\n // Middleware - CORS for cross-origin requests from web UI\n app.use('*', cors({\n origin: '*', // Allow all origins\n allowMethods: ['GET', 'POST', 'PUT', 'PATCH', 'DELETE', 'OPTIONS'],\n allowHeaders: ['Content-Type', 'Authorization', 'X-Requested-With'],\n exposeHeaders: ['X-Stream-Id', 'x-stream-id'],\n maxAge: 86400, // 24 hours\n }));\n \n // Only enable HTTP logging if not in quiet mode\n if (!options.quiet) {\n app.use('*', logger());\n }\n\n // Health checks\n app.route('/health', health);\n\n // API routes\n app.route('/sessions', sessions);\n app.route('/agents', agents);\n app.route('/sessions', terminals); // Terminal routes are nested under /sessions/:sessionId/terminals\n app.route('/terminals', terminals); // Also mount at /terminals for simpler direct access (e.g., /terminals/stream/:id)\n app.route('/tasks', tasks);\n\n // OpenAPI spec (manual, simplified)\n app.get('/openapi.json', async (c) => {\n return c.json(generateOpenAPISpec());\n });\n\n // Swagger UI\n app.get('/swagger', (c) => {\n const html = `<!DOCTYPE html>\n<html lang=\"en\">\n<head>\n <meta charset=\"UTF-8\">\n <title>SparkECoder API - Swagger UI</title>\n <link rel=\"stylesheet\" href=\"https://unpkg.com/swagger-ui-dist@5/swagger-ui.css\">\n</head>\n<body>\n <div id=\"swagger-ui\"></div>\n <script src=\"https://unpkg.com/swagger-ui-dist@5/swagger-ui-bundle.js\"></script>\n <script>\n SwaggerUIBundle({\n url: '/openapi.json',\n dom_id: '#swagger-ui',\n presets: [SwaggerUIBundle.presets.apis, SwaggerUIBundle.SwaggerUIStandalonePreset],\n layout: \"BaseLayout\"\n });\n </script>\n</body>\n</html>`;\n return c.html(html);\n });\n\n // Root endpoint\n app.get('/', (c) => {\n return c.json({\n name: 'SparkECoder API',\n version: '0.1.0',\n description: 'A powerful coding agent CLI with HTTP API',\n docs: '/openapi.json',\n endpoints: {\n health: '/health',\n sessions: '/sessions',\n agents: '/agents',\n terminals: '/sessions/:sessionId/terminals',\n },\n });\n });\n\n return app;\n}\n\nexport async function startServer(options: ServerOptions = {}) {\n // Check for required dependencies (tmux) before starting\n const depsOk = await checkDependencies({ quiet: options.quiet, exitOnFailure: false });\n if (!depsOk) {\n throw new Error('Missing required dependency: tmux. See above for installation instructions.');\n }\n\n // Load config\n const config = await loadConfig(options.configPath, options.workingDirectory);\n\n // Load API keys from storage into environment (before making any API calls)\n loadApiKeysIntoEnv();\n\n // Override working directory if provided\n if (options.workingDirectory) {\n config.resolvedWorkingDirectory = options.workingDirectory;\n }\n\n // Ensure working directory exists (create agent-workspace if needed)\n if (!existsSync(config.resolvedWorkingDirectory)) {\n mkdirSync(config.resolvedWorkingDirectory, { recursive: true });\n if (!options.quiet) console.log(`📁 Created agent workspace: ${config.resolvedWorkingDirectory}`);\n }\n\n // Initialize database (remote MongoDB)\n if (!config.resolvedRemoteServer.url) {\n throw new Error('Remote server not configured. Set REMOTE_SERVER_URL environment variable or remoteServer.url in config');\n }\n \n let authKey = config.resolvedRemoteServer.authKey;\n if (!authKey) {\n if (!options.quiet) console.log('📡 Registering with remote server...');\n authKey = await ensureRemoteAuthKey(config.resolvedRemoteServer.url);\n if (!options.quiet) console.log('✓ Registered with remote server');\n }\n initDatabase({ url: config.resolvedRemoteServer.url, authKey });\n if (!options.quiet) console.log(`📡 Using remote database: ${config.resolvedRemoteServer.url}`);\n\n const port = options.port || config.server.port;\n const host = options.host || config.server.host || '0.0.0.0';\n // Public URL for web UI: CLI option > config > auto-detect\n const publicUrl = options.publicUrl || config.server.publicUrl;\n\n const app = await createApp({ quiet: options.quiet });\n\n if (!options.quiet) {\n console.log(`\\n🚀 SparkECoder API Server`);\n console.log(` → Running at http://${host}:${port}`);\n if (publicUrl) {\n console.log(` → Public URL: ${publicUrl}`);\n }\n console.log(` → Working directory: ${config.resolvedWorkingDirectory}`);\n console.log(` → Default model: ${config.defaultModel}`);\n console.log(` → OpenAPI spec: http://${host}:${port}/openapi.json\\n`);\n }\n\n serverInstance = serve({\n fetch: app.fetch,\n port,\n hostname: host,\n });\n\n // Start Web UI if enabled (default: true)\n let webPort: number | undefined;\n let webStarted: boolean | undefined;\n if (options.webUI !== false) {\n const result = await startWebUI(port, options.webPort || DEFAULT_WEB_PORT, options.quiet, publicUrl);\n webPort = result.port;\n webStarted = result.started;\n }\n\n return { app, port, host, webPort, webStarted };\n}\n\nexport function stopServer() {\n // Stop web UI first\n stopWebUI();\n \n // Kill all sparkecoder tmux sessions (cleanup) - fire and forget\n tmux.listSessions().then(async (sessions) => {\n for (const id of sessions) {\n await tmux.killTerminal(id);\n }\n }).catch(() => {\n // Ignore cleanup errors\n });\n \n if (serverInstance) {\n serverInstance.close();\n serverInstance = null;\n }\n closeDatabase();\n}\n\nfunction generateOpenAPISpec() {\n return {\n openapi: '3.1.0',\n info: {\n title: 'SparkECoder API',\n version: '0.1.0',\n description:\n 'A powerful coding agent CLI with HTTP API for development environments. Supports streaming responses following the Vercel AI SDK data stream protocol.',\n },\n servers: [{ url: 'http://localhost:3141', description: 'Local development' }],\n paths: {\n '/': {\n get: {\n summary: 'API Info',\n description: 'Get basic API information and available endpoints',\n responses: {\n 200: {\n description: 'API information',\n content: { 'application/json': {} },\n },\n },\n },\n },\n '/health': {\n get: {\n summary: 'Health Check',\n description: 'Check API health status and configuration',\n responses: {\n 200: {\n description: 'API is healthy',\n content: { 'application/json': {} },\n },\n },\n },\n },\n '/health/ready': {\n get: {\n summary: 'Readiness Check',\n description: 'Check if the API is ready to accept requests',\n responses: {\n 200: { description: 'API is ready' },\n 503: { description: 'API is not ready' },\n },\n },\n },\n '/sessions': {\n get: {\n summary: 'List Sessions',\n description: 'Get a list of all agent sessions',\n parameters: [\n { name: 'limit', in: 'query', schema: { type: 'integer', default: 50 } },\n { name: 'offset', in: 'query', schema: { type: 'integer', default: 0 } },\n ],\n responses: {\n 200: { description: 'List of sessions' },\n },\n },\n post: {\n summary: 'Create Session',\n description: 'Create a new agent session',\n requestBody: {\n content: {\n 'application/json': {\n schema: {\n type: 'object',\n properties: {\n name: { type: 'string' },\n workingDirectory: { type: 'string' },\n model: { type: 'string' },\n toolApprovals: { type: 'object' },\n },\n },\n },\n },\n },\n responses: {\n 201: { description: 'Session created' },\n },\n },\n },\n '/sessions/{id}': {\n get: {\n summary: 'Get Session',\n description: 'Get details of a specific session',\n parameters: [{ name: 'id', in: 'path', required: true, schema: { type: 'string' } }],\n responses: {\n 200: { description: 'Session details' },\n 404: { description: 'Session not found' },\n },\n },\n delete: {\n summary: 'Delete Session',\n description: 'Delete a session and all its data',\n parameters: [{ name: 'id', in: 'path', required: true, schema: { type: 'string' } }],\n responses: {\n 200: { description: 'Session deleted' },\n 404: { description: 'Session not found' },\n },\n },\n },\n '/sessions/{id}/messages': {\n get: {\n summary: 'Get Messages',\n description: 'Get message history for a session',\n parameters: [\n { name: 'id', in: 'path', required: true, schema: { type: 'string' } },\n { name: 'limit', in: 'query', schema: { type: 'integer', default: 100 } },\n ],\n responses: {\n 200: { description: 'Message history' },\n 404: { description: 'Session not found' },\n },\n },\n },\n '/sessions/{id}/clear': {\n post: {\n summary: 'Clear Context',\n description: 'Clear conversation context for a session',\n parameters: [{ name: 'id', in: 'path', required: true, schema: { type: 'string' } }],\n responses: {\n 200: { description: 'Context cleared' },\n 404: { description: 'Session not found' },\n },\n },\n },\n '/agents/{id}/run': {\n post: {\n summary: 'Run Agent (Streaming)',\n description:\n 'Run the agent with a prompt and receive streaming response. Returns SSE stream following Vercel AI SDK data stream protocol.',\n parameters: [{ name: 'id', in: 'path', required: true, schema: { type: 'string' } }],\n requestBody: {\n content: {\n 'application/json': {\n schema: {\n type: 'object',\n required: ['prompt'],\n properties: {\n prompt: { type: 'string' },\n },\n },\n },\n },\n },\n responses: {\n 200: {\n description: 'SSE stream of agent output',\n content: { 'text/event-stream': {} },\n },\n 404: { description: 'Session not found' },\n },\n },\n },\n '/agents/{id}/generate': {\n post: {\n summary: 'Run Agent (Non-streaming)',\n description: 'Run the agent and receive complete response',\n parameters: [{ name: 'id', in: 'path', required: true, schema: { type: 'string' } }],\n requestBody: {\n content: {\n 'application/json': {\n schema: {\n type: 'object',\n required: ['prompt'],\n properties: {\n prompt: { type: 'string' },\n },\n },\n },\n },\n },\n responses: {\n 200: { description: 'Agent response' },\n 404: { description: 'Session not found' },\n },\n },\n },\n '/agents/{id}/approve/{toolCallId}': {\n post: {\n summary: 'Approve Tool',\n description: 'Approve a pending tool execution',\n parameters: [\n { name: 'id', in: 'path', required: true, schema: { type: 'string' } },\n { name: 'toolCallId', in: 'path', required: true, schema: { type: 'string' } },\n ],\n responses: {\n 200: { description: 'Tool approved and executed' },\n 400: { description: 'Approval failed' },\n 404: { description: 'Session not found' },\n },\n },\n },\n '/agents/{id}/reject/{toolCallId}': {\n post: {\n summary: 'Reject Tool',\n description: 'Reject a pending tool execution',\n parameters: [\n { name: 'id', in: 'path', required: true, schema: { type: 'string' } },\n { name: 'toolCallId', in: 'path', required: true, schema: { type: 'string' } },\n ],\n requestBody: {\n content: {\n 'application/json': {\n schema: {\n type: 'object',\n properties: {\n reason: { type: 'string' },\n },\n },\n },\n },\n },\n responses: {\n 200: { description: 'Tool rejected' },\n 400: { description: 'Rejection failed' },\n 404: { description: 'Session not found' },\n },\n },\n },\n '/agents/{id}/approvals': {\n get: {\n summary: 'Get Pending Approvals',\n description: 'Get all pending tool approvals for a session',\n parameters: [{ name: 'id', in: 'path', required: true, schema: { type: 'string' } }],\n responses: {\n 200: { description: 'Pending approvals' },\n 404: { description: 'Session not found' },\n },\n },\n },\n '/agents/quick': {\n post: {\n summary: 'Quick Start',\n description: 'Create a session and run agent in one request',\n requestBody: {\n content: {\n 'application/json': {\n schema: {\n type: 'object',\n required: ['prompt'],\n properties: {\n prompt: { type: 'string' },\n name: { type: 'string' },\n workingDirectory: { type: 'string' },\n model: { type: 'string' },\n toolApprovals: { type: 'object' },\n },\n },\n },\n },\n },\n responses: {\n 200: {\n description: 'SSE stream of agent output',\n content: { 'text/event-stream': {} },\n },\n },\n },\n },\n '/sessions/{sessionId}/terminals': {\n get: {\n summary: 'List Terminals',\n description: 'Get all terminals for a session',\n parameters: [{ name: 'sessionId', in: 'path', required: true, schema: { type: 'string' } }],\n responses: {\n 200: { description: 'List of terminals' },\n 404: { description: 'Session not found' },\n },\n },\n post: {\n summary: 'Spawn Terminal',\n description: 'Start a new background process',\n parameters: [{ name: 'sessionId', in: 'path', required: true, schema: { type: 'string' } }],\n requestBody: {\n content: {\n 'application/json': {\n schema: {\n type: 'object',\n required: ['command'],\n properties: {\n command: { type: 'string' },\n cwd: { type: 'string' },\n name: { type: 'string' },\n },\n },\n },\n },\n },\n responses: {\n 201: { description: 'Terminal spawned' },\n 404: { description: 'Session not found' },\n },\n },\n },\n '/sessions/{sessionId}/terminals/{terminalId}': {\n get: {\n summary: 'Get Terminal Status',\n description: 'Get status and details of a terminal',\n parameters: [\n { name: 'sessionId', in: 'path', required: true, schema: { type: 'string' } },\n { name: 'terminalId', in: 'path', required: true, schema: { type: 'string' } },\n ],\n responses: {\n 200: { description: 'Terminal status' },\n 404: { description: 'Terminal not found' },\n },\n },\n },\n '/sessions/{sessionId}/terminals/{terminalId}/logs': {\n get: {\n summary: 'Get Terminal Logs',\n description: 'Get output logs from a terminal',\n parameters: [\n { name: 'sessionId', in: 'path', required: true, schema: { type: 'string' } },\n { name: 'terminalId', in: 'path', required: true, schema: { type: 'string' } },\n { name: 'tail', in: 'query', schema: { type: 'integer' } },\n ],\n responses: {\n 200: { description: 'Terminal logs' },\n 404: { description: 'Terminal not found' },\n },\n },\n },\n '/sessions/{sessionId}/terminals/{terminalId}/kill': {\n post: {\n summary: 'Kill Terminal',\n description: 'Stop a running terminal process',\n parameters: [\n { name: 'sessionId', in: 'path', required: true, schema: { type: 'string' } },\n { name: 'terminalId', in: 'path', required: true, schema: { type: 'string' } },\n ],\n requestBody: {\n content: {\n 'application/json': {\n schema: {\n type: 'object',\n properties: {\n signal: { type: 'string', enum: ['SIGTERM', 'SIGKILL'] },\n },\n },\n },\n },\n },\n responses: {\n 200: { description: 'Terminal killed' },\n 400: { description: 'Failed to kill terminal' },\n },\n },\n },\n '/sessions/{sessionId}/terminals/{terminalId}/write': {\n post: {\n summary: 'Write to Terminal',\n description: 'Send input to terminal stdin',\n parameters: [\n { name: 'sessionId', in: 'path', required: true, schema: { type: 'string' } },\n { name: 'terminalId', in: 'path', required: true, schema: { type: 'string' } },\n ],\n requestBody: {\n content: {\n 'application/json': {\n schema: {\n type: 'object',\n required: ['input'],\n properties: {\n input: { type: 'string' },\n },\n },\n },\n },\n },\n responses: {\n 200: { description: 'Input sent' },\n 400: { description: 'Failed to write' },\n },\n },\n },\n '/sessions/{sessionId}/terminals/{terminalId}/stream': {\n get: {\n summary: 'Stream Terminal Output',\n description: 'SSE stream of terminal output',\n parameters: [\n { name: 'sessionId', in: 'path', required: true, schema: { type: 'string' } },\n { name: 'terminalId', in: 'path', required: true, schema: { type: 'string' } },\n ],\n responses: {\n 200: { description: 'SSE stream', content: { 'text/event-stream': {} } },\n 404: { description: 'Terminal not found' },\n },\n },\n },\n '/sessions/{sessionId}/terminals/kill-all': {\n post: {\n summary: 'Kill All Terminals',\n description: 'Stop all running terminals for a session',\n parameters: [{ name: 'sessionId', in: 'path', required: true, schema: { type: 'string' } }],\n responses: {\n 200: { description: 'Terminals killed' },\n },\n },\n },\n },\n components: {\n schemas: {\n Session: {\n type: 'object',\n properties: {\n id: { type: 'string' },\n name: { type: 'string' },\n workingDirectory: { type: 'string' },\n model: { type: 'string' },\n status: { type: 'string', enum: ['active', 'waiting', 'completed', 'error'] },\n createdAt: { type: 'string', format: 'date-time' },\n updatedAt: { type: 'string', format: 'date-time' },\n },\n },\n Message: {\n type: 'object',\n properties: {\n id: { type: 'string' },\n role: { type: 'string', enum: ['user', 'assistant', 'system', 'tool'] },\n content: { type: 'object' },\n createdAt: { type: 'string', format: 'date-time' },\n },\n },\n ToolExecution: {\n type: 'object',\n properties: {\n id: { type: 'string' },\n toolCallId: { type: 'string' },\n toolName: { type: 'string' },\n input: { type: 'object' },\n output: { type: 'object' },\n status: { type: 'string', enum: ['pending', 'approved', 'rejected', 'completed', 'error'] },\n requiresApproval: { type: 'boolean' },\n },\n },\n Terminal: {\n type: 'object',\n properties: {\n id: { type: 'string' },\n name: { type: 'string' },\n command: { type: 'string' },\n cwd: { type: 'string' },\n pid: { type: 'integer' },\n status: { type: 'string', enum: ['running', 'stopped', 'error'] },\n exitCode: { type: 'integer' },\n error: { type: 'string' },\n createdAt: { type: 'string', format: 'date-time' },\n stoppedAt: { type: 'string', format: 'date-time' },\n },\n },\n },\n },\n };\n}\n\n","import { Hono } from 'hono';\nimport { zValidator } from '@hono/zod-validator';\nimport { z } from 'zod';\nimport { existsSync, mkdirSync, writeFileSync, readdirSync, statSync, unlinkSync } from 'node:fs';\nimport { readdir } from 'node:fs/promises';\nimport { join, basename, extname, relative } from 'node:path';\nimport { nanoid } from 'nanoid';\nimport { sessionQueries, messageQueries, todoQueries, toolExecutionQueries, activeStreamQueries, checkpointQueries, type SessionConfig } from '../../db/index.js';\nimport { Agent } from '../../agent/index.js';\nimport { getConfig, getAppDataDirectory } from '../../config/index.js';\nimport * as tmux from '../../terminal/tmux.js';\nimport { getCheckpoints, revertToCheckpoint, getSessionDiff, clearCheckpointManager } from '../../checkpoints/index.js';\n\nconst sessions = new Hono();\n\nimport { setDevtoolsContext, getDevtoolsContext as getDevtoolsCtx } from '../devtools-store.js';\n\n// In-memory store for pending input (from devtools, etc.)\n// Key: sessionId, Value: { text: string, createdAt: Date }\nconst pendingInputStore = new Map<string, { text: string; createdAt: Date }>();\n\n// Clean up old pending inputs (older than 5 minutes)\nfunction cleanupPendingInputs() {\n const now = Date.now();\n for (const [sessionId, entry] of pendingInputStore) {\n if (now - entry.createdAt.getTime() > 5 * 60 * 1000) {\n pendingInputStore.delete(sessionId);\n }\n }\n}\n\n// Schemas\nconst createSessionSchema = z.object({\n name: z.string().optional(),\n workingDirectory: z.string().optional(),\n model: z.string().optional(),\n toolApprovals: z.record(z.string(), z.boolean()).optional(),\n});\n\nconst paginationQuerySchema = z.object({\n limit: z.string().optional(),\n offset: z.string().optional(),\n});\n\nconst messagesQuerySchema = z.object({\n limit: z.string().optional(),\n});\n\n// List all sessions\nsessions.get(\n '/',\n zValidator('query', paginationQuerySchema),\n async (c) => {\n const query = c.req.valid('query');\n const limit = parseInt(query.limit || '50');\n const offset = parseInt(query.offset || '0');\n\n const allSessions = await sessionQueries.list(limit, offset);\n\n // Check for active streams for each session\n const sessionsWithStreamInfo = await Promise.all(allSessions.map(async (s) => {\n const activeStream = await activeStreamQueries.getBySessionId(s.id);\n return {\n id: s.id,\n name: s.name,\n workingDirectory: s.workingDirectory,\n model: s.model,\n status: s.status,\n config: s.config,\n isStreaming: !!activeStream,\n createdAt: s.createdAt.toISOString(),\n updatedAt: s.updatedAt.toISOString(),\n };\n }));\n\n return c.json({\n sessions: sessionsWithStreamInfo,\n count: allSessions.length,\n limit,\n offset,\n });\n }\n);\n\n// Create a new session\nsessions.post(\n '/',\n zValidator('json', createSessionSchema),\n async (c) => {\n const body = c.req.valid('json');\n const config = getConfig();\n\n const agent = await Agent.create({\n name: body.name,\n workingDirectory: body.workingDirectory || config.resolvedWorkingDirectory,\n model: body.model || config.defaultModel,\n sessionConfig: body.toolApprovals ? { toolApprovals: body.toolApprovals } : undefined,\n });\n\n const session = agent.getSession();\n\n return c.json({\n id: session.id,\n name: session.name,\n workingDirectory: session.workingDirectory,\n model: session.model,\n status: session.status,\n createdAt: session.createdAt.toISOString(),\n }, 201);\n }\n);\n\n// Get a specific session\nsessions.get('/:id', async (c) => {\n const id = c.req.param('id');\n const session = await sessionQueries.getById(id);\n\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n const contextStats = await (async () => {\n const agent = await Agent.create({ sessionId: id });\n return agent.getContextStats();\n })();\n\n const todos = await todoQueries.getBySession(id);\n const pendingApprovals = await toolExecutionQueries.getPendingApprovals(id);\n\n return c.json({\n id: session.id,\n name: session.name,\n workingDirectory: session.workingDirectory,\n model: session.model,\n status: session.status,\n config: session.config,\n createdAt: session.createdAt.toISOString(),\n updatedAt: session.updatedAt.toISOString(),\n context: contextStats,\n todos: todos.map((t) => ({\n id: t.id,\n content: t.content,\n status: t.status,\n order: t.order,\n })),\n pendingApprovals: pendingApprovals.map((p) => ({\n id: p.id,\n toolCallId: p.toolCallId,\n toolName: p.toolName,\n input: p.input,\n })),\n });\n});\n\n// Get session messages (returns AI SDK ModelMessage format)\nsessions.get(\n '/:id/messages',\n zValidator('query', messagesQuerySchema),\n async (c) => {\n const id = c.req.param('id');\n const query = c.req.valid('query');\n const limit = parseInt(query.limit || '100');\n\n const session = await sessionQueries.getById(id);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n const messages = await messageQueries.getRecentBySession(id, limit);\n\n return c.json({\n sessionId: id,\n messages: messages.map((m) => ({\n id: m.id,\n ...m.modelMessage, // Spread the AI SDK ModelMessage (role, content)\n createdAt: m.createdAt.toISOString(),\n })),\n count: messages.length,\n });\n }\n);\n\n// Get session tool executions\nsessions.get('/:id/tools', async (c) => {\n const id = c.req.param('id');\n\n const session = await sessionQueries.getById(id);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n const executions = await toolExecutionQueries.getBySession(id);\n\n return c.json({\n sessionId: id,\n executions: executions.map((e) => ({\n id: e.id,\n toolCallId: e.toolCallId,\n toolName: e.toolName,\n input: e.input,\n output: e.output,\n status: e.status,\n requiresApproval: e.requiresApproval,\n error: e.error,\n startedAt: e.startedAt.toISOString(),\n completedAt: e.completedAt?.toISOString(),\n })),\n count: executions.length,\n });\n});\n\n// Update session (e.g., change model)\nconst updateSessionSchema = z.object({\n model: z.string().optional(),\n name: z.string().optional(),\n toolApprovals: z.record(z.string(), z.boolean()).optional(),\n});\n\nsessions.patch(\n '/:id',\n zValidator('json', updateSessionSchema),\n async (c) => {\n const id = c.req.param('id');\n const body = c.req.valid('json');\n\n const session = await sessionQueries.getById(id);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n // Build updates object\n const updates: { model?: string; name?: string; config?: SessionConfig } = {};\n if (body.model) updates.model = body.model;\n if (body.name !== undefined) updates.name = body.name;\n \n // Handle toolApprovals update - merge with existing config AND existing toolApprovals\n if (body.toolApprovals !== undefined) {\n const existingConfig = session.config || {};\n const existingToolApprovals = existingConfig.toolApprovals || {};\n updates.config = {\n ...existingConfig,\n toolApprovals: {\n ...existingToolApprovals,\n ...body.toolApprovals,\n },\n };\n }\n\n const updatedSession = Object.keys(updates).length > 0\n ? (await sessionQueries.update(id, updates)) || session\n : session;\n\n return c.json({\n id: updatedSession.id,\n name: updatedSession.name,\n model: updatedSession.model,\n status: updatedSession.status,\n workingDirectory: updatedSession.workingDirectory,\n config: updatedSession.config,\n updatedAt: updatedSession.updatedAt.toISOString(),\n });\n }\n);\n\n// Delete a session\nsessions.delete('/:id', async (c) => {\n const id = c.req.param('id');\n\n // Kill all running terminals for this session before deleting\n try {\n const session = await sessionQueries.getById(id);\n if (session) {\n const terminalIds = await tmux.listSessions();\n for (const tid of terminalIds) {\n const meta = await tmux.getMeta(tid, session.workingDirectory);\n if (meta && meta.sessionId === id) {\n await tmux.killTerminal(tid);\n }\n }\n }\n } catch (e) {\n // Ignore cleanup errors\n }\n\n // Clear the checkpoint manager for this session\n clearCheckpointManager(id);\n\n const deleted = await sessionQueries.delete(id);\n if (!deleted) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n return c.json({ success: true, id });\n});\n\n// Clear session context\nsessions.post('/:id/clear', async (c) => {\n const id = c.req.param('id');\n\n const session = await sessionQueries.getById(id);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n const agent = await Agent.create({ sessionId: id });\n await agent.clearContext();\n\n return c.json({ success: true, sessionId: id });\n});\n\n// ============================================\n// Pending Input (for devtools integration)\n// ============================================\n\nconst pendingInputSchema = z.object({\n text: z.string(),\n});\n\n// Set pending input for a session (from devtools, CLI, etc.)\nsessions.post(\n '/:id/pending-input',\n zValidator('json', pendingInputSchema),\n async (c) => {\n const id = c.req.param('id');\n const { text } = c.req.valid('json');\n\n // Verify session exists\n const session = await sessionQueries.getById(id);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n // Store pending input\n pendingInputStore.set(id, { text, createdAt: new Date() });\n\n // Cleanup old entries\n cleanupPendingInputs();\n\n return c.json({ success: true, sessionId: id });\n }\n);\n\n// Get and clear pending input for a session\nsessions.get('/:id/pending-input', async (c) => {\n const id = c.req.param('id');\n\n // Cleanup old entries first\n cleanupPendingInputs();\n\n const pending = pendingInputStore.get(id);\n if (!pending) {\n return c.json({ hasPendingInput: false, text: null });\n }\n\n // Clear after reading\n pendingInputStore.delete(id);\n\n return c.json({\n hasPendingInput: true,\n text: pending.text,\n createdAt: pending.createdAt.toISOString(),\n });\n});\n\n// ============================================\n// Devtools Context (current page user is viewing)\n// ============================================\n\nconst devtoolsContextSchema = z.object({\n url: z.string(),\n path: z.string(),\n pageName: z.string().optional(),\n screenWidth: z.number().optional(),\n screenHeight: z.number().optional(),\n devicePixelRatio: z.number().optional(),\n});\n\n// Update devtools context (heartbeat with current page info)\nsessions.post(\n '/:id/devtools-context',\n zValidator('json', devtoolsContextSchema),\n async (c) => {\n const id = c.req.param('id');\n const body = c.req.valid('json');\n\n // Store context (don't require session to exist - devtools may connect before session is created)\n setDevtoolsContext(id, {\n url: body.url,\n path: body.path,\n pageName: body.pageName || body.path,\n screenWidth: body.screenWidth,\n screenHeight: body.screenHeight,\n devicePixelRatio: body.devicePixelRatio,\n lastHeartbeat: new Date(),\n });\n\n return c.json({ success: true, sessionId: id });\n }\n);\n\n// Get devtools context for a session\nsessions.get('/:id/devtools-context', async (c) => {\n const id = c.req.param('id');\n\n const ctx = getDevtoolsCtx(id);\n if (!ctx) {\n return c.json({ connected: false, context: null });\n }\n\n return c.json({\n connected: true,\n context: {\n url: ctx.url,\n path: ctx.path,\n pageName: ctx.pageName,\n screenWidth: ctx.screenWidth,\n screenHeight: ctx.screenHeight,\n devicePixelRatio: ctx.devicePixelRatio,\n lastHeartbeat: ctx.lastHeartbeat.toISOString(),\n },\n });\n});\n\n// Get todos for a session\nsessions.get('/:id/todos', async (c) => {\n const id = c.req.param('id');\n\n const session = await sessionQueries.getById(id);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n const todos = await todoQueries.getBySession(id);\n const pending = todos.filter((t: { status: string }) => t.status === 'pending');\n const inProgress = todos.filter((t: { status: string }) => t.status === 'in_progress');\n const completed = todos.filter((t: { status: string }) => t.status === 'completed');\n const cancelled = todos.filter((t: { status: string }) => t.status === 'cancelled');\n\n // Find the \"next\" todo: first in_progress, or first pending\n const nextTodo = inProgress[0] || pending[0] || null;\n\n return c.json({\n todos: todos.map((t) => ({\n id: t.id,\n content: t.content,\n status: t.status,\n order: t.order,\n createdAt: t.createdAt.toISOString(),\n updatedAt: t.updatedAt.toISOString(),\n })),\n stats: {\n total: todos.length,\n pending: pending.length,\n inProgress: inProgress.length,\n completed: completed.length,\n cancelled: cancelled.length,\n },\n nextTodo: nextTodo ? {\n id: nextTodo.id,\n content: nextTodo.content,\n status: nextTodo.status,\n } : null,\n });\n});\n\n// Get checkpoints for a session\nsessions.get('/:id/checkpoints', async (c) => {\n const id = c.req.param('id');\n\n const session = await sessionQueries.getById(id);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n const checkpoints = await getCheckpoints(id);\n\n return c.json({\n sessionId: id,\n checkpoints: checkpoints.map((cp) => ({\n id: cp.id,\n messageSequence: cp.messageSequence,\n gitHead: cp.gitHead,\n createdAt: cp.createdAt.toISOString(),\n })),\n count: checkpoints.length,\n });\n});\n\n// Revert session to a specific checkpoint\nsessions.post('/:id/revert/:checkpointId', async (c) => {\n const sessionId = c.req.param('id');\n const checkpointId = c.req.param('checkpointId');\n\n const session = await sessionQueries.getById(sessionId);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n // Check if there's an active stream - can't revert while streaming\n const activeStream = await activeStreamQueries.getBySessionId(sessionId);\n if (activeStream) {\n return c.json({ \n error: 'Cannot revert while a stream is active. Stop the stream first.',\n streamId: activeStream.streamId,\n }, 409);\n }\n\n const result = await revertToCheckpoint(sessionId, checkpointId);\n\n if (!result.success) {\n return c.json({ error: result.error }, 400);\n }\n\n return c.json({\n success: true,\n sessionId,\n checkpointId,\n filesRestored: result.filesRestored,\n filesDeleted: result.filesDeleted,\n messagesDeleted: result.messagesDeleted,\n checkpointsDeleted: result.checkpointsDeleted,\n });\n});\n\n// Get the diff for a session (all file changes since start)\nsessions.get('/:id/diff', async (c) => {\n const id = c.req.param('id');\n\n const session = await sessionQueries.getById(id);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n const diff = await getSessionDiff(id);\n\n return c.json({\n sessionId: id,\n files: diff.files.map((f) => ({\n path: f.path,\n status: f.status,\n hasOriginal: f.originalContent !== null,\n hasCurrent: f.currentContent !== null,\n // Optionally include content (can be large)\n // originalContent: f.originalContent,\n // currentContent: f.currentContent,\n })),\n summary: {\n created: diff.files.filter(f => f.status === 'created').length,\n modified: diff.files.filter(f => f.status === 'modified').length,\n deleted: diff.files.filter(f => f.status === 'deleted').length,\n total: diff.files.length,\n },\n });\n});\n\n// Get full diff content for a specific file\nsessions.get('/:id/diff/:filePath', async (c) => {\n const sessionId = c.req.param('id');\n const filePath = decodeURIComponent(c.req.param('filePath'));\n\n const session = await sessionQueries.getById(sessionId);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n const diff = await getSessionDiff(sessionId);\n const fileDiff = diff.files.find(f => f.path === filePath);\n\n if (!fileDiff) {\n return c.json({ error: 'File not found in diff' }, 404);\n }\n\n return c.json({\n sessionId,\n path: fileDiff.path,\n status: fileDiff.status,\n originalContent: fileDiff.originalContent,\n currentContent: fileDiff.currentContent,\n });\n});\n\n// ============================================\n// Attachments API\n// ============================================\n\n/**\n * Get the attachments directory for a session\n */\nfunction getAttachmentsDir(sessionId: string): string {\n const appDataDir = getAppDataDirectory();\n return join(appDataDir, 'attachments', sessionId);\n}\n\n/**\n * Ensure the attachments directory exists\n */\nfunction ensureAttachmentsDir(sessionId: string): string {\n const dir = getAttachmentsDir(sessionId);\n if (!existsSync(dir)) {\n mkdirSync(dir, { recursive: true });\n }\n return dir;\n}\n\n// List attachments for a session\nsessions.get('/:id/attachments', async (c) => {\n const sessionId = c.req.param('id');\n\n const session = await sessionQueries.getById(sessionId);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n const dir = getAttachmentsDir(sessionId);\n if (!existsSync(dir)) {\n return c.json({ sessionId, attachments: [], count: 0 });\n }\n\n const files = readdirSync(dir);\n const attachments = files.map((filename) => {\n const filePath = join(dir, filename);\n const stats = statSync(filePath);\n return {\n id: filename.split('_')[0], // Extract the nanoid prefix\n filename,\n path: filePath,\n size: stats.size,\n createdAt: stats.birthtime.toISOString(),\n };\n });\n\n return c.json({\n sessionId,\n attachments,\n count: attachments.length,\n });\n});\n\n// Upload an attachment\nsessions.post('/:id/attachments', async (c) => {\n const sessionId = c.req.param('id');\n\n const session = await sessionQueries.getById(sessionId);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n const contentType = c.req.header('content-type') || '';\n \n // Handle multipart form data\n if (contentType.includes('multipart/form-data')) {\n try {\n const formData = await c.req.formData();\n const file = formData.get('file');\n \n if (!file || !(file instanceof File)) {\n return c.json({ error: 'No file provided' }, 400);\n }\n\n const dir = ensureAttachmentsDir(sessionId);\n const id = nanoid(10);\n const ext = extname(file.name) || '';\n const safeFilename = `${id}_${basename(file.name).replace(/[^a-zA-Z0-9._-]/g, '_')}`;\n const filePath = join(dir, safeFilename);\n\n const arrayBuffer = await file.arrayBuffer();\n writeFileSync(filePath, Buffer.from(arrayBuffer));\n\n return c.json({\n id,\n filename: file.name,\n storedAs: safeFilename,\n path: filePath,\n size: file.size,\n mediaType: file.type,\n sessionId,\n }, 201);\n } catch (err) {\n console.error('Failed to upload attachment:', err);\n return c.json({ error: 'Failed to upload file' }, 500);\n }\n }\n\n // Handle base64 JSON upload\n try {\n const body = await c.req.json() as { \n filename: string; \n data: string; \n mediaType?: string;\n };\n \n if (!body.filename || !body.data) {\n return c.json({ error: 'Missing filename or data' }, 400);\n }\n\n const dir = ensureAttachmentsDir(sessionId);\n const id = nanoid(10);\n const ext = extname(body.filename) || '';\n const safeFilename = `${id}_${basename(body.filename).replace(/[^a-zA-Z0-9._-]/g, '_')}`;\n const filePath = join(dir, safeFilename);\n\n // Handle base64 data URL or raw base64\n let base64Data = body.data;\n if (base64Data.includes(',')) {\n base64Data = base64Data.split(',')[1];\n }\n \n const buffer = Buffer.from(base64Data, 'base64');\n writeFileSync(filePath, buffer);\n\n return c.json({\n id,\n filename: body.filename,\n storedAs: safeFilename,\n path: filePath,\n size: buffer.length,\n mediaType: body.mediaType,\n sessionId,\n }, 201);\n } catch (err) {\n console.error('Failed to upload attachment:', err);\n return c.json({ error: 'Failed to upload file' }, 500);\n }\n});\n\n// Delete an attachment\nsessions.delete('/:id/attachments/:attachmentId', async (c) => {\n const sessionId = c.req.param('id');\n const attachmentId = c.req.param('attachmentId');\n\n const session = await sessionQueries.getById(sessionId);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n const dir = getAttachmentsDir(sessionId);\n if (!existsSync(dir)) {\n return c.json({ error: 'Attachment not found' }, 404);\n }\n\n // Find the file that starts with the attachment ID\n const files = readdirSync(dir);\n const file = files.find(f => f.startsWith(attachmentId + '_'));\n \n if (!file) {\n return c.json({ error: 'Attachment not found' }, 404);\n }\n\n const filePath = join(dir, file);\n unlinkSync(filePath);\n\n return c.json({ success: true, id: attachmentId });\n});\n\n// ============================================================================\n// Workspace Files API - For @ mentions in chat input\n// ============================================================================\n\nconst filesQuerySchema = z.object({\n query: z.string().optional(), // Filter query (e.g., \"src/com\" to match \"src/components\")\n limit: z.string().optional(), // Max results (default 50)\n});\n\n// Directories to ignore when listing files\nconst IGNORED_DIRECTORIES = new Set([\n 'node_modules',\n '.git',\n '.next',\n 'dist',\n 'build',\n '.turbo',\n '.cache',\n 'coverage',\n '__pycache__',\n '.pytest_cache',\n 'venv',\n '.venv',\n 'target', // Rust\n '.idea',\n '.vscode',\n]);\n\n// File extensions to ignore\nconst IGNORED_EXTENSIONS = new Set([\n '.pyc',\n '.pyo',\n '.class',\n '.o',\n '.obj',\n '.exe',\n '.dll',\n '.so',\n '.dylib',\n]);\n\ninterface WorkspaceFile {\n path: string; // Relative path from working directory\n name: string; // File/folder name\n type: 'file' | 'folder';\n extension?: string; // For files only\n}\n\n// Maximum depth to recurse into directories\nconst MAX_RECURSION_DEPTH = 10;\n\n/**\n * Check if a query matches a file/folder name using fuzzy matching\n * Matches if:\n * - Name contains the query (substring match)\n * - Path contains the query\n * - Query matches start of name segments (e.g., \"comp\" matches \"components\")\n */\nfunction matchesQuery(name: string, relativePath: string, query: string): boolean {\n if (!query) return true;\n \n const queryLower = query.toLowerCase();\n const nameLower = name.toLowerCase();\n const pathLower = relativePath.toLowerCase();\n \n // Exact substring match in name or path\n if (nameLower.includes(queryLower) || pathLower.includes(queryLower)) {\n return true;\n }\n \n // Match by path segments (e.g., \"prompting\" matches \"apps/prompting\")\n const pathSegments = pathLower.split('/');\n for (const segment of pathSegments) {\n if (segment.includes(queryLower) || segment.startsWith(queryLower)) {\n return true;\n }\n }\n \n return false;\n}\n\n/**\n * Calculate a relevance score for sorting results\n * Higher scores = more relevant\n */\nfunction getRelevanceScore(name: string, relativePath: string, query: string): number {\n if (!query) return 0;\n \n const queryLower = query.toLowerCase();\n const nameLower = name.toLowerCase();\n const pathLower = relativePath.toLowerCase();\n \n // Exact name match\n if (nameLower === queryLower) return 100;\n \n // Name starts with query\n if (nameLower.startsWith(queryLower)) return 90;\n \n // Name contains query\n if (nameLower.includes(queryLower)) return 80;\n \n // Last path segment matches\n const lastSegment = pathLower.split('/').pop() || '';\n if (lastSegment === queryLower) return 70;\n if (lastSegment.startsWith(queryLower)) return 60;\n if (lastSegment.includes(queryLower)) return 50;\n \n // Path contains query\n if (pathLower.includes(queryLower)) return 30;\n \n // Any segment starts with query\n const segments = pathLower.split('/');\n for (const segment of segments) {\n if (segment.startsWith(queryLower)) return 40;\n }\n \n return 10;\n}\n\n/**\n * Recursively list files and folders in a directory\n * Always recurses into directories (up to MAX_RECURSION_DEPTH) to find deep matches\n */\nasync function listWorkspaceFiles(\n baseDir: string,\n currentDir: string,\n query: string,\n limit: number,\n results: WorkspaceFile[] = [],\n depth: number = 0\n): Promise<WorkspaceFile[]> {\n // Stop recursion at max depth or if we have enough results\n if (depth > MAX_RECURSION_DEPTH || results.length >= limit * 2) {\n return results;\n }\n\n try {\n const entries = await readdir(currentDir, { withFileTypes: true });\n\n for (const entry of entries) {\n // Stop if we have way more than enough results\n if (results.length >= limit * 2) break;\n\n const fullPath = join(currentDir, entry.name);\n const relativePath = relative(baseDir, fullPath);\n\n // Skip ignored directories\n if (entry.isDirectory() && IGNORED_DIRECTORIES.has(entry.name)) {\n continue;\n }\n\n // Skip hidden files/folders (starting with .)\n if (entry.name.startsWith('.')) {\n continue;\n }\n\n // Skip ignored extensions\n const ext = extname(entry.name).toLowerCase();\n if (IGNORED_EXTENSIONS.has(ext)) {\n continue;\n }\n\n if (entry.isDirectory()) {\n // Add folder if it matches query\n if (matchesQuery(entry.name, relativePath, query)) {\n results.push({\n path: relativePath,\n name: entry.name,\n type: 'folder',\n });\n }\n \n // Always recurse into directories to find deep matches\n await listWorkspaceFiles(baseDir, fullPath, query, limit, results, depth + 1);\n } else if (entry.isFile()) {\n // Add file if it matches query\n if (matchesQuery(entry.name, relativePath, query)) {\n results.push({\n path: relativePath,\n name: entry.name,\n type: 'file',\n extension: ext || undefined,\n });\n }\n }\n }\n } catch {\n // Ignore permission errors etc.\n }\n\n return results;\n}\n\n// List workspace files for a session (for @ mentions)\nsessions.get(\n '/:id/files',\n zValidator('query', filesQuerySchema),\n async (c) => {\n const sessionId = c.req.param('id');\n const { query = '', limit: limitStr = '50' } = c.req.valid('query');\n const limit = Math.min(parseInt(limitStr) || 50, 100); // Cap at 100\n\n const session = await sessionQueries.getById(sessionId);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n const workingDirectory = session.workingDirectory;\n \n if (!existsSync(workingDirectory)) {\n return c.json({ \n sessionId,\n workingDirectory,\n files: [], \n count: 0,\n error: 'Working directory does not exist' \n });\n }\n\n try {\n const allFiles = await listWorkspaceFiles(\n workingDirectory,\n workingDirectory,\n query,\n limit\n );\n\n // Sort by relevance (best matches first), then folders first, then alphabetically\n allFiles.sort((a, b) => {\n // If there's a query, sort by relevance first\n if (query) {\n const scoreA = getRelevanceScore(a.name, a.path, query);\n const scoreB = getRelevanceScore(b.name, b.path, query);\n if (scoreA !== scoreB) {\n return scoreB - scoreA; // Higher score first\n }\n }\n \n // Then folders first\n if (a.type !== b.type) {\n return a.type === 'folder' ? -1 : 1;\n }\n \n // Then alphabetically by path\n return a.path.localeCompare(b.path);\n });\n\n // Limit results after sorting to get the most relevant ones\n const files = allFiles.slice(0, limit);\n\n return c.json({\n sessionId,\n workingDirectory,\n files,\n count: files.length,\n query,\n });\n } catch (err) {\n console.error('Failed to list workspace files:', err);\n return c.json({ \n error: 'Failed to list files',\n sessionId,\n workingDirectory,\n files: [],\n count: 0,\n }, 500);\n }\n }\n);\n\n// ============================================\n// Session Files (proxied to remote server)\n// ============================================\n\nsessions.get('/:id/session-files', async (c) => {\n const sessionId = c.req.param('id');\n\n try {\n const { isRemoteConfigured } = await import('../../db/remote.js');\n if (!isRemoteConfigured()) {\n return c.json({ files: [] });\n }\n\n const { storageQueries } = await import('../../db/remote.js');\n const files = await storageQueries.getSessionFiles(sessionId);\n return c.json({ sessionId, files });\n } catch (err: any) {\n console.error('Failed to get session files:', err.message);\n return c.json({ sessionId, files: [] });\n }\n});\n\nsessions.get('/files/:fileId/download', async (c) => {\n const fileId = c.req.param('fileId');\n\n try {\n const { isRemoteConfigured, storageQueries } = await import('../../db/remote.js');\n if (!isRemoteConfigured()) {\n return c.json({ error: 'Remote server not configured' }, 503);\n }\n\n const result = await storageQueries.getDownloadUrl(fileId);\n return c.json(result);\n } catch (err: any) {\n return c.json({ error: err.message }, 500);\n }\n});\n\n/**\n * GET /sessions/:id/browser-recording\n * Get the browser recording(s) for a session (MP4 download URLs).\n */\nsessions.get('/:id/browser-recording', async (c) => {\n const sessionId = c.req.param('id');\n\n try {\n const { isRemoteConfigured, storageQueries } = await import('../../db/remote.js');\n if (!isRemoteConfigured()) {\n return c.json({ sessionId, recordings: [] });\n }\n\n const files = await storageQueries.getSessionFiles(sessionId);\n const recordings = files.filter((f) => f.category === 'browser-recording');\n\n if (recordings.length === 0) {\n return c.json({ sessionId, recordings: [], message: 'No browser recordings for this session' });\n }\n\n return c.json({\n sessionId,\n recordings: recordings.map((r) => ({\n id: r.id,\n fileName: r.fileName,\n sizeBytes: r.sizeBytes,\n createdAt: r.createdAt,\n downloadUrl: r.downloadUrl,\n expiresAt: r.downloadUrlExpiresAt,\n })),\n });\n } catch (err: any) {\n console.error('Failed to get browser recordings:', err.message);\n return c.json({ sessionId, recordings: [], error: err.message });\n }\n});\n\nexport { sessions };\n","import {\n streamText,\n generateText,\n tool,\n stepCountIs,\n type ToolSet,\n type ModelMessage,\n} from 'ai';\nimport { isAnthropicModel, resolveModel } from './model.js';\nimport { z } from 'zod';\nimport { nanoid } from 'nanoid';\nimport {\n sessionQueries,\n toolExecutionQueries,\n Session,\n ToolExecution,\n} from '../db/index.js';\nimport { getConfig, requiresApproval, SessionConfig } from '../config/index.js';\nimport { createTools, BashToolProgress, WriteFileProgress, SearchToolProgress, type TaskCompletionSignal } from '../tools/index.js';\nimport { ContextManager } from './context.js';\nimport { buildSystemPrompt, buildTaskPromptAddendum } from './prompts.js';\nimport { sendWebhook, type WebhookEvent } from '../utils/webhook.js';\nimport { type TaskConfig } from '../db/index.js';\n\nconst MAX_SSE_FIELD_LENGTH = 8 * 1024;\nconst SSE_PREVIEW_LENGTH = 2 * 1024;\n\nfunction truncateWriteFileInput(input: Record<string, unknown>): Record<string, unknown> {\n const out = { ...input };\n for (const key of ['content', 'old_string', 'new_string'] as const) {\n const val = out[key];\n if (typeof val === 'string' && val.length > MAX_SSE_FIELD_LENGTH) {\n out[key] = `${val.slice(0, SSE_PREVIEW_LENGTH)}\\n... (truncated)`;\n out[`${key}Truncated`] = true;\n out[`${key}Length`] = val.length;\n }\n }\n return out;\n}\n\n// Shared store for approval resolvers (needed because approve/reject come from different HTTP requests)\nconst approvalResolvers = new Map<string, { \n resolve: (approved: boolean) => void; \n reason?: string;\n sessionId: string;\n}>();\n\nexport interface AgentOptions {\n sessionId?: string;\n name?: string;\n workingDirectory?: string;\n model?: string;\n sessionConfig?: Partial<SessionConfig>;\n}\n\n/** Attachment for user messages (images, files) */\nexport interface MessageAttachment {\n type: 'image' | 'file';\n data: string; // base64 data URL or raw base64\n mediaType?: string;\n filename?: string; // Original filename for context\n savedPath?: string; // Path where file was saved on disk\n}\n\nexport interface AgentRunOptions {\n prompt: string;\n /** Optional file/image attachments to include in the message */\n attachments?: MessageAttachment[];\n abortSignal?: AbortSignal;\n /** Skip saving user message (if already saved externally) */\n skipSaveUserMessage?: boolean;\n onText?: (text: string) => void;\n onToolCall?: (toolCall: { toolCallId: string; toolName: string; input: unknown }) => void;\n onToolResult?: (result: { toolCallId: string; toolName: string; output: unknown }) => void;\n onApprovalRequired?: (execution: ToolExecution) => void;\n onStepFinish?: (step: { text?: string; toolCalls?: unknown[]; usage?: unknown }) => void;\n onAbort?: (info: { steps: unknown[] }) => void;\n /** Called when a tool (like bash, write_file, or explore_agent) has progress to report */\n onToolProgress?: (progress: { toolName: string; data: BashToolProgress | WriteFileProgress | SearchToolProgress }) => void;\n}\n\nexport interface AgentStreamResult {\n sessionId: string;\n stream: ReturnType<typeof streamText>;\n waitForApprovals: () => Promise<ToolExecution[]>;\n /** Call this after stream completes to save response messages */\n saveResponseMessages: () => Promise<void>;\n}\n\n/**\n * The main coding agent that orchestrates LLM interactions\n */\nexport class Agent {\n private session: Session;\n private context: ContextManager;\n private baseTools: ToolSet;\n private pendingApprovals: Map<string, ToolExecution> = new Map();\n\n private constructor(session: Session, context: ContextManager, tools: ToolSet) {\n this.session = session;\n this.context = context;\n this.baseTools = tools;\n }\n\n /**\n * Create tools with optional progress callbacks\n */\n private async createToolsWithCallbacks(options: {\n onToolProgress?: AgentRunOptions['onToolProgress'];\n }): Promise<ToolSet> {\n const config = getConfig();\n return createTools({\n sessionId: this.session.id,\n workingDirectory: this.session.workingDirectory,\n skillsDirectories: config.resolvedSkillsDirectories,\n onBashProgress: options.onToolProgress\n ? (progress) => options.onToolProgress!({ toolName: 'bash', data: progress })\n : undefined,\n onWriteFileProgress: options.onToolProgress\n ? (progress) => options.onToolProgress!({ toolName: 'write_file', data: progress })\n : undefined,\n onSearchProgress: options.onToolProgress\n ? (progress) => options.onToolProgress!({ toolName: 'explore_agent', data: progress })\n : undefined,\n });\n }\n\n /**\n * Create or resume an agent session\n */\n static async create(options: AgentOptions = {}): Promise<Agent> {\n const config = getConfig();\n\n // Get or create session\n let session: Session;\n\n if (options.sessionId) {\n const existing = await sessionQueries.getById(options.sessionId);\n if (!existing) {\n throw new Error(`Session not found: ${options.sessionId}`);\n }\n session = existing;\n } else {\n session = await sessionQueries.create({\n name: options.name,\n workingDirectory: options.workingDirectory || config.resolvedWorkingDirectory,\n model: options.model || config.defaultModel,\n config: options.sessionConfig as SessionConfig,\n });\n }\n\n // Create context manager\n const context = new ContextManager({\n sessionId: session.id,\n modelId: session.model || config.defaultModel,\n maxContextChars: config.context?.maxChars || 200_000,\n keepRecentMessages: config.context?.keepRecentMessages || 10,\n autoSummarize: config.context?.autoSummarize ?? true,\n });\n\n // Create tools\n const tools = await createTools({\n sessionId: session.id,\n workingDirectory: session.workingDirectory,\n skillsDirectories: config.resolvedSkillsDirectories,\n });\n\n return new Agent(session, context, tools);\n }\n\n /**\n * Get the session ID\n */\n get sessionId(): string {\n return this.session.id;\n }\n\n /**\n * Get session details\n */\n getSession(): Session {\n return this.session;\n }\n\n /**\n * Build user message content from prompt and attachments\n */\n private buildUserMessageContent(\n prompt: string,\n attachments?: MessageAttachment[]\n ): string | Array<{ type: string; text?: string; image?: string; data?: string; mediaType?: string; filename?: string; savedPath?: string }> {\n if (!attachments || attachments.length === 0) {\n return prompt;\n }\n\n // Build content array with text and file parts\n const contentParts: Array<{ type: string; text?: string; image?: string; data?: string; mediaType?: string; filename?: string; savedPath?: string }> = [];\n \n // IMPORTANT: Put file location info FIRST so the model knows where files are saved\n // This gives the model context about file paths before it sees the images\n const attachmentDescriptions = attachments\n .map((a, i) => {\n const name = a.filename || `attachment_${i + 1}`;\n const typeLabel = a.type === 'image' ? 'Image' : 'File';\n const location = a.savedPath || '(path unknown)';\n return `${i + 1}. ${typeLabel}: \"${name}\" saved at: ${location}`;\n })\n .join('\\n');\n \n contentParts.push({ \n type: 'text', \n text: `[FILE ATTACHMENTS - The user has attached the following files which are saved on disk]\\n${attachmentDescriptions}\\n\\nYou can reference these files by their paths above. The file contents are also shown inline below.` \n });\n \n // Add user's text prompt\n if (prompt) {\n contentParts.push({ type: 'text', text: `\\n[USER MESSAGE]\\n${prompt}` });\n }\n \n // Add file/image parts with filename and path metadata\n for (const attachment of attachments) {\n if (attachment.type === 'image') {\n contentParts.push({\n type: 'image',\n image: attachment.data, // base64 data URL or raw base64\n mediaType: attachment.mediaType,\n filename: attachment.filename,\n savedPath: attachment.savedPath,\n });\n } else {\n contentParts.push({\n type: 'file',\n data: attachment.data,\n mediaType: attachment.mediaType || 'application/octet-stream',\n filename: attachment.filename,\n savedPath: attachment.savedPath,\n });\n }\n }\n \n return contentParts;\n }\n\n /**\n * Run the agent with a prompt (streaming)\n */\n async stream(options: AgentRunOptions): Promise<AgentStreamResult> {\n const config = getConfig();\n\n // Build user message content with attachments\n const userContent = this.buildUserMessageContent(options.prompt, options.attachments);\n\n // Add user message to context (skip if already saved externally)\n if (!options.skipSaveUserMessage) {\n this.context.addUserMessage(userContent);\n }\n\n // Update session status\n await sessionQueries.updateStatus(this.session.id, 'active');\n\n // Build system prompt with enhanced skill discovery\n const systemPrompt = await buildSystemPrompt({\n workingDirectory: this.session.workingDirectory,\n skillsDirectories: config.resolvedSkillsDirectories,\n sessionId: this.session.id,\n discoveredSkills: config.discoveredSkills,\n // TODO: Pass activeFiles from client for glob matching\n activeFiles: [],\n });\n\n // Get conversation history\n const messages = await this.context.getMessages();\n\n // Create tools with progress callbacks if needed\n const tools = options.onToolProgress\n ? await this.createToolsWithCallbacks({ onToolProgress: options.onToolProgress })\n : this.baseTools;\n\n // Wrap tools with approval checking\n const wrappedTools = this.wrapToolsWithApproval(options, tools);\n\n // Create stream with reasoning enabled for supported models\n const useAnthropic = isAnthropicModel(this.session.model);\n const stream = streamText({\n model: resolveModel(this.session.model) as any,\n system: systemPrompt,\n messages: messages as any,\n tools: wrappedTools,\n stopWhen: stepCountIs(500),\n // Forward abort signal if provided\n abortSignal: options.abortSignal,\n // Enable extended thinking/reasoning for models that support it\n providerOptions: useAnthropic\n ? {\n anthropic: {\n toolStreaming: true,\n thinking: {\n type: 'enabled',\n budgetTokens: 10000,\n },\n },\n }\n : undefined,\n onStepFinish: async (step) => {\n options.onStepFinish?.(step as any);\n },\n onAbort: ({ steps }) => {\n options.onAbort?.({ steps });\n },\n });\n\n // Helper to save response messages after stream completes\n const saveResponseMessages = async () => {\n const result = await stream;\n const response = await result.response;\n const responseMessages = response.messages as ModelMessage[];\n this.context.addResponseMessages(responseMessages);\n };\n\n return {\n sessionId: this.session.id,\n stream,\n waitForApprovals: () => this.waitForApprovals(),\n saveResponseMessages,\n };\n }\n\n /**\n * Run the agent with a prompt (non-streaming)\n */\n async run(options: Omit<AgentRunOptions, 'onText'>): Promise<{ text: string; steps: unknown[] }> {\n const config = getConfig();\n\n // Add user message to context\n this.context.addUserMessage(options.prompt);\n\n // Build system prompt with enhanced skill discovery\n const systemPrompt = await buildSystemPrompt({\n workingDirectory: this.session.workingDirectory,\n skillsDirectories: config.resolvedSkillsDirectories,\n sessionId: this.session.id,\n discoveredSkills: config.discoveredSkills,\n activeFiles: [],\n });\n\n // Get conversation history\n const messages = await this.context.getMessages();\n\n // Create tools with progress callbacks if needed\n const tools = options.onToolProgress\n ? await this.createToolsWithCallbacks({ onToolProgress: options.onToolProgress })\n : this.baseTools;\n\n // Wrap tools with approval checking\n const wrappedTools = this.wrapToolsWithApproval(options, tools);\n\n const useAnthropic = isAnthropicModel(this.session.model);\n const result = await generateText({\n model: resolveModel(this.session.model) as any,\n system: systemPrompt,\n messages: messages as any,\n tools: wrappedTools,\n stopWhen: stepCountIs(500),\n // Enable extended thinking/reasoning for models that support it\n providerOptions: useAnthropic\n ? {\n anthropic: {\n thinking: {\n type: 'enabled',\n budgetTokens: 10000,\n },\n },\n }\n : undefined,\n });\n\n // Save response messages using the proper AI SDK format\n const responseMessages = result.response.messages as ModelMessage[];\n this.context.addResponseMessages(responseMessages);\n\n return {\n text: result.text,\n steps: result.steps,\n };\n }\n\n /**\n * Run the agent in task mode — loops autonomously until the agent calls\n * complete_task or task_failed (or hits maxIterations).\n * All tools run without approval. Webhook events are fired throughout.\n */\n async runTask(options: {\n prompt: string;\n taskConfig: TaskConfig;\n abortSignal?: AbortSignal;\n writeSSE?: (data: string) => Promise<void>;\n onText?: (text: string) => void;\n onToolCall?: (toolCall: { toolCallId: string; toolName: string; input: unknown }) => void;\n onToolResult?: (result: { toolCallId: string; toolName: string; output: unknown }) => void;\n onToolProgress?: AgentRunOptions['onToolProgress'];\n onStepFinish?: AgentRunOptions['onStepFinish'];\n }): Promise<{ status: 'completed' | 'failed'; result?: unknown; error?: string; iterations: number }> {\n const config = getConfig();\n const maxIterations = options.taskConfig.maxIterations ?? 50;\n const webhookUrl = options.taskConfig.webhookUrl;\n\n const fireWebhook = (type: WebhookEvent['type'], data: unknown) => {\n if (!webhookUrl) return;\n sendWebhook(webhookUrl, {\n type,\n taskId: this.session.id,\n sessionId: this.session.id,\n timestamp: new Date().toISOString(),\n data,\n });\n };\n\n // Completion signal shared between tools and the loop.\n const completion: { signal: TaskCompletionSignal | null } = { signal: null };\n const onComplete = (signal: TaskCompletionSignal) => {\n completion.signal = signal;\n };\n\n // Browser recording for task mode: track proxy + recorder per session\n let taskRecorder: import('../browser/recorder.js').FrameRecorder | null = null;\n const sessionId = this.session.id;\n\n const emit = options.writeSSE;\n\n const bashProgressHandler = (progress: BashToolProgress) => {\n options.onToolProgress?.({ toolName: 'bash', data: progress });\n if (emit) emit(JSON.stringify({ type: 'tool-progress', toolName: 'bash', data: progress })).catch(() => {});\n\n // Start browser proxy + recorder when agent-browser opens\n const port = progress.browserStreamPort;\n if (port && progress.status === 'started') {\n import('../browser/stream-proxy.js').then(({ getOrCreateProxy }) => {\n const proxy = getOrCreateProxy(sessionId, port);\n if (!taskRecorder) {\n import('../browser/recorder.js').then(({ FrameRecorder }) => {\n taskRecorder = new FrameRecorder(sessionId);\n taskRecorder.start();\n });\n }\n if (proxy.listenerCount('frame') === 0) {\n proxy.on('frame', (frame) => {\n taskRecorder?.addFrame(frame);\n if (emit) emit(JSON.stringify({ type: 'browser-frame', data: frame.data, metadata: frame.metadata })).catch(() => {});\n });\n proxy.on('status', (s: any) => {\n if (emit) emit(JSON.stringify({ type: 'browser-status', ...s })).catch(() => {});\n });\n }\n });\n }\n };\n\n // Build task-augmented tools (no approval wrapping — all tools auto-approved)\n const taskTools = await createTools({\n sessionId: this.session.id,\n workingDirectory: this.session.workingDirectory,\n skillsDirectories: config.resolvedSkillsDirectories,\n onBashProgress: bashProgressHandler,\n onWriteFileProgress: (progress) => {\n options.onToolProgress?.({ toolName: 'write_file', data: progress });\n if (emit) emit(JSON.stringify({ type: 'tool-progress', toolName: 'write_file', data: progress })).catch(() => {});\n },\n onSearchProgress: (progress) => {\n options.onToolProgress?.({ toolName: 'explore_agent', data: progress });\n if (emit) emit(JSON.stringify({ type: 'tool-progress', toolName: 'explore_agent', data: progress })).catch(() => {});\n },\n taskTools: {\n outputSchema: options.taskConfig.outputSchema,\n onComplete,\n },\n });\n\n // Build system prompt with task addendum\n const baseSystemPrompt = await buildSystemPrompt({\n workingDirectory: this.session.workingDirectory,\n skillsDirectories: config.resolvedSkillsDirectories,\n sessionId: this.session.id,\n discoveredSkills: config.discoveredSkills,\n activeFiles: [],\n });\n const taskAddendum = buildTaskPromptAddendum(options.taskConfig.outputSchema);\n const systemPrompt = `${baseSystemPrompt}\\n\\n${taskAddendum}`;\n\n fireWebhook('task.started', { prompt: options.prompt });\n\n if (emit) {\n await emit(JSON.stringify({ type: 'data-user-message', data: { id: `user_${Date.now()}`, content: options.prompt } }));\n }\n\n // Add the initial user message\n await this.context.addUserMessage(options.prompt);\n\n let iteration = 0;\n\n while (iteration < maxIterations) {\n iteration++;\n\n if (options.abortSignal?.aborted) {\n const cancelError = 'Task was cancelled';\n fireWebhook('task.failed', { status: 'failed', error: cancelError, iterations: iteration });\n return { status: 'failed', error: cancelError, iterations: iteration };\n }\n\n const messages = await this.context.getMessages();\n const useAnthropic = isAnthropicModel(this.session.model);\n\n // Emit start-of-message event for the dashboard\n if (emit) {\n await emit(JSON.stringify({ type: 'start', messageId: `msg_${Date.now()}` }));\n }\n\n let textStarted = false;\n let textId = `text_${Date.now()}`;\n let reasoningId = `reasoning_${Date.now()}`;\n let reasoningStarted = false;\n const toolCallStarts = new Set<string>();\n\n const iterStream = streamText({\n model: resolveModel(this.session.model) as any,\n system: systemPrompt,\n messages: messages as any,\n tools: taskTools,\n stopWhen: stepCountIs(500),\n abortSignal: options.abortSignal,\n providerOptions: useAnthropic\n ? {\n anthropic: {\n toolStreaming: true,\n thinking: { type: 'enabled', budgetTokens: 10000 },\n },\n }\n : undefined,\n onStepFinish: async (step: any) => {\n options.onStepFinish?.(step);\n fireWebhook('task.step_finished', { iteration, text: step.text });\n if (emit) {\n if (textStarted) {\n await emit(JSON.stringify({ type: 'text-end', id: textId }));\n textStarted = false;\n textId = `text_${Date.now()}`;\n }\n await emit(JSON.stringify({ type: 'finish-step' }));\n }\n },\n });\n\n // Consume the stream, emitting SSE events for live dashboard updates\n for await (const part of iterStream.fullStream) {\n if (part.type === 'text-delta') {\n if (emit) {\n if (!textStarted) {\n await emit(JSON.stringify({ type: 'text-start', id: textId }));\n textStarted = true;\n }\n await emit(JSON.stringify({ type: 'text-delta', id: textId, delta: part.text }));\n }\n } else if (part.type === 'reasoning-start') {\n if (emit) {\n await emit(JSON.stringify({ type: 'reasoning-start', id: reasoningId }));\n reasoningStarted = true;\n }\n } else if (part.type === 'reasoning-delta') {\n if (emit) {\n await emit(JSON.stringify({ type: 'reasoning-delta', id: reasoningId, delta: part.text }));\n }\n } else if (part.type === 'reasoning-end') {\n if (emit && reasoningStarted) {\n await emit(JSON.stringify({ type: 'reasoning-end', id: reasoningId }));\n reasoningStarted = false;\n reasoningId = `reasoning_${Date.now()}`;\n }\n } else if ((part as any).type === 'tool-call-streaming-start') {\n if (emit) {\n const p = part as any;\n await emit(JSON.stringify({ type: 'tool-input-start', toolCallId: p.toolCallId, toolName: p.toolName }));\n toolCallStarts.add(p.toolCallId);\n }\n } else if ((part as any).type === 'tool-call-delta') {\n if (emit) {\n const p = part as any;\n await emit(JSON.stringify({ type: 'tool-input-delta', toolCallId: p.toolCallId, argsTextDelta: p.argsTextDelta }));\n }\n } else if (part.type === 'tool-call') {\n if (emit) {\n if (!toolCallStarts.has(part.toolCallId)) {\n await emit(JSON.stringify({ type: 'tool-input-start', toolCallId: part.toolCallId, toolName: part.toolName }));\n toolCallStarts.add(part.toolCallId);\n }\n const safeInput = part.toolName === 'write_file' && part.input && typeof part.input === 'object'\n ? truncateWriteFileInput(part.input as Record<string, unknown>)\n : part.input;\n await emit(JSON.stringify({ type: 'tool-input-available', toolCallId: part.toolCallId, toolName: part.toolName, input: safeInput }));\n }\n } else if (part.type === 'tool-result') {\n if (emit) {\n await emit(JSON.stringify({ type: 'tool-output-available', toolCallId: part.toolCallId, output: part.output }));\n }\n } else if (part.type === 'error') {\n console.error('Task stream error:', part.error);\n if (emit) {\n await emit(JSON.stringify({ type: 'error', errorText: String(part.error) }));\n }\n }\n }\n\n // End open text/reasoning blocks after stream consumed\n if (emit && textStarted) {\n await emit(JSON.stringify({ type: 'text-end', id: textId }));\n }\n if (emit && reasoningStarted) {\n await emit(JSON.stringify({ type: 'reasoning-end', id: reasoningId }));\n }\n\n // Get results after stream is fully consumed\n const iterResponse = await iterStream.response;\n const responseMessages = iterResponse.messages as ModelMessage[];\n await this.context.addResponseMessages(responseMessages);\n\n const resultText = await iterStream.text;\n const resultSteps = await iterStream.steps;\n\n // Fire webhook for text output\n if (resultText) {\n options.onText?.(resultText);\n fireWebhook('task.message', { iteration, text: resultText });\n }\n\n // Fire webhooks for tool calls in this iteration\n for (const step of resultSteps) {\n if ((step as any).toolCalls) {\n for (const tc of (step as any).toolCalls) {\n options.onToolCall?.({ toolCallId: tc.toolCallId, toolName: tc.toolName, input: tc.args });\n fireWebhook('task.tool_call', { iteration, toolName: tc.toolName, toolCallId: tc.toolCallId, input: tc.args });\n }\n }\n if ((step as any).toolResults) {\n for (const tr of (step as any).toolResults) {\n options.onToolResult?.({ toolCallId: tr.toolCallId, toolName: tr.toolName, output: tr.result });\n fireWebhook('task.tool_result', { iteration, toolName: tr.toolName, toolCallId: tr.toolCallId, output: tr.result });\n }\n }\n }\n\n // Check if a completion tool was called\n if (completion.signal) {\n const sig = completion.signal;\n const finalStatus = sig.status;\n\n // Upload task output files to GCS if configured\n let fileUrls: string[] | undefined;\n if (finalStatus === 'completed' && sig.result && typeof sig.result === 'object') {\n const resultObj = sig.result as Record<string, unknown>;\n const filePaths = Array.isArray(resultObj.files) ? resultObj.files as string[] : [];\n if (filePaths.length > 0) {\n fileUrls = await this.uploadTaskFiles(filePaths);\n }\n }\n\n // Encode and upload browser recording if one was captured\n const recordingUrls = await this.finishTaskRecording(taskRecorder);\n\n const allFileUrls = [...(fileUrls || []), ...recordingUrls];\n\n const eventType = finalStatus === 'completed' ? 'task.completed' : 'task.failed';\n fireWebhook(eventType as WebhookEvent['type'], {\n status: finalStatus,\n result: sig.result,\n error: sig.error,\n iterations: iteration,\n fileUrls: allFileUrls.length > 0 ? allFileUrls : undefined,\n browserRecordingUrls: recordingUrls.length > 0 ? recordingUrls : undefined,\n });\n\n // Persist task result in session config\n const updatedTask: TaskConfig = {\n ...options.taskConfig,\n status: finalStatus,\n result: sig.result,\n error: sig.error,\n iterations: iteration,\n };\n await sessionQueries.update(this.session.id, {\n config: { ...this.session.config, task: updatedTask },\n });\n\n return {\n status: finalStatus,\n result: sig.result,\n error: sig.error,\n iterations: iteration,\n };\n }\n\n // No completion tool called — inject continuation and re-run\n const continuationPrompt = 'Continue working on the task. Before calling `complete_task`, VERIFY your work is correct — re-read edited files, run the linter, run tests if applicable, and check the browser/server if you made UI or API changes. Make sure you searched the right directories and found everything relevant. When fully verified, call `complete_task` with the result. If you cannot complete it, call `task_failed` with a reason.';\n if (emit) {\n await emit(JSON.stringify({ type: 'data-user-message', data: { id: `user_${Date.now()}`, content: continuationPrompt } }));\n }\n await this.context.addUserMessage(continuationPrompt);\n }\n\n // Hit max iterations without completion\n const timeoutError = `Task did not complete within ${maxIterations} iterations`;\n const timeoutRecordingUrls = await this.finishTaskRecording(taskRecorder);\n fireWebhook('task.failed', {\n status: 'failed',\n error: timeoutError,\n iterations: iteration,\n browserRecordingUrls: timeoutRecordingUrls.length > 0 ? timeoutRecordingUrls : undefined,\n });\n\n const updatedTask: TaskConfig = {\n ...options.taskConfig,\n status: 'failed',\n error: timeoutError,\n iterations: iteration,\n };\n await sessionQueries.update(this.session.id, {\n config: { ...this.session.config, task: updatedTask },\n });\n\n return { status: 'failed', error: timeoutError, iterations: iteration };\n }\n\n /**\n * Stop a task-mode browser recording, encode to MP4, upload to GCS.\n * Returns download URLs for any recordings produced.\n */\n private async finishTaskRecording(\n recorder: import('../browser/recorder.js').FrameRecorder | null\n ): Promise<string[]> {\n // Clean up the browser proxy\n try {\n const { destroyProxy } = await import('../browser/stream-proxy.js');\n destroyProxy(this.session.id);\n } catch {}\n\n if (!recorder || recorder.frameCount === 0) {\n recorder?.clear();\n return [];\n }\n\n recorder.stop();\n try {\n const { isRemoteConfigured, storageQueries } = await import('../db/remote.js');\n if (!isRemoteConfigured()) { recorder.clear(); return []; }\n\n const result = await recorder.encode();\n recorder.clear();\n if (!result) return [];\n\n const { readFile, unlink } = await import('node:fs/promises');\n\n const uploadInfo = await storageQueries.getUploadUrl(\n this.session.id,\n `browser-recording-${Date.now()}.mp4`,\n 'video/mp4',\n 'browser-recording'\n );\n\n const fileData = await readFile(result.path);\n await fetch(uploadInfo.uploadUrl, {\n method: 'PUT',\n headers: { 'Content-Type': 'video/mp4' },\n body: fileData,\n });\n await storageQueries.updateFile(uploadInfo.fileId, { sizeBytes: result.sizeBytes });\n\n const dlInfo = await storageQueries.getDownloadUrl(uploadInfo.fileId);\n await unlink(result.path).catch(() => {});\n\n console.log(`[TASK] Browser recording uploaded (${result.sizeBytes} bytes)`);\n return [dlInfo.downloadUrl];\n } catch (err: any) {\n console.error('[TASK] Failed to upload browser recording:', err.message);\n recorder.clear();\n return [];\n }\n }\n\n /**\n * Upload task output files to GCS via the remote server.\n * Returns an array of download URLs for successfully uploaded files.\n */\n private async uploadTaskFiles(filePaths: string[]): Promise<string[]> {\n try {\n const { isRemoteConfigured, storageQueries } = await import('../db/remote.js');\n if (!isRemoteConfigured()) return [];\n\n const { readFile } = await import('node:fs/promises');\n const { join, basename } = await import('node:path');\n\n const urls: string[] = [];\n\n for (const filePath of filePaths) {\n try {\n const fullPath = filePath.startsWith('/')\n ? filePath\n : join(this.session.workingDirectory, filePath);\n const fileName = basename(fullPath);\n\n // Determine content type\n const ext = fileName.split('.').pop()?.toLowerCase() || '';\n const mimeMap: Record<string, string> = {\n pdf: 'application/pdf', json: 'application/json', csv: 'text/csv',\n txt: 'text/plain', md: 'text/markdown', html: 'text/html',\n png: 'image/png', jpg: 'image/jpeg', jpeg: 'image/jpeg',\n gif: 'image/gif', svg: 'image/svg+xml', mp4: 'video/mp4',\n zip: 'application/zip',\n };\n const contentType = mimeMap[ext] || 'application/octet-stream';\n\n const uploadInfo = await storageQueries.getUploadUrl(\n this.session.id,\n fileName,\n contentType,\n 'task-output'\n );\n\n const fileData = await readFile(fullPath);\n await fetch(uploadInfo.uploadUrl, {\n method: 'PUT',\n headers: { 'Content-Type': contentType },\n body: fileData,\n });\n\n await storageQueries.updateFile(uploadInfo.fileId, { sizeBytes: fileData.length });\n\n const downloadInfo = await storageQueries.getDownloadUrl(uploadInfo.fileId);\n urls.push(downloadInfo.downloadUrl);\n\n console.log(`[TASK] Uploaded file: ${fileName} (${fileData.length} bytes)`);\n } catch (err: any) {\n console.error(`[TASK] Failed to upload file ${filePath}:`, err.message);\n }\n }\n\n return urls;\n } catch (err: any) {\n console.error('[TASK] File upload failed:', err.message);\n return [];\n }\n }\n\n /**\n * Wrap tools to add approval checking\n */\n private wrapToolsWithApproval(options: AgentRunOptions, tools?: ToolSet): ToolSet {\n const sessionConfig = this.session.config;\n const wrappedTools: ToolSet = {};\n const toolsToWrap = tools || this.baseTools;\n\n for (const [name, originalTool] of Object.entries(toolsToWrap)) {\n const needsApproval = requiresApproval(name, sessionConfig ?? undefined);\n\n if (!needsApproval) {\n wrappedTools[name] = originalTool;\n continue;\n }\n\n // Create wrapped tool that checks for approval and waits\n wrappedTools[name] = tool({\n description: originalTool.description || '',\n inputSchema: (originalTool as any).inputSchema || z.object({}),\n execute: async (input: unknown, toolOptions: { toolCallId?: string }) => {\n const toolCallId = toolOptions.toolCallId || nanoid();\n\n // Record the execution\n const execution = toolExecutionQueries.create({\n sessionId: this.session.id,\n toolName: name,\n toolCallId,\n input: input as any,\n requiresApproval: true,\n status: 'pending',\n });\n\n // Store pending approval\n this.pendingApprovals.set(toolCallId, await execution);\n\n // Notify about approval requirement\n options.onApprovalRequired?.(await execution);\n\n // Update session status\n await sessionQueries.updateStatus(this.session.id, 'waiting');\n\n // Wait for approval decision (using shared store for cross-request access)\n const approved = await new Promise<boolean>((resolve) => {\n approvalResolvers.set(toolCallId, { resolve, sessionId: this.session.id });\n });\n\n // Get any rejection reason\n const resolverData = approvalResolvers.get(toolCallId);\n approvalResolvers.delete(toolCallId);\n this.pendingApprovals.delete(toolCallId);\n\n const exec = await execution;\n if (!approved) {\n // Tool was rejected\n const reason = resolverData?.reason || 'User rejected the tool execution';\n await toolExecutionQueries.reject(exec.id);\n await sessionQueries.updateStatus(this.session.id, 'active');\n \n return {\n status: 'rejected',\n toolCallId,\n rejected: true,\n reason,\n message: `Tool \"${name}\" was rejected by the user. Reason: ${reason}`,\n };\n }\n\n // Tool was approved - execute the original tool\n await toolExecutionQueries.approve(exec.id);\n await sessionQueries.updateStatus(this.session.id, 'active');\n\n try {\n const result = await (originalTool as any).execute(input, toolOptions);\n await toolExecutionQueries.complete(exec.id, result);\n return result;\n } catch (error: any) {\n await toolExecutionQueries.complete(exec.id, null, error.message);\n throw error;\n }\n },\n });\n }\n\n return wrappedTools;\n }\n\n /**\n * Wait for all pending approvals\n */\n async waitForApprovals(): Promise<ToolExecution[]> {\n return Array.from(this.pendingApprovals.values());\n }\n\n /**\n * Approve a pending tool execution\n */\n async approve(toolCallId: string): Promise<{ approved: true }> {\n // Check shared resolver store (the streaming Agent is waiting on this)\n const resolver = approvalResolvers.get(toolCallId);\n if (resolver) {\n resolver.resolve(true);\n return { approved: true };\n }\n\n // Fall back to database lookup\n const pendingFromDb = await toolExecutionQueries.getPendingApprovals(this.session.id);\n const execution = pendingFromDb.find((e: ToolExecution) => e.toolCallId === toolCallId);\n \n if (!execution) {\n throw new Error(`No pending approval for tool call: ${toolCallId}`);\n }\n\n // Mark as approved in DB\n await toolExecutionQueries.approve(execution.id);\n return { approved: true };\n }\n\n /**\n * Reject a pending tool execution\n */\n async reject(toolCallId: string, reason?: string): Promise<{ rejected: true }> {\n // Check shared resolver store (the streaming Agent is waiting on this)\n const resolver = approvalResolvers.get(toolCallId);\n if (resolver) {\n resolver.reason = reason;\n resolver.resolve(false);\n return { rejected: true };\n }\n\n // Fall back to database lookup\n const pendingFromDb = await toolExecutionQueries.getPendingApprovals(this.session.id);\n const execution = pendingFromDb.find((e: ToolExecution) => e.toolCallId === toolCallId);\n \n if (!execution) {\n throw new Error(`No pending approval for tool call: ${toolCallId}`);\n }\n\n // Mark as rejected in DB\n await toolExecutionQueries.reject(execution.id);\n return { rejected: true };\n }\n\n /**\n * Get pending approvals\n */\n async getPendingApprovals(): Promise<ToolExecution[]> {\n return toolExecutionQueries.getPendingApprovals(this.session.id);\n }\n\n /**\n * Get context statistics\n */\n getContextStats() {\n return this.context.getStats();\n }\n\n /**\n * Clear conversation context (start fresh)\n */\n clearContext(): void {\n this.context.clear();\n }\n}\n\nexport { ContextManager } from './context.js';\nexport { buildSystemPrompt, buildTaskPromptAddendum } from './prompts.js';\n","import { gateway } from '@ai-sdk/gateway';\nimport type { LanguageModel } from 'ai';\nimport { createRemoteModel } from './remote-model.js';\nimport { getConfig } from '../config/index.js';\n\nconst ANTHROPIC_PREFIX = 'anthropic/';\nconst GOOGLE_PREFIX = 'google/';\n\n/**\n * Check if a model ID is an Anthropic model (for provider-specific options).\n */\nexport function isAnthropicModel(modelId: string): boolean {\n const normalized = modelId.trim().toLowerCase();\n return normalized.startsWith(ANTHROPIC_PREFIX) || normalized.startsWith('claude-');\n}\n\n/**\n * Check if a model ID is a Google model.\n */\nexport function isGoogleModel(modelId: string): boolean {\n const normalized = modelId.trim().toLowerCase();\n return normalized.startsWith(GOOGLE_PREFIX) || normalized.startsWith('gemini-');\n}\n\n/**\n * Resolves a model ID to a LanguageModel instance.\n *\n * When a remote server is configured, routes through the remote inference\n * proxy so API keys never leave the server. Falls back to local AI Gateway\n * for development/offline use.\n */\nexport function resolveModel(modelId: string): LanguageModel {\n try {\n const config = getConfig();\n if (config.resolvedRemoteServer.isConfigured) {\n return createRemoteModel(modelId.trim(), {\n url: config.resolvedRemoteServer.url!,\n authKey: config.resolvedRemoteServer.authKey!,\n }) as LanguageModel;\n }\n } catch {\n // Config not loaded yet (e.g. during import-time calls) - fall through to local\n }\n return gateway(modelId.trim());\n}\n\n// Default models for subagents (smaller, faster models)\nexport const SUBAGENT_MODELS = {\n search: 'google/gemini-3-flash-preview',\n analyze: 'google/gemini-3-flash-preview',\n default: 'google/gemini-3-flash-preview',\n} as const;\n","/**\n * Remote inference proxy model.\n *\n * Implements the LanguageModelV3 interface by forwarding doGenerate/doStream\n * calls to the remote server's /inference endpoints. API keys never leave\n * the remote server.\n */\n\nexport interface RemoteModelConfig {\n url: string;\n authKey: string;\n}\n\ninterface CallOptions {\n prompt: any[];\n abortSignal?: AbortSignal;\n [key: string]: unknown;\n}\n\n/**\n * Serialize prompt content for JSON transport.\n * Converts Uint8Array data to base64 with a marker flag.\n */\nfunction serializePrompt(prompt: any[]): any[] {\n return prompt.map((msg: any) => {\n if (!Array.isArray(msg.content)) return msg;\n return {\n ...msg,\n content: msg.content.map((part: any) => {\n if (part.type === 'file' && part.data instanceof Uint8Array) {\n return {\n ...part,\n data: Buffer.from(part.data).toString('base64'),\n _base64: true,\n };\n }\n return part;\n }),\n };\n });\n}\n\n/**\n * Deserialize a stream part from JSON transport.\n * Restores Uint8Array from base64-encoded markers.\n */\nfunction deserializeValue(value: any): any {\n if (value && typeof value === 'object') {\n if (value.__uint8array && typeof value.data === 'string') {\n return Buffer.from(value.data, 'base64');\n }\n if (Array.isArray(value)) {\n return value.map(deserializeValue);\n }\n const result: any = {};\n for (const [k, v] of Object.entries(value)) {\n result[k] = deserializeValue(v);\n }\n return result;\n }\n return value;\n}\n\n/**\n * Strip non-serializable fields from call options before sending over HTTP.\n */\nfunction prepareOptions(options: CallOptions): Record<string, unknown> {\n const { abortSignal, ...rest } = options;\n return {\n ...rest,\n prompt: serializePrompt(options.prompt),\n };\n}\n\n/**\n * Creates a LanguageModelV3-compatible model that proxies all inference\n * requests to the remote server. The returned object satisfies the\n * LanguageModelV3 interface structurally.\n */\nexport function createRemoteModel(\n modelId: string,\n config: RemoteModelConfig,\n) {\n const baseUrl = config.url.replace(/\\/$/, '');\n const headers = {\n 'Content-Type': 'application/json',\n 'Authorization': `Bearer ${config.authKey}`,\n };\n\n return {\n specificationVersion: 'v3',\n provider: 'remote-proxy',\n modelId,\n supportedUrls: {},\n\n async doGenerate(options: CallOptions) {\n const res = await fetch(`${baseUrl}/inference/generate`, {\n method: 'POST',\n headers,\n body: JSON.stringify({\n modelId,\n options: prepareOptions(options),\n }),\n signal: options.abortSignal,\n });\n\n if (!res.ok) {\n const err = await res.json().catch(() => ({})) as { error?: string; details?: any };\n const detail = formatRemoteError(res.status, modelId, err);\n throw new Error(detail);\n }\n\n const result = await res.json();\n return deserializeValue(result);\n },\n\n async doStream(options: CallOptions) {\n const res = await fetch(`${baseUrl}/inference/stream`, {\n method: 'POST',\n headers,\n body: JSON.stringify({\n modelId,\n options: prepareOptions(options),\n }),\n signal: options.abortSignal,\n });\n\n if (!res.ok) {\n const err = await res.json().catch(() => ({})) as { error?: string; details?: any };\n const detail = formatRemoteError(res.status, modelId, err);\n throw new Error(detail);\n }\n\n const reader = res.body!.getReader();\n const decoder = new TextDecoder();\n let buffer = '';\n\n const stream = new ReadableStream({\n async pull(controller) {\n while (true) {\n const { done, value } = await reader.read();\n\n if (done) {\n if (buffer.trim()) {\n try {\n const parsed = deserializeValue(JSON.parse(buffer.trim()));\n if (parsed.type === 'error') {\n controller.error(new Error(parsed.error));\n } else {\n controller.enqueue(parsed);\n }\n } catch { /* ignore partial data */ }\n }\n controller.close();\n return;\n }\n\n buffer += decoder.decode(value, { stream: true });\n const lines = buffer.split('\\n');\n buffer = lines.pop() || '';\n\n for (const line of lines) {\n if (!line.trim()) continue;\n try {\n const parsed = deserializeValue(JSON.parse(line));\n if (parsed.type === 'error') {\n controller.error(new Error(parsed.error));\n return;\n }\n controller.enqueue(parsed);\n } catch {\n // skip malformed lines\n }\n }\n }\n },\n cancel() {\n reader.cancel();\n },\n });\n\n const responseHeaders: Record<string, string> = {};\n res.headers.forEach((v, k) => {\n if (k.startsWith('x-upstream-')) {\n responseHeaders[k.replace('x-upstream-', '')] = v;\n }\n });\n\n return {\n stream,\n response: Object.keys(responseHeaders).length > 0\n ? { headers: responseHeaders }\n : undefined,\n };\n },\n };\n}\n\nfunction formatRemoteError(\n status: number,\n modelId: string,\n body: { error?: string; details?: any },\n): string {\n const parts = [`Remote inference failed (${status}) for ${modelId}`];\n\n if (body.error) parts.push(body.error);\n\n if (body.details) {\n const d = body.details;\n if (d.type) parts.push(`type=${d.type}`);\n if (d.statusCode && d.statusCode !== status) parts.push(`upstream=${d.statusCode}`);\n if (d.cause) parts.push(`cause: ${d.cause}`);\n if (d.orderWarnings?.length) parts.push(`prompt issues: ${d.orderWarnings.join('; ')}`);\n }\n\n return parts.join(' — ');\n}\n","import { tool } from 'ai';\nimport { z } from 'zod';\nimport { exec } from 'node:child_process';\nimport { promisify } from 'node:util';\nimport { truncateOutput } from '../utils/truncate.js';\nimport * as tmux from '../terminal/tmux.js';\n\nconst execAsync = promisify(exec);\n\nconst COMMAND_TIMEOUT = 120_000; // 2 minutes for sync commands\nconst MAX_OUTPUT_CHARS = 10_000;\n\n// Commands that are blocked for safety\nconst BLOCKED_COMMANDS = [\n 'rm -rf /',\n 'rm -rf ~',\n 'mkfs',\n 'dd if=/dev/zero',\n ':(){:|:&};:',\n 'chmod -R 777 /',\n];\n\n/**\n * Check if a command is blocked\n */\nfunction isBlockedCommand(command: string): boolean {\n const normalizedCommand = command.toLowerCase().trim();\n return BLOCKED_COMMANDS.some((blocked) =>\n normalizedCommand.includes(blocked.toLowerCase())\n );\n}\n\nexport interface BashToolProgress {\n terminalId: string;\n status: 'started' | 'running' | 'completed';\n command?: string;\n browserStreamPort?: number;\n browserClosed?: boolean;\n}\n\nexport interface BashToolOptions {\n workingDirectory: string;\n sessionId: string;\n onOutput?: (output: string) => void;\n onProgress?: (progress: BashToolProgress) => void;\n}\n\nconst BROWSER_STREAM_BASE_PORT = 9223;\nconst sessionBrowserPorts = new Map<string, number>();\nlet nextPortOffset = 0;\n\nfunction getBrowserStreamPort(sessionId: string): number {\n let port = sessionBrowserPorts.get(sessionId);\n if (!port) {\n port = BROWSER_STREAM_BASE_PORT + nextPortOffset++;\n sessionBrowserPorts.set(sessionId, port);\n }\n return port;\n}\n\nfunction hasAgentBrowserCommand(command: string): boolean {\n return /\\bagent-browser\\b/.test(command);\n}\n\nfunction isAgentBrowserOpenCommand(command: string): boolean {\n return /\\bagent-browser\\s+open\\b/.test(command);\n}\n\nfunction isAgentBrowserCloseCommand(command: string): boolean {\n return /\\bagent-browser\\s+(close|close\\s+--all)\\b/.test(command);\n}\n\n/**\n * Inject AGENT_BROWSER_STREAM_PORT into every `agent-browser` invocation\n * in the command so the WS server stays active across chained commands.\n */\nfunction injectBrowserStreamPort(command: string, port: number): string {\n return command.replace(\n /\\bagent-browser\\b/g,\n `AGENT_BROWSER_STREAM_PORT=${port} agent-browser`\n );\n}\n\n// Unified bash tool schema - Option A (minimal flags)\nconst bashInputSchema = z.object({\n command: z\n .string()\n .optional()\n .describe('The command to execute. Required for running new commands.'),\n background: z\n .boolean()\n .default(false)\n .describe('Run the command in background mode (for dev servers, watchers). Returns immediately with terminal ID.'),\n id: z\n .string()\n .optional()\n .describe('Terminal ID. Use to get logs from, send input to, or kill an existing terminal.'),\n kill: z\n .boolean()\n .optional()\n .describe('Kill the terminal with the given ID.'),\n tail: z\n .number()\n .optional()\n .describe('Number of lines to return from the end of output (for logs).'),\n input: z\n .string()\n .optional()\n .describe('Send text input to an interactive terminal (requires id). Used for responding to prompts.'),\n key: z\n .enum(['Enter', 'Escape', 'Up', 'Down', 'Left', 'Right', 'Tab', 'C-c', 'C-d', 'y', 'n'])\n .optional()\n .describe('Send a special key to an interactive terminal (requires id). Use \"y\" or \"n\" for yes/no prompts.'),\n});\n\ntype BashInput = z.infer<typeof bashInputSchema>;\n\n// Cache tmux availability at startup\nlet useTmux: boolean | null = null;\n\nasync function shouldUseTmux(): Promise<boolean> {\n if (useTmux === null) {\n useTmux = await tmux.isTmuxAvailable();\n if (!useTmux) {\n console.warn('[bash] tmux not available, using fallback exec mode');\n }\n }\n return useTmux;\n}\n\n/**\n * Fallback implementation using exec (when tmux is not available)\n */\nasync function execFallback(\n command: string,\n workingDirectory: string,\n onOutput?: (output: string) => void\n): Promise<{ success: boolean; output: string; exitCode: number; error?: string }> {\n try {\n const { stdout, stderr } = await execAsync(command, {\n cwd: workingDirectory,\n timeout: COMMAND_TIMEOUT,\n maxBuffer: 10 * 1024 * 1024,\n });\n\n const output = truncateOutput(stdout + (stderr ? `\\n${stderr}` : ''), MAX_OUTPUT_CHARS);\n onOutput?.(output);\n\n return {\n success: true,\n output,\n exitCode: 0,\n };\n } catch (error: any) {\n const output = truncateOutput(\n (error.stdout || '') + (error.stderr ? `\\n${error.stderr}` : ''),\n MAX_OUTPUT_CHARS\n );\n onOutput?.(output || error.message);\n\n if (error.killed) {\n return {\n success: false,\n error: `Command timed out after ${COMMAND_TIMEOUT / 1000} seconds`,\n output,\n exitCode: 124,\n };\n }\n\n return {\n success: false,\n error: error.message,\n output,\n exitCode: error.code ?? 1,\n };\n }\n}\n\nexport function createBashTool(options: BashToolOptions) {\n return tool({\n description: `Execute commands in the terminal. Every command runs in its own session with logs saved to disk.\n\n**Run a command (default - waits for completion):**\nbash({ command: \"npm install\" })\nbash({ command: \"git status\" })\n\n**Run in background (for dev servers, watchers, or interactive commands):**\nbash({ command: \"npm run dev\", background: true })\n→ Returns { id: \"abc123\" } - save this ID\n\n**Check on a background process:**\nbash({ id: \"abc123\" })\nbash({ id: \"abc123\", tail: 50 }) // last 50 lines only\n\n**Stop a background process:**\nbash({ id: \"abc123\", kill: true })\n\n**Respond to interactive prompts (for yes/no questions, etc.):**\nbash({ id: \"abc123\", key: \"y\" }) // send 'y' for yes\nbash({ id: \"abc123\", key: \"n\" }) // send 'n' for no\nbash({ id: \"abc123\", key: \"Enter\" }) // press Enter\nbash({ id: \"abc123\", input: \"my text\" }) // send text input\n\n**IMPORTANT for interactive commands:**\n- Use --yes, -y, or similar flags to avoid prompts when available\n- For create-next-app: add --yes to accept defaults\n- For npm: add --yes or -y to skip confirmation\n- If prompts are unavoidable, run in background mode and use input/key to respond\n\nTerminal output is stored in the global SparkECoder data directory. Use the \\`tail\\` option to read recent output.`,\n\n inputSchema: bashInputSchema,\n\n execute: async (inputArgs: BashInput) => {\n const { command, background, id, kill, tail, input: textInput, key } = inputArgs;\n\n // Handle terminal management (id-based operations)\n if (id) {\n // Kill a terminal\n if (kill) {\n const success = await tmux.killTerminal(id);\n return {\n success,\n id,\n status: success ? 'stopped' : 'not_found',\n message: success ? `Terminal ${id} stopped` : `Terminal ${id} not found or already stopped`,\n };\n }\n\n // Send input to an interactive terminal\n if (textInput !== undefined) {\n const success = await tmux.sendInput(id, textInput, { pressEnter: true });\n if (!success) {\n return {\n success: false,\n id,\n error: `Terminal ${id} not found or not running`,\n };\n }\n \n // Wait a moment for the input to be processed, then get logs\n await new Promise(r => setTimeout(r, 300));\n const { output, status } = await tmux.getLogs(id, options.workingDirectory, { tail: tail || 50, sessionId: options.sessionId });\n const truncatedOutput = truncateOutput(output, MAX_OUTPUT_CHARS);\n \n return {\n success: true,\n id,\n output: truncatedOutput,\n status,\n message: `Sent input \"${textInput}\" to terminal`,\n };\n }\n\n // Send a special key to an interactive terminal\n if (key) {\n const success = await tmux.sendKey(id, key);\n if (!success) {\n return {\n success: false,\n id,\n error: `Terminal ${id} not found or not running`,\n };\n }\n \n // Wait a moment for the key to be processed, then get logs\n await new Promise(r => setTimeout(r, 300));\n const { output, status } = await tmux.getLogs(id, options.workingDirectory, { tail: tail || 50, sessionId: options.sessionId });\n const truncatedOutput = truncateOutput(output, MAX_OUTPUT_CHARS);\n \n return {\n success: true,\n id,\n output: truncatedOutput,\n status,\n message: `Sent key \"${key}\" to terminal`,\n };\n }\n\n // Get logs/status from a terminal\n const { output, status } = await tmux.getLogs(id, options.workingDirectory, { tail, sessionId: options.sessionId });\n const truncatedOutput = truncateOutput(output, MAX_OUTPUT_CHARS);\n\n return {\n success: true,\n id,\n output: truncatedOutput,\n status,\n };\n }\n\n // Running a new command requires the command parameter\n if (!command) {\n return {\n success: false,\n error: 'Either \"command\" (to run a new command) or \"id\" (to check/kill/send input) is required',\n };\n }\n\n // Safety check\n if (isBlockedCommand(command)) {\n return {\n success: false,\n error: 'This command is blocked for safety reasons.',\n output: '',\n exitCode: 1,\n };\n }\n\n // Detect agent-browser commands and inject streaming port.\n // We inject on ALL agent-browser commands so the WS server stays alive\n // across open, screenshot, eval, snapshot, etc.\n let actualCommand = command;\n const hasAgentBrowser = hasAgentBrowserCommand(command);\n const browserClose = isAgentBrowserCloseCommand(command);\n let browserPort: number | undefined;\n\n if (hasAgentBrowser) {\n browserPort = getBrowserStreamPort(options.sessionId);\n if (!browserClose) {\n actualCommand = injectBrowserStreamPort(command, browserPort);\n }\n }\n\n // Check if we can use tmux\n const canUseTmux = await shouldUseTmux();\n\n if (background) {\n // Background mode\n if (!canUseTmux) {\n return {\n success: false,\n error: 'Background mode requires tmux to be installed. Install with: brew install tmux (macOS) or apt install tmux (Linux)',\n };\n }\n\n // Generate terminal ID upfront and emit progress\n const terminalId = tmux.generateTerminalId();\n options.onProgress?.({ terminalId, status: 'started', command, browserStreamPort: browserPort });\n\n const result = await tmux.runBackground(actualCommand, options.workingDirectory, {\n sessionId: options.sessionId,\n terminalId,\n });\n\n return {\n success: true,\n id: result.id,\n status: 'running',\n message: `Started background process. Use bash({ id: \"${result.id}\" }) to check logs.`,\n };\n }\n\n // Sync mode (default)\n if (canUseTmux) {\n const terminalId = tmux.generateTerminalId();\n options.onProgress?.({ terminalId, status: 'started', command, browserStreamPort: browserPort });\n\n try {\n const result = await tmux.runSync(actualCommand, options.workingDirectory, {\n sessionId: options.sessionId,\n timeout: COMMAND_TIMEOUT,\n terminalId,\n });\n\n const truncatedOutput = truncateOutput(result.output, MAX_OUTPUT_CHARS);\n options.onOutput?.(truncatedOutput);\n\n options.onProgress?.({\n terminalId,\n status: 'completed',\n command,\n browserStreamPort: browserPort,\n browserClosed: browserClose || undefined,\n });\n\n return {\n success: result.exitCode === 0,\n id: result.id,\n output: truncatedOutput,\n exitCode: result.exitCode,\n status: result.status,\n };\n } catch (error: any) {\n options.onProgress?.({ terminalId, status: 'completed', command });\n return {\n success: false,\n error: error.message,\n output: '',\n exitCode: 1,\n };\n }\n } else {\n // Fallback to exec (no tmux)\n const result = await execFallback(actualCommand, options.workingDirectory, options.onOutput);\n return {\n success: result.success,\n output: result.output,\n exitCode: result.exitCode,\n error: result.error,\n };\n }\n },\n });\n}\n\nexport type BashTool = ReturnType<typeof createBashTool>;\n","/**\n * Lightweight token estimation using the ~4 chars/token heuristic.\n * Accurate enough for context window budgeting without pulling in a tokenizer.\n */\n\nconst CHARS_PER_TOKEN = 4;\nconst MESSAGE_OVERHEAD_TOKENS = 4;\n\nexport function estimateTokens(text: string): number {\n return Math.ceil(text.length / CHARS_PER_TOKEN);\n}\n\nexport function estimateMessageTokens(messages: Array<{ role: string; content: unknown }>): number {\n return messages.reduce((total, msg) => {\n const content = typeof msg.content === 'string'\n ? msg.content\n : JSON.stringify(msg.content);\n return total + estimateTokens(content) + MESSAGE_OVERHEAD_TOKENS;\n }, 0);\n}\n","import { estimateMessageTokens } from './tokens.js';\n\nconst MAX_OUTPUT_CHARS = 10_000;\n\n/**\n * Truncate a string if it exceeds the max length\n */\nexport function truncateOutput(\n output: string,\n maxChars: number = MAX_OUTPUT_CHARS\n): string {\n if (output.length <= maxChars) {\n return output;\n }\n\n const halfMax = Math.floor(maxChars / 2);\n const truncatedChars = output.length - maxChars;\n\n return (\n output.slice(0, halfMax) +\n `\\n\\n... [TRUNCATED: ${truncatedChars.toLocaleString()} characters omitted] ...\\n\\n` +\n output.slice(-halfMax)\n );\n}\n\n/**\n * Calculate the total character count of messages\n */\nexport function calculateContextSize(messages: Array<{ content: unknown }>): number {\n return messages.reduce((total, msg) => {\n const content = typeof msg.content === 'string' \n ? msg.content \n : JSON.stringify(msg.content);\n return total + content.length;\n }, 0);\n}\n\n/**\n * Calculate the estimated token count of messages\n */\nexport function calculateContextTokens(messages: Array<{ role: string; content: unknown }>): number {\n return estimateMessageTokens(messages);\n}\n\n/**\n * Format bytes to human readable string\n */\nexport function formatBytes(bytes: number): string {\n if (bytes < 1024) return `${bytes} B`;\n if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`;\n return `${(bytes / (1024 * 1024)).toFixed(1)} MB`;\n}\n\n/**\n * Format number with commas\n */\nexport function formatNumber(num: number): string {\n return num.toLocaleString();\n}\n","/**\n * tmux wrapper for terminal session management\n * \n * Provides a thin abstraction over tmux commands for:\n * - Session creation and management\n * - Output capture and logging\n * - Process lifecycle management\n */\n\nimport { exec } from 'node:child_process';\nimport { promisify } from 'node:util';\nimport { mkdir, writeFile, readFile } from 'node:fs/promises';\nimport { existsSync, mkdirSync } from 'node:fs';\nimport { join } from 'node:path';\nimport { nanoid } from 'nanoid';\nimport { getAppDataDirectory } from '../config/index.js';\n\nconst execAsync = promisify(exec);\n\n// Session prefix for all sparkecoder terminals\nconst SESSION_PREFIX = 'spark_';\n\n// Log directory base path - stored in global app data directory\nconst LOG_BASE_DIR = 'sessions';\n\nexport interface TerminalMeta {\n id: string;\n command: string;\n cwd: string;\n createdAt: string;\n sessionId: string;\n background: boolean;\n name?: string;\n}\n\nexport interface TerminalResult {\n id: string;\n output: string;\n exitCode: number;\n status: 'completed' | 'running' | 'stopped' | 'error';\n}\n\n// Cache tmux availability check\nlet tmuxAvailableCache: boolean | null = null;\n\n/**\n * Check if tmux is installed and available\n */\nexport async function isTmuxAvailable(): Promise<boolean> {\n if (tmuxAvailableCache !== null) {\n return tmuxAvailableCache;\n }\n \n try {\n const { stdout } = await execAsync('tmux -V');\n tmuxAvailableCache = true;\n // console.log(`[tmux] Available: ${stdout.trim()}`);\n return true;\n } catch (error) {\n tmuxAvailableCache = false;\n console.log(`[tmux] Not available: ${error instanceof Error ? error.message : 'unknown error'}`);\n return false;\n }\n}\n\n/**\n * Generate a unique terminal ID\n * Ensure it starts with a letter (tmux session names work better this way)\n */\nexport function generateTerminalId(): string {\n // Prefix with 't' to ensure it starts with a letter (nanoid can start with - or _)\n return 't' + nanoid(9);\n}\n\n/**\n * Get the tmux session name for a terminal ID\n */\nexport function getSessionName(terminalId: string): string {\n return `${SESSION_PREFIX}${terminalId}`;\n}\n\n/**\n * Get the global terminal data directory\n * Uses OS-appropriate app data location (not the working directory)\n */\nfunction getTerminalDataDir(): string {\n const appDataDir = getAppDataDirectory();\n // Ensure directory exists\n if (!existsSync(appDataDir)) {\n mkdirSync(appDataDir, { recursive: true });\n }\n return appDataDir;\n}\n\n/**\n * Get the log directory for a terminal (stored in global app data, not working directory)\n */\nexport function getLogDir(terminalId: string, _workingDirectory: string, sessionId?: string): string {\n const baseDir = getTerminalDataDir();\n if (sessionId) {\n // Session-scoped path: ~/Library/Application Support/sparkecoder/sessions/{sessionId}/terminals/{terminalId}/\n return join(baseDir, LOG_BASE_DIR, sessionId, 'terminals', terminalId);\n }\n // Fallback for legacy terminals without sessionId\n return join(baseDir, 'terminals', terminalId);\n}\n\n/**\n * Escape a string for shell command\n */\nfunction shellEscape(str: string): string {\n // Use single quotes and escape any single quotes in the string\n return `'${str.replace(/'/g, \"'\\\\''\")}'`;\n}\n\n/**\n * Create log directory and metadata file\n */\nasync function initLogDir(terminalId: string, meta: TerminalMeta, workingDirectory: string): Promise<string> {\n const logDir = getLogDir(terminalId, workingDirectory, meta.sessionId);\n await mkdir(logDir, { recursive: true });\n await writeFile(join(logDir, 'meta.json'), JSON.stringify(meta, null, 2));\n // Create empty output.log\n await writeFile(join(logDir, 'output.log'), '');\n return logDir;\n}\n\n/**\n * Poll until a condition is met or timeout\n */\nasync function pollUntil(\n condition: () => Promise<boolean>,\n options: { timeout: number; interval?: number }\n): Promise<boolean> {\n const { timeout, interval = 100 } = options;\n const startTime = Date.now();\n \n while (Date.now() - startTime < timeout) {\n if (await condition()) {\n return true;\n }\n await new Promise(r => setTimeout(r, interval));\n }\n \n return false;\n}\n\n/**\n * Run a command synchronously in tmux (waits for completion)\n */\nexport async function runSync(\n command: string,\n workingDirectory: string,\n options: { sessionId: string; timeout?: number; terminalId?: string }\n): Promise<TerminalResult> {\n if (!options) {\n throw new Error('runSync: options parameter is required (must include sessionId)');\n }\n const id = options.terminalId || generateTerminalId();\n const session = getSessionName(id);\n const logDir = await initLogDir(id, {\n id,\n command,\n cwd: workingDirectory,\n createdAt: new Date().toISOString(),\n sessionId: options.sessionId,\n background: false,\n }, workingDirectory);\n \n const logFile = join(logDir, 'output.log');\n const exitCodeFile = join(logDir, 'exit_code');\n const timeout = options.timeout || 120000; // 2 minute default\n \n try {\n // Wrap command to write exit code to a file when done\n // Also write output to the log file directly (more reliable than pipe-pane for quick commands)\n const wrappedCommand = `(${command}) 2>&1 | tee -a ${shellEscape(logFile)}; echo $? > ${shellEscape(exitCodeFile)}`;\n \n // Start tmux session\n await execAsync(\n `tmux new-session -d -s ${session} -c ${shellEscape(workingDirectory)} ${shellEscape(wrappedCommand)}`,\n { timeout: 5000 }\n );\n \n // Try to pipe output to log file (may fail if command completes quickly, that's ok)\n try {\n await execAsync(\n `tmux pipe-pane -t ${session} -o 'cat >> ${shellEscape(logFile)}'`,\n { timeout: 1000 }\n );\n } catch {\n // Session may have already ended - that's fine, we use tee in the command\n }\n \n // Poll until session ends or timeout\n const completed = await pollUntil(\n async () => {\n try {\n await execAsync(`tmux has-session -t ${session}`, { timeout: 1000 });\n return false; // Session still exists\n } catch {\n return true; // Session ended\n }\n },\n { timeout, interval: 100 }\n );\n \n if (!completed) {\n // Timeout - kill the session\n try {\n await execAsync(`tmux kill-session -t ${session}`, { timeout: 5000 });\n } catch {\n // Ignore\n }\n \n // Read whatever output we have\n let output = '';\n try {\n output = await readFile(logFile, 'utf-8');\n } catch {\n // Ignore\n }\n \n return {\n id,\n output: output.trim(),\n exitCode: 124, // Standard timeout exit code\n status: 'error',\n };\n }\n \n // Session ended - read output and exit code\n // Give a moment for log file to be flushed\n await new Promise(r => setTimeout(r, 50));\n \n let output = '';\n try {\n output = await readFile(logFile, 'utf-8');\n } catch {\n // Ignore\n }\n \n // Read exit code\n let exitCode = 0;\n try {\n if (existsSync(exitCodeFile)) {\n const exitCodeStr = await readFile(exitCodeFile, 'utf-8');\n exitCode = parseInt(exitCodeStr.trim(), 10) || 0;\n }\n } catch {\n // Ignore exit code read errors\n }\n \n return {\n id,\n output: output.trim(),\n exitCode,\n status: 'completed',\n };\n } catch (error: any) {\n // Try to kill the session on any error\n try {\n await execAsync(`tmux kill-session -t ${session}`, { timeout: 5000 });\n } catch {\n // Ignore\n }\n \n throw error;\n }\n}\n\n/**\n * Run a command in the background (returns immediately)\n */\nexport async function runBackground(\n command: string,\n workingDirectory: string,\n options: { sessionId: string; terminalId?: string; name?: string }\n): Promise<TerminalResult> {\n if (!options) {\n throw new Error('runBackground: options parameter is required (must include sessionId)');\n }\n const id = options.terminalId || generateTerminalId();\n const session = getSessionName(id);\n const logDir = await initLogDir(id, {\n id,\n command,\n cwd: workingDirectory,\n createdAt: new Date().toISOString(),\n sessionId: options.sessionId,\n background: true,\n name: options.name,\n }, workingDirectory);\n \n const logFile = join(logDir, 'output.log');\n \n // Wrap command to log output via tee (more reliable than pipe-pane)\n const wrappedCommand = `(${command}) 2>&1 | tee -a ${shellEscape(logFile)}`;\n \n // Start tmux session (don't wait for completion)\n await execAsync(\n `tmux new-session -d -s ${session} -c ${shellEscape(workingDirectory)} ${shellEscape(wrappedCommand)}`,\n { timeout: 5000 }\n );\n \n return {\n id,\n output: '',\n exitCode: 0,\n status: 'running',\n };\n}\n\n/**\n * Get logs from a terminal\n */\nexport async function getLogs(\n terminalId: string,\n workingDirectory: string,\n options: { tail?: number; sessionId?: string } = {}\n): Promise<{ output: string; status: 'running' | 'stopped' | 'unknown' }> {\n const session = getSessionName(terminalId);\n const logDir = getLogDir(terminalId, workingDirectory, options.sessionId);\n const logFile = join(logDir, 'output.log');\n \n // Check if session is still running\n let isRunning = false;\n try {\n await execAsync(`tmux has-session -t ${session}`, { timeout: 5000 });\n isRunning = true;\n } catch {\n // Session not running\n }\n \n // Try to capture from tmux first (more up-to-date)\n if (isRunning) {\n try {\n const lines = options.tail || 1000;\n const { stdout } = await execAsync(\n `tmux capture-pane -t ${session} -p -S -${lines}`,\n { timeout: 5000, maxBuffer: 10 * 1024 * 1024 }\n );\n return { output: stdout.trim(), status: 'running' };\n } catch {\n // Fall through to file-based approach\n }\n }\n \n // Fall back to log file\n try {\n let output = await readFile(logFile, 'utf-8');\n \n if (options.tail) {\n const lines = output.split('\\n');\n output = lines.slice(-options.tail).join('\\n');\n }\n \n return { output: output.trim(), status: isRunning ? 'running' : 'stopped' };\n } catch {\n return { output: '', status: 'unknown' };\n }\n}\n\n/**\n * Check if a terminal is running\n */\nexport async function isRunning(terminalId: string): Promise<boolean> {\n const session = getSessionName(terminalId);\n try {\n await execAsync(`tmux has-session -t ${session}`, { timeout: 5000 });\n return true;\n } catch {\n return false;\n }\n}\n\n/**\n * Kill a terminal session\n */\nexport async function killTerminal(terminalId: string): Promise<boolean> {\n const session = getSessionName(terminalId);\n try {\n await execAsync(`tmux kill-session -t ${session}`, { timeout: 5000 });\n return true;\n } catch {\n return false;\n }\n}\n\n/**\n * List all sparkecoder terminal sessions\n */\nexport async function listSessions(): Promise<string[]> {\n try {\n const { stdout } = await execAsync(\n `tmux list-sessions -F '#{session_name}' 2>/dev/null || true`,\n { timeout: 5000 }\n );\n \n return stdout\n .trim()\n .split('\\n')\n .filter(name => name.startsWith(SESSION_PREFIX))\n .map(name => name.slice(SESSION_PREFIX.length));\n } catch {\n return [];\n }\n}\n\n/**\n * Get metadata for a terminal\n */\nexport async function getMeta(terminalId: string, workingDirectory: string, sessionId?: string): Promise<TerminalMeta | null> {\n const logDir = getLogDir(terminalId, workingDirectory, sessionId);\n const metaFile = join(logDir, 'meta.json');\n \n try {\n const content = await readFile(metaFile, 'utf-8');\n return JSON.parse(content);\n } catch {\n return null;\n }\n}\n\n/**\n * List all terminals for a session\n */\nexport async function listSessionTerminals(\n sessionId: string,\n workingDirectory: string\n): Promise<TerminalMeta[]> {\n const terminalsDir = join(workingDirectory, LOG_BASE_DIR, sessionId, 'terminals');\n const terminals: TerminalMeta[] = [];\n \n try {\n const { readdir } = await import('node:fs/promises');\n const entries = await readdir(terminalsDir, { withFileTypes: true });\n \n for (const entry of entries) {\n if (entry.isDirectory()) {\n const meta = await getMeta(entry.name, workingDirectory, sessionId);\n if (meta) {\n terminals.push(meta);\n }\n }\n }\n } catch {\n // Directory doesn't exist or can't be read\n }\n \n return terminals;\n}\n\n/**\n * Send input (keystrokes) to a running terminal\n * Use this to respond to interactive prompts\n */\nexport async function sendInput(terminalId: string, input: string, options: { pressEnter?: boolean } = {}): Promise<boolean> {\n const session = getSessionName(terminalId);\n const { pressEnter = true } = options;\n \n try {\n // Check if session exists first\n await execAsync(`tmux has-session -t ${session}`, { timeout: 1000 });\n \n // Send the input using tmux send-keys with -l (literal) flag\n await execAsync(\n `tmux send-keys -t ${session} -l ${shellEscape(input)}`,\n { timeout: 1000 }\n );\n \n // Send Enter key separately if requested\n if (pressEnter) {\n await execAsync(\n `tmux send-keys -t ${session} Enter`,\n { timeout: 1000 }\n );\n }\n \n return true;\n } catch {\n return false;\n }\n}\n\n/**\n * Send special keys to a terminal (like arrow keys, escape, etc.)\n */\nexport async function sendKey(terminalId: string, key: 'Enter' | 'Escape' | 'Up' | 'Down' | 'Left' | 'Right' | 'Tab' | 'C-c' | 'C-d' | 'y' | 'n'): Promise<boolean> {\n const session = getSessionName(terminalId);\n \n try {\n await execAsync(`tmux has-session -t ${session}`, { timeout: 1000 });\n await execAsync(`tmux send-keys -t ${session} ${key}`, { timeout: 1000 });\n return true;\n } catch {\n return false;\n }\n}\n","import { tool } from 'ai';\nimport { z } from 'zod';\nimport { readFile, stat } from 'node:fs/promises';\nimport { resolve, relative, isAbsolute, extname } from 'node:path';\nimport { existsSync } from 'node:fs';\nimport { truncateOutput } from '../utils/truncate.js';\nimport { resizeImageIfNeeded } from '../utils/resize-image.js';\n\nconst MAX_FILE_SIZE = 5 * 1024 * 1024; // 5MB\nconst MAX_IMAGE_SIZE = 20 * 1024 * 1024; // 20MB for images\nconst MAX_OUTPUT_CHARS = 50_000;\n\nconst IMAGE_EXTENSIONS: Record<string, string> = {\n '.png': 'image/png',\n '.jpg': 'image/jpeg',\n '.jpeg': 'image/jpeg',\n '.gif': 'image/gif',\n '.webp': 'image/webp',\n};\n\nfunction isImageFile(filePath: string): boolean {\n return extname(filePath).toLowerCase() in IMAGE_EXTENSIONS;\n}\n\nfunction getImageMediaType(filePath: string): string {\n return IMAGE_EXTENSIONS[extname(filePath).toLowerCase()] || 'image/png';\n}\n\nexport interface ReadFileToolOptions {\n workingDirectory: string;\n}\n\nconst readFileInputSchema = z.object({\n path: z\n .string()\n .describe('The path to the file to read. Can be relative to working directory or absolute. Supports text files and images (png, jpg, jpeg, gif, webp).'),\n startLine: z\n .number()\n .optional()\n .describe('Optional: Start reading from this line number (1-indexed). Only for text files.'),\n endLine: z\n .number()\n .optional()\n .describe('Optional: Stop reading at this line number (1-indexed, inclusive). Only for text files.'),\n});\n\nexport function createReadFileTool(options: ReadFileToolOptions) {\n return tool({\n description: `Read the contents of a file. Provide a path relative to the working directory (${options.workingDirectory}) or an absolute path.\nSupports text files (automatically truncated if large) and image files (png, jpg, jpeg, gif, webp).\nFor images, the file contents are returned as visual data you can see and analyze.\nUse this to understand existing code, check file contents, view screenshots, or gather context.`,\n\n inputSchema: readFileInputSchema,\n\n execute: async ({ path: filePath, startLine, endLine }: z.infer<typeof readFileInputSchema>) => {\n try {\n const absolutePath = isAbsolute(filePath)\n ? filePath\n : resolve(options.workingDirectory, filePath);\n\n const relativePath = relative(options.workingDirectory, absolutePath);\n if (relativePath.startsWith('..') && !isAbsolute(filePath)) {\n return {\n success: false,\n error: 'Path escapes the working directory. Use an absolute path if intentional.',\n content: null,\n };\n }\n\n if (!existsSync(absolutePath)) {\n return {\n success: false,\n error: `File not found: ${filePath}`,\n content: null,\n };\n }\n\n const stats = await stat(absolutePath);\n\n if (stats.isDirectory()) {\n return {\n success: false,\n error: 'Path is a directory, not a file. Use bash with \"ls\" to list directory contents.',\n content: null,\n };\n }\n\n // Handle image files\n if (isImageFile(absolutePath)) {\n if (stats.size > MAX_IMAGE_SIZE) {\n return {\n success: false,\n error: `Image is too large (${(stats.size / 1024 / 1024).toFixed(2)}MB). Maximum size is ${MAX_IMAGE_SIZE / 1024 / 1024}MB.`,\n content: null,\n };\n }\n\n const rawBuffer = await readFile(absolutePath);\n const mediaType = getImageMediaType(absolutePath);\n const buffer = await resizeImageIfNeeded(rawBuffer, mediaType);\n const base64 = buffer.toString('base64');\n\n return {\n success: true,\n path: absolutePath,\n relativePath: relative(options.workingDirectory, absolutePath),\n content: `[Image: ${relativePath} (${mediaType}, ${(stats.size / 1024).toFixed(1)}KB)]`,\n mediaType,\n imageData: base64,\n sizeBytes: stats.size,\n };\n }\n\n // Handle text files\n if (stats.size > MAX_FILE_SIZE) {\n return {\n success: false,\n error: `File is too large (${(stats.size / 1024 / 1024).toFixed(2)}MB). Maximum size is ${MAX_FILE_SIZE / 1024 / 1024}MB.`,\n content: null,\n };\n }\n\n let content = await readFile(absolutePath, 'utf-8');\n\n if (startLine !== undefined || endLine !== undefined) {\n const lines = content.split('\\n');\n const start = (startLine ?? 1) - 1;\n const end = endLine ?? lines.length;\n \n if (start < 0 || start >= lines.length) {\n return {\n success: false,\n error: `Start line ${startLine} is out of range. File has ${lines.length} lines.`,\n content: null,\n };\n }\n\n content = lines\n .slice(start, end)\n .map((line, idx) => `${(start + idx + 1).toString().padStart(4)}: ${line}`)\n .join('\\n');\n }\n\n const truncatedContent = truncateOutput(content, MAX_OUTPUT_CHARS);\n const wasTruncated = truncatedContent.length < content.length;\n\n return {\n success: true,\n path: absolutePath,\n relativePath: relative(options.workingDirectory, absolutePath),\n content: truncatedContent,\n lineCount: content.split('\\n').length,\n wasTruncated,\n sizeBytes: stats.size,\n };\n } catch (error: any) {\n if (error.code === 'ERR_INVALID_ARG_VALUE' || error.message.includes('encoding')) {\n return {\n success: false,\n error: 'File appears to be binary and cannot be read as text.',\n content: null,\n };\n }\n\n return {\n success: false,\n error: error.message,\n content: null,\n };\n }\n },\n\n toModelOutput: ({ output }) => {\n if (output && typeof output === 'object' && 'imageData' in output && output.imageData) {\n const result = output as { imageData: string; mediaType: string; content: string; relativePath: string };\n return {\n type: 'content' as const,\n value: [\n { type: 'text' as const, text: result.content },\n { type: 'image-data' as const, data: result.imageData, mediaType: result.mediaType },\n ],\n };\n }\n return typeof output === 'string'\n ? { type: 'text' as const, value: output }\n : { type: 'json' as const, value: output as any };\n },\n });\n}\n\nexport type ReadFileTool = ReturnType<typeof createReadFileTool>;\n","import sharp from 'sharp';\nimport { createHash } from 'node:crypto';\nimport { existsSync, mkdirSync, readFileSync, writeFileSync } from 'node:fs';\nimport { join } from 'node:path';\nimport { getAppDataDirectory } from '../config/index.js';\n\n/**\n * Anthropic API limits (as of 2026):\n * - Hard reject: any dimension > 8000px (or >2000px if >20 images)\n * - Optimal: long edge <= 1568px, ~1.15 megapixels\n * (anything larger is re-resized server-side, adding TTFT latency)\n * - File size: 5MB per image (base64-encoded)\n *\n * We target 1568px long edge to match Anthropic's optimal dimensions,\n * avoiding both the hard rejection AND the server-side resize latency penalty.\n * If the result still exceeds 5MB, we reduce quality iteratively.\n */\nconst MAX_LONG_EDGE = 1568;\nconst MAX_FILE_BYTES = 5 * 1024 * 1024; // 5MB\nconst CACHE_DIR_NAME = 'image-cache';\n\nfunction getCacheDir(): string {\n const dir = join(getAppDataDirectory(), CACHE_DIR_NAME);\n if (!existsSync(dir)) mkdirSync(dir, { recursive: true });\n return dir;\n}\n\nfunction cacheKey(buffer: Buffer): string {\n return createHash('sha256').update(buffer).digest('hex');\n}\n\n/**\n * Downscale an image buffer to fit within Anthropic's optimal dimensions\n * (1568px long edge) and file size limit (5MB).\n * Returns the original buffer unchanged if no processing is needed.\n * Caches processed results on disk keyed by SHA-256 of the original.\n */\nexport async function resizeImageIfNeeded(buffer: Buffer, mediaType?: string): Promise<Buffer> {\n let metadata;\n try {\n metadata = await sharp(buffer).metadata();\n } catch {\n return buffer;\n }\n\n const { width, height } = metadata;\n if (!width || !height) return buffer;\n\n const longEdge = Math.max(width, height);\n const needsResize = longEdge > MAX_LONG_EDGE;\n const needsShrink = buffer.length > MAX_FILE_BYTES;\n\n if (!needsResize && !needsShrink) return buffer;\n\n const key = cacheKey(buffer);\n const cacheDir = getCacheDir();\n const isPng = mediaType?.includes('png');\n const ext = isPng ? '.png' : '.jpg';\n const cachePath = join(cacheDir, key + ext);\n\n if (existsSync(cachePath)) {\n console.log(`[image-resize] Cache hit for ${width}x${height} image`);\n return readFileSync(cachePath);\n }\n\n let pipeline = sharp(buffer);\n\n if (needsResize) {\n pipeline = pipeline.resize(MAX_LONG_EDGE, MAX_LONG_EDGE, {\n fit: 'inside',\n withoutEnlargement: true,\n });\n }\n\n // For PNGs that just need size reduction, convert to JPEG\n // (PNG compression can't get large images under 5MB reliably)\n let result: Buffer;\n if (isPng && (needsShrink || buffer.length > 2 * 1024 * 1024)) {\n result = await pipeline.jpeg({ quality: 85 }).toBuffer();\n } else if (isPng) {\n result = await pipeline.png().toBuffer();\n } else {\n result = await pipeline.jpeg({ quality: 85 }).toBuffer();\n }\n\n // If still over 5MB, reduce quality iteratively\n if (result.length > MAX_FILE_BYTES) {\n for (const quality of [70, 50, 30]) {\n result = await sharp(buffer)\n .resize(MAX_LONG_EDGE, MAX_LONG_EDGE, { fit: 'inside', withoutEnlargement: true })\n .jpeg({ quality })\n .toBuffer();\n if (result.length <= MAX_FILE_BYTES) break;\n }\n }\n\n writeFileSync(cachePath, result);\n\n const resultMeta = await sharp(result).metadata();\n console.log(\n `[image-resize] ${width}x${height} -> ${resultMeta.width}x${resultMeta.height} ` +\n `(${(buffer.length / 1024).toFixed(0)}KB -> ${(result.length / 1024).toFixed(0)}KB)`,\n );\n\n return result;\n}\n","import { tool } from 'ai';\nimport { z } from 'zod';\nimport { readFile, writeFile, mkdir } from 'node:fs/promises';\nimport { resolve, relative, isAbsolute, dirname } from 'node:path';\nimport { existsSync } from 'node:fs';\nimport { backupFile } from '../checkpoints/index.js';\nimport * as LSP from '../lsp/index.js';\n\nexport interface WriteFileProgress {\n /** The file path being written */\n path: string;\n /** Relative path from working directory */\n relativePath: string;\n /** Write mode */\n mode: 'full' | 'str_replace';\n /** Status of the write operation */\n status: 'started' | 'content' | 'completed';\n /** For 'content' status: the content being written (may be chunked for streaming) */\n content?: string;\n /** When content is chunked, the chunk index (0-based) */\n chunkIndex?: number;\n /** When content is chunked, the total number of chunks */\n chunkCount?: number;\n /** When content is chunked, the start offset for this chunk */\n chunkStart?: number;\n /** Whether this content update is chunked */\n isChunked?: boolean;\n /** For str_replace: the old string being replaced */\n oldString?: string;\n /** For str_replace: the new string */\n newString?: string;\n /** Total content length (for progress tracking) */\n totalLength?: number;\n /** Action being performed */\n action?: 'created' | 'replaced' | 'edited';\n}\n\nexport interface WriteFileToolOptions {\n workingDirectory: string;\n sessionId: string;\n /** Enable LSP diagnostics after file edits (default: true) */\n enableLSP?: boolean;\n /** Called when write_file has progress to report (for streaming content) */\n onProgress?: (progress: WriteFileProgress) => void;\n}\n\nconst MAX_PROGRESS_CHUNK_SIZE = 16 * 1024;\n\nconst writeFileInputSchema = z.object({\n path: z\n .string()\n .describe('The path to the file. Can be relative to working directory or absolute.'),\n mode: z\n .enum(['full', 'str_replace'])\n .describe('Write mode: \"full\" for complete file write, \"str_replace\" for targeted string replacement'),\n content: z\n .string()\n .optional()\n .describe('For \"full\" mode: The complete content to write to the file'),\n old_string: z\n .string()\n .optional()\n .describe('For \"str_replace\" mode: The exact string to find and replace'),\n new_string: z\n .string()\n .optional()\n .describe('For \"str_replace\" mode: The string to replace old_string with'),\n});\n\nexport function createWriteFileTool(options: WriteFileToolOptions) {\n return tool({\n description: `Write content to a file. Supports two modes:\n1. \"full\" - Write the entire file content (creates new file or replaces existing)\n2. \"str_replace\" - Replace a specific string in an existing file (for precise edits)\n\nFor str_replace mode:\n- Provide the exact string to find (old_string) and its replacement (new_string)\n- The old_string must match EXACTLY (including whitespace and indentation)\n- Only the first occurrence is replaced\n- Use this for surgical edits to existing code\n\nFor full mode:\n- Provide the complete file content\n- Creates parent directories if they don't exist\n- Use this for new files or complete rewrites\n\nWorking directory: ${options.workingDirectory}`,\n\n inputSchema: writeFileInputSchema,\n\n execute: async ({ path, mode, content, old_string, new_string }: z.infer<typeof writeFileInputSchema>) => {\n try {\n // Resolve the path\n const absolutePath = isAbsolute(path)\n ? path\n : resolve(options.workingDirectory, path);\n\n // Security check\n const relativePath = relative(options.workingDirectory, absolutePath);\n if (relativePath.startsWith('..') && !isAbsolute(path)) {\n return {\n success: false,\n error: 'Path escapes the working directory. Use an absolute path if intentional.',\n };\n }\n\n if (mode === 'full') {\n // Full file write\n if (content === undefined) {\n return {\n success: false,\n error: 'Content is required for \"full\" mode',\n };\n }\n\n const existed = existsSync(absolutePath);\n const action = existed ? 'replaced' : 'created';\n\n // Emit progress: started\n console.log('[WRITE-FILE] onProgress callback exists:', !!options.onProgress);\n console.log('[WRITE-FILE] Emitting started event for:', relativePath);\n options.onProgress?.({\n path: absolutePath,\n relativePath,\n mode: 'full',\n status: 'started',\n action,\n totalLength: content.length,\n });\n\n // Emit progress: content (chunked for large payloads to keep SSE stable)\n if (content.length <= MAX_PROGRESS_CHUNK_SIZE) {\n options.onProgress?.({\n path: absolutePath,\n relativePath,\n mode: 'full',\n status: 'content',\n content,\n action,\n totalLength: content.length,\n });\n } else {\n const chunkCount = Math.ceil(content.length / MAX_PROGRESS_CHUNK_SIZE);\n for (let i = 0; i < chunkCount; i += 1) {\n const chunkStart = i * MAX_PROGRESS_CHUNK_SIZE;\n const chunk = content.slice(chunkStart, chunkStart + MAX_PROGRESS_CHUNK_SIZE);\n options.onProgress?.({\n path: absolutePath,\n relativePath,\n mode: 'full',\n status: 'content',\n content: chunk,\n action,\n totalLength: content.length,\n chunkIndex: i,\n chunkCount,\n chunkStart,\n isChunked: true,\n });\n // Yield between chunks so SSE can flush progressively\n if (chunkCount > 1) {\n await new Promise((resolve) => setTimeout(resolve, 0));\n }\n }\n }\n\n // Backup the file before modifying (for checkpoint/revert)\n await backupFile(options.sessionId, options.workingDirectory, absolutePath);\n\n // Create parent directories if needed\n const dir = dirname(absolutePath);\n if (!existsSync(dir)) {\n await mkdir(dir, { recursive: true });\n }\n\n await writeFile(absolutePath, content, 'utf-8');\n\n // Get LSP diagnostics if enabled and file type is supported\n let diagnosticsOutput = '';\n if (options.enableLSP !== false && LSP.isSupported(absolutePath)) {\n await LSP.touchFile(absolutePath, true);\n diagnosticsOutput = await LSP.formatDiagnosticsOutput(absolutePath);\n }\n\n // Emit progress: completed\n options.onProgress?.({\n path: absolutePath,\n relativePath,\n mode: 'full',\n status: 'completed',\n action,\n totalLength: content.length,\n });\n\n return {\n success: true,\n path: absolutePath,\n relativePath,\n mode: 'full',\n action,\n bytesWritten: Buffer.byteLength(content, 'utf-8'),\n lineCount: content.split('\\n').length,\n ...(diagnosticsOutput && { diagnostics: diagnosticsOutput }),\n };\n } else if (mode === 'str_replace') {\n // String replacement mode\n if (old_string === undefined || new_string === undefined) {\n return {\n success: false,\n error: 'Both old_string and new_string are required for \"str_replace\" mode',\n };\n }\n\n if (!existsSync(absolutePath)) {\n return {\n success: false,\n error: `File not found: ${path}. Use \"full\" mode to create new files.`,\n };\n }\n\n // Emit progress: started\n options.onProgress?.({\n path: absolutePath,\n relativePath,\n mode: 'str_replace',\n status: 'started',\n action: 'edited',\n });\n\n // Emit progress: content (show the replacement)\n options.onProgress?.({\n path: absolutePath,\n relativePath,\n mode: 'str_replace',\n status: 'content',\n oldString: old_string,\n newString: new_string,\n action: 'edited',\n });\n\n // Backup the file before modifying (for checkpoint/revert)\n await backupFile(options.sessionId, options.workingDirectory, absolutePath);\n\n // Read current content\n const currentContent = await readFile(absolutePath, 'utf-8');\n\n // Check if old_string exists\n if (!currentContent.includes(old_string)) {\n // Provide helpful debugging info\n const lines = currentContent.split('\\n');\n const preview = lines.slice(0, 20).join('\\n');\n \n return {\n success: false,\n error: 'old_string not found in file. The string must match EXACTLY including whitespace.',\n hint: 'Check for differences in indentation, line endings, or invisible characters.',\n filePreview: lines.length > 20 \n ? `${preview}\\n... (${lines.length - 20} more lines)`\n : preview,\n };\n }\n\n // Check for multiple occurrences\n const occurrences = currentContent.split(old_string).length - 1;\n if (occurrences > 1) {\n return {\n success: false,\n error: `Found ${occurrences} occurrences of old_string. Please provide more context to make it unique.`,\n hint: 'Include surrounding lines or more specific content in old_string.',\n };\n }\n\n // Perform replacement\n const newContent = currentContent.replace(old_string, new_string);\n await writeFile(absolutePath, newContent, 'utf-8');\n\n // Calculate diff info\n const oldLines = old_string.split('\\n').length;\n const newLines = new_string.split('\\n').length;\n\n // Get LSP diagnostics if enabled and file type is supported\n let diagnosticsOutput = '';\n if (options.enableLSP !== false && LSP.isSupported(absolutePath)) {\n await LSP.touchFile(absolutePath, true);\n diagnosticsOutput = await LSP.formatDiagnosticsOutput(absolutePath);\n }\n\n // Emit progress: completed\n options.onProgress?.({\n path: absolutePath,\n relativePath,\n mode: 'str_replace',\n status: 'completed',\n action: 'edited',\n });\n\n return {\n success: true,\n path: absolutePath,\n relativePath,\n mode: 'str_replace',\n linesRemoved: oldLines,\n linesAdded: newLines,\n lineDelta: newLines - oldLines,\n ...(diagnosticsOutput && { diagnostics: diagnosticsOutput }),\n };\n }\n\n return {\n success: false,\n error: `Invalid mode: ${mode}`,\n };\n } catch (error: any) {\n return {\n success: false,\n error: error.message,\n };\n }\n },\n });\n}\n\nexport type WriteFileTool = ReturnType<typeof createWriteFileTool>;\n","/**\n * Checkpoint system for session revert functionality\n * \n * Creates checkpoints before each user message, backs up modified files,\n * and allows reverting to any previous checkpoint.\n */\n\nimport { readFile, writeFile, unlink, mkdir } from 'node:fs/promises';\nimport { existsSync } from 'node:fs';\nimport { resolve, relative, dirname } from 'node:path';\nimport { exec } from 'node:child_process';\nimport { promisify } from 'node:util';\nimport {\n checkpointQueries,\n fileBackupQueries,\n messageQueries,\n toolExecutionQueries,\n sessionQueries,\n type Checkpoint,\n type FileBackup,\n} from '../db/index.js';\n\nconst execAsync = promisify(exec);\n\n/**\n * Get the current git HEAD commit hash (if in a git repo)\n */\nasync function getGitHead(workingDirectory: string): Promise<string | undefined> {\n try {\n const { stdout } = await execAsync('git rev-parse HEAD', {\n cwd: workingDirectory,\n timeout: 5000,\n });\n return stdout.trim();\n } catch {\n return undefined;\n }\n}\n\n/**\n * Check if a directory is a git repository\n */\nasync function isGitRepo(workingDirectory: string): Promise<boolean> {\n try {\n await execAsync('git rev-parse --git-dir', {\n cwd: workingDirectory,\n timeout: 5000,\n });\n return true;\n } catch {\n return false;\n }\n}\n\nexport interface CheckpointManager {\n sessionId: string;\n workingDirectory: string;\n currentCheckpointId: string | null;\n}\n\n// Store for active checkpoint managers (one per session)\nconst activeManagers = new Map<string, CheckpointManager>();\n\n/**\n * Get or create a checkpoint manager for a session\n */\nexport function getCheckpointManager(sessionId: string, workingDirectory: string): CheckpointManager {\n let manager = activeManagers.get(sessionId);\n if (!manager) {\n manager = {\n sessionId,\n workingDirectory,\n currentCheckpointId: null,\n };\n activeManagers.set(sessionId, manager);\n }\n return manager;\n}\n\n/**\n * Create a new checkpoint before processing a user message\n * Called when a user message is about to be processed\n */\nexport async function createCheckpoint(\n sessionId: string,\n workingDirectory: string,\n messageSequence: number\n): Promise<Checkpoint> {\n // Get git HEAD if available\n const gitHead = await getGitHead(workingDirectory);\n\n // Create the checkpoint record\n const checkpoint = await checkpointQueries.create({\n sessionId,\n messageSequence,\n gitHead,\n });\n\n // Update the manager with the current checkpoint\n const manager = getCheckpointManager(sessionId, workingDirectory);\n manager.currentCheckpointId = checkpoint.id;\n\n return checkpoint;\n}\n\n/**\n * Backup a file before it's modified\n * Called by the write_file tool before writing\n */\nexport async function backupFile(\n sessionId: string,\n workingDirectory: string,\n filePath: string\n): Promise<FileBackup | null> {\n const manager = getCheckpointManager(sessionId, workingDirectory);\n \n if (!manager.currentCheckpointId) {\n console.warn('[checkpoint] No active checkpoint, skipping file backup');\n return null;\n }\n\n // Normalize the file path to be relative\n const absolutePath = resolve(workingDirectory, filePath);\n const relativePath = relative(workingDirectory, absolutePath);\n\n // Check if we already have a backup for this file in this checkpoint\n if (await fileBackupQueries.hasBackup(manager.currentCheckpointId, relativePath)) {\n // Already backed up in this checkpoint, no need to backup again\n return null;\n }\n\n // Read the original content (if file exists)\n let originalContent: string | null = null;\n let existed = false;\n\n if (existsSync(absolutePath)) {\n try {\n originalContent = await readFile(absolutePath, 'utf-8');\n existed = true;\n } catch (error: any) {\n console.warn(`[checkpoint] Failed to read file for backup: ${error.message}`);\n }\n }\n\n // Create the backup record\n const backup = await fileBackupQueries.create({\n checkpointId: manager.currentCheckpointId,\n sessionId,\n filePath: relativePath,\n originalContent,\n existed,\n });\n\n return backup;\n}\n\n/**\n * Revert a session to a specific checkpoint\n * This will:\n * 1. Restore all files to their state at that checkpoint\n * 2. Delete all messages after the checkpoint's message sequence\n * 3. Delete all tool executions after the checkpoint\n * 4. Delete all checkpoints after this one\n */\nexport async function revertToCheckpoint(\n sessionId: string,\n checkpointId: string\n): Promise<{\n success: boolean;\n filesRestored: number;\n filesDeleted: number;\n messagesDeleted: number;\n checkpointsDeleted: number;\n error?: string;\n}> {\n // Get the session to find working directory\n const session = await sessionQueries.getById(sessionId);\n if (!session) {\n return {\n success: false,\n filesRestored: 0,\n filesDeleted: 0,\n messagesDeleted: 0,\n checkpointsDeleted: 0,\n error: 'Session not found',\n };\n }\n\n // Get the checkpoint\n const checkpoint = await checkpointQueries.getById(checkpointId);\n if (!checkpoint || checkpoint.sessionId !== sessionId) {\n return {\n success: false,\n filesRestored: 0,\n filesDeleted: 0,\n messagesDeleted: 0,\n checkpointsDeleted: 0,\n error: 'Checkpoint not found',\n };\n }\n\n const workingDirectory = session.workingDirectory;\n\n // Get all file backups FROM this checkpoint onwards (these need to be reverted)\n // This includes backups from the target checkpoint since they represent changes made\n // AFTER the checkpoint was created (i.e., during processing of that user message)\n const backupsToRevert = await fileBackupQueries.getFromSequence(sessionId, checkpoint.messageSequence);\n\n // Group backups by file path, keeping only the earliest backup for each file\n // (we want to restore to the state before ANY changes were made)\n const fileToEarliestBackup = new Map<string, FileBackup>();\n for (const backup of backupsToRevert) {\n if (!fileToEarliestBackup.has(backup.filePath)) {\n fileToEarliestBackup.set(backup.filePath, backup);\n }\n }\n\n let filesRestored = 0;\n let filesDeleted = 0;\n\n // Restore each file\n for (const [filePath, backup] of fileToEarliestBackup) {\n const absolutePath = resolve(workingDirectory, filePath);\n\n try {\n if (backup.existed && backup.originalContent !== null) {\n // File existed before - restore its content\n const dir = dirname(absolutePath);\n if (!existsSync(dir)) {\n await mkdir(dir, { recursive: true });\n }\n await writeFile(absolutePath, backup.originalContent, 'utf-8');\n filesRestored++;\n } else if (!backup.existed) {\n // File didn't exist before - delete it\n if (existsSync(absolutePath)) {\n await unlink(absolutePath);\n filesDeleted++;\n }\n }\n } catch (error: any) {\n console.error(`Failed to restore ${filePath}: ${error.message}`);\n }\n }\n\n // Delete messages from the checkpoint's message sequence onwards\n const messagesDeleted = await messageQueries.deleteFromSequence(sessionId, checkpoint.messageSequence);\n\n // Delete tool executions after the checkpoint was created\n await toolExecutionQueries.deleteAfterTime(sessionId, checkpoint.createdAt);\n\n // Delete checkpoints after this one (the file backups are deleted via CASCADE)\n const checkpointsDeleted = await checkpointQueries.deleteAfterSequence(sessionId, checkpoint.messageSequence);\n\n // Update the manager\n const manager = getCheckpointManager(sessionId, workingDirectory);\n manager.currentCheckpointId = checkpoint.id;\n\n return {\n success: true,\n filesRestored,\n filesDeleted,\n messagesDeleted,\n checkpointsDeleted,\n };\n}\n\n/**\n * Get all checkpoints for a session\n */\nexport async function getCheckpoints(sessionId: string): Promise<Checkpoint[]> {\n return checkpointQueries.getBySession(sessionId);\n}\n\n/**\n * Get the diff for an entire session (all file changes from start to now)\n */\nexport async function getSessionDiff(\n sessionId: string\n): Promise<{\n files: Array<{\n path: string;\n status: 'created' | 'modified' | 'deleted';\n originalContent: string | null;\n currentContent: string | null;\n }>;\n}> {\n const session = await sessionQueries.getById(sessionId);\n if (!session) {\n return { files: [] };\n }\n\n const workingDirectory = session.workingDirectory;\n\n // Get all file backups for this session\n const allBackups = await fileBackupQueries.getBySession(sessionId);\n\n // Group by file path, keeping the earliest backup (original state)\n const fileToOriginalBackup = new Map<string, FileBackup>();\n for (const backup of allBackups) {\n if (!fileToOriginalBackup.has(backup.filePath)) {\n fileToOriginalBackup.set(backup.filePath, backup);\n }\n }\n\n const files: Array<{\n path: string;\n status: 'created' | 'modified' | 'deleted';\n originalContent: string | null;\n currentContent: string | null;\n }> = [];\n\n for (const [filePath, originalBackup] of fileToOriginalBackup) {\n const absolutePath = resolve(workingDirectory, filePath);\n \n // Get current content\n let currentContent: string | null = null;\n let currentExists = false;\n \n if (existsSync(absolutePath)) {\n try {\n currentContent = await readFile(absolutePath, 'utf-8');\n currentExists = true;\n } catch {\n // File exists but can't be read\n }\n }\n\n // Determine status\n let status: 'created' | 'modified' | 'deleted';\n if (!originalBackup.existed && currentExists) {\n status = 'created';\n } else if (originalBackup.existed && !currentExists) {\n status = 'deleted';\n } else {\n status = 'modified';\n }\n\n files.push({\n path: filePath,\n status,\n originalContent: originalBackup.originalContent,\n currentContent,\n });\n }\n\n return { files };\n}\n\n/**\n * Clear the checkpoint manager for a session (called when session is deleted)\n */\nexport function clearCheckpointManager(sessionId: string): void {\n activeManagers.delete(sessionId);\n}\n","/**\n * LSP Integration Module\n * \n * Provides Language Server Protocol support for the coding agent.\n * Automatically spawns LSP servers on-demand when files are touched,\n * collects diagnostics, and formats them for the agent.\n * \n * Usage:\n * import * as LSP from './lsp/index.js';\n * \n * // After editing a file, get diagnostics\n * await LSP.touchFile('/path/to/file.ts', true);\n * const diagnostics = await LSP.getDiagnostics('/path/to/file.ts');\n */\n\nimport { extname, dirname } from 'node:path';\nimport { getServerForExtension, getSupportedExtensions } from './servers.js';\nimport { createClient, normalizePath } from './client.js';\nimport {\n formatDiagnosticsForAgent,\n formatDiagnostic,\n DiagnosticSeverity,\n SymbolKind,\n} from './types.js';\nimport type { Diagnostic, LSPClient, Location, DocumentSymbol, SymbolInformation } from './types.js';\n\n// Re-export types and utilities\nexport * from './types.js';\nexport { normalizePath } from './client.js';\nexport { getSupportedExtensions, getServerForExtension } from './servers.js';\n\n/**\n * Global state for LSP clients\n */\ninterface LSPState {\n clients: Map<string, LSPClient>; // key: `${serverId}:${root}`\n broken: Set<string>; // keys of servers that failed to start\n initialized: boolean;\n}\n\nlet state: LSPState = {\n clients: new Map(),\n broken: new Set(),\n initialized: false,\n};\n\n/**\n * Initialize the LSP system (optional, called automatically on first use)\n */\nexport async function init(): Promise<void> {\n if (state.initialized) return;\n state.initialized = true;\n}\n\n/**\n * Get or create an LSP client for a file\n */\nasync function getClientForFile(filePath: string): Promise<LSPClient | null> {\n const normalized = normalizePath(filePath);\n const ext = extname(normalized);\n \n // Check if we support this file type\n const serverDef = getServerForExtension(ext);\n if (!serverDef) {\n return null;\n }\n \n // Use file's directory as root (server will find project root)\n const root = dirname(normalized);\n const key = `${serverDef.id}:${root}`;\n \n // Check if we already have a client\n const existing = state.clients.get(key);\n if (existing) {\n return existing;\n }\n \n // Check if this server is broken for this root\n if (state.broken.has(key)) {\n return null;\n }\n \n // Spawn new server\n try {\n const handle = await serverDef.spawn(root);\n if (!handle) {\n state.broken.add(key);\n return null;\n }\n \n console.log(`[lsp] Started ${serverDef.name} for ${root}`);\n \n const client = await createClient(serverDef.id, handle, root);\n state.clients.set(key, client);\n \n // Handle process exit\n handle.process.on('exit', (code) => {\n console.log(`[lsp] ${serverDef.name} exited with code ${code}`);\n state.clients.delete(key);\n });\n \n return client;\n } catch (error) {\n console.error(`[lsp] Failed to start ${serverDef.name}:`, error);\n state.broken.add(key);\n return null;\n }\n}\n\n/**\n * Get all clients for a file (currently just TypeScript, but extensible)\n */\nasync function getClientsForFile(filePath: string): Promise<LSPClient[]> {\n const client = await getClientForFile(filePath);\n return client ? [client] : [];\n}\n\n/**\n * Touch a file (notify LSP of change and optionally wait for diagnostics)\n * \n * Call this after editing a file to get diagnostics.\n * \n * @param filePath - Path to the file\n * @param waitForDiagnostics - Whether to wait for diagnostics before returning\n * @returns Promise that resolves when done\n */\nexport async function touchFile(filePath: string, waitForDiagnostics = false): Promise<void> {\n const clients = await getClientsForFile(filePath);\n \n if (clients.length === 0) {\n return;\n }\n \n // Notify all clients\n await Promise.all(clients.map(client => client.notifyOpen(filePath)));\n \n // Optionally wait for diagnostics\n if (waitForDiagnostics) {\n await Promise.all(clients.map(client => client.waitForDiagnostics(filePath)));\n }\n}\n\n/**\n * Get diagnostics for a file\n */\nexport async function getDiagnostics(filePath: string): Promise<Diagnostic[]> {\n const normalized = normalizePath(filePath);\n const clients = await getClientsForFile(normalized);\n \n const allDiagnostics: Diagnostic[] = [];\n \n for (const client of clients) {\n const diags = client.getDiagnostics(normalized);\n allDiagnostics.push(...diags);\n }\n \n return allDiagnostics;\n}\n\n/**\n * Get all diagnostics from all clients\n */\nexport async function getAllDiagnostics(): Promise<Record<string, Diagnostic[]>> {\n const results: Record<string, Diagnostic[]> = {};\n \n for (const client of state.clients.values()) {\n const clientDiags = client.getAllDiagnostics();\n for (const [path, diagnostics] of clientDiags.entries()) {\n const existing = results[path] || [];\n existing.push(...diagnostics);\n results[path] = existing;\n }\n }\n \n return results;\n}\n\n/**\n * Wait for diagnostics on a file\n */\nexport async function waitForDiagnostics(filePath: string, timeoutMs = 5000): Promise<Diagnostic[]> {\n const normalized = normalizePath(filePath);\n const clients = await getClientsForFile(normalized);\n \n const allDiagnostics: Diagnostic[] = [];\n \n await Promise.all(\n clients.map(async (client) => {\n const diags = await client.waitForDiagnostics(normalized, timeoutMs);\n allDiagnostics.push(...diags);\n })\n );\n \n return allDiagnostics;\n}\n\n// ============================================================\n// Code intelligence methods (definition, references, hover, symbols)\n// ============================================================\n\n/**\n * Get definition locations for a symbol at a given position\n */\nexport async function getDefinition(filePath: string, line: number, character: number): Promise<Location[]> {\n const normalized = normalizePath(filePath);\n const client = await getClientForFile(normalized);\n if (!client) return [];\n return client.getDefinition(normalized, line, character);\n}\n\n/**\n * Find all references to a symbol at a given position\n */\nexport async function getReferences(\n filePath: string,\n line: number,\n character: number,\n includeDeclaration = false\n): Promise<Location[]> {\n const normalized = normalizePath(filePath);\n const client = await getClientForFile(normalized);\n if (!client) return [];\n return client.getReferences(normalized, line, character, includeDeclaration);\n}\n\n/**\n * Get hover/type information for a symbol at a given position\n */\nexport async function getHover(filePath: string, line: number, character: number): Promise<string | null> {\n const normalized = normalizePath(filePath);\n const client = await getClientForFile(normalized);\n if (!client) return null;\n return client.getHover(normalized, line, character);\n}\n\n/**\n * Get all symbols defined in a document (hierarchical tree)\n */\nexport async function getDocumentSymbols(filePath: string): Promise<DocumentSymbol[]> {\n const normalized = normalizePath(filePath);\n const client = await getClientForFile(normalized);\n if (!client) return [];\n return client.getDocumentSymbols(normalized);\n}\n\n/**\n * Search for symbols across the workspace by name.\n * Requires a hint file path to identify which LSP client/project to search.\n */\nexport async function findWorkspaceSymbols(query: string, hintFilePath?: string): Promise<SymbolInformation[]> {\n let client: LSPClient | null = null;\n if (hintFilePath) {\n client = await getClientForFile(hintFilePath);\n }\n if (!client) {\n // Use first available client\n const clients = Array.from(state.clients.values());\n client = clients[0] || null;\n }\n if (!client) return [];\n return client.findWorkspaceSymbols(query);\n}\n\n/**\n * Format diagnostics for agent output\n * \n * Call this after touchFile to get a formatted string to append to tool output.\n */\nexport async function formatDiagnosticsOutput(\n filePath: string,\n options: { maxDiagnostics?: number; errorsOnly?: boolean } = {}\n): Promise<string> {\n const diagnostics = await getDiagnostics(filePath);\n return formatDiagnosticsForAgent(filePath, diagnostics, options);\n}\n\n/**\n * Get errors only (severity = 1)\n */\nexport function getErrors(diagnostics: Diagnostic[]): Diagnostic[] {\n return diagnostics.filter(d => d.severity === DiagnosticSeverity.Error);\n}\n\n/**\n * Check if a file type is supported\n */\nexport function isSupported(filePath: string): boolean {\n const ext = extname(filePath);\n return getServerForExtension(ext) !== null;\n}\n\n/**\n * Shutdown all LSP clients\n */\nexport async function shutdown(): Promise<void> {\n const shutdownPromises: Promise<void>[] = [];\n \n for (const [key, client] of state.clients.entries()) {\n console.log(`[lsp] Shutting down ${key}`);\n shutdownPromises.push(client.shutdown());\n }\n \n await Promise.allSettled(shutdownPromises);\n \n state.clients.clear();\n state.broken.clear();\n state.initialized = false;\n}\n\n/**\n * Reset state (for testing)\n * \n * Kills all existing LSP server processes before clearing state to prevent\n * orphaned tsserver processes from accumulating (which causes CI hangs).\n */\nexport async function reset(): Promise<void> {\n // Shut down existing clients to avoid orphaning server processes\n for (const client of state.clients.values()) {\n try {\n await client.shutdown();\n } catch {\n // Ignore errors during cleanup\n }\n }\n\n state = {\n clients: new Map(),\n broken: new Set(),\n initialized: false,\n };\n}\n\n// Utility exports for direct usage\nexport const DiagnosticUtils = {\n format: formatDiagnostic,\n formatForAgent: formatDiagnosticsForAgent,\n Severity: DiagnosticSeverity,\n};\n\n// Alias for backwards compatibility\nexport { DiagnosticUtils as Diagnostic };\n","/**\n * LSP Server Definitions\n * \n * Defines how to spawn and configure various LSP servers.\n * Currently supports TypeScript/JavaScript with typescript-language-server.\n */\n\nimport { spawn } from 'node:child_process';\nimport { existsSync } from 'node:fs';\nimport { resolve, dirname } from 'node:path';\nimport type { LSPServerDefinition, LSPServerHandle } from './types.js';\n\n/**\n * Find the nearest directory containing one of the given files\n */\nfunction findNearestRoot(startDir: string, markers: string[]): string | null {\n let dir = startDir;\n const root = '/';\n \n while (dir !== root) {\n for (const marker of markers) {\n if (existsSync(resolve(dir, marker))) {\n return dir;\n }\n }\n const parent = dirname(dir);\n if (parent === dir) break;\n dir = parent;\n }\n \n return null;\n}\n\n/**\n * Check if a command exists in PATH\n */\nasync function commandExists(cmd: string): Promise<boolean> {\n try {\n const { exec } = await import('node:child_process');\n const { promisify } = await import('node:util');\n const execAsync = promisify(exec);\n \n const isWindows = process.platform === 'win32';\n const checkCmd = isWindows ? `where ${cmd}` : `which ${cmd}`;\n \n await execAsync(checkCmd);\n return true;\n } catch {\n return false;\n }\n}\n\n/**\n * TypeScript/JavaScript Language Server\n * \n * Uses typescript-language-server which wraps tsserver.\n * Provides type checking, error detection, and more.\n */\nexport const TypeScriptServer: LSPServerDefinition = {\n id: 'typescript',\n name: 'TypeScript Language Server',\n extensions: ['.ts', '.tsx', '.js', '.jsx', '.mjs', '.cjs', '.mts', '.cts'],\n \n async spawn(root: string): Promise<LSPServerHandle | null> {\n // Check for package manager lock files to determine project root\n const projectRoot = findNearestRoot(root, [\n 'package-lock.json',\n 'pnpm-lock.yaml',\n 'yarn.lock',\n 'bun.lockb',\n 'bun.lock',\n ]) || root;\n \n // Try to find typescript-language-server\n const hasNpx = await commandExists('npx');\n const hasBunx = await commandExists('bunx');\n const hasPnpx = await commandExists('pnpx');\n \n let cmd: string[];\n \n if (hasPnpx) {\n cmd = ['pnpx', 'typescript-language-server', '--stdio'];\n } else if (hasBunx) {\n cmd = ['bunx', 'typescript-language-server', '--stdio'];\n } else if (hasNpx) {\n cmd = ['npx', 'typescript-language-server', '--stdio'];\n } else {\n console.warn('[lsp] No package runner (npx/bunx/pnpx) found for typescript-language-server');\n return null;\n }\n \n try {\n const proc = spawn(cmd[0], cmd.slice(1), {\n cwd: projectRoot,\n stdio: ['pipe', 'pipe', 'pipe'],\n env: {\n ...process.env,\n // Suppress some noisy output\n TSS_LOG: '-level none',\n },\n });\n \n // Handle stderr (for debugging)\n proc.stderr?.on('data', (data) => {\n const msg = data.toString().trim();\n if (msg && !msg.includes('deprecated')) {\n // Only log non-trivial errors\n console.debug('[lsp:typescript:stderr]', msg);\n }\n });\n \n return {\n process: proc,\n initialization: {\n // TypeScript-specific initialization options\n preferences: {\n includeInlayParameterNameHints: 'none',\n includeInlayPropertyDeclarationTypeHints: false,\n includeInlayFunctionLikeReturnTypeHints: false,\n },\n },\n };\n } catch (error) {\n console.error('[lsp] Failed to spawn typescript-language-server:', error);\n return null;\n }\n },\n};\n\n/**\n * All available LSP servers\n */\nexport const servers: LSPServerDefinition[] = [\n TypeScriptServer,\n];\n\n/**\n * Get the appropriate server for a file extension\n */\nexport function getServerForExtension(ext: string): LSPServerDefinition | null {\n for (const server of servers) {\n if (server.extensions.includes(ext)) {\n return server;\n }\n }\n return null;\n}\n\n/**\n * Get all supported file extensions\n */\nexport function getSupportedExtensions(): string[] {\n const extensions = new Set<string>();\n for (const server of servers) {\n for (const ext of server.extensions) {\n extensions.add(ext);\n }\n }\n return Array.from(extensions);\n}\n","/**\n * LSP Client\n * \n * Manages communication with an LSP server via JSON-RPC over stdio.\n * Handles initialization, file notifications, and diagnostics collection.\n */\n\nimport {\n createMessageConnection,\n StreamMessageReader,\n StreamMessageWriter,\n type MessageConnection,\n} from 'vscode-jsonrpc/node.js';\nimport { pathToFileURL, fileURLToPath } from 'node:url';\nimport { readFile } from 'node:fs/promises';\nimport { existsSync } from 'node:fs';\nimport { extname, normalize } from 'node:path';\nimport type { LSPClient, LSPServerHandle, Diagnostic, Location, DocumentSymbol, SymbolInformation } from './types.js';\n\n/**\n * Map file extension to LSP language ID\n */\nfunction getLanguageId(filePath: string): string {\n const ext = extname(filePath).toLowerCase();\n const map: Record<string, string> = {\n '.ts': 'typescript',\n '.tsx': 'typescriptreact',\n '.js': 'javascript',\n '.jsx': 'javascriptreact',\n '.mjs': 'javascript',\n '.cjs': 'javascript',\n '.mts': 'typescript',\n '.cts': 'typescript',\n '.json': 'json',\n '.jsonc': 'jsonc',\n };\n return map[ext] || 'plaintext';\n}\n\n/**\n * Normalize a file path for consistent key usage\n */\nexport function normalizePath(filePath: string): string {\n return normalize(filePath);\n}\n\n/**\n * Create an LSP client connected to a server\n */\nexport async function createClient(\n serverId: string,\n handle: LSPServerHandle,\n root: string\n): Promise<LSPClient> {\n const { process: proc } = handle;\n \n if (!proc.stdout || !proc.stdin) {\n throw new Error('LSP server process has no stdout/stdin');\n }\n \n // Create JSON-RPC connection over stdio\n const connection: MessageConnection = createMessageConnection(\n new StreamMessageReader(proc.stdout),\n new StreamMessageWriter(proc.stdin)\n );\n \n // Diagnostics storage\n const diagnostics = new Map<string, Diagnostic[]>();\n \n // Track open files and their versions\n const fileVersions = new Map<string, number>();\n \n // Event listeners for diagnostics updates\n const diagnosticListeners = new Map<string, Array<() => void>>();\n \n // Listen for diagnostic notifications\n connection.onNotification('textDocument/publishDiagnostics', (params: any) => {\n const filePath = normalizePath(fileURLToPath(params.uri));\n diagnostics.set(filePath, params.diagnostics || []);\n \n // Notify any waiters\n const listeners = diagnosticListeners.get(filePath);\n if (listeners) {\n for (const listener of listeners) {\n listener();\n }\n }\n });\n \n // Handle server requests\n connection.onRequest('workspace/configuration', async (params: any) => {\n // Return configuration for each requested section\n return params.items.map(() => handle.initialization || {});\n });\n \n connection.onRequest('client/registerCapability', async () => {\n // Accept capability registration\n return null;\n });\n \n connection.onRequest('window/workDoneProgress/create', async () => {\n // Accept progress token creation\n return null;\n });\n \n connection.onNotification('window/logMessage', (params: any) => {\n // Optionally log server messages\n if (params.type <= 2) { // Error or Warning\n console.debug(`[lsp:${serverId}]`, params.message);\n }\n });\n \n // Start listening\n connection.listen();\n \n // Initialize the server\n const initResult = await connection.sendRequest('initialize', {\n processId: process.pid,\n rootUri: pathToFileURL(root).href,\n rootPath: root,\n workspaceFolders: [\n {\n name: 'workspace',\n uri: pathToFileURL(root).href,\n },\n ],\n capabilities: {\n textDocument: {\n synchronization: {\n dynamicRegistration: true,\n willSave: false,\n willSaveWaitUntil: false,\n didSave: true,\n },\n publishDiagnostics: {\n relatedInformation: true,\n versionSupport: true,\n codeDescriptionSupport: true,\n },\n completion: {\n dynamicRegistration: true,\n completionItem: {\n snippetSupport: true,\n documentationFormat: ['markdown', 'plaintext'],\n },\n },\n hover: {\n dynamicRegistration: true,\n contentFormat: ['markdown', 'plaintext'],\n },\n definition: {\n dynamicRegistration: true,\n },\n references: {\n dynamicRegistration: true,\n },\n documentSymbol: {\n dynamicRegistration: true,\n hierarchicalDocumentSymbolSupport: true,\n symbolKind: {\n valueSet: [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26],\n },\n },\n },\n workspace: {\n configuration: true,\n didChangeConfiguration: {\n dynamicRegistration: true,\n },\n didChangeWatchedFiles: {\n dynamicRegistration: true,\n },\n workspaceFolders: true,\n },\n },\n initializationOptions: handle.initialization,\n });\n \n // Send initialized notification\n await connection.sendNotification('initialized', {});\n \n // Return client interface\n const client: LSPClient = {\n serverId,\n root,\n diagnostics,\n \n async notifyOpen(filePath: string): Promise<void> {\n const normalized = normalizePath(filePath);\n \n if (!existsSync(normalized)) {\n return;\n }\n \n try {\n const content = await readFile(normalized, 'utf-8');\n const version = (fileVersions.get(normalized) ?? -1) + 1;\n fileVersions.set(normalized, version);\n \n if (version === 0) {\n // First time opening\n await connection.sendNotification('textDocument/didOpen', {\n textDocument: {\n uri: pathToFileURL(normalized).href,\n languageId: getLanguageId(normalized),\n version,\n text: content,\n },\n });\n } else {\n // Already open, send change\n await connection.sendNotification('textDocument/didChange', {\n textDocument: {\n uri: pathToFileURL(normalized).href,\n version,\n },\n contentChanges: [{ text: content }],\n });\n }\n } catch (error) {\n console.error('[lsp] Error notifying open:', error);\n }\n },\n \n async notifyChange(filePath: string): Promise<void> {\n const normalized = normalizePath(filePath);\n \n if (!existsSync(normalized)) {\n return;\n }\n \n try {\n const content = await readFile(normalized, 'utf-8');\n const version = (fileVersions.get(normalized) ?? 0) + 1;\n fileVersions.set(normalized, version);\n \n await connection.sendNotification('textDocument/didChange', {\n textDocument: {\n uri: pathToFileURL(normalized).href,\n version,\n },\n contentChanges: [{ text: content }],\n });\n } catch (error) {\n console.error('[lsp] Error notifying change:', error);\n }\n },\n \n async notifyClose(filePath: string): Promise<void> {\n const normalized = normalizePath(filePath);\n fileVersions.delete(normalized);\n diagnostics.delete(normalized);\n \n try {\n await connection.sendNotification('textDocument/didClose', {\n textDocument: {\n uri: pathToFileURL(normalized).href,\n },\n });\n } catch (error) {\n console.error('[lsp] Error notifying close:', error);\n }\n },\n \n async notifyWatchedFilesChanged(changes: Array<{ uri: string; type: number }>): Promise<void> {\n try {\n await connection.sendNotification('workspace/didChangeWatchedFiles', {\n changes,\n });\n } catch (error) {\n console.error('[lsp] Error notifying watched files:', error);\n }\n },\n \n async waitForDiagnostics(filePath: string, timeoutMs = 5000): Promise<Diagnostic[]> {\n const normalized = normalizePath(filePath);\n \n return new Promise<Diagnostic[]>((resolve) => {\n const startTime = Date.now();\n let debounceTimer: NodeJS.Timeout | undefined;\n let resolved = false;\n \n const cleanup = () => {\n if (debounceTimer) clearTimeout(debounceTimer);\n const listeners = diagnosticListeners.get(normalized);\n if (listeners) {\n const idx = listeners.indexOf(onDiagnostic);\n if (idx >= 0) listeners.splice(idx, 1);\n if (listeners.length === 0) {\n diagnosticListeners.delete(normalized);\n }\n }\n };\n \n const finish = () => {\n if (resolved) return;\n resolved = true;\n cleanup();\n resolve(diagnostics.get(normalized) || []);\n };\n \n const onDiagnostic = () => {\n // Debounce: wait 150ms after last update\n if (debounceTimer) clearTimeout(debounceTimer);\n debounceTimer = setTimeout(finish, 150);\n };\n \n // Register listener\n if (!diagnosticListeners.has(normalized)) {\n diagnosticListeners.set(normalized, []);\n }\n diagnosticListeners.get(normalized)!.push(onDiagnostic);\n \n // Timeout fallback\n setTimeout(() => {\n if (!resolved) {\n finish();\n }\n }, timeoutMs);\n \n // If we already have diagnostics, trigger debounce\n if (diagnostics.has(normalized)) {\n onDiagnostic();\n }\n });\n },\n \n getDiagnostics(filePath: string): Diagnostic[] {\n return diagnostics.get(normalizePath(filePath)) || [];\n },\n \n getAllDiagnostics(): Map<string, Diagnostic[]> {\n return new Map(diagnostics);\n },\n \n async getDefinition(filePath: string, line: number, character: number): Promise<Location[]> {\n const normalized = normalizePath(filePath);\n // Only open if not already tracked (avoids redundant file reads + didChange)\n if (!fileVersions.has(normalized)) {\n await client.notifyOpen(normalized);\n }\n try {\n const result: any = await connection.sendRequest('textDocument/definition', {\n textDocument: { uri: pathToFileURL(normalized).href },\n position: { line, character },\n });\n if (!result) return [];\n const items = Array.isArray(result) ? result : [result];\n return items.map((r: any) => ({\n uri: r.targetUri || r.uri,\n range: r.targetRange || r.range,\n }));\n } catch (error) {\n console.error('[lsp] Error getting definition:', error);\n return [];\n }\n },\n \n async getReferences(filePath: string, line: number, character: number, includeDeclaration = false): Promise<Location[]> {\n const normalized = normalizePath(filePath);\n if (!fileVersions.has(normalized)) {\n await client.notifyOpen(normalized);\n }\n try {\n const result = await connection.sendRequest('textDocument/references', {\n textDocument: { uri: pathToFileURL(normalized).href },\n position: { line, character },\n context: { includeDeclaration },\n });\n return (result as Location[]) || [];\n } catch (error) {\n console.error('[lsp] Error getting references:', error);\n return [];\n }\n },\n \n async getHover(filePath: string, line: number, character: number): Promise<string | null> {\n const normalized = normalizePath(filePath);\n if (!fileVersions.has(normalized)) {\n await client.notifyOpen(normalized);\n }\n try {\n const result: any = await connection.sendRequest('textDocument/hover', {\n textDocument: { uri: pathToFileURL(normalized).href },\n position: { line, character },\n });\n if (!result || !result.contents) return null;\n if (typeof result.contents === 'string') return result.contents;\n if (result.contents.value) return result.contents.value;\n if (Array.isArray(result.contents)) {\n return result.contents.map((c: any) => typeof c === 'string' ? c : c.value).join('\\n');\n }\n return null;\n } catch (error) {\n console.error('[lsp] Error getting hover:', error);\n return null;\n }\n },\n \n async getDocumentSymbols(filePath: string): Promise<DocumentSymbol[]> {\n const normalized = normalizePath(filePath);\n if (!fileVersions.has(normalized)) {\n await client.notifyOpen(normalized);\n }\n try {\n const result: any[] = await connection.sendRequest('textDocument/documentSymbol', {\n textDocument: { uri: pathToFileURL(normalized).href },\n });\n if (!result || result.length === 0) return [];\n \n // Handle both DocumentSymbol[] (hierarchical) and SymbolInformation[] (flat) formats\n // DocumentSymbol has `range` directly; SymbolInformation has `location.range`\n if (result[0].range) {\n return result as DocumentSymbol[];\n }\n // Convert SymbolInformation[] to DocumentSymbol[]\n return result.map((si: any) => ({\n name: si.name,\n kind: si.kind,\n range: si.location?.range ?? { start: { line: 0, character: 0 }, end: { line: 0, character: 0 } },\n selectionRange: si.location?.range ?? { start: { line: 0, character: 0 }, end: { line: 0, character: 0 } },\n detail: si.containerName,\n }));\n } catch (error) {\n console.error('[lsp] Error getting document symbols:', error);\n return [];\n }\n },\n \n async findWorkspaceSymbols(query: string): Promise<SymbolInformation[]> {\n try {\n const result = await connection.sendRequest('workspace/symbol', { query });\n return (result as SymbolInformation[]) || [];\n } catch (error) {\n console.error('[lsp] Error finding workspace symbols:', error);\n return [];\n }\n },\n \n async shutdown(): Promise<void> {\n try {\n await connection.sendRequest('shutdown');\n await connection.sendNotification('exit');\n connection.end();\n connection.dispose();\n proc.kill();\n } catch (error) {\n // Force kill if graceful shutdown fails\n proc.kill('SIGKILL');\n }\n },\n };\n \n return client;\n}\n","/**\n * LSP Types\n * \n * Type definitions for the Language Server Protocol integration.\n * These types are compatible with vscode-languageserver-types.\n */\n\n/**\n * Diagnostic severity levels from LSP spec\n */\nexport enum DiagnosticSeverity {\n Error = 1,\n Warning = 2,\n Information = 3,\n Hint = 4,\n}\n\n/**\n * Position in a text document (0-indexed)\n */\nexport interface Position {\n line: number;\n character: number;\n}\n\n/**\n * Range in a text document\n */\nexport interface Range {\n start: Position;\n end: Position;\n}\n\n/**\n * A diagnostic message from an LSP server\n */\nexport interface Diagnostic {\n range: Range;\n message: string;\n severity?: DiagnosticSeverity;\n code?: number | string;\n source?: string;\n relatedInformation?: DiagnosticRelatedInformation[];\n}\n\n/**\n * Related information for a diagnostic\n */\nexport interface DiagnosticRelatedInformation {\n location: {\n uri: string;\n range: Range;\n };\n message: string;\n}\n\n/**\n * A location in a text document (used by definition, references, etc.)\n */\nexport interface Location {\n uri: string;\n range: Range;\n}\n\n/**\n * Symbol kinds from the LSP spec\n */\nexport enum SymbolKind {\n File = 1,\n Module = 2,\n Namespace = 3,\n Package = 4,\n Class = 5,\n Method = 6,\n Property = 7,\n Field = 8,\n Constructor = 9,\n Enum = 10,\n Interface = 11,\n Function = 12,\n Variable = 13,\n Constant = 14,\n String = 15,\n Number = 16,\n Boolean = 17,\n Array = 18,\n Object = 19,\n Key = 20,\n Null = 21,\n EnumMember = 22,\n Struct = 23,\n Event = 24,\n Operator = 25,\n TypeParameter = 26,\n}\n\n/**\n * Hierarchical document symbol (returned by textDocument/documentSymbol)\n */\nexport interface DocumentSymbol {\n name: string;\n detail?: string;\n kind: SymbolKind;\n range: Range;\n selectionRange: Range;\n children?: DocumentSymbol[];\n}\n\n/**\n * Flat symbol information (returned by workspace/symbol)\n */\nexport interface SymbolInformation {\n name: string;\n kind: SymbolKind;\n location: Location;\n containerName?: string;\n}\n\n/**\n * Parameters for textDocument/publishDiagnostics notification\n */\nexport interface PublishDiagnosticsParams {\n uri: string;\n version?: number;\n diagnostics: Diagnostic[];\n}\n\n/**\n * LSP Server handle (spawned process)\n */\nexport interface LSPServerHandle {\n process: import('node:child_process').ChildProcess;\n initialization?: Record<string, unknown>;\n}\n\n/**\n * LSP Server definition\n */\nexport interface LSPServerDefinition {\n id: string;\n name: string;\n extensions: string[];\n spawn: (root: string) => Promise<LSPServerHandle | null>;\n}\n\n/**\n * LSP Client interface\n */\nexport interface LSPClient {\n serverId: string;\n root: string;\n diagnostics: Map<string, Diagnostic[]>;\n \n notifyOpen(filePath: string): Promise<void>;\n notifyChange(filePath: string): Promise<void>;\n notifyClose(filePath: string): Promise<void>;\n notifyWatchedFilesChanged(changes: Array<{ uri: string; type: number }>): Promise<void>;\n \n waitForDiagnostics(filePath: string, timeoutMs?: number): Promise<Diagnostic[]>;\n getDiagnostics(filePath: string): Diagnostic[];\n getAllDiagnostics(): Map<string, Diagnostic[]>;\n \n /** Go to definition for a symbol at the given position */\n getDefinition(filePath: string, line: number, character: number): Promise<Location[]>;\n /** Find all references to the symbol at the given position */\n getReferences(filePath: string, line: number, character: number, includeDeclaration?: boolean): Promise<Location[]>;\n /** Get hover/type information for the symbol at the given position */\n getHover(filePath: string, line: number, character: number): Promise<string | null>;\n /** Get all symbols defined in a document (hierarchical) */\n getDocumentSymbols(filePath: string): Promise<DocumentSymbol[]>;\n /** Search for symbols across the workspace by name */\n findWorkspaceSymbols(query: string): Promise<SymbolInformation[]>;\n \n shutdown(): Promise<void>;\n}\n\n/**\n * Format a diagnostic for display to the agent\n */\nexport function formatDiagnostic(diagnostic: Diagnostic): string {\n const severity = {\n [DiagnosticSeverity.Error]: 'ERROR',\n [DiagnosticSeverity.Warning]: 'WARN',\n [DiagnosticSeverity.Information]: 'INFO',\n [DiagnosticSeverity.Hint]: 'HINT',\n }[diagnostic.severity ?? DiagnosticSeverity.Error];\n \n const line = diagnostic.range.start.line + 1; // Convert to 1-indexed\n const col = diagnostic.range.start.character + 1;\n const source = diagnostic.source ? ` [${diagnostic.source}]` : '';\n \n return `${severity} [${line}:${col}]${source} ${diagnostic.message}`;\n}\n\n/**\n * Format diagnostics for agent output\n */\nexport function formatDiagnosticsForAgent(\n filePath: string,\n diagnostics: Diagnostic[],\n options: { maxDiagnostics?: number; errorsOnly?: boolean } = {}\n): string {\n const { maxDiagnostics = 20, errorsOnly = true } = options;\n \n // Filter to errors only if requested\n const filtered = errorsOnly\n ? diagnostics.filter(d => d.severity === DiagnosticSeverity.Error)\n : diagnostics;\n \n if (filtered.length === 0) return '';\n \n const limited = filtered.slice(0, maxDiagnostics);\n const suffix = filtered.length > maxDiagnostics\n ? `\\n... and ${filtered.length - maxDiagnostics} more`\n : '';\n \n const formatted = limited.map(formatDiagnostic).join('\\n');\n \n return `\\n\\nLSP errors detected in this file, please fix:\\n<diagnostics file=\"${filePath}\">\\n${formatted}${suffix}\\n</diagnostics>`;\n}\n","import { tool } from 'ai';\nimport { z } from 'zod';\nimport { todoQueries, TodoItem } from '../db/index.js';\n\nexport interface TodoToolOptions {\n sessionId: string;\n}\n\nconst todoInputSchema = z.object({\n action: z\n .enum(['add', 'list', 'mark', 'clear'])\n .describe('The action to perform on the todo list'),\n items: z\n .array(\n z.object({\n content: z.string().describe('Description of the task'),\n order: z.number().optional().describe('Optional order/priority (lower = higher priority)'),\n })\n )\n .optional()\n .describe('For \"add\" action: Array of todo items to add'),\n todoId: z\n .string()\n .optional()\n .describe('For \"mark\" action: The ID of the todo item to update'),\n status: z\n .enum(['pending', 'in_progress', 'completed', 'cancelled'])\n .optional()\n .describe('For \"mark\" action: The new status for the todo item'),\n});\n\nexport function createTodoTool(options: TodoToolOptions) {\n return tool({\n description: `Manage your task list for the current session. Use this to:\n- Break down complex tasks into smaller steps\n- Track progress on multi-step operations\n- Organize your work systematically\n\nAvailable actions:\n- \"add\": Add one or more new todo items to the list\n- \"list\": View all current todo items and their status\n- \"mark\": Update the status of a todo item (pending, in_progress, completed, cancelled)\n- \"clear\": Remove all todo items from the list\n\nBest practices:\n- Add todos before starting complex tasks\n- Mark items as \"in_progress\" when actively working on them\n- Update status as you complete each step`,\n\n inputSchema: todoInputSchema,\n\n execute: async ({ action, items, todoId, status }: z.infer<typeof todoInputSchema>) => {\n try {\n switch (action) {\n case 'add': {\n if (!items || items.length === 0) {\n return {\n success: false,\n error: 'No items provided. Include at least one todo item.',\n };\n }\n\n const created = await todoQueries.createMany(options.sessionId, items);\n \n return {\n success: true,\n action: 'add',\n itemsAdded: created.length,\n items: created.map(formatTodoItem),\n };\n }\n\n case 'list': {\n const todos = await todoQueries.getBySession(options.sessionId);\n \n const stats = {\n total: todos.length,\n pending: todos.filter((t: TodoItem) => t.status === 'pending').length,\n inProgress: todos.filter((t: TodoItem) => t.status === 'in_progress').length,\n completed: todos.filter((t: TodoItem) => t.status === 'completed').length,\n cancelled: todos.filter((t: TodoItem) => t.status === 'cancelled').length,\n };\n\n return {\n success: true,\n action: 'list',\n stats,\n items: todos.map(formatTodoItem),\n };\n }\n\n case 'mark': {\n if (!todoId) {\n return {\n success: false,\n error: 'todoId is required for \"mark\" action',\n };\n }\n\n if (!status) {\n return {\n success: false,\n error: 'status is required for \"mark\" action',\n };\n }\n\n const updated = await todoQueries.updateStatus(todoId, status);\n \n if (!updated) {\n return {\n success: false,\n error: `Todo item not found: ${todoId}`,\n };\n }\n\n return {\n success: true,\n action: 'mark',\n item: formatTodoItem(updated),\n };\n }\n\n case 'clear': {\n const count = await todoQueries.clearSession(options.sessionId);\n \n return {\n success: true,\n action: 'clear',\n itemsRemoved: count,\n };\n }\n\n default:\n return {\n success: false,\n error: `Unknown action: ${action}`,\n };\n }\n } catch (error: any) {\n return {\n success: false,\n error: error.message,\n };\n }\n },\n });\n}\n\nfunction formatTodoItem(item: TodoItem) {\n return {\n id: item.id,\n content: item.content,\n status: item.status,\n order: item.order,\n createdAt: item.createdAt.toISOString(),\n };\n}\n\nexport type TodoTool = ReturnType<typeof createTodoTool>;\n","import { tool } from 'ai';\nimport { z } from 'zod';\nimport { loadAllSkills, loadSkillContent, formatSkillsForContext } from '../skills/index.js';\nimport { skillQueries } from '../db/index.js';\n\nexport interface LoadSkillToolOptions {\n sessionId: string;\n skillsDirectories: string[];\n}\n\nconst loadSkillInputSchema = z.object({\n action: z\n .enum(['list', 'load'])\n .describe('Action to perform: \"list\" to see available skills, \"load\" to load a skill'),\n skillName: z\n .string()\n .optional()\n .describe('For \"load\" action: The name of the skill to load'),\n});\n\nexport function createLoadSkillTool(options: LoadSkillToolOptions) {\n return tool({\n description: `Load a skill document into the conversation context. Skills are specialized knowledge files that provide guidance on specific topics like debugging, code review, architecture patterns, etc.\n\nAvailable actions:\n- \"list\": Show all available skills with their descriptions\n- \"load\": Load a specific skill's full content into context\n\nUse this when you need specialized knowledge or guidance for a particular task.\nOnce loaded, a skill's content will be available in the conversation context.`,\n\n inputSchema: loadSkillInputSchema,\n\n execute: async ({ action, skillName }: z.infer<typeof loadSkillInputSchema>) => {\n try {\n switch (action) {\n case 'list': {\n const skills = await loadAllSkills(options.skillsDirectories);\n \n return {\n success: true,\n action: 'list',\n skillCount: skills.length,\n skills: skills.map((s) => ({\n name: s.name,\n description: s.description,\n })),\n formatted: formatSkillsForContext(skills),\n };\n }\n\n case 'load': {\n if (!skillName) {\n return {\n success: false,\n error: 'skillName is required for \"load\" action',\n };\n }\n\n // Check if already loaded\n if (await skillQueries.isLoaded(options.sessionId, skillName)) {\n return {\n success: false,\n error: `Skill \"${skillName}\" is already loaded in this session`,\n };\n }\n\n // Load the skill content\n const skill = await loadSkillContent(skillName, options.skillsDirectories);\n \n if (!skill) {\n const allSkills = await loadAllSkills(options.skillsDirectories);\n return {\n success: false,\n error: `Skill \"${skillName}\" not found`,\n availableSkills: allSkills.map((s) => s.name),\n };\n }\n\n // Record that we loaded this skill\n await skillQueries.load(options.sessionId, skillName);\n\n return {\n success: true,\n action: 'load',\n skillName: skill.name,\n description: skill.description,\n content: skill.content,\n contentLength: skill.content.length,\n };\n }\n\n default:\n return {\n success: false,\n error: `Unknown action: ${action}`,\n };\n }\n } catch (error: any) {\n return {\n success: false,\n error: error.message,\n };\n }\n },\n });\n}\n\nexport type LoadSkillTool = ReturnType<typeof createLoadSkillTool>;\n","/**\n * Linter Tool\n * \n * Provides the agent with the ability to check files for lint/type errors\n * using the LSP (Language Server Protocol) integration.\n */\n\nimport { tool } from 'ai';\nimport { z } from 'zod';\nimport { resolve, relative, isAbsolute, extname } from 'node:path';\nimport { existsSync } from 'node:fs';\nimport { readdir, stat } from 'node:fs/promises';\nimport * as LSP from '../lsp/index.js';\nimport type { Diagnostic } from '../lsp/types.js';\n\nexport interface LinterToolOptions {\n workingDirectory: string;\n}\n\nconst linterInputSchema = z.object({\n paths: z\n .array(z.string())\n .optional()\n .describe('File or directory paths to check for lint errors. If not provided, returns diagnostics for all recently touched files.'),\n fix: z\n .boolean()\n .optional()\n .default(false)\n .describe('Reserved for future use: auto-fix lint errors (not yet implemented)'),\n});\n\n/**\n * Recursively find all supported files in a directory\n */\nasync function findSupportedFiles(\n dir: string,\n workingDirectory: string,\n maxFiles = 50\n): Promise<string[]> {\n const files: string[] = [];\n const supportedExtensions = LSP.getSupportedExtensions();\n\n async function walk(currentDir: string) {\n if (files.length >= maxFiles) return;\n\n try {\n const entries = await readdir(currentDir, { withFileTypes: true });\n\n for (const entry of entries) {\n if (files.length >= maxFiles) break;\n\n const fullPath = resolve(currentDir, entry.name);\n\n // Skip node_modules, .git, and other common ignore patterns\n if (entry.isDirectory()) {\n if (['node_modules', '.git', 'dist', 'build', '.next', 'coverage'].includes(entry.name)) {\n continue;\n }\n await walk(fullPath);\n } else if (entry.isFile()) {\n const ext = extname(entry.name);\n if (supportedExtensions.includes(ext)) {\n files.push(fullPath);\n }\n }\n }\n } catch {\n // Ignore permission errors etc.\n }\n }\n\n await walk(dir);\n return files;\n}\n\nexport function createLinterTool(options: LinterToolOptions) {\n return tool({\n description: `Check files for linting and type errors using the Language Server Protocol (LSP).\n\nSupports TypeScript, JavaScript, TSX, JSX files.\n\nUsage:\n- \\`linter({})\\` - Get diagnostics for all recently edited files\n- \\`linter({ paths: [\"src/app.ts\"] })\\` - Check specific files\n- \\`linter({ paths: [\"src/\"] })\\` - Check all supported files in a directory\n\nReturns detailed error information including line numbers, error messages, and severity.\nUse this after making changes to verify your code is correct, or proactively to find issues.\n\nWorking directory: ${options.workingDirectory}`,\n\n inputSchema: linterInputSchema,\n\n execute: async ({ paths }: z.infer<typeof linterInputSchema>) => {\n try {\n // If no paths provided, get all diagnostics from LSP\n if (!paths || paths.length === 0) {\n const allDiagnostics = await LSP.getAllDiagnostics();\n \n if (Object.keys(allDiagnostics).length === 0) {\n return {\n success: true,\n message: 'No lint errors found. No files have been analyzed yet - specify paths to check specific files.',\n files: [],\n totalErrors: 0,\n totalWarnings: 0,\n };\n }\n\n return formatDiagnosticsResult(allDiagnostics, options.workingDirectory);\n }\n\n // Process provided paths\n const filesToCheck: string[] = [];\n\n for (const path of paths) {\n const absolutePath = isAbsolute(path)\n ? path\n : resolve(options.workingDirectory, path);\n\n if (!existsSync(absolutePath)) {\n continue;\n }\n\n const stats = await stat(absolutePath);\n\n if (stats.isDirectory()) {\n const dirFiles = await findSupportedFiles(absolutePath, options.workingDirectory);\n filesToCheck.push(...dirFiles);\n } else if (stats.isFile()) {\n if (LSP.isSupported(absolutePath)) {\n filesToCheck.push(absolutePath);\n }\n }\n }\n\n if (filesToCheck.length === 0) {\n return {\n success: true,\n message: 'No supported files found to check. Supported extensions: ' + LSP.getSupportedExtensions().join(', '),\n files: [],\n totalErrors: 0,\n totalWarnings: 0,\n };\n }\n\n // Touch all files and wait for diagnostics\n await Promise.all(\n filesToCheck.map(file => LSP.touchFile(file, true))\n );\n\n // Collect diagnostics for all files\n const diagnosticsMap: Record<string, Diagnostic[]> = {};\n\n for (const file of filesToCheck) {\n const diagnostics = await LSP.getDiagnostics(file);\n if (diagnostics.length > 0) {\n diagnosticsMap[file] = diagnostics;\n }\n }\n\n return formatDiagnosticsResult(diagnosticsMap, options.workingDirectory);\n } catch (error: any) {\n return {\n success: false,\n error: error.message,\n };\n }\n },\n });\n}\n\n/**\n * Format diagnostics into a structured result\n */\nfunction formatDiagnosticsResult(\n diagnosticsMap: Record<string, Diagnostic[]>,\n workingDirectory: string\n) {\n let totalErrors = 0;\n let totalWarnings = 0;\n let totalInfo = 0;\n\n const files: Array<{\n path: string;\n relativePath: string;\n errors: number;\n warnings: number;\n diagnostics: Array<{\n severity: string;\n line: number;\n column: number;\n message: string;\n source?: string;\n code?: string | number;\n }>;\n }> = [];\n\n for (const [filePath, diagnostics] of Object.entries(diagnosticsMap)) {\n const relativePath = relative(workingDirectory, filePath);\n let fileErrors = 0;\n let fileWarnings = 0;\n\n const formattedDiagnostics = diagnostics.map(d => {\n const severity = getSeverityString(d.severity);\n \n if (d.severity === LSP.DiagnosticSeverity.Error) {\n fileErrors++;\n totalErrors++;\n } else if (d.severity === LSP.DiagnosticSeverity.Warning) {\n fileWarnings++;\n totalWarnings++;\n } else {\n totalInfo++;\n }\n\n return {\n severity,\n line: d.range.start.line + 1,\n column: d.range.start.character + 1,\n message: d.message,\n source: d.source,\n code: d.code,\n };\n });\n\n files.push({\n path: filePath,\n relativePath,\n errors: fileErrors,\n warnings: fileWarnings,\n diagnostics: formattedDiagnostics,\n });\n }\n\n // Sort by errors (most first)\n files.sort((a, b) => b.errors - a.errors);\n\n const hasIssues = totalErrors > 0 || totalWarnings > 0;\n\n return {\n success: true,\n message: hasIssues\n ? `Found ${totalErrors} error(s) and ${totalWarnings} warning(s) in ${files.length} file(s).`\n : `No lint errors found in ${Object.keys(diagnosticsMap).length || 'any'} file(s).`,\n files,\n totalErrors,\n totalWarnings,\n totalInfo,\n summary: hasIssues\n ? formatSummary(files)\n : undefined,\n };\n}\n\n/**\n * Get severity as a string\n */\nfunction getSeverityString(severity?: number): string {\n switch (severity) {\n case LSP.DiagnosticSeverity.Error:\n return 'error';\n case LSP.DiagnosticSeverity.Warning:\n return 'warning';\n case LSP.DiagnosticSeverity.Information:\n return 'info';\n case LSP.DiagnosticSeverity.Hint:\n return 'hint';\n default:\n return 'error';\n }\n}\n\n/**\n * Format a human-readable summary\n */\nfunction formatSummary(\n files: Array<{\n relativePath: string;\n diagnostics: Array<{\n severity: string;\n line: number;\n column: number;\n message: string;\n }>;\n }>\n): string {\n const lines: string[] = [];\n\n for (const file of files) {\n lines.push(`\\n${file.relativePath}:`);\n for (const d of file.diagnostics.slice(0, 10)) {\n const prefix = d.severity === 'error' ? '❌' : d.severity === 'warning' ? '⚠️' : 'ℹ️';\n lines.push(` ${prefix} [${d.line}:${d.column}] ${d.message}`);\n }\n if (file.diagnostics.length > 10) {\n lines.push(` ... and ${file.diagnostics.length - 10} more`);\n }\n }\n\n return lines.join('\\n');\n}\n\nexport type LinterTool = ReturnType<typeof createLinterTool>;\n","import { tool } from 'ai';\nimport { z } from 'zod';\nimport { createSearchSubagent, SearchResult, SubagentProgressEvent } from '../agent/subagents/index.js';\nimport { truncateOutput } from '../utils/truncate.js';\n\nconst MAX_RESULT_CHARS = 10_000;\n\n/**\n * Progress event for the explore_agent tool (emitted via onProgress callback)\n */\nexport interface SearchToolProgress {\n status: 'started' | 'step' | 'complete' | 'error';\n subagentId?: string;\n stepType?: 'thought' | 'tool_call' | 'tool_result' | 'text';\n stepContent?: string;\n toolName?: string;\n toolInput?: unknown;\n toolOutput?: unknown;\n result?: SearchResult;\n error?: string;\n}\n\nexport interface SearchToolOptions {\n sessionId: string;\n workingDirectory: string;\n /** Callback for progress updates (for streaming to UI) */\n onProgress?: (progress: SearchToolProgress) => void | Promise<void>;\n}\n\n/**\n * Create the explore_agent tool that spawns a SearchSubagent\n * \n * This tool allows the main agent to delegate explore tasks to a specialized\n * mini-agent that uses a smaller, faster model (Gemini 3 Flash Preview).\n * \n * The subagent has access to:\n * - grep: Search for patterns in files\n * - glob: Find files by pattern\n * - read_file: Read file contents\n * - list_dir: List directory contents\n * - semantic_search: Find code by meaning (if indexed)\n * - code_graph: Inspect a symbol's type hierarchy and usage graph via the TypeScript LSP\n * \n * Progress is streamed back to the UI so users can see exploration happening.\n */\nexport function createSearchTool(options: SearchToolOptions) {\n return tool({\n description: `Delegate an explore task to the explore_agent tool. Use this when you need to:\n- Find files or code matching a pattern\n- Explore the codebase structure\n- Search for specific functions, classes, or variables\n- Understand how a feature is implemented\n\nThe Explore agent will explore the codebase and return a summary of findings.\nThis is more thorough than a simple grep because it can follow references and understand context.\nIt also has access to semantic search to find code by meaning, not just text.\nIt can also use code_graph to inspect a symbol's type hierarchy, references, and which pages/routes use it.\n\nCRITICAL: The explore agent has ZERO context. It cannot see the conversation, the user's message, devtools data, or any prior context. You MUST pass ALL relevant context via the \"context\" parameter. If the user selected a component (component name, file path, HTML, component stack) or there is a <devtools-context> block, you MUST copy that information into the \"context\" field verbatim. Without it the explore agent is searching blind.`,\n\n inputSchema: z.object({\n query: z.string().describe('What to search for. Be specific about what you\\'re looking for.'),\n context: z.string().describe('ALL context the explore agent needs. It has ZERO context on its own - no conversation history, no devtools data, nothing. You MUST include: any selected component info (name, file path, HTML, component stack), any <devtools-context> block (page URL, path, viewport), and any other relevant details from the user message. The explore agent literally only sees the query and this context field.'),\n }),\n\n execute: async ({ query, context }, toolOptions) => {\n const toolCallId = (toolOptions as any).toolCallId || `explore_agent_${Date.now()}`;\n \n // Emit started event\n await options.onProgress?.({\n status: 'started',\n subagentId: toolCallId,\n });\n \n try {\n const subagent = createSearchSubagent();\n \n // Build the full task including any context the main agent passed along\n // (e.g. selected component info, devtools page context, etc.)\n const fullTask = context \n ? `${query}\\n\\nContext: ${context}`\n : query;\n \n // Run the subagent - the context field carries all devtools/component\n // info that the main agent forwarded from the user's message\n const result = await subagent.run({\n task: fullTask,\n sessionId: options.sessionId,\n toolCallId,\n workingDirectory: options.workingDirectory,\n onProgress: async (event: SubagentProgressEvent) => {\n // Map subagent events to explore_agent tool progress\n if (event.type === 'step' && event.step) {\n await options.onProgress?.({\n status: 'step',\n subagentId: event.subagentId,\n stepType: event.step.type,\n stepContent: event.step.content,\n toolName: event.step.toolName,\n toolInput: event.step.toolInput,\n toolOutput: event.step.toolOutput,\n });\n } else if (event.type === 'complete') {\n await options.onProgress?.({\n status: 'complete',\n subagentId: event.subagentId,\n result: event.result as SearchResult,\n });\n } else if (event.type === 'error') {\n await options.onProgress?.({\n status: 'error',\n subagentId: event.subagentId,\n error: event.error,\n });\n }\n },\n });\n \n if (!result.success) {\n return {\n success: false,\n error: result.error || 'Search failed',\n executionId: result.executionId,\n };\n }\n \n const searchResult = result.result!;\n \n // Set the query on the result (parseResult doesn't have access to the original task)\n searchResult.query = query;\n \n // =========================================================\n // Format the result for the main agent\n // The summary is the most important part - it contains the\n // LLM's full analysis with file paths, code snippets, and\n // explanations collected from ALL steps.\n // =========================================================\n \n let formattedResult = '';\n \n // The summary IS the main content - it contains the explore agent's\n // full analysis with paths, snippets, and explanations\n if (searchResult.summary) {\n formattedResult += searchResult.summary;\n }\n \n // Add structured findings as supplementary data\n if (searchResult.findings.length > 0) {\n formattedResult += `\\n\\n### Files & Matches Found (${searchResult.findings.length} items)\\n`;\n \n for (const finding of searchResult.findings) {\n if (finding.type === 'match') {\n formattedResult += `\\n- **${finding.path}:${finding.lineNumber}** - \\`${truncateOutput(finding.content || '', 150)}\\``;\n } else if (finding.type === 'semantic') {\n formattedResult += `\\n- **${finding.path}:${finding.lineNumber}** [semantic] ${finding.context ? `(${finding.context})` : ''}`;\n if (finding.content) {\n formattedResult += `\\n \\`\\`\\`\\n ${truncateOutput(finding.content, 200)}\\n \\`\\`\\``;\n }\n } else if (finding.type === 'file') {\n formattedResult += `\\n- **${finding.path}** ${finding.context ? `(${finding.context})` : ''}`;\n }\n }\n }\n \n if (!formattedResult.trim()) {\n formattedResult = 'The explore agent ran but did not find relevant results for this query. Try being more specific or using different search terms.';\n }\n \n return {\n success: true,\n query: searchResult.query,\n summary: searchResult.summary,\n findings: searchResult.findings,\n matchCount: searchResult.matchCount,\n filesSearched: searchResult.filesSearched,\n formattedResult: truncateOutput(formattedResult, MAX_RESULT_CHARS),\n executionId: result.executionId,\n stepsCount: result.steps.length,\n };\n } catch (error: any) {\n await options.onProgress?.({\n status: 'error',\n error: error.message,\n });\n \n return {\n success: false,\n error: error.message,\n };\n }\n },\n });\n}\n\nexport type SearchTool = ReturnType<typeof createSearchTool>;\n","import {\n streamText,\n generateText,\n stepCountIs,\n type ToolSet,\n} from 'ai';\nimport { nanoid } from 'nanoid';\nimport { resolveModel, SUBAGENT_MODELS } from './model.js';\nimport { subagentQueries, SubagentExecution, SubagentStep } from '../db/index.js';\n\n/**\n * Progress event emitted by subagents\n */\nexport interface SubagentProgressEvent {\n type: 'step' | 'text' | 'tool_call' | 'tool_result' | 'complete' | 'error';\n subagentId: string;\n subagentType: string;\n step?: SubagentStep;\n text?: string;\n toolName?: string;\n toolInput?: unknown;\n toolOutput?: unknown;\n result?: unknown;\n error?: string;\n}\n\n/**\n * Options for running a subagent\n */\nexport interface SubagentRunOptions {\n task: string;\n sessionId: string;\n toolCallId: string;\n workingDirectory: string;\n /** Callback for progress events */\n onProgress?: (event: SubagentProgressEvent) => void | Promise<void>;\n /** Abort signal */\n abortSignal?: AbortSignal;\n /** Additional context to inject into the subagent's system prompt (e.g. devtools context) */\n additionalContext?: string;\n}\n\n/**\n * Result from a subagent execution\n */\nexport interface SubagentResult<T = unknown> {\n success: boolean;\n result?: T;\n error?: string;\n steps: SubagentStep[];\n executionId: string;\n}\n\n/**\n * Base class for subagents.\n * \n * Subagents are lightweight agents that perform specific tasks using smaller,\n * faster models. They're spawned by the main agent via tools and report progress\n * back to the UI.\n * \n * To create a new subagent type:\n * 1. Extend this class\n * 2. Implement `getTools()` to return the tools available to this subagent\n * 3. Implement `getSystemPrompt()` to return the system prompt\n * 4. Optionally override `parseResult()` to structure the final output\n */\nexport abstract class Subagent<TResult = unknown> {\n /** Unique identifier for this subagent type */\n abstract readonly type: string;\n \n /** Human-readable name for this subagent */\n abstract readonly name: string;\n \n /** Model to use (defaults to gemini-3-flash-preview) */\n protected model: string;\n \n /** Maximum steps before stopping */\n protected maxSteps: number = 20;\n \n constructor(model?: string) {\n this.model = model || SUBAGENT_MODELS.default;\n }\n \n /**\n * Get the tools available to this subagent\n */\n protected abstract getTools(options: SubagentRunOptions): ToolSet;\n \n /**\n * Get the system prompt for this subagent\n */\n protected abstract getSystemPrompt(options: SubagentRunOptions): string;\n \n /**\n * Parse the final result from the subagent's output.\n * Override this to structure the result for your subagent type.\n */\n protected parseResult(text: string, steps: SubagentStep[]): TResult {\n return { text, steps } as TResult;\n }\n \n /**\n * Run the subagent with streaming progress updates\n */\n async run(options: SubagentRunOptions): Promise<SubagentResult<TResult>> {\n const { task, sessionId, toolCallId, onProgress, abortSignal } = options;\n const steps: SubagentStep[] = [];\n \n // Create execution record in database\n const execution = await subagentQueries.create({\n sessionId,\n toolCallId,\n subagentType: this.type,\n task,\n model: this.model,\n });\n \n const addStep = async (step: Omit<SubagentStep, 'id' | 'timestamp'>) => {\n const fullStep: SubagentStep = {\n id: nanoid(8),\n timestamp: Date.now(),\n ...step,\n };\n steps.push(fullStep);\n \n // Update database\n await subagentQueries.addStep(execution.id, fullStep);\n \n // Emit progress\n await onProgress?.({\n type: 'step',\n subagentId: execution.id,\n subagentType: this.type,\n step: fullStep,\n });\n };\n \n try {\n const tools = this.getTools(options);\n const systemPrompt = this.getSystemPrompt(options);\n \n // Run the subagent\n const result = await generateText({\n model: resolveModel(this.model) as any,\n system: systemPrompt,\n messages: [\n { role: 'user', content: task }\n ],\n tools,\n stopWhen: stepCountIs(this.maxSteps),\n abortSignal,\n onStepFinish: async (step) => {\n // Record text output\n if (step.text) {\n await addStep({\n type: 'text',\n content: step.text,\n });\n await onProgress?.({\n type: 'text',\n subagentId: execution.id,\n subagentType: this.type,\n text: step.text,\n });\n }\n \n // Record tool calls\n if (step.toolCalls) {\n for (const toolCall of step.toolCalls) {\n await addStep({\n type: 'tool_call',\n content: `Calling ${toolCall.toolName}`,\n toolName: toolCall.toolName,\n toolInput: toolCall.input,\n });\n await onProgress?.({\n type: 'tool_call',\n subagentId: execution.id,\n subagentType: this.type,\n toolName: toolCall.toolName,\n toolInput: toolCall.input,\n });\n }\n }\n \n // Record tool results\n if (step.toolResults) {\n for (const toolResult of step.toolResults) {\n await addStep({\n type: 'tool_result',\n content: `Result from ${toolResult.toolName}`,\n toolName: toolResult.toolName,\n toolOutput: toolResult.output,\n });\n await onProgress?.({\n type: 'tool_result',\n subagentId: execution.id,\n subagentType: this.type,\n toolName: toolResult.toolName,\n toolOutput: toolResult.output,\n });\n }\n }\n },\n });\n \n // Parse the final result\n const parsedResult = this.parseResult(result.text, steps);\n \n // Mark as complete\n await subagentQueries.complete(execution.id, parsedResult);\n \n await onProgress?.({\n type: 'complete',\n subagentId: execution.id,\n subagentType: this.type,\n result: parsedResult,\n });\n \n return {\n success: true,\n result: parsedResult,\n steps,\n executionId: execution.id,\n };\n } catch (error: any) {\n const errorMessage = error.message || 'Unknown error';\n \n // Mark as error\n await subagentQueries.markError(execution.id, errorMessage);\n \n await onProgress?.({\n type: 'error',\n subagentId: execution.id,\n subagentType: this.type,\n error: errorMessage,\n });\n \n return {\n success: false,\n error: errorMessage,\n steps,\n executionId: execution.id,\n };\n }\n }\n \n /**\n * Run with streaming (for real-time progress in UI)\n */\n async *stream(options: SubagentRunOptions): AsyncGenerator<SubagentProgressEvent> {\n const events: SubagentProgressEvent[] = [];\n let resolveNext: ((event: SubagentProgressEvent | null) => void) | null = null;\n let done = false;\n \n // Queue for events\n const eventQueue: SubagentProgressEvent[] = [];\n \n // Start the run with progress callback\n const runPromise = this.run({\n ...options,\n onProgress: async (event) => {\n eventQueue.push(event);\n if (resolveNext) {\n resolveNext(eventQueue.shift()!);\n resolveNext = null;\n }\n },\n }).then((result) => {\n done = true;\n if (resolveNext) {\n resolveNext(null);\n }\n return result;\n });\n \n // Yield events as they come\n while (!done || eventQueue.length > 0) {\n if (eventQueue.length > 0) {\n yield eventQueue.shift()!;\n } else if (!done) {\n // Wait for next event\n const event = await new Promise<SubagentProgressEvent | null>((resolve) => {\n resolveNext = resolve;\n });\n if (event) {\n yield event;\n }\n }\n }\n \n // Wait for completion\n await runPromise;\n }\n}\n\n// Export types\nexport type { SubagentStep };\n","import { tool, type ToolSet } from 'ai';\nimport { z } from 'zod';\nimport { exec } from 'node:child_process';\nimport { promisify } from 'node:util';\nimport { readFile, stat, readdir } from 'node:fs/promises';\nimport { resolve, relative, isAbsolute } from 'node:path';\nimport { existsSync } from 'node:fs';\nimport { Subagent, SubagentRunOptions, SubagentStep } from '../subagent.js';\nimport { SUBAGENT_MODELS } from '../model.js';\nimport { truncateOutput } from '../../utils/truncate.js';\nimport { isVectorGatewayConfigured, checkIndexExists } from '../../semantic/index.js';\nimport { createCodeGraphTool } from '../../tools/code-graph.js';\n\nconst execAsync = promisify(exec);\n\nconst MAX_OUTPUT_CHARS = 10_000;\nconst MAX_FILE_SIZE = 1 * 1024 * 1024; // 1MB for explore subagent\nconst MAX_SUMMARY_CHARS = 6_000; // Cap summary returned to main agent\nconst MAX_FINDINGS = 20; // Cap findings returned\n\n/**\n * Search result from the Explore agent\n */\nexport interface SearchResult {\n query: string;\n summary: string;\n findings: SearchFinding[];\n filesSearched: number;\n matchCount: number;\n}\n\nexport interface SearchFinding {\n type: 'file' | 'match' | 'semantic';\n path: string;\n content?: string;\n lineNumber?: number;\n relevance: 'high' | 'medium' | 'low';\n context?: string;\n}\n\n/**\n * SearchSubagent - A mini-agent specialized for exploring codebases.\n * \n * Uses a smaller, faster model (Gemini 3 Flash Preview) and has access to:\n * - grep: Search for patterns in files\n * - glob: Find files by pattern\n * - read_file: Read file contents\n * - list_dir: List directory contents\n * - semantic_search: Find code by meaning (if indexed)\n * - code_graph: Inspect a symbol's type hierarchy and usage graph via the TypeScript LSP\n * \n * Returns structured, concise search results to the main agent.\n * \n * IMPORTANT: The summary collects ALL text the LLM generates across all steps,\n * not just the final step. This ensures the analysis and discoveries are preserved\n * and sent back to the main agent.\n */\nexport class SearchSubagent extends Subagent<SearchResult> {\n readonly type = 'search';\n readonly name = 'Explore Agent';\n \n constructor(model?: string) {\n super(model || SUBAGENT_MODELS.search);\n this.maxSteps = 10; // Enough steps for thorough exploration\n }\n \n protected getSystemPrompt(options: SubagentRunOptions): string {\n const contextBlock = options.additionalContext\n ? `\\n## Active Context (from devtools)\\n${options.additionalContext}\\n\\nUse this context to understand what the user is currently looking at. If they reference \"this page\", \"this component\", \"this screen\", etc., this context tells you which page/route they mean. Search for files related to this page/route first.\\n`\n : '';\n\n return `You are an Explore agent - a READ-ONLY file search and code analysis specialist. Your job is to find relevant files, understand code structure, and return clear findings to the main agent. You MUST NOT create, modify, or delete any files.\n\nWorking Directory: ${options.workingDirectory}\n${contextBlock}\n## Available Tools\n- **semantic_search**: Search the codebase by meaning (finds code related to a concept). Use this FIRST when available - it's the fastest way to explore.\n- **grep**: Search for exact patterns in files using ripgrep. Best for exact symbol/string matches.\n- **glob**: Find files matching a name pattern. Best for file discovery.\n- **read_file**: Read contents of a specific file. Use to examine code found in searches.\n- **list_dir**: List directory contents. Use to understand project structure.\n- **code_graph**: Inspect a symbol's type hierarchy, references, and usage graph via the TypeScript language server. Returns type signatures, all files that reference the symbol, and which pages/routes contain it. Best for understanding component/function relationships and impact analysis.\n\n## Search Strategy\n\n1. **Start with semantic_search** if available - it finds code by meaning, which is the fastest way to explore\n2. **Use grep** for exact symbol/string matches (function names, class names, imports)\n3. **Use code_graph** when you need to understand a symbol's type signature, what depends on it, or which pages use it. It's much more precise than grep for understanding relationships.\n4. **Use glob** for file discovery by name patterns\n5. **Read key files** to get actual code content and understand context\n6. **Run searches in PARALLEL** - make multiple tool calls at once to cover different angles simultaneously. This is critical for speed.\n\n### Tool Selection Guide\n- Know the exact name? Use **grep** (e.g. \\`getUserById\\`, \\`class AuthService\\`)\n- Need type info, references, or impact analysis? Use **code_graph** (e.g. \\`code_graph({ symbol: \"UserCard\" })\\`)\n- Exploring a concept? Use **semantic_search** (e.g. \"how does authentication work\")\n- Looking for files? Use **glob** (e.g. \\`**/*.config.ts\\`, \\`**/auth/**\\`)\n- Need file content? Use **read_file** with optional line ranges for large files\n\n## CRITICAL: Your text output IS the result\n\nThe text you write in your responses is what gets sent back to the main agent as the summary. The main agent has NO other context about what you found. You MUST include:\n- **Exact file paths** (relative to working directory)\n- **Line numbers** where relevant code is found\n- **Key code snippets** showing the important parts\n- **How pieces connect together** - explain relationships between files\n- **What you actually found**, not just metadata\n\nALWAYS end with a comprehensive summary of your findings. This summary IS the data that the main agent receives.\n\n## Constraints\n- You are READ-ONLY. Do NOT attempt to create, write, edit, or delete files.\n- Be fast. Return results quickly. Don't over-explore when you've found what's needed.\n- Be thorough but efficient. If you find the answer in 2 searches, stop there.\n- No emojis in output.\n\n## Example good output:\n\"Found the authentication system in src/auth/:\n- \\`src/auth/provider.ts\\` (lines 15-45): AuthProvider class handles JWT token validation\n- \\`src/auth/middleware.ts\\` (lines 8-20): Express middleware that checks auth headers\n- \\`src/auth/hooks.ts\\`: useAuth() hook used by React components\nThe auth flow: middleware validates JWT -> provider decodes token -> user context set\"\n\n## Example bad output:\n\"I searched for auth and found some files.\" (too vague, no paths, no details)\n\nKeep it concise but INCLUDE THE ACTUAL DATA.`;\n }\n \n protected async getToolsAsync(options: SubagentRunOptions): Promise<ToolSet> {\n const workingDirectory = options.workingDirectory;\n \n const tools: ToolSet = {\n grep: tool({\n description: 'Search for patterns in files using ripgrep. Returns matching lines with file paths and line numbers.',\n inputSchema: z.object({\n pattern: z.string().describe('The regex pattern to search for'),\n path: z.string().optional().describe('Subdirectory or file to search in (relative to working directory)'),\n fileType: z.string().optional().describe('File type to filter (e.g., \"ts\", \"js\", \"py\")'),\n maxResults: z.number().optional().default(50).describe('Maximum number of results to return'),\n }),\n execute: async ({ pattern, path, fileType, maxResults }) => {\n try {\n const searchPath = path \n ? resolve(workingDirectory, path) \n : workingDirectory;\n \n let args = ['rg', '--line-number', '--no-heading'];\n \n if (fileType) {\n args.push('--type', fileType);\n }\n \n args.push('--max-count', String(maxResults || 50));\n args.push('--', pattern, searchPath);\n \n const { stdout, stderr } = await execAsync(args.join(' '), {\n cwd: workingDirectory,\n maxBuffer: 5 * 1024 * 1024,\n timeout: 30000,\n });\n \n const output = truncateOutput(stdout || 'No matches found', MAX_OUTPUT_CHARS);\n const matchCount = (stdout || '').split('\\n').filter(Boolean).length;\n \n return {\n success: true,\n output,\n matchCount,\n pattern,\n };\n } catch (error: any) {\n // rg returns exit code 1 when no matches found\n if (error.code === 1 && !error.stderr) {\n return {\n success: true,\n output: 'No matches found',\n matchCount: 0,\n pattern,\n };\n }\n return {\n success: false,\n error: error.message,\n pattern,\n };\n }\n },\n }),\n \n glob: tool({\n description: 'Find files matching a glob pattern. Returns list of matching file paths.',\n inputSchema: z.object({\n pattern: z.string().describe('Glob pattern (e.g., \"**/*.ts\", \"src/**/*.tsx\", \"*.json\")'),\n maxResults: z.number().optional().default(100).describe('Maximum number of files to return'),\n }),\n execute: async ({ pattern, maxResults }) => {\n try {\n // Use find command with pattern matching\n const { stdout } = await execAsync(\n `find . -type f -name \"${pattern.replace('**/', '')}\" 2>/dev/null | head -n ${maxResults || 100}`,\n {\n cwd: workingDirectory,\n timeout: 30000,\n }\n );\n \n const files = stdout.trim().split('\\n').filter(Boolean);\n \n return {\n success: true,\n files,\n count: files.length,\n pattern,\n };\n } catch (error: any) {\n return {\n success: false,\n error: error.message,\n pattern,\n };\n }\n },\n }),\n \n read_file: tool({\n description: 'Read the contents of a file. Use this to examine specific files found in search.',\n inputSchema: z.object({\n path: z.string().describe('Path to the file (relative to working directory or absolute)'),\n startLine: z.number().optional().describe('Start reading from this line (1-indexed)'),\n endLine: z.number().optional().describe('Stop reading at this line (1-indexed, inclusive)'),\n }),\n execute: async ({ path, startLine, endLine }) => {\n try {\n const absolutePath = isAbsolute(path)\n ? path\n : resolve(workingDirectory, path);\n \n if (!existsSync(absolutePath)) {\n return {\n success: false,\n error: `File not found: ${path}`,\n };\n }\n \n const stats = await stat(absolutePath);\n if (stats.size > MAX_FILE_SIZE) {\n return {\n success: false,\n error: `File too large (${(stats.size / 1024 / 1024).toFixed(2)}MB). Use startLine/endLine to read portions.`,\n };\n }\n \n let content = await readFile(absolutePath, 'utf-8');\n \n if (startLine !== undefined || endLine !== undefined) {\n const lines = content.split('\\n');\n const start = (startLine ?? 1) - 1;\n const end = endLine ?? lines.length;\n content = lines.slice(start, end).join('\\n');\n }\n \n return {\n success: true,\n path: relative(workingDirectory, absolutePath),\n content: truncateOutput(content, MAX_OUTPUT_CHARS),\n lineCount: content.split('\\n').length,\n };\n } catch (error: any) {\n return {\n success: false,\n error: error.message,\n };\n }\n },\n }),\n \n list_dir: tool({\n description: 'List contents of a directory. Shows files and subdirectories.',\n inputSchema: z.object({\n path: z.string().optional().default('.').describe('Directory path (relative to working directory)'),\n recursive: z.boolean().optional().default(false).describe('List recursively (be careful with large directories)'),\n maxDepth: z.number().optional().default(2).describe('Maximum depth for recursive listing'),\n }),\n execute: async ({ path, recursive, maxDepth }) => {\n try {\n const absolutePath = isAbsolute(path)\n ? path\n : resolve(workingDirectory, path);\n \n if (!existsSync(absolutePath)) {\n return {\n success: false,\n error: `Directory not found: ${path}`,\n };\n }\n \n const stats = await stat(absolutePath);\n if (!stats.isDirectory()) {\n return {\n success: false,\n error: `Not a directory: ${path}`,\n };\n }\n \n if (recursive) {\n // Use find for recursive listing\n const { stdout } = await execAsync(\n `find . -maxdepth ${maxDepth} -type f 2>/dev/null | head -n 200`,\n {\n cwd: absolutePath,\n timeout: 10000,\n }\n );\n \n const files = stdout.trim().split('\\n').filter(Boolean);\n return {\n success: true,\n path: relative(workingDirectory, absolutePath) || '.',\n files,\n count: files.length,\n recursive: true,\n };\n } else {\n const entries = await readdir(absolutePath, { withFileTypes: true });\n const items = entries.slice(0, 200).map(e => ({\n name: e.name,\n type: e.isDirectory() ? 'directory' : 'file',\n }));\n \n return {\n success: true,\n path: relative(workingDirectory, absolutePath) || '.',\n items,\n count: items.length,\n };\n }\n } catch (error: any) {\n return {\n success: false,\n error: error.message,\n };\n }\n },\n }),\n code_graph: createCodeGraphTool({\n workingDirectory,\n }),\n };\n \n // Add semantic_search if vector gateway is configured and index exists\n try {\n if (isVectorGatewayConfigured()) {\n const hasIndex = await checkIndexExists(workingDirectory);\n if (hasIndex) {\n const { createSemanticSearchTool } = await import('../../tools/semantic-search.js');\n tools.semantic_search = createSemanticSearchTool({\n workingDirectory,\n });\n }\n }\n } catch {\n // Silently skip semantic search if unavailable\n }\n \n return tools;\n }\n \n // Override getTools to return sync version (base class requires sync)\n // The actual async tool loading happens in the run() override\n protected getTools(options: SubagentRunOptions): ToolSet {\n // This won't be called directly - we override run() to use getToolsAsync()\n return {};\n }\n \n /**\n * Override run to use async tool loading (for semantic search availability check)\n */\n async run(options: SubagentRunOptions): Promise<import('../subagent.js').SubagentResult<SearchResult>> {\n // Load tools asynchronously (to check semantic search availability)\n const asyncTools = await this.getToolsAsync(options);\n \n // Temporarily override getTools to return our async-loaded tools\n const originalGetTools = this.getTools.bind(this);\n this.getTools = () => asyncTools;\n \n try {\n return await super.run(options);\n } finally {\n this.getTools = originalGetTools;\n }\n }\n \n protected parseResult(text: string, steps: SubagentStep[]): SearchResult {\n // =========================================================\n // KEY FIX: Collect ALL text from ALL steps as the summary.\n // \n // In a multi-step agent, result.text is only the LAST step's text.\n // If the last step was tool calls, result.text is empty.\n // The LLM's analysis (where it explains what it found) happens\n // in intermediate text steps - we need ALL of it.\n // =========================================================\n \n const allTexts: string[] = [];\n \n for (const step of steps) {\n if (step.type === 'text' && step.content) {\n allTexts.push(step.content);\n }\n }\n \n // Use the collected text from all steps, falling back to result.text\n const fullSummary = allTexts.length > 0 \n ? allTexts.join('\\n\\n')\n : text;\n \n // Extract structured findings from tool results\n const findings: SearchFinding[] = [];\n let filesSearched = 0;\n let matchCount = 0;\n \n for (const step of steps) {\n if (step.type === 'tool_result' && step.toolOutput) {\n const output = step.toolOutput as any;\n \n if (step.toolName === 'grep' && output.success) {\n matchCount += output.matchCount || 0;\n \n // Parse grep output to extract findings with content\n const lines = (output.output || '').split('\\n').filter(Boolean);\n for (const line of lines.slice(0, 20)) {\n // Format: path:line:content\n const match = line.match(/^([^:]+):(\\d+):(.*)$/);\n if (match) {\n findings.push({\n type: 'match',\n path: match[1],\n lineNumber: parseInt(match[2], 10),\n content: match[3].trim(),\n relevance: 'high',\n });\n }\n }\n } else if (step.toolName === 'glob' && output.success) {\n filesSearched += output.count || 0;\n \n for (const file of (output.files || []).slice(0, 15)) {\n findings.push({\n type: 'file',\n path: file,\n relevance: 'medium',\n });\n }\n } else if (step.toolName === 'read_file' && output.success) {\n // Include actual content snippet from read files\n const snippet = output.content \n ? truncateOutput(output.content, 500)\n : undefined;\n \n findings.push({\n type: 'file',\n path: output.path,\n content: snippet,\n relevance: 'high',\n context: `${output.lineCount} lines`,\n });\n } else if (step.toolName === 'semantic_search' && output.success) {\n // Include semantic search results\n const matches = output.matches || [];\n for (const m of matches.slice(0, 10)) {\n findings.push({\n type: 'semantic',\n path: m.filePath,\n lineNumber: m.startLine,\n content: m.snippet ? truncateOutput(m.snippet, 300) : undefined,\n relevance: m.score > 0.8 ? 'high' : m.score > 0.5 ? 'medium' : 'low',\n context: m.symbolName || m.language,\n });\n }\n } else if (step.toolName === 'code_graph' && output.success) {\n // Include code_graph results — the definition file + referenced pages\n matchCount += output.referenceCount || 0;\n \n if (output.filePath) {\n findings.push({\n type: 'file',\n path: output.filePath,\n lineNumber: output.line,\n content: output.typeInfo ? truncateOutput(output.typeInfo, 300) : undefined,\n relevance: 'high',\n context: `${output.kind || 'symbol'}${output.referenceCount ? `, ${output.referenceCount} refs` : ''}`,\n });\n }\n \n // Add page files as high-relevance findings\n for (const page of (output.pages || []).slice(0, 10)) {\n findings.push({\n type: 'file',\n path: page.path,\n relevance: 'high',\n context: page.route ? `route: ${page.route}` : 'page',\n });\n }\n }\n }\n }\n \n // Truncate summary if too long, but preserve the content\n const cappedSummary = truncateOutput(fullSummary, MAX_SUMMARY_CHARS);\n \n return {\n query: '', // Set by the caller (search tool) from the original task\n summary: cappedSummary,\n findings: findings.slice(0, MAX_FINDINGS),\n filesSearched,\n matchCount,\n };\n }\n}\n\n// Factory function\nexport function createSearchSubagent(model?: string): SearchSubagent {\n return new SearchSubagent(model);\n}\n","/**\n * Code Graph Tool\n * \n * Uses the TypeScript language server to inspect a symbol's type hierarchy\n * and usage graph. Given a symbol name, traverses references up the component\n * tree to find which pages/routes contain it, along with type signatures.\n * \n * This gives the agent structured context about code relationships without\n * overloading it with full file contents.\n */\n\nimport { tool } from 'ai';\nimport { z } from 'zod';\nimport { resolve, relative, isAbsolute, basename } from 'node:path';\nimport { readFile, readdir } from 'node:fs/promises';\nimport { existsSync } from 'node:fs';\nimport { fileURLToPath } from 'node:url';\nimport { execFileSync } from 'node:child_process';\nimport * as LSP from '../lsp/index.js';\nimport type { DocumentSymbol, Location } from '../lsp/types.js';\nimport { SymbolKind } from '../lsp/types.js';\n\nexport interface CodeGraphToolOptions {\n workingDirectory: string;\n}\n\nconst codeGraphInputSchema = z.object({\n symbol: z.string().describe(\n 'The symbol name to inspect (function, component, class, type, variable, etc.)'\n ),\n filePath: z.string().optional().describe(\n 'File path where the symbol is defined. If omitted, searches the workspace via grep.'\n ),\n depth: z.number().optional().default(2).describe(\n 'How many levels of references to traverse upward (default: 2, max: 3). Level 1 = direct usages, level 2 = usages of those usages.'\n ),\n});\n\n// ============================================================\n// Helpers\n// ============================================================\n\n/** Check if a file is a Next.js/React page or layout file */\nfunction isPageFile(filePath: string): boolean {\n const normalized = filePath.replace(/\\\\/g, '/');\n // App Router: app/page.tsx, app/team/page.tsx, app/admin/users/layout.tsx, etc.\n // The (.+\\/)? makes the intermediate path optional (handles root app/page.tsx)\n if (/\\/app\\/(.+\\/)?(page|layout|loading|error|not-found)\\.(tsx?|jsx?)$/.test(normalized)) return true;\n // Pages Router: any file under pages/ except _app, _document, _error, and api/\n if (/\\/pages\\/(?!_|api\\/).+\\.(tsx?|jsx?)$/.test(normalized)) return true;\n return false;\n}\n\n/** Extract route path from a page file path (e.g. app/team/page.tsx → /team) */\nfunction extractRoutePath(filePath: string, workingDirectory: string): string | undefined {\n const rel = relative(workingDirectory, filePath).replace(/\\\\/g, '/');\n // App Router: app/page.tsx → /, app/team/page.tsx → /team\n // Use ((?:\\/[^/]+)*?) to optionally capture path segments between app/ and page.tsx\n const appMatch = rel.match(/(?:src\\/)?app((?:\\/[^/]+)*?)\\/(?:page|layout|loading|error|not-found)\\.\\w+$/);\n if (appMatch) return appMatch[1] || '/';\n // Pages Router: pages/about.tsx → /about, pages/blog/[slug].tsx → /blog/[slug]\n const pagesMatch = rel.match(/(?:src\\/)?pages(\\/.*?)(?:\\/index)?\\.\\w+$/);\n if (pagesMatch) return pagesMatch[1] || '/';\n return undefined;\n}\n\n/** Human-readable symbol kind */\nfunction symbolKindName(kind: SymbolKind): string {\n const names: Record<number, string> = {\n [SymbolKind.Class]: 'class',\n [SymbolKind.Function]: 'function',\n [SymbolKind.Method]: 'method',\n [SymbolKind.Property]: 'property',\n [SymbolKind.Variable]: 'variable',\n [SymbolKind.Interface]: 'interface',\n [SymbolKind.Enum]: 'enum',\n [SymbolKind.Constant]: 'constant',\n [SymbolKind.Constructor]: 'constructor',\n [SymbolKind.Module]: 'module',\n [SymbolKind.Namespace]: 'namespace',\n [SymbolKind.TypeParameter]: 'type_param',\n [SymbolKind.Field]: 'field',\n [SymbolKind.EnumMember]: 'enum_member',\n [SymbolKind.Object]: 'object',\n };\n return names[kind] || 'symbol';\n}\n\n/** Find the tightest-enclosing symbol for a position in a document symbol tree.\n * LSP ranges are [start, end) — end position is exclusive. */\nfunction findContainingSymbol(\n symbols: DocumentSymbol[],\n line: number,\n character: number,\n): DocumentSymbol | null {\n for (const sym of symbols) {\n if (!sym.range) continue; // guard against malformed symbols\n const { start, end } = sym.range;\n const afterStart = line > start.line || (line === start.line && character >= start.character);\n // end is exclusive: position must be strictly before end\n const beforeEnd = line < end.line || (line === end.line && character < end.character);\n if (afterStart && beforeEnd) {\n if (sym.children?.length) {\n const child = findContainingSymbol(sym.children, line, character);\n if (child) return child;\n }\n return sym;\n }\n }\n return null;\n}\n\n/** Find a symbol by name anywhere in the document symbol tree */\nfunction findSymbolByName(symbols: DocumentSymbol[], name: string): DocumentSymbol | null {\n for (const sym of symbols) {\n if (sym.name === name && sym.selectionRange) return sym;\n if (sym.children) {\n const found = findSymbolByName(sym.children, name);\n if (found) return found;\n }\n }\n return null;\n}\n\n/** Strip markdown code fences from hover output */\nfunction cleanHoverText(text: string): string {\n return text.replace(/```\\w*\\n?/g, '').replace(/\\n```/g, '').trim();\n}\n\n/** Use ripgrep to find a symbol definition in the workspace (with Node.js fallback) */\nasync function grepForSymbol(symbol: string, workingDirectory: string): Promise<{ filePath: string; line: number; char: number } | null> {\n // Escape regex-special chars in the symbol name\n const escaped = symbol.replace(/[.*+?^${}()|[\\]\\\\]/g, '\\\\$&');\n\n // Try ripgrep first (fast)\n const rgPatterns = [\n `(export\\\\s+)?(default\\\\s+)?(function|const|let|var|class|interface|type|enum)\\\\s+${escaped}\\\\b`,\n `(export\\\\s+)?(default\\\\s+)?\\\\b${escaped}\\\\s*[=:(]`,\n ];\n\n for (const pattern of rgPatterns) {\n try {\n const result = execFileSync('rg', [\n '-n', '--no-heading',\n '-e', pattern,\n '--glob', '*.{ts,tsx,js,jsx}',\n '-m', '5',\n ], {\n cwd: workingDirectory,\n encoding: 'utf-8',\n timeout: 5000,\n stdio: ['pipe', 'pipe', 'pipe'],\n }).trim();\n\n if (result) {\n const firstLine = result.split('\\n')[0];\n const match = firstLine.match(/^(.+?):(\\d+):(.*)/);\n if (match) {\n const col = match[3].indexOf(symbol);\n return {\n filePath: resolve(workingDirectory, match[1]),\n line: parseInt(match[2]) - 1,\n char: col >= 0 ? col : 0,\n };\n }\n }\n } catch {\n // rg not found or no matches — fall through\n }\n }\n\n // Fallback: recursive Node.js search (handles environments without rg)\n const defPattern = new RegExp(\n `(export|function|const|let|var|class|interface|type|enum)\\\\s+.*\\\\b${escaped}\\\\b`\n );\n const SUPPORTED_EXTS = new Set(['.ts', '.tsx', '.js', '.jsx']);\n const IGNORED_DIRS = new Set(['node_modules', '.git', 'dist', 'build', '.next', 'coverage']);\n\n async function search(dir: string, maxFiles: number): Promise<{ filePath: string; line: number; char: number } | null> {\n if (maxFiles <= 0) return null;\n let remaining = maxFiles;\n\n try {\n const entries = await readdir(dir, { withFileTypes: true });\n for (const entry of entries) {\n if (remaining <= 0) return null;\n const fullPath = resolve(dir, entry.name);\n\n if (entry.isDirectory()) {\n if (IGNORED_DIRS.has(entry.name) || entry.name.startsWith('.')) continue;\n const found = await search(fullPath, remaining);\n if (found) return found;\n remaining -= 10; // estimate\n } else if (entry.isFile()) {\n const ext = entry.name.substring(entry.name.lastIndexOf('.'));\n if (!SUPPORTED_EXTS.has(ext)) continue;\n remaining--;\n\n const content = await readFile(fullPath, 'utf-8');\n const lines = content.split('\\n');\n for (let i = 0; i < lines.length; i++) {\n if (defPattern.test(lines[i])) {\n const col = lines[i].indexOf(symbol);\n if (col >= 0) {\n return { filePath: fullPath, line: i, char: col };\n }\n }\n }\n }\n }\n } catch { /* permission errors etc */ }\n return null;\n }\n\n return search(workingDirectory, 200);\n}\n\n// ============================================================\n// Main tool\n// ============================================================\n\nconst MAX_REF_FILES = 15;\nconst MAX_LEVEL2_PARENTS = 8;\nconst MAX_LEVEL2_SYMBOLS_PER_PARENT = 3;\n\nexport function createCodeGraphTool(options: CodeGraphToolOptions) {\n return tool({\n description: `Inspect a symbol's type information and usage graph using the TypeScript language server.\n\nGiven a symbol name (function, component, class, type, etc.), this tool will:\n1. Find its definition and full type signature (parameters, return type)\n2. Find all references — what components/functions/files use this symbol\n3. Identify which pages/routes contain it in their component tree\n4. Show the file's symbol structure for surrounding context\n\nUse this to understand:\n- Component hierarchies (what renders what, which pages are affected)\n- Type signatures and parameter/return types before making changes\n- How deeply a symbol is used across the codebase\n- What will break if you change something\n\nSupports TypeScript, JavaScript, TSX, JSX files.\nWorking directory: ${options.workingDirectory}`,\n\n inputSchema: codeGraphInputSchema,\n\n execute: async ({ symbol, filePath, depth }: z.infer<typeof codeGraphInputSchema>) => {\n const maxDepth = Math.min(depth ?? 2, 3);\n\n try {\n // ========================================\n // Step 1: Locate the symbol's definition\n // ========================================\n let defFilePath: string | undefined;\n let defLine = 0;\n let defChar = 0;\n let defSymbol: DocumentSymbol | null = null;\n\n if (filePath) {\n const absPath = isAbsolute(filePath)\n ? filePath\n : resolve(options.workingDirectory, filePath);\n\n if (!existsSync(absPath)) {\n return { success: false, error: `File not found: ${filePath}` };\n }\n\n if (!LSP.isSupported(absPath)) {\n return { success: false, error: `File type not supported. Supports: ${LSP.getSupportedExtensions().join(', ')}` };\n }\n\n // Open file in LSP and wait for it to be processed\n await LSP.touchFile(absPath, true);\n const symbols = await LSP.getDocumentSymbols(absPath);\n defSymbol = findSymbolByName(symbols, symbol);\n\n if (defSymbol) {\n defFilePath = absPath;\n defLine = defSymbol.selectionRange.start.line;\n defChar = defSymbol.selectionRange.start.character;\n } else {\n // Fall back to text search in the file\n const content = await readFile(absPath, 'utf-8');\n const lines = content.split('\\n');\n // First pass: look for definition patterns\n const defPattern = new RegExp(\n `(export|function|const|let|var|class|interface|type|enum)\\\\s+.*\\\\b${symbol.replace(/[.*+?^${}()|[\\]\\\\]/g, '\\\\$&')}\\\\b`\n );\n for (let i = 0; i < lines.length; i++) {\n if (defPattern.test(lines[i])) {\n const col = lines[i].indexOf(symbol);\n if (col !== -1) {\n defFilePath = absPath;\n defLine = i;\n defChar = col;\n break;\n }\n }\n }\n // Second pass: any occurrence\n if (!defFilePath) {\n for (let i = 0; i < lines.length; i++) {\n const col = lines[i].indexOf(symbol);\n if (col !== -1) {\n defFilePath = absPath;\n defLine = i;\n defChar = col;\n break;\n }\n }\n }\n }\n } else {\n // No file path — search workspace\n const found = await grepForSymbol(symbol, options.workingDirectory);\n if (found) {\n defFilePath = found.filePath;\n defLine = found.line;\n defChar = found.char;\n }\n }\n\n if (!defFilePath) {\n return {\n success: false,\n error: `Could not find symbol \"${symbol}\" in the codebase. Try providing a filePath.`,\n };\n }\n\n // Ensure the file is open in LSP and processed\n await LSP.touchFile(defFilePath, true);\n\n // ========================================\n // Step 2: Get type info via hover\n // ========================================\n const rawHover = await LSP.getHover(defFilePath, defLine, defChar);\n const typeInfo = rawHover ? cleanHoverText(rawHover) : null;\n\n // ========================================\n // Step 3: Get document symbols for file structure\n // ========================================\n const fileSymbols = await LSP.getDocumentSymbols(defFilePath);\n if (!defSymbol && fileSymbols.length > 0) {\n defSymbol = findSymbolByName(fileSymbols, symbol);\n }\n\n // ========================================\n // Step 4: Get references (upstream — who uses this)\n // ========================================\n const references = await LSP.getReferences(defFilePath, defLine, defChar, false);\n\n // Group references by file\n const refsByFile = new Map<string, Location[]>();\n for (const ref of references) {\n const refPath = fileURLToPath(ref.uri);\n if (!refsByFile.has(refPath)) {\n refsByFile.set(refPath, []);\n }\n refsByFile.get(refPath)!.push(ref);\n }\n\n // ========================================\n // Step 5: Process each reference file\n // ========================================\n interface RefFileInfo {\n filePath: string;\n relativePath: string;\n isPage: boolean;\n routePath?: string;\n containingSymbols: Array<{\n name: string;\n kind: string;\n line: number;\n char: number;\n typeInfo?: string;\n }>;\n }\n\n const refFileInfos: RefFileInfo[] = [];\n let processed = 0;\n\n for (const [refPath, locs] of refsByFile) {\n if (processed >= MAX_REF_FILES) break;\n // Skip the definition file (self-references)\n if (refPath === defFilePath) continue;\n processed++;\n\n const relPath = relative(options.workingDirectory, refPath);\n const pageFile = isPageFile(refPath);\n const routePath = pageFile ? extractRoutePath(refPath, options.workingDirectory) : undefined;\n\n // Open file + get symbols\n await LSP.touchFile(refPath, false);\n const refFileSymbols = await LSP.getDocumentSymbols(refPath);\n\n const seen = new Map<string, { name: string; kind: string; line: number; char: number; typeInfo?: string }>();\n\n for (const loc of locs) {\n const container = findContainingSymbol(\n refFileSymbols,\n loc.range.start.line,\n loc.range.start.character,\n );\n if (container && !seen.has(container.name)) {\n // Get type info for the containing symbol (concise — just first line)\n let containerHover: string | null = null;\n try {\n const raw = await LSP.getHover(\n refPath,\n container.selectionRange.start.line,\n container.selectionRange.start.character,\n );\n if (raw) containerHover = cleanHoverText(raw).split('\\n')[0];\n } catch { /* skip */ }\n\n seen.set(container.name, {\n name: container.name,\n kind: symbolKindName(container.kind),\n line: container.selectionRange.start.line + 1,\n char: container.selectionRange.start.character,\n typeInfo: containerHover || undefined,\n });\n }\n }\n\n refFileInfos.push({\n filePath: refPath,\n relativePath: relPath,\n isPage: pageFile,\n routePath,\n containingSymbols: Array.from(seen.values()),\n });\n }\n\n // ========================================\n // Step 6: Level-2 references (usages of the usages)\n // ========================================\n interface Level2Ref {\n parentSymbol: string;\n parentFile: string;\n refs: Array<{\n relativePath: string;\n isPage: boolean;\n routePath?: string;\n containingSymbol?: string;\n }>;\n }\n\n const level2Refs: Level2Ref[] = [];\n\n if (maxDepth >= 2) {\n for (const refFile of refFileInfos.slice(0, MAX_LEVEL2_PARENTS)) {\n for (const sym of refFile.containingSymbols.slice(0, MAX_LEVEL2_SYMBOLS_PER_PARENT)) {\n try {\n // Use the stored selectionRange position — no need to re-search\n const symLineIdx = sym.line - 1; // convert 1-indexed back to 0-indexed\n const symChar = sym.char;\n\n const l2Locations = await LSP.getReferences(\n refFile.filePath, symLineIdx, symChar, false,\n );\n\n const l2Nodes: Level2Ref['refs'] = [];\n const seenPaths = new Set<string>();\n\n for (const loc of l2Locations.slice(0, 10)) {\n const l2Path = fileURLToPath(loc.uri);\n if (l2Path === refFile.filePath || l2Path === defFilePath) continue;\n if (seenPaths.has(l2Path)) continue;\n seenPaths.add(l2Path);\n\n const l2Rel = relative(options.workingDirectory, l2Path);\n const l2Page = isPageFile(l2Path);\n const l2Route = l2Page ? extractRoutePath(l2Path, options.workingDirectory) : undefined;\n\n // Optionally find containing symbol\n let containerName: string | undefined;\n try {\n await LSP.touchFile(l2Path, false);\n const l2Symbols = await LSP.getDocumentSymbols(l2Path);\n const container = findContainingSymbol(l2Symbols, loc.range.start.line, loc.range.start.character);\n if (container) containerName = container.name;\n } catch { /* skip */ }\n\n l2Nodes.push({\n relativePath: l2Rel,\n isPage: l2Page,\n routePath: l2Route,\n containingSymbol: containerName,\n });\n }\n\n if (l2Nodes.length > 0) {\n level2Refs.push({\n parentSymbol: sym.name,\n parentFile: refFile.relativePath,\n refs: l2Nodes,\n });\n }\n } catch { /* skip level-2 errors */ }\n }\n }\n }\n\n // ========================================\n // Step 7: Format output for the agent\n // ========================================\n const relDefPath = relative(options.workingDirectory, defFilePath);\n const lines: string[] = [];\n\n // -- Symbol header --\n lines.push(`=== ${symbol} ===`);\n lines.push(`File: ${relDefPath}:${defLine + 1}`);\n if (defSymbol) lines.push(`Kind: ${symbolKindName(defSymbol.kind)}`);\n if (typeInfo) lines.push(`Type: ${typeInfo}`);\n\n // -- References --\n const externalRefCount = references.filter(r => fileURLToPath(r.uri) !== defFilePath).length;\n const externalFileCount = refsByFile.size - (refsByFile.has(defFilePath!) ? 1 : 0);\n\n if (refFileInfos.length > 0) {\n lines.push('');\n lines.push(`=== Referenced by (${externalRefCount} usages across ${externalFileCount} files) ===`);\n\n const pages = refFileInfos.filter(r => r.isPage);\n const nonPages = refFileInfos.filter(r => !r.isPage);\n\n if (pages.length > 0) {\n lines.push('');\n lines.push('Pages/Routes:');\n for (const page of pages) {\n lines.push(` ${page.relativePath}${page.routePath ? ` → ${page.routePath}` : ''}`);\n for (const s of page.containingSymbols) {\n lines.push(` └── ${s.name} (${s.kind}:${s.line})${s.typeInfo ? ` — ${s.typeInfo}` : ''}`);\n }\n }\n }\n\n if (nonPages.length > 0) {\n lines.push('');\n lines.push('Components/Functions:');\n for (const ref of nonPages) {\n lines.push(` ${ref.relativePath}`);\n for (const s of ref.containingSymbols) {\n const typePart = s.typeInfo && s.typeInfo.length < 120 ? ` — ${s.typeInfo}` : '';\n lines.push(` └── ${s.name} (${s.kind}:${s.line})${typePart}`);\n }\n }\n }\n } else {\n lines.push('');\n lines.push('No external references found (symbol may be unused or only used within the same file).');\n }\n\n // -- Level 2 refs --\n if (level2Refs.length > 0) {\n lines.push('');\n lines.push('=== Extended tree (level 2) ===');\n for (const l2 of level2Refs) {\n lines.push('');\n lines.push(`${l2.parentSymbol} (${l2.parentFile}) is used by:`);\n for (const ref of l2.refs) {\n const tag = ref.isPage ? ' [PAGE]' : '';\n const route = ref.routePath ? ` → ${ref.routePath}` : '';\n const container = ref.containingSymbol ? ` in ${ref.containingSymbol}` : '';\n lines.push(` └── ${ref.relativePath}${tag}${route}${container}`);\n }\n }\n }\n\n // -- File structure --\n if (fileSymbols.length > 0) {\n lines.push('');\n lines.push(`=== File structure (${basename(defFilePath)}) ===`);\n for (const sym of fileSymbols) {\n const marker = sym.name === symbol ? ' ← target' : '';\n lines.push(` ${sym.name} (${symbolKindName(sym.kind)}:${sym.selectionRange.start.line + 1})${marker}`);\n if (sym.children) {\n for (const child of sym.children.slice(0, 10)) {\n lines.push(` └── ${child.name} (${symbolKindName(child.kind)}:${child.selectionRange.start.line + 1})`);\n }\n if (sym.children.length > 10) {\n lines.push(` ... and ${sym.children.length - 10} more`);\n }\n }\n }\n }\n\n const formattedResult = lines.join('\\n');\n\n return {\n success: true,\n symbol,\n filePath: relDefPath,\n line: defLine + 1,\n kind: defSymbol ? symbolKindName(defSymbol.kind) : undefined,\n typeInfo: typeInfo || undefined,\n referenceCount: externalRefCount,\n referenceFiles: externalFileCount,\n pages: refFileInfos\n .filter(r => r.isPage)\n .map(r => ({ path: r.relativePath, route: r.routePath })),\n formattedResult,\n };\n } catch (error: unknown) {\n return {\n success: false,\n error: error instanceof Error ? error.message : String(error),\n };\n }\n },\n });\n}\n\nexport type CodeGraphTool = ReturnType<typeof createCodeGraphTool>;\n","import { ToolSet } from 'ai';\nimport { createBashTool, BashToolOptions, BashToolProgress } from './bash.js';\nimport { createReadFileTool, ReadFileToolOptions } from './read-file.js';\nimport { createWriteFileTool, WriteFileToolOptions, WriteFileProgress } from './write-file.js';\nimport { createTodoTool, TodoToolOptions } from './todo.js';\nimport { createLoadSkillTool, LoadSkillToolOptions } from './load-skill.js';\nimport { createLinterTool, LinterToolOptions } from './linter.js';\nimport { createSearchTool, SearchToolOptions, SearchToolProgress } from './search.js';\nimport { createSemanticSearchTool, SemanticSearchToolOptions, SemanticSearchResult } from './semantic-search.js';\nimport { createCodeGraphTool, CodeGraphToolOptions } from './code-graph.js';\nimport { createCompleteTaskTool, createTaskFailedTool, TaskCompletionSignal, CreateTaskToolsOptions } from './task.js';\nimport { createUploadFileTool, UploadFileToolOptions } from './upload-file.js';\nimport { isVectorGatewayConfigured, checkIndexExists } from '../semantic/index.js';\nimport { isRemoteConfigured } from '../db/remote.js';\n\nexport interface CreateToolsOptions {\n sessionId: string;\n workingDirectory: string;\n skillsDirectories: string[];\n onBashOutput?: (output: string) => void;\n onBashProgress?: (progress: BashToolProgress) => void;\n /** Called when write_file has progress to report (for streaming content) */\n onWriteFileProgress?: (progress: WriteFileProgress) => void;\n /** Called when explore_agent tool has progress to report (subagent steps) */\n onSearchProgress?: (progress: SearchToolProgress) => void;\n /** Enable LSP diagnostics for file edits (default: true) */\n enableLSP?: boolean;\n /** Enable semantic search if configured (default: true) */\n enableSemanticSearch?: boolean;\n /** Task mode: include complete_task and task_failed tools */\n taskTools?: CreateTaskToolsOptions;\n}\n\n/**\n * Create all tools for an agent session\n * Note: This is now async to support checking semantic search availability\n */\nexport async function createTools(options: CreateToolsOptions): Promise<ToolSet> {\n const tools: ToolSet = {\n bash: createBashTool({\n workingDirectory: options.workingDirectory,\n sessionId: options.sessionId,\n onOutput: options.onBashOutput,\n onProgress: options.onBashProgress,\n }),\n\n read_file: createReadFileTool({\n workingDirectory: options.workingDirectory,\n }),\n\n write_file: createWriteFileTool({\n workingDirectory: options.workingDirectory,\n sessionId: options.sessionId,\n enableLSP: options.enableLSP ?? true,\n onProgress: options.onWriteFileProgress,\n }),\n\n todo: createTodoTool({\n sessionId: options.sessionId,\n }),\n\n load_skill: createLoadSkillTool({\n sessionId: options.sessionId,\n skillsDirectories: options.skillsDirectories,\n }),\n\n linter: createLinterTool({\n workingDirectory: options.workingDirectory,\n }),\n\n explore_agent: createSearchTool({\n sessionId: options.sessionId,\n workingDirectory: options.workingDirectory,\n onProgress: options.onSearchProgress,\n }),\n\n code_graph: createCodeGraphTool({\n workingDirectory: options.workingDirectory,\n }),\n };\n\n // Conditionally add upload_file if remote server with GCS is configured\n if (isRemoteConfigured()) {\n tools.upload_file = createUploadFileTool({\n workingDirectory: options.workingDirectory,\n sessionId: options.sessionId,\n });\n }\n\n // Conditionally add semantic_search if configured and index exists\n if (options.enableSemanticSearch !== false) {\n try {\n if (isVectorGatewayConfigured()) {\n const hasIndex = await checkIndexExists(options.workingDirectory);\n if (hasIndex) {\n tools.semantic_search = createSemanticSearchTool({\n workingDirectory: options.workingDirectory,\n });\n }\n }\n } catch {\n // Silently skip semantic search if there are any issues\n }\n }\n\n // Conditionally add task tools when in task mode\n if (options.taskTools) {\n tools.complete_task = createCompleteTaskTool(options.taskTools);\n tools.task_failed = createTaskFailedTool(options.taskTools);\n }\n\n return tools;\n}\n\n// Re-export individual tool creators for customization\nexport { createBashTool } from './bash.js';\nexport { createReadFileTool } from './read-file.js';\nexport { createWriteFileTool } from './write-file.js';\nexport { createTodoTool } from './todo.js';\nexport { createLoadSkillTool } from './load-skill.js';\nexport { createLinterTool } from './linter.js';\nexport { createSearchTool } from './search.js';\nexport { createSemanticSearchTool } from './semantic-search.js';\nexport { createCodeGraphTool } from './code-graph.js';\nexport { createCompleteTaskTool, createTaskFailedTool } from './task.js';\nexport { createUploadFileTool } from './upload-file.js';\n\n// Export types\nexport type { BashToolOptions, BashToolProgress } from './bash.js';\nexport type { ReadFileToolOptions } from './read-file.js';\nexport type { WriteFileToolOptions, WriteFileProgress } from './write-file.js';\nexport type { TodoToolOptions } from './todo.js';\nexport type { LoadSkillToolOptions } from './load-skill.js';\nexport type { LinterToolOptions } from './linter.js';\nexport type { SearchToolOptions, SearchToolProgress } from './search.js';\nexport type { SemanticSearchToolOptions, SemanticSearchResult } from './semantic-search.js';\nexport type { CodeGraphToolOptions } from './code-graph.js';\nexport type { TaskCompletionSignal, CreateTaskToolsOptions } from './task.js';\nexport type { UploadFileToolOptions } from './upload-file.js';","import { tool } from 'ai';\nimport { z } from 'zod';\nimport Ajv from 'ajv';\n\nconst ajv = new Ajv({ allErrors: true });\n\nexport interface TaskCompletionSignal {\n status: 'completed' | 'failed';\n result?: unknown;\n error?: string;\n}\n\nexport interface CreateTaskToolsOptions {\n outputSchema: Record<string, unknown>;\n onComplete: (signal: TaskCompletionSignal) => void;\n}\n\nexport function createCompleteTaskTool(options: CreateTaskToolsOptions) {\n const validate = ajv.compile(options.outputSchema);\n\n return tool({\n description:\n 'Call this tool when you have completed the task. Pass the result as a JSON object matching the required output schema. If the result does not match the schema, you will receive validation errors and should fix and retry.',\n inputSchema: z.object({\n result: z\n .record(z.string(), z.unknown())\n .describe('The task result as a JSON object matching the output schema'),\n }),\n execute: async (input) => {\n const valid = validate(input.result);\n if (!valid) {\n const errors = validate.errors?.map((e) => ({\n path: e.instancePath || '/',\n message: e.message,\n params: e.params,\n }));\n return {\n status: 'validation_error',\n message:\n 'The result does not match the required output schema. Fix the errors and call complete_task again.',\n errors,\n expectedSchema: options.outputSchema,\n };\n }\n\n options.onComplete({ status: 'completed', result: input.result });\n return {\n status: 'completed',\n message: 'Task completed successfully.',\n };\n },\n });\n}\n\nexport function createTaskFailedTool(options: Pick<CreateTaskToolsOptions, 'onComplete'>) {\n return tool({\n description:\n 'Call this tool if you are unable to complete the task. Provide a clear reason explaining why the task cannot be completed.',\n inputSchema: z.object({\n reason: z.string().describe('Explanation of why the task cannot be completed'),\n }),\n execute: async (input) => {\n options.onComplete({ status: 'failed', error: input.reason });\n return {\n status: 'failed',\n message: `Task marked as failed: ${input.reason}`,\n };\n },\n });\n}\n","import { tool } from 'ai';\nimport { z } from 'zod';\nimport { readFile, stat } from 'node:fs/promises';\nimport { join, basename, extname } from 'node:path';\n\nexport interface UploadFileToolOptions {\n workingDirectory: string;\n sessionId: string;\n}\n\nconst MIME_TYPES: Record<string, string> = {\n '.txt': 'text/plain',\n '.md': 'text/markdown',\n '.html': 'text/html',\n '.css': 'text/css',\n '.js': 'application/javascript',\n '.ts': 'application/typescript',\n '.json': 'application/json',\n '.csv': 'text/csv',\n '.xml': 'application/xml',\n '.pdf': 'application/pdf',\n '.png': 'image/png',\n '.jpg': 'image/jpeg',\n '.jpeg': 'image/jpeg',\n '.gif': 'image/gif',\n '.webp': 'image/webp',\n '.svg': 'image/svg+xml',\n '.mp4': 'video/mp4',\n '.webm': 'video/webm',\n '.mp3': 'audio/mpeg',\n '.wav': 'audio/wav',\n '.zip': 'application/zip',\n '.tar': 'application/x-tar',\n '.gz': 'application/gzip',\n};\n\nexport function createUploadFileTool(options: UploadFileToolOptions) {\n return tool({\n description: `Upload a file to cloud storage and get back a shareable download URL. Use this when the user needs a hosted link to a file (e.g. a generated report, image, export, or any artifact they want to download or share). The file must already exist on disk.`,\n inputSchema: z.object({\n path: z.string().describe('Path to the file to upload (relative to working directory or absolute)'),\n name: z.string().optional().describe('Display name for the file (defaults to the filename)'),\n }),\n execute: async (input) => {\n try {\n const { isRemoteConfigured, storageQueries } = await import('../db/remote.js');\n if (!isRemoteConfigured()) {\n return {\n success: false,\n error: 'File upload is not available — remote server with GCS is not configured.',\n };\n }\n\n const fullPath = input.path.startsWith('/')\n ? input.path\n : join(options.workingDirectory, input.path);\n\n // Verify file exists\n try {\n await stat(fullPath);\n } catch {\n return {\n success: false,\n error: `File not found: ${input.path}`,\n };\n }\n\n const fileName = input.name || basename(fullPath);\n const ext = extname(fullPath).toLowerCase();\n const contentType = MIME_TYPES[ext] || 'application/octet-stream';\n\n // Get presigned upload URL\n const uploadInfo = await storageQueries.getUploadUrl(\n options.sessionId,\n fileName,\n contentType,\n 'general'\n );\n\n // Read and upload\n const fileData = await readFile(fullPath);\n const putRes = await fetch(uploadInfo.uploadUrl, {\n method: 'PUT',\n headers: { 'Content-Type': contentType },\n body: fileData,\n });\n\n if (!putRes.ok) {\n return {\n success: false,\n error: `Upload failed: ${putRes.status} ${putRes.statusText}`,\n };\n }\n\n // Update file size\n await storageQueries.updateFile(uploadInfo.fileId, { sizeBytes: fileData.length });\n\n // Get download URL\n const downloadInfo = await storageQueries.getDownloadUrl(uploadInfo.fileId);\n\n return {\n success: true,\n fileId: uploadInfo.fileId,\n fileName,\n sizeBytes: fileData.length,\n contentType,\n downloadUrl: downloadInfo.downloadUrl,\n expiresAt: downloadInfo.expiresAt,\n };\n } catch (err: any) {\n return {\n success: false,\n error: `Upload failed: ${err.message}`,\n };\n }\n },\n });\n}\n","import { generateText, type ModelMessage as AIModelMessage } from 'ai';\nimport { resolveModel } from './model.js';\nimport { messageQueries, ModelMessage } from '../db/index.js';\nimport { calculateContextSize } from '../utils/truncate.js';\nimport { estimateTokens, estimateMessageTokens } from '../utils/tokens.js';\nimport { createSummaryPrompt } from './prompts.js';\nimport { getConfig } from '../config/index.js';\nimport { sanitizeModelMessages } from '../utils/sanitize-messages.js';\nimport {\n getModelLimits,\n SUMMARIZATION_MODEL,\n SUMMARY_CHUNK_TOKENS,\n SUMMARY_BUDGET_RATIO,\n} from './model-limits.js';\n\nexport interface ContextManagerOptions {\n sessionId: string;\n modelId: string;\n maxContextChars: number;\n keepRecentMessages: number;\n autoSummarize: boolean;\n}\n\nconst TOOL_OUTPUT_TRIM_CHARS = 400;\n\nconst COMPACTABLE_TOOLS = new Set([\n 'read_file',\n 'bash',\n 'explore_agent',\n 'code_graph',\n]);\n\n/**\n * Manages conversation context with a three-phase rolling window:\n *\n * Phase 1 – Compact: strip todo tool calls, trim large tool outputs,\n * and remove thinking blocks from older messages (no LLM cost).\n * Phase 2 – Chunk-summarize: when compacted context still exceeds the\n * model's rolling target, summarize the oldest ~30 K-token chunk\n * via a cheap/fast model (gemini-3-flash-preview).\n * Phase 3 – Roll summaries: when accumulated summaries exceed their budget\n * (~15 % of rolling target), re-summarize them into one.\n *\n * A final repair pass ensures all tool-call / tool-result pairs are matched\n * before the messages leave the context manager.\n */\nexport class ContextManager {\n private sessionId: string;\n private modelId: string;\n private maxContextChars: number;\n private keepRecentMessages: number;\n private autoSummarize: boolean;\n private summaries: string[] = [];\n\n constructor(options: ContextManagerOptions) {\n this.sessionId = options.sessionId;\n this.modelId = options.modelId;\n this.maxContextChars = options.maxContextChars;\n this.keepRecentMessages = options.keepRecentMessages;\n this.autoSummarize = options.autoSummarize;\n }\n\n /**\n * Get messages for the current context, applying the three-phase pipeline.\n */\n async getMessages(): Promise<AIModelMessage[]> {\n let messages = (await messageQueries.getModelMessages(this.sessionId)) as AIModelMessage[];\n messages = sanitizeModelMessages(messages) as AIModelMessage[];\n\n // Phase 1: compact older messages (always, free)\n messages = this.compactOlderMessages(messages, this.keepRecentMessages);\n\n if (this.autoSummarize) {\n const { rollingTarget } = getModelLimits(this.modelId);\n const summaryBudget = Math.floor(rollingTarget * SUMMARY_BUDGET_RATIO);\n\n // Phase 2: chunk-summarize until we're under the rolling target\n messages = await this.chunkSummarize(messages, rollingTarget);\n\n // Phase 3: roll summaries if they've grown too large\n await this.rollSummaries(summaryBudget);\n }\n\n // Prepend accumulated summaries\n if (this.summaries.length > 0) {\n const summaryContent = this.summaries.join('\\n\\n---\\n\\n');\n messages = [\n {\n role: 'system' as const,\n content: `[Previous conversation summary]\\n${summaryContent}`,\n },\n ...messages,\n ];\n }\n\n // Final safety net: repair any broken tool-call / tool-result pairing\n messages = repairToolPairing(messages);\n\n return messages;\n }\n\n // ---------------------------------------------------------------------------\n // Phase 1 – Compact\n // ---------------------------------------------------------------------------\n\n /**\n * Strip non-essential content from messages older than the most recent\n * `recentCount`. Operates in-memory only — does not touch the DB.\n *\n * Tracks removed tool-call IDs so matching tool-results are also removed,\n * preventing orphaned tool_result blocks that providers reject.\n */\n compactOlderMessages(\n messages: AIModelMessage[],\n recentCount: number,\n ): AIModelMessage[] {\n if (messages.length <= recentCount) return messages;\n\n const boundary = messages.length - recentCount;\n const olderMessages = messages.slice(0, boundary);\n const recentMessages = messages.slice(boundary);\n\n // First pass: compact and collect removed tool-call IDs\n const removedToolCallIds = new Set<string>();\n const compacted: AIModelMessage[] = [];\n\n for (const msg of olderMessages) {\n const processed = this.compactMessage(msg, removedToolCallIds);\n if (processed) compacted.push(processed);\n }\n\n // Second pass: strip orphaned tool-results whose tool-calls were removed\n if (removedToolCallIds.size > 0) {\n const cleaned: AIModelMessage[] = [];\n for (const msg of compacted) {\n const result = stripOrphanedToolResults(msg, removedToolCallIds);\n if (result) cleaned.push(result);\n }\n return [...cleaned, ...recentMessages];\n }\n\n return [...compacted, ...recentMessages];\n }\n\n private compactMessage(\n msg: AIModelMessage,\n removedToolCallIds: Set<string>,\n ): AIModelMessage | null {\n if (!Array.isArray(msg.content)) return msg;\n\n const parts: any[] = [];\n for (const part of msg.content as any[]) {\n // Remove todo tool calls and track their IDs\n if (part.type === 'tool-call' && part.toolName === 'todo') {\n if (part.toolCallId) removedToolCallIds.add(part.toolCallId);\n continue;\n }\n\n // Remove todo tool results and track their IDs\n if (part.type === 'tool-result' && part.toolName === 'todo') {\n if (part.toolCallId) removedToolCallIds.add(part.toolCallId);\n continue;\n }\n\n // Remove thinking/reasoning blocks\n if (part.type === 'reasoning' || part.type === 'thinking') continue;\n\n // Trim large tool-result outputs for known heavy tools\n if (part.type === 'tool-result' && COMPACTABLE_TOOLS.has(part.toolName)) {\n parts.push(this.trimToolResult(part));\n continue;\n }\n\n parts.push(part);\n }\n\n if (parts.length === 0) return null;\n return { ...msg, content: parts } as AIModelMessage;\n }\n\n private trimToolResult(part: any): any {\n const results = Array.isArray(part.result) ? part.result : [part.result];\n const trimmedResults = results.map((r: any) => {\n if (typeof r === 'string' && r.length > TOOL_OUTPUT_TRIM_CHARS) {\n const half = Math.floor(TOOL_OUTPUT_TRIM_CHARS / 2);\n return (\n r.slice(0, half) +\n `\\n...[trimmed ${r.length - TOOL_OUTPUT_TRIM_CHARS} chars]...\\n` +\n r.slice(-half)\n );\n }\n if (r && typeof r === 'object' && typeof r.text === 'string' && r.text.length > TOOL_OUTPUT_TRIM_CHARS) {\n const half = Math.floor(TOOL_OUTPUT_TRIM_CHARS / 2);\n return {\n ...r,\n text:\n r.text.slice(0, half) +\n `\\n...[trimmed ${r.text.length - TOOL_OUTPUT_TRIM_CHARS} chars]...\\n` +\n r.text.slice(-half),\n };\n }\n return r;\n });\n\n return {\n ...part,\n result: Array.isArray(part.result) ? trimmedResults : trimmedResults[0],\n };\n }\n\n // ---------------------------------------------------------------------------\n // Phase 2 – Chunk-summarize\n // ---------------------------------------------------------------------------\n\n /**\n * While estimated tokens exceed `rollingTarget`, peel off the oldest\n * ~SUMMARY_CHUNK_TOKENS worth of messages, summarize them via the cheap\n * model, and prepend the summary.\n */\n private async chunkSummarize(\n messages: AIModelMessage[],\n rollingTarget: number,\n ): Promise<AIModelMessage[]> {\n let totalTokens = estimateMessageTokens(messages);\n\n while (totalTokens > rollingTarget && messages.length > this.keepRecentMessages) {\n // Collect the oldest chunk of ~SUMMARY_CHUNK_TOKENS\n let chunkTokens = 0;\n let chunkEnd = 0;\n const maxChunkable = messages.length - this.keepRecentMessages;\n\n for (let i = 0; i < maxChunkable; i++) {\n const msgTokens = this.messageTokens(messages[i]);\n chunkTokens += msgTokens;\n chunkEnd = i + 1;\n if (chunkTokens >= SUMMARY_CHUNK_TOKENS) break;\n }\n\n if (chunkEnd === 0) break;\n\n const chunk = messages.slice(0, chunkEnd);\n const remaining = messages.slice(chunkEnd);\n\n const summary = await this.summarizeChunk(chunk);\n if (summary) {\n this.summaries.push(summary);\n console.log(\n `[Context] Summarized ${chunk.length} messages (~${chunkTokens} tokens) into ${estimateTokens(summary)} tokens`,\n );\n }\n\n messages = remaining;\n totalTokens = estimateMessageTokens(messages);\n }\n\n return messages;\n }\n\n private async summarizeChunk(chunk: AIModelMessage[]): Promise<string | null> {\n const historyText = chunk\n .map((msg) => {\n const content = typeof msg.content === 'string'\n ? msg.content\n : JSON.stringify(msg.content);\n return `[${msg.role}]: ${content}`;\n })\n .join('\\n\\n');\n\n try {\n const result = await generateText({\n model: resolveModel(SUMMARIZATION_MODEL) as any,\n prompt: createSummaryPrompt(historyText),\n });\n return result.text;\n } catch (error) {\n console.error('[Context] Chunk summarization failed:', error);\n return null;\n }\n }\n\n // ---------------------------------------------------------------------------\n // Phase 3 – Roll summaries\n // ---------------------------------------------------------------------------\n\n /**\n * If accumulated summaries exceed `budget` tokens, re-summarize them\n * into a single condensed summary.\n */\n private async rollSummaries(budget: number): Promise<void> {\n if (this.summaries.length <= 1) return;\n\n const totalSummaryTokens = this.summaries.reduce(\n (t, s) => t + estimateTokens(s),\n 0,\n );\n\n if (totalSummaryTokens <= budget) return;\n\n const combined = this.summaries.join('\\n\\n---\\n\\n');\n\n try {\n const result = await generateText({\n model: resolveModel(SUMMARIZATION_MODEL) as any,\n prompt: createSummaryPrompt(combined),\n });\n\n console.log(\n `[Context] Rolled ${this.summaries.length} summaries (${totalSummaryTokens} tokens) into ${estimateTokens(result.text)} tokens`,\n );\n\n this.summaries = [result.text];\n } catch (error) {\n console.error('[Context] Summary rolling failed:', error);\n }\n }\n\n // ---------------------------------------------------------------------------\n // Helpers\n // ---------------------------------------------------------------------------\n\n private messageTokens(msg: AIModelMessage): number {\n const content = typeof msg.content === 'string'\n ? msg.content\n : JSON.stringify(msg.content);\n return estimateTokens(content) + 4;\n }\n\n // ---------------------------------------------------------------------------\n // Public API (unchanged)\n // ---------------------------------------------------------------------------\n\n async addUserMessage(\n content:\n | string\n | Array<{ type: string; text?: string; image?: string; data?: string; mediaType?: string }>,\n ): Promise<void> {\n const userMessage: ModelMessage = {\n role: 'user',\n content: content as any,\n };\n await messageQueries.create(this.sessionId, userMessage);\n }\n\n async addResponseMessages(messages: AIModelMessage[]): Promise<void> {\n await messageQueries.addMany(this.sessionId, messages as ModelMessage[]);\n }\n\n async getStats(): Promise<{\n messageCount: number;\n contextChars: number;\n estimatedTokens: number;\n hasSummary: boolean;\n summaryCount: number;\n }> {\n const messages = (await messageQueries.getModelMessages(this.sessionId)) as AIModelMessage[];\n\n return {\n messageCount: messages.length,\n contextChars: calculateContextSize(messages),\n estimatedTokens: estimateMessageTokens(messages),\n hasSummary: this.summaries.length > 0,\n summaryCount: this.summaries.length,\n };\n }\n\n async clear(): Promise<void> {\n await messageQueries.deleteBySession(this.sessionId);\n this.summaries = [];\n }\n}\n\n// ---------------------------------------------------------------------------\n// Tool pairing repair (used by both ContextManager and remote server)\n// ---------------------------------------------------------------------------\n\n/**\n * Remove tool-result parts whose matching tool-call was removed.\n */\nfunction stripOrphanedToolResults(\n msg: AIModelMessage,\n removedIds: Set<string>,\n): AIModelMessage | null {\n if (!Array.isArray(msg.content)) return msg;\n\n const parts = (msg.content as any[]).filter((part) => {\n if (part.type === 'tool-result' && removedIds.has(part.toolCallId)) return false;\n if (part.type === 'tool-call' && removedIds.has(part.toolCallId)) return false;\n return true;\n });\n\n if (parts.length === 0) return null;\n return { ...msg, content: parts } as AIModelMessage;\n}\n\n/**\n * Ensure every tool-call has a matching tool-result and vice versa.\n * Removes orphans from either side so the prompt is always structurally valid.\n *\n * Exported so the remote server can use it as a last-resort safety net.\n */\nexport function repairToolPairing(messages: AIModelMessage[]): AIModelMessage[] {\n // Collect all tool-call IDs and tool-result IDs\n const toolCallIds = new Set<string>();\n const toolResultIds = new Set<string>();\n\n for (const msg of messages) {\n if (!Array.isArray(msg.content)) continue;\n for (const part of msg.content as any[]) {\n if (part.type === 'tool-call' && part.toolCallId) toolCallIds.add(part.toolCallId);\n if (part.type === 'tool-result' && part.toolCallId) toolResultIds.add(part.toolCallId);\n }\n }\n\n // IDs that exist on one side but not the other\n const orphanedCalls = new Set([...toolCallIds].filter((id) => !toolResultIds.has(id)));\n const orphanedResults = new Set([...toolResultIds].filter((id) => !toolCallIds.has(id)));\n\n if (orphanedCalls.size === 0 && orphanedResults.size === 0) return messages;\n\n if (orphanedCalls.size > 0) {\n console.warn(`[tool-repair] Removing ${orphanedCalls.size} orphaned tool-call(s) with no matching result`);\n }\n if (orphanedResults.size > 0) {\n console.warn(`[tool-repair] Removing ${orphanedResults.size} orphaned tool-result(s) with no matching call`);\n }\n\n const repaired: AIModelMessage[] = [];\n\n for (const msg of messages) {\n if (!Array.isArray(msg.content)) {\n repaired.push(msg);\n continue;\n }\n\n const parts = (msg.content as any[]).filter((part) => {\n if (part.type === 'tool-call' && orphanedCalls.has(part.toolCallId)) return false;\n if (part.type === 'tool-result' && orphanedResults.has(part.toolCallId)) return false;\n return true;\n });\n\n if (parts.length === 0) continue;\n repaired.push({ ...msg, content: parts } as AIModelMessage);\n }\n\n return repaired;\n}\n","import os from 'node:os';\nimport {\n loadAllSkillsFromDiscovered,\n getGlobMatchedSkills,\n loadAgentsMd,\n formatSkillsForContext,\n formatAlwaysLoadedSkills,\n formatGlobMatchedSkills,\n formatAgentsMdContent,\n} from '../skills/index.js';\nimport { todoQueries, TodoItem } from '../db/index.js';\nimport { DiscoveredSkills } from '../config/types.js';\n\n/**\n * Get platform-specific search instructions\n */\nfunction getSearchInstructions(): string {\n const platform = process.platform;\n \n const common = `- **Prefer \\`read_file\\` over shell commands** for reading files - don't use \\`cat\\`, \\`head\\`, or \\`tail\\` when \\`read_file\\` is available\n- **Avoid unbounded searches** - always scope searches with glob patterns and directory paths to prevent overwhelming output\n- **Search strategically**: Start with specific patterns and directories, then broaden only if needed`;\n\n if (platform === 'win32') {\n return `${common}\n- **Find files**: \\`dir /s /b *.ts\\` or PowerShell: \\`Get-ChildItem -Recurse -Filter *.ts\\`\n- **Search content**: \\`findstr /s /n \"pattern\" *.ts\\` or PowerShell: \\`Select-String -Pattern \"pattern\" -Path *.ts -Recurse\\`\n- **If ripgrep (\\`rg\\`) is installed**: \\`rg \"pattern\" -t ts src/\\` - faster and respects .gitignore`;\n }\n \n // Unix-like (darwin, linux, etc.)\n return `${common}\n- **Find files**: \\`find . -name \"*.ts\"\\` or \\`find src/ -type f -name \"*.tsx\"\\`\n- **Search content**: \\`grep -rn \"pattern\" --include=\"*.ts\" src/\\` - use \\`-l\\` for filenames only, \\`-c\\` for counts\n- **If ripgrep (\\`rg\\`) is installed**: \\`rg \"pattern\" -t ts src/\\` - faster and respects .gitignore`;\n}\n\n/**\n * Build the system prompt for the coding agent\n */\nexport async function buildSystemPrompt(options: {\n workingDirectory: string;\n skillsDirectories: string[];\n sessionId: string;\n discoveredSkills?: DiscoveredSkills;\n activeFiles?: string[];\n customInstructions?: string;\n}): Promise<string> {\n const {\n workingDirectory,\n skillsDirectories,\n sessionId,\n discoveredSkills,\n activeFiles = [],\n customInstructions,\n } = options;\n\n // Load skills using the enhanced system if discoveredSkills is provided\n let alwaysLoadedContent = '';\n let globMatchedContent = '';\n let agentsMdContent = '';\n let onDemandSkillsContext = '';\n\n if (discoveredSkills) {\n // Use the new enhanced skill loading\n const { always, onDemand, all } = await loadAllSkillsFromDiscovered(discoveredSkills);\n\n // Format always-loaded skills\n alwaysLoadedContent = formatAlwaysLoadedSkills(always);\n\n // Format on-demand skills for context\n onDemandSkillsContext = formatSkillsForContext(onDemand);\n\n // Load AGENTS.md if present\n const agentsMd = await loadAgentsMd(discoveredSkills.agentsMdPath);\n agentsMdContent = formatAgentsMdContent(agentsMd);\n\n // Load glob-matched skills based on active files\n if (activeFiles.length > 0) {\n const globMatched = await getGlobMatchedSkills(all, activeFiles, workingDirectory);\n globMatchedContent = formatGlobMatchedSkills(globMatched);\n }\n } else {\n // Legacy fallback: just load skills from directories\n const { loadAllSkills } = await import('../skills/index.js');\n const skills = await loadAllSkills(skillsDirectories);\n onDemandSkillsContext = formatSkillsForContext(skills);\n }\n\n // Load current todos\n const todos = await todoQueries.getBySession(sessionId);\n const todosContext = formatTodosForContext(todos);\n\n // Get environment info\n const platform = process.platform === 'win32' ? 'Windows' : process.platform === 'darwin' ? 'macOS' : 'Linux';\n const currentDate = new Date().toLocaleDateString('en-US', { weekday: 'long', year: 'numeric', month: 'long', day: 'numeric' });\n const searchInstructions = getSearchInstructions();\n\n const systemPrompt = `You are SparkECoder, an expert AI coding assistant. You help developers write, debug, and improve code.\n\n## Environment\n- **Platform**: ${platform} (${os.release()})\n- **Date**: ${currentDate}\n- **Working Directory**: ${workingDirectory}\n\n## Core Capabilities\nYou have access to powerful tools for:\n- **bash**: Execute commands in the terminal (see below for details)\n- **read_file**: Read file contents to understand code and context\n- **write_file**: Create new files or edit existing ones (supports targeted string replacement)\n- **linter**: Check files for type errors and lint issues (TypeScript, JavaScript, TSX, JSX)\n- **todo**: Manage your task list to track progress on complex operations\n- **load_skill**: Load specialized knowledge documents for specific tasks\n- **explore_agent**: Explore agent for semantic discovery - for exploratory questions and finding code by meaning\n- **code_graph**: Inspect a symbol's type hierarchy and usage graph via the TypeScript language server\n- **upload_file**: Upload a file to cloud storage and get a shareable download URL (available when remote storage is configured)\n\n\nIMPORTANT: If you have zero context of where you are working, always explore it first to understand the structure before doing things for the user.\n\nUse the TODO tool to manage your task list to track progress on complex operations. Always ask the user what they want to do specifically before doing it, and make a plan. \nStep 1 of the plan should be researching files and understanding the components/structure of what you're working on (if you don't already have context), then after u have done that, plan out the rest of the tasks u need to do. \nYou can clear the todo and restart it, and do multiple things inside of one session.\n\n### bash Tool\nThe bash tool runs commands in the terminal. Every command runs in its own session with logs saved to disk.\n\n**Run a command (default - waits for completion):**\n\\`\\`\\`\nbash({ command: \"npm install\" })\nbash({ command: \"git status\" })\n\\`\\`\\`\n\n**Run in background (for dev servers, watchers):**\n\\`\\`\\`\nbash({ command: \"npm run dev\", background: true })\n→ Returns { id: \"abc123\" } - save this ID to check logs or stop it later\n\\`\\`\\`\n\n**Check on a background process:**\n\\`\\`\\`\nbash({ id: \"abc123\" }) // get full output\nbash({ id: \"abc123\", tail: 50 }) // last 50 lines only\n\\`\\`\\`\n\n**Stop a background process:**\n\\`\\`\\`\nbash({ id: \"abc123\", kill: true })\n\\`\\`\\`\n\n**Respond to interactive prompts (for yes/no questions, etc.):**\n\\`\\`\\`\nbash({ id: \"abc123\", key: \"y\" }) // send 'y' for yes\nbash({ id: \"abc123\", key: \"n\" }) // send 'n' for no \nbash({ id: \"abc123\", key: \"Enter\" }) // press Enter\nbash({ id: \"abc123\", input: \"my text\" }) // send text input\n\\`\\`\\`\n\n**IMPORTANT - Handling Interactive Commands:**\n- ALWAYS prefer non-interactive flags when available:\n - \\`npm init --yes\\` or \\`npm install --yes\\`\n - \\`npx create-next-app --yes\\` (accepts all defaults)\n - \\`npx create-react-app --yes\\`\n - \\`git commit --no-edit\\`\n - \\`apt-get install -y\\`\n- If a command might prompt for input, run it in background mode first\n- Check the output to see if it's waiting for input\n- Use \\`key: \"y\"\\` or \\`key: \"n\"\\` for yes/no prompts\n- Use \\`input: \"text\"\\` for text input prompts\n\nTerminal output is stored in the global SparkECoder data directory. Use the \\`tail\\` option to read recent output.\n\n## Guidelines\n\n### Code Quality\n- Write clean, maintainable, well-documented code\n- Follow existing code style and conventions in the project\n- Use meaningful variable and function names\n- Add comments for complex logic\n\n### Problem Solving\n- Before making changes, understand the existing code structure\n- Break complex tasks into smaller, manageable steps using the todo tool\n- Test changes when possible using the bash tool\n- Handle errors gracefully and provide helpful error messages\n\n### File Operations\n- Use \\`read_file\\` to understand code before modifying\n- Use \\`write_file\\` with mode \"str_replace\" for targeted edits to existing files\n- Use \\`write_file\\` with mode \"full\" only for new files or complete rewrites\n- After making changes, use the \\`linter\\` tool to check for type errors and lint issues\n- The \\`write_file\\` tool automatically shows lint errors in its output for TypeScript/JavaScript files\n- If the user asks to write/create a file, always use \\`write_file\\` rather than printing the full contents\n- If the user requests a file but does not provide a path, choose a sensible default (e.g. \\`index.html\\`) and proceed\n- For large content (hundreds of lines), avoid placing it in chat output; write to a file instead\n\n### Linter Tool\nThe linter tool uses Language Server Protocol (LSP) to detect type errors and lint issues:\n\\`\\`\\`\nlinter({}) // Check all recently edited files\nlinter({ paths: [\"src/app.ts\"] }) // Check specific files\nlinter({ paths: [\"src/\"] }) // Check all files in a directory\n\\`\\`\\`\nUse this proactively after making code changes to catch errors early.\n\n### Code Graph Tool\nThe code_graph tool uses the TypeScript language server to inspect a symbol's type hierarchy and usage graph:\n\\`\\`\\`\ncode_graph({ symbol: \"UserCard\" }) // Search workspace for symbol\ncode_graph({ symbol: \"UserCard\", filePath: \"src/components.tsx\" }) // Look up in a specific file\ncode_graph({ symbol: \"formatUser\", filePath: \"utils.ts\", depth: 2 }) // Traverse 2 levels up the reference tree\n\\`\\`\\`\n\n**What it returns:**\n- The symbol's full type signature (parameters, return type)\n- All files/functions/components that reference it (grouped into pages vs components)\n- Which Next.js pages/routes contain it in their component tree\n- Level-2 transitive usages (who uses the things that use this symbol)\n- The file's symbol structure for surrounding context\n\n**When to use code_graph:**\n- **To locate a component/function by name** when you don't have the file path — e.g. a user mentions a component from devtools but the path is missing or mangled. Just pass the symbol name and it will find the definition.\n- **Before making changes** to a function/component — understand what depends on it and what will break\n- **To understand component hierarchies** — what renders what, which pages are affected across the *entire* codebase (not just the current page)\n- **To get type signatures** (props, params, return types) without reading entire files\n- **After a devtools selection** when the task involves refactoring, changing props, or anything that could impact other consumers\n\n**When NOT to use code_graph:**\n- For exploratory \"how does X work?\" questions — use \\`explore_agent\\` instead\n- For exact string searches — use grep/rg directly\n- For non-TypeScript/JavaScript files — code_graph only supports TS/JS/TSX/JSX\n\n### Searching and Exploration\n\n**Choose the right search approach:**\n\n0. **Use paths to your advantage — skip searching if you already have what you need.**\n - If the user selected a component via devtools and you can see the component name, file path, and/or line number, you ALREADY know where the code is. Just use \\`read_file\\` to read that file directly — do NOT call \\`explore_agent\\` to \"find\" something you already have the location of.\n - If you received a **page path** (e.g. \\`/dashboard\\`, \\`/settings/profile\\`), map it to the corresponding file in the project structure. In Next.js this means \\`app/dashboard/page.tsx\\`, \\`app/settings/profile/page.tsx\\`, etc. In other frameworks, check the routing convention (e.g. \\`pages/\\`, \\`src/routes/\\`). Use \\`read_file\\` on the mapped path directly.\n - **If the file path is missing, truncated, or doesn't exist** (common with devtools — webpack paths can be mangled), use \\`code_graph({ symbol: \"ComponentName\" })\\` to locate the component. This searches the workspace for the symbol definition AND returns its type info, references, and page locations in one call — much better than raw grep for components.\n - **After reading a devtools-selected component**, if the task involves changes that could affect other consumers (refactoring, changing props, renaming), use \\`code_graph\\` to see ALL files and pages that depend on it — the devtools component stack only shows the current page's hierarchy, not the full picture.\n - Read up and down component trees when you have the file path or page path to find what you're looking for.\n1. **Use the \\`explore_agent\\` tool (Explore agent)** for:\n - Semantic/exploratory questions: \"How does authentication work?\", \"Where is user data processed?\"\n - Finding code by meaning or concept, not exact text\n - Understanding how features are implemented across multiple files\n - Exploring unfamiliar parts of the codebase\n - Questions like \"where\", \"how\", \"what does X do\"\n \n The Explore agent is a mini-agent that intelligently explores the codebase, reads relevant files, and returns a summary of what it found. It's best for understanding and discovery.\n\n **MANDATORY — The Explore agent has ZERO context.** It cannot see the conversation, the user's messages, devtools data, or anything else. The \\`query\\` and \\`context\\` fields are the ONLY information it receives. Every time you call \\`explore_agent\\`, you MUST populate the \\`context\\` field with ALL relevant information:\n - Copy the ENTIRE \\`<devtools-context>\\` block (page URL, path, viewport) if one exists in the user's message.\n - Copy ALL selected component info (component name, file path, line number, HTML snippet, component stack) if the user selected something via devtools.\n - Include any file paths, component names, or page names the user mentioned.\n - If you skip the \\`context\\` field, the explore agent is searching completely blind and will waste time guessing.\n - NEVER call \\`explore_agent\\` with only a \\`query\\` and no \\`context\\` when the user's message contains devtools or component information.\n\n2. **Use the \\`code_graph\\` tool** for:\n - Understanding what depends on a specific symbol before changing it\n - Tracing component/function usage up to page-level routes\n - Getting type signatures (params, return types) without reading full files\n - Finding exact components usages in the codebase\n - Answering \"what will break if I change this?\" or \"which pages use this component?\"\n\n3. **Use direct commands (grep/rg, find)** for:\n - Exact string matches: \\`rg \"functionName\"\\`, \\`rg \"class MyClass\"\\`\n - Finding files by name: \\`find . -name \"*.config.ts\"\\`\n - Simple pattern matching when you know exactly what you're looking for\n - Counting occurrences or listing all matches\n\n**Examples:**\n- User selected \\`<LandingButton>\\` at \\`src/components/LandingButton.tsx:12\\` → Just \\`read_file(\"src/components/LandingButton.tsx\")\\`. Do NOT call explore_agent.\n- User selected \\`<PricingCard>\\` but no file path in the component stack → Use \\`code_graph({ symbol: \"PricingCard\" })\\` to find its definition, type info, and all usages at once.\n- User selected \\`<UserCard>\\` and says \"refactor the props\" → First \\`read_file\\` the component, then \\`code_graph({ symbol: \"UserCard\" })\\` to see every file/page that depends on it before changing the interface.\n- \"Where is the API authentication handled?\" (no file path given) → Use \\`explore_agent\\` tool\n- \"What pages use the UserCard component?\" → Use \\`code_graph({ symbol: \"UserCard\" })\\`\n- \"What's the type signature of formatUser?\" → Use \\`code_graph({ symbol: \"formatUser\", filePath: \"utils.ts\" })\\`\n- \"Find all usages of getUserById\" → Use \\`rg \"getUserById\"\\`\n- \"How does the payment flow work?\" → Use \\`explore_agent\\` tool\n- \"Find files named config\" → Use \\`find . -name \"*config*\"\\`\n\n${searchInstructions}\n\n###Follow these principles when designing and implementing software:\n\n1. **Modularity** — Write simple parts connected by clean interfaces\n2. **Clarity** — Clarity is better than cleverness\n3. **Composition** — Design programs to be connected to other programs\n4. **Separation** — Separate policy from mechanism; separate interfaces from engines\n5. **Simplicity** — Design for simplicity; add complexity only where you must\n6. **Parsimony** — Write a big program only when it is clear by demonstration that nothing else will do\n7. **Transparency** — Design for visibility to make inspection and debugging easier\n8. **Robustness** — Robustness is the child of transparency and simplicity\n9. **Representation** — Fold knowledge into data so program logic can be stupid and robust\n10. **Least Surprise** — In interface design, always do the least surprising thing\n11. **Silence** — When a program has nothing surprising to say, it should say nothing\n12. **Repair** — When you must fail, fail noisily and as soon as possible\n13. **Economy** — Programmer time is expensive; conserve it in preference to machine time\n14. **Generation** — Avoid hand-hacking; write programs to write programs when you can\n15. **Optimization** — Prototype before polishing. Get it working before you optimize it\n16. **Diversity** — Distrust all claims for \"one true way\"\n17. **Extensibility** — Design for the future, because it will be here sooner than you think\n\n### Follow these design rules for any user interfaces or experiences you write (DESIGN LIKE APPLE):\n\n1. **Simplicity** — Simplicity is the ultimate sophistication. Remove everything unnecessary.\n2. **Focus** — Say no to 1,000 things to say yes to the few that matter most.\n3. **Clarity** — Make the interface so clear that it becomes invisible.\n4. **Delight** — Sweat the details. The smallest touches create the biggest impact.\n5. **Intuitive** — It should just work. No manual required.\n6. **Human-Centered** — Design for people, not technology. Technology should adapt to humans, not the other way around.\n7. **Consistency** — Familiar patterns reduce cognitive load. Be predictable where it matters.\n8. **Feedback** — Every action deserves a response. Make interactions feel alive.\n9. **Forgiveness** — Make it easy to undo. Never punish exploration.\n10. **Beauty** — Aesthetics are not superficial. Beautiful things work better because people care about them.\n\n### Follow these rules to be a good agent for the user:\n\n1. Understand first - Read relevant files before making any changes. Use the \\`explore_agent\\` tool for exploratory questions about how things work, and direct searches (grep/rg) for finding exact strings or file names.\n2. Plan for complexity - If the task involves 3+ steps or has meaningful trade-offs, create a todo list to track progress before implementing.\n3. Use the right tools - Have specialized tools for reading files, editing code, semantic search via subagents, and running terminal commands. Prefer these over raw shell commands.\n4. Work efficiently - When need to do multiple independent things (like reading several files), do them in parallel rather than one at a time.\n5. Be direct - Focus on technical accuracy rather than validation. If see issues with an approach or need clarification, say so.\n6. Verify my work - After making changes, check for linter errors and fix any introduced.\n7. Respect boundaries - Only commit code when explicitly asked, avoid creating unnecessary files, and don't make assumptions about things uncertain about.\n\n\n### Communication\n- Explain your reasoning and approach\n- Be concise but thorough\n- Ask clarifying questions when requirements are ambiguous\n- Report progress on multi-step tasks\n\n${agentsMdContent}\n\n${alwaysLoadedContent}\n\n${globMatchedContent}\n\n## On-Demand Skills\n${onDemandSkillsContext}\n\n## Current Task List\n${todosContext}\n\n${customInstructions ? `## Custom Instructions\\n${customInstructions}` : ''}\n\nRemember: You are a helpful, capable coding assistant. Take initiative, be thorough, and deliver high-quality results.`;\n\n return systemPrompt;\n}\n\n/**\n * Format todos for system prompt context\n */\nfunction formatTodosForContext(todos: TodoItem[]): string {\n if (todos.length === 0) {\n return 'No active tasks. Use the todo tool to create a plan for complex operations.';\n }\n\n const statusEmoji: Record<string, string> = {\n pending: '⬜',\n in_progress: '🔄',\n completed: '✅',\n cancelled: '❌',\n };\n\n const lines = ['Current tasks:'];\n for (const todo of todos) {\n const emoji = statusEmoji[todo.status] || '•';\n lines.push(`${emoji} [${todo.id}] ${todo.content}`);\n }\n\n return lines.join('\\n');\n}\n\n/**\n * Build an addendum for the system prompt when running in task mode.\n * Describes the output schema and the two completion tools the agent must use.\n */\nexport function buildTaskPromptAddendum(outputSchema: Record<string, unknown>): string {\n return `\n## Task Mode\n\nYou are running in **task mode**. You have been given a specific task to complete autonomously.\nYou have access to ALL the same tools as a normal session — bash, read_file, write_file, linter, todo, load_skill, explore_agent, code_graph, upload_file, and more. Use them all. This is not a limited session.\nIf you need to give the user a downloadable file (report, image, export, etc.), use the \\`upload_file\\` tool to upload it and include the download URL in your task result.\n\n### Rules\n1. Work independently — no human will approve tool calls. All tools run without approval.\n2. Keep working until the task is fully complete — and then VERIFY it is complete before finishing.\n3. When done, call the \\`complete_task\\` tool with a JSON result matching the output schema below.\n4. If you determine the task is impossible or encounter an unrecoverable error, call the \\`task_failed\\` tool with a clear reason.\n5. Do NOT stop without calling one of these two tools.\n\n### Verification — BE EXTREMELY THOROUGH\nBefore calling \\`complete_task\\`, you MUST verify your work completely. Do not just assume it worked. Actually check.\n\n**After making code changes:**\n- Run the **linter** on every file you touched to catch type errors and lint issues. Fix any you introduced.\n- **Read back the files you edited** to confirm the changes are correct and complete — don't rely on memory.\n- If there are **tests**, run them (\\`npm test\\`, \\`pytest\\`, etc.) and ensure they pass.\n- If you created new files, verify they exist and contain what you expect.\n\n**For UI / web changes:**\n- Start the dev server if it isn't already running (it might be so double check ur context)\n- **Open the browser** to verify the changes visually: using your agent-browser tool read the skill\n- Check the dev server logs for errors or warnings.\n- If the app crashes or shows errors, fix them before completing.\n\n**For backend / API changes:**\n- Test the endpoint with curl or a quick script to confirm it works as expected.\n- Check server logs for errors.\n\n**For search and exploration tasks:**\n- Actually search in the RIGHT directories — don't just search the root if the relevant code is in \\`src/\\`, \\`app/\\`, \\`lib/\\`, etc.\n- Use \\`explore_agent\\` for semantic/conceptual questions and \\`grep\\`/\\`code_graph\\` for exact lookups.\n- Cross-reference findings — if you find something in one place, verify related files are consistent.\n- Don't stop at the first match — make sure you've found ALL relevant occurrences.\n\n**General verification checklist:**\n- Re-read the original task prompt and confirm every requirement has been addressed.\n- If the task asked for multiple things, verify EACH one individually.\n- If something doesn't look right, fix it — don't complete with known issues.\n\n**Screenshot your completed work:**\n- After completing a task, take a screenshot of the result when it makes sense (UI changes browser pages, etc.). You can use the browser skill to do this.\n- Save screenshots and generated artifacts under \\`.sparkecode-artifacts/\\` (git-ignored) instead of the repo root.\n- Ensure \\`.sparkecode-artifacts/\\` is listed in \\`.gitignore\\`; if it is missing, add it before continuing.\n- Use \\`upload_file\\` to upload the screenshot and include the download URL in your task result so the user can see proof of what was done.\n- In task results, never return local filesystem paths for screenshots/reports; return uploaded \\`downloadUrl\\` links only.\n- This is especially valuable for UI/visual changes, successful test runs, and browser verification — show, don't just tell.\n\n### Use All Available Tools\n- **load_skill**: Load specialized skills/knowledge relevant to the task. Check what skills are available and use them.\n- **explore_agent**: Use for codebase exploration and understanding before making changes.\n- **code_graph**: Use to understand type hierarchies, references, and impact before refactoring.\n- **todo**: Track your progress on multi-step tasks so you don't miss steps.\n- **bash**: Full shell access — run builds, tests, dev servers, open browsers, curl endpoints, anything.\n- **upload_file**: Upload files (screenshots, reports, exports) to cloud storage. Use this to include screenshots of completed work in your task result — visual proof is very helpful.\n\n### Output Schema\nThe \\`complete_task\\` tool expects a \\`result\\` object matching this JSON Schema:\n\\`\\`\\`json\n${JSON.stringify(outputSchema, null, 2)}\n\\`\\`\\`\n\n### Completion Tools\n- **\\`complete_task({ result: ... })\\`** — Call ONLY after thorough verification. The result is validated against the schema above. If validation fails you will get errors back — fix and retry.\n- **\\`task_failed({ reason: \"...\" })\\`** — Call only if the task truly cannot be completed.\n`;\n}\n\n/**\n * Create a summary prompt for context compression\n */\nexport function createSummaryPrompt(conversationHistory: string): string {\n return `Please provide a concise summary of the following conversation history. Focus on:\n1. The main task or goal being worked on\n2. Key decisions made\n3. Important code changes or file operations performed\n4. Current state and any pending actions\n\nKeep the summary under 2000 characters while preserving essential context for continuing the work.\n\nConversation to summarize:\n${conversationHistory}\n\nSummary:`;\n}\n","/**\n * Message sanitization for AI SDK ModelMessage compatibility.\n * \n * Ensures messages retrieved from the database conform to the AI SDK's\n * ModelMessage[] schema before being passed to streamText()/generateText().\n * \n * Handles two classes of issues:\n * \n * 1. SCHEMA CORRUPTION: The remote database client's `parseDates()` function\n * recursively converts ISO date strings (like `createdAt`) inside tool result\n * outputs to JavaScript Date objects. The AI SDK's jsonValueSchema only accepts\n * JSON primitives — Date objects are rejected, causing AI_InvalidPromptError.\n * \n * 2. CONSECUTIVE SAME-ROLE MESSAGES: If multiple user messages are saved to the\n * database without an assistant response between them (e.g., user sends two\n * messages quickly, or the previous stream errored before producing a response),\n * the Anthropic API rejects consecutive same-role messages. This module merges\n * them into a single message.\n * \n * This module provides a safety net that catches and repairs any corrupted\n * messages so the agent can self-heal even if the database layer returns\n * unexpected data.\n */\n\nimport { modelMessageSchema, type ModelMessage } from 'ai';\n\n/**\n * Recursively convert Date objects to ISO strings within a value.\n * This reverses any accidental Date conversions from parseDates.\n */\nfunction convertDatesToStrings(value: unknown): unknown {\n if (value === null || value === undefined) return value;\n if (value instanceof Date) return value.toISOString();\n if (Array.isArray(value)) return value.map(convertDatesToStrings);\n if (typeof value === 'object') {\n const result: Record<string, unknown> = {};\n for (const [k, v] of Object.entries(value as Record<string, unknown>)) {\n result[k] = convertDatesToStrings(v);\n }\n return result;\n }\n return value;\n}\n\n/**\n * Sanitize a single ModelMessage so it passes AI SDK schema validation.\n * \n * Fixes known corruption patterns:\n * - Date objects inside tool result output (from parseDates)\n * - Date objects inside tool call input\n * - Date objects inside user/assistant content parts\n */\nfunction sanitizeMessage(msg: unknown): unknown {\n if (msg === null || msg === undefined || typeof msg !== 'object') return msg;\n \n const message = msg as Record<string, unknown>;\n \n // Only process messages with a recognized role\n if (!message.role || typeof message.role !== 'string') return msg;\n \n // Deep-convert any Date objects back to ISO strings\n return convertDatesToStrings(message);\n}\n\n/**\n * Merge the content of two user messages into a single content value.\n * \n * Rules:\n * - string + string → joined with \"\\n\\n\"\n * - array + array → concatenated (deduplicating text parts)\n * - string + array → string converted to text part, then concatenated\n * - array + string → string converted to text part, then appended\n */\nfunction mergeUserContent(\n a: unknown,\n b: unknown,\n): string | Array<Record<string, unknown>> {\n const aIsString = typeof a === 'string';\n const bIsString = typeof b === 'string';\n\n // Both strings: simple join\n if (aIsString && bIsString) {\n return `${a}\\n\\n${b}`;\n }\n\n // Normalise both to arrays\n const aParts: Array<Record<string, unknown>> = aIsString\n ? [{ type: 'text', text: a }]\n : Array.isArray(a)\n ? (a as Array<Record<string, unknown>>)\n : [];\n\n const bParts: Array<Record<string, unknown>> = bIsString\n ? [{ type: 'text', text: b }]\n : Array.isArray(b)\n ? (b as Array<Record<string, unknown>>)\n : [];\n\n return [...aParts, ...bParts];\n}\n\n/**\n * Merge consecutive same-role messages in the array.\n * \n * The Anthropic API (and some other providers) reject message arrays where\n * two consecutive messages share the same role. This can happen when:\n * - The user sends two messages before the agent responds\n * - The previous agent stream errored/aborted before producing a response\n * - Network issues cause duplicate message saves\n * \n * For user messages: merges content (text joined with newlines, parts concatenated)\n * For assistant messages: merges content parts into a single array\n * For tool messages: concatenates content arrays\n */\nfunction mergeConsecutiveSameRole(messages: ModelMessage[]): ModelMessage[] {\n if (messages.length <= 1) return messages;\n\n const merged: ModelMessage[] = [];\n\n for (const msg of messages) {\n const prev = merged[merged.length - 1];\n\n if (!prev || (prev as any).role !== (msg as any).role) {\n // Different role or first message — keep as-is\n merged.push(msg);\n continue;\n }\n\n // Same role as previous — merge\n const role = (msg as any).role as string;\n\n if (role === 'user') {\n const mergedContent = mergeUserContent((prev as any).content, (msg as any).content);\n merged[merged.length - 1] = { role: 'user', content: mergedContent } as any as ModelMessage;\n console.warn('[sanitize-messages] Merged consecutive user messages');\n } else if (role === 'assistant') {\n // Normalise both to arrays and concatenate\n const prevParts = typeof (prev as any).content === 'string'\n ? [{ type: 'text', text: (prev as any).content }]\n : Array.isArray((prev as any).content)\n ? (prev as any).content\n : [];\n const curParts = typeof (msg as any).content === 'string'\n ? [{ type: 'text', text: (msg as any).content }]\n : Array.isArray((msg as any).content)\n ? (msg as any).content\n : [];\n merged[merged.length - 1] = { role: 'assistant', content: [...prevParts, ...curParts] } as any as ModelMessage;\n console.warn('[sanitize-messages] Merged consecutive assistant messages');\n } else if (role === 'tool') {\n // Tool messages always have array content — concatenate\n const prevContent = Array.isArray((prev as any).content) ? (prev as any).content : [];\n const curContent = Array.isArray((msg as any).content) ? (msg as any).content : [];\n merged[merged.length - 1] = { role: 'tool', content: [...prevContent, ...curContent] } as any as ModelMessage;\n console.warn('[sanitize-messages] Merged consecutive tool messages');\n } else {\n // Unknown role — just push, don't try to merge\n merged.push(msg);\n }\n }\n\n return merged;\n}\n\n/**\n * Validate and sanitize an array of ModelMessage objects.\n * \n * Performs two passes:\n * 1. Schema repair — fixes individual messages that fail AI SDK validation\n * (Date objects, missing type wrappers, etc.)\n * 2. Sequence repair — merges consecutive same-role messages that providers\n * like Anthropic would reject\n * \n * Returns the original messages if they're all valid and properly sequenced,\n * or repaired copies if any had issues. Logs warnings for any messages that\n * needed repair so the issue is visible in server logs.\n */\nexport function sanitizeModelMessages(messages: ModelMessage[]): ModelMessage[] {\n // === Pass 1: Schema repair ===\n // Fast path: try validating the whole array first\n let allValid = true;\n for (const msg of messages) {\n try {\n modelMessageSchema.parse(msg);\n } catch {\n allValid = false;\n break;\n }\n }\n \n let result: ModelMessage[];\n \n if (allValid) {\n result = messages;\n } else {\n // Slow path: sanitize each message individually\n console.warn('[sanitize-messages] Detected invalid messages, attempting self-repair...');\n \n const sanitized: ModelMessage[] = [];\n let repairCount = 0;\n \n for (let i = 0; i < messages.length; i++) {\n const msg = messages[i];\n \n // Check if this specific message is valid\n try {\n modelMessageSchema.parse(msg);\n sanitized.push(msg);\n continue;\n } catch {\n // Needs repair\n }\n \n // Strategy 1: Convert Date objects to ISO strings\n const fixed = sanitizeMessage(msg) as ModelMessage;\n try {\n modelMessageSchema.parse(fixed);\n sanitized.push(fixed);\n repairCount++;\n console.warn(`[sanitize-messages] Repaired message ${i} (role=${(msg as any).role}) - converted Date objects to strings`);\n continue;\n } catch {\n // Strategy 1 failed\n }\n \n // Strategy 2: For tool messages, try wrapping raw output in { type: 'json', value: ... }\n // This handles cases where tool output was stored in legacy format\n if ((msg as any).role === 'tool' && Array.isArray((msg as any).content)) {\n const fixedContent = ((msg as any).content as any[]).map((part: any) => {\n if (part.type === 'tool-result' && part.output !== undefined) {\n const output = convertDatesToStrings(part.output);\n // If output doesn't have a recognized type discriminator, wrap it\n if (output && typeof output === 'object' && !(output as any).type) {\n return { ...part, output: { type: 'json', value: output } };\n }\n // If output has a type but it's not a recognized discriminator, wrap it\n const knownTypes = ['text', 'json', 'execution-denied', 'error-text', 'error-json', 'content'];\n if (output && typeof output === 'object' && !knownTypes.includes((output as any).type)) {\n return { ...part, output: { type: 'json', value: output } };\n }\n return { ...part, output };\n }\n return convertDatesToStrings(part);\n });\n \n const wrappedMsg = { ...(msg as any), content: fixedContent } as ModelMessage;\n try {\n modelMessageSchema.parse(wrappedMsg);\n sanitized.push(wrappedMsg);\n repairCount++;\n console.warn(`[sanitize-messages] Repaired message ${i} (role=tool) - wrapped raw output in json type`);\n continue;\n } catch {\n // Strategy 2 failed\n }\n }\n \n // Strategy 3: Last resort - include the message as-is and let it fail\n // downstream with a better error context. This is better than silently\n // dropping messages which could corrupt conversation state.\n console.error(\n `[sanitize-messages] Could not repair message ${i} (role=${(msg as any).role}). ` +\n `Message will be included as-is. Content keys: ${JSON.stringify(Object.keys(msg as any))}`,\n );\n sanitized.push(msg);\n }\n \n if (repairCount > 0) {\n console.warn(`[sanitize-messages] Self-repair complete: fixed ${repairCount}/${messages.length} messages`);\n }\n \n result = sanitized;\n }\n\n // === Pass 2: Sequence repair ===\n // Merge consecutive same-role messages (Anthropic rejects these)\n result = mergeConsecutiveSameRole(result);\n\n return result;\n}\n","export interface ModelLimits {\n contextWindow: number;\n rollingTarget: number;\n}\n\nconst MODEL_LIMITS: Record<string, ModelLimits> = {\n 'anthropic/claude-opus-4-6': { contextWindow: 200_000, rollingTarget: 150_000 },\n 'anthropic/claude-sonnet-4': { contextWindow: 200_000, rollingTarget: 150_000 },\n 'anthropic/claude-3.5-sonnet': { contextWindow: 200_000, rollingTarget: 150_000 },\n 'anthropic/claude-3-haiku': { contextWindow: 200_000, rollingTarget: 150_000 },\n 'google/gemini-3-flash-preview': { contextWindow: 1_000_000, rollingTarget: 150_000 },\n 'google/gemini-2.5-pro': { contextWindow: 1_000_000, rollingTarget: 150_000 },\n 'google/gemini-2.5-flash': { contextWindow: 1_000_000, rollingTarget: 150_000 },\n 'openai/gpt-4o': { contextWindow: 128_000, rollingTarget: 78_000 },\n 'openai/gpt-4.1': { contextWindow: 1_000_000, rollingTarget: 150_000 },\n 'openai/o3': { contextWindow: 200_000, rollingTarget: 150_000 },\n 'xai/grok-3': { contextWindow: 131_072, rollingTarget: 80_000 },\n};\n\nconst DEFAULT_LIMITS: ModelLimits = { contextWindow: 200_000, rollingTarget: 150_000 };\n\nconst PREFIX_DEFAULTS: Record<string, ModelLimits> = {\n 'anthropic/': { contextWindow: 200_000, rollingTarget: 150_000 },\n 'google/': { contextWindow: 1_000_000, rollingTarget: 150_000 },\n 'openai/': { contextWindow: 128_000, rollingTarget: 78_000 },\n 'xai/': { contextWindow: 131_072, rollingTarget: 80_000 },\n};\n\n/**\n * Look up context window limits for a model.\n * Tries exact match, then prefix match, then falls back to conservative defaults.\n */\nexport function getModelLimits(modelId: string): ModelLimits {\n const normalized = modelId.trim().toLowerCase();\n\n const exact = MODEL_LIMITS[normalized];\n if (exact) return exact;\n\n for (const [prefix, limits] of Object.entries(PREFIX_DEFAULTS)) {\n if (normalized.startsWith(prefix)) return limits;\n }\n\n return DEFAULT_LIMITS;\n}\n\nexport const SUMMARIZATION_MODEL = 'google/gemini-3-flash-preview';\n\n/** Summarize in chunks of roughly this many tokens. */\nexport const SUMMARY_CHUNK_TOKENS = 30_000;\n\n/**\n * Fraction of the rolling target that accumulated summaries may consume\n * before they themselves get re-summarized.\n */\nexport const SUMMARY_BUDGET_RATIO = 0.15;\n","/**\n * Shared in-memory store for devtools context (current page user is viewing).\n * Extracted to a shared module so both server routes and the agent can access it.\n */\n\nexport interface DevtoolsContext {\n url: string;\n path: string;\n pageName: string;\n screenWidth?: number;\n screenHeight?: number;\n devicePixelRatio?: number;\n lastHeartbeat: Date;\n}\n\nconst devtoolsContextStore = new Map<string, DevtoolsContext>();\n\n/**\n * Set/update devtools context for a session\n */\nexport function setDevtoolsContext(sessionId: string, context: DevtoolsContext): void {\n devtoolsContextStore.set(sessionId, context);\n}\n\n/**\n * Get devtools context for a session (returns null if not connected or stale)\n */\nexport function getDevtoolsContext(sessionId: string): DevtoolsContext | null {\n cleanupStaleContexts();\n return devtoolsContextStore.get(sessionId) || null;\n}\n\n/**\n * Clean up stale devtools contexts (older than 30 seconds = disconnected)\n */\nexport function cleanupStaleContexts(): void {\n const now = Date.now();\n for (const [sessionId, ctx] of devtoolsContextStore) {\n if (now - ctx.lastHeartbeat.getTime() > 30 * 1000) {\n devtoolsContextStore.delete(sessionId);\n }\n }\n}\n","import { Hono } from 'hono';\nimport { zValidator } from '@hono/zod-validator';\nimport { z } from 'zod';\nimport { existsSync, mkdirSync, writeFileSync } from 'node:fs';\nimport { join } from 'node:path';\nimport { sessionQueries, toolExecutionQueries, activeStreamQueries, messageQueries } from '../../db/index.js';\nimport { Agent } from '../../agent/index.js';\nimport { getConfig, getAppDataDirectory } from '../../config/index.js';\nimport { streamContext } from '../resumable-stream.js';\nimport { nanoid } from 'nanoid';\nimport { createCheckpoint, getCheckpointManager } from '../../checkpoints/index.js';\nimport { getDevtoolsContext } from '../devtools-store.js';\nimport { getOrCreateProxy, getProxy, destroyProxy, type BrowserFrame, type BrowserStatus, type BrowserInputEvent } from '../../browser/stream-proxy.js';\nimport { FrameRecorder } from '../../browser/recorder.js';\nimport { isRemoteConfigured } from '../../db/remote.js';\nimport { resizeImageIfNeeded } from '../../utils/resize-image.js';\n\n// Active recorders per session\nconst sessionRecorders = new Map<string, FrameRecorder>();\n\nconst MAX_TOOL_INPUT_LENGTH = 8 * 1024;\nconst MAX_TOOL_INPUT_PREVIEW = 2 * 1024;\nconst MAX_TOOL_ARGS_CHUNK = 2 * 1024;\n\nfunction sanitizeToolInput(toolName: string, input: unknown): unknown {\n if (toolName !== 'write_file' || !input || typeof input !== 'object') {\n return input;\n }\n\n const data = input as Record<string, unknown>;\n let changed = false;\n const next: Record<string, unknown> = { ...data };\n\n const content = typeof data.content === 'string' ? data.content : undefined;\n if (content && content.length > MAX_TOOL_INPUT_LENGTH) {\n next.content = `${content.slice(0, MAX_TOOL_INPUT_PREVIEW)}\\n... (truncated)`;\n next.contentLength = content.length;\n next.contentTruncated = true;\n changed = true;\n }\n\n const oldString = typeof data.old_string === 'string' ? data.old_string : undefined;\n if (oldString && oldString.length > MAX_TOOL_INPUT_LENGTH) {\n next.old_string = `${oldString.slice(0, MAX_TOOL_INPUT_PREVIEW)}\\n... (truncated)`;\n next.oldStringLength = oldString.length;\n next.oldStringTruncated = true;\n changed = true;\n }\n\n const newString = typeof data.new_string === 'string' ? data.new_string : undefined;\n if (newString && newString.length > MAX_TOOL_INPUT_LENGTH) {\n next.new_string = `${newString.slice(0, MAX_TOOL_INPUT_PREVIEW)}\\n... (truncated)`;\n next.newStringLength = newString.length;\n next.newStringTruncated = true;\n changed = true;\n }\n\n if (changed) {\n console.log('[TOOL-INPUT] Truncated write_file input for streaming payload size');\n }\n\n return changed ? next : input;\n}\n\nfunction buildToolArgsText(input: unknown): string {\n try {\n return JSON.stringify(input ?? {});\n } catch {\n return '{}';\n }\n}\n\nasync function emitSyntheticToolStreaming(\n writeSSE: (data: string) => Promise<void>,\n toolCallStarts: Set<string>,\n toolCallId: string,\n toolName: string,\n input: unknown\n): Promise<void> {\n if (toolCallStarts.has(toolCallId)) return;\n\n toolCallStarts.add(toolCallId);\n await writeSSE(JSON.stringify({\n type: 'tool-input-start',\n toolCallId,\n toolName,\n }));\n\n if (toolName !== 'write_file') return;\n\n const argsText = buildToolArgsText(input);\n for (let i = 0; i < argsText.length; i += MAX_TOOL_ARGS_CHUNK) {\n const chunk = argsText.slice(i, i + MAX_TOOL_ARGS_CHUNK);\n await writeSSE(JSON.stringify({\n type: 'tool-input-delta',\n toolCallId,\n argsTextDelta: chunk,\n }));\n await new Promise((resolve) => setTimeout(resolve, 0));\n }\n}\n\n/**\n * Build a devtools context XML string to attach to user messages.\n * Wrapped in <devtools-context> tags so the frontend can regex it out for display.\n * The LLM still sees it as part of the user message for full context.\n */\nfunction buildDevtoolsContextXml(sessionId: string): string {\n const ctx = getDevtoolsContext(sessionId);\n if (!ctx) return '';\n\n const parts: string[] = [];\n parts.push(`<devtools-context>`);\n parts.push(` <page url=\"${ctx.url}\" path=\"${ctx.path}\" name=\"${ctx.pageName}\" />`);\n if (ctx.screenWidth && ctx.screenHeight) {\n parts.push(` <viewport width=\"${ctx.screenWidth}\" height=\"${ctx.screenHeight}\"${ctx.devicePixelRatio ? ` dpr=\"${ctx.devicePixelRatio}\"` : ''} />`);\n }\n parts.push(`</devtools-context>`);\n return parts.join('\\n');\n}\n\n/**\n * Prepend devtools context XML to a user prompt string.\n * The XML block comes first so the LLM always has screen context.\n */\nfunction enrichPromptWithDevtoolsContext(sessionId: string, prompt: string): string {\n const xml = buildDevtoolsContextXml(sessionId);\n if (!xml) return prompt;\n return `${xml}\\n\\n${prompt}`;\n}\n\n/**\n * Encode browser recording frames to MP4 and upload to GCS via remote server.\n * Runs in background -- errors are logged but not propagated.\n */\nasync function encodAndUploadRecording(sessionId: string, recorder: FrameRecorder): Promise<void> {\n if (!isRemoteConfigured()) {\n console.log('[RECORDING] Remote server not configured, skipping upload');\n recorder.clear();\n return;\n }\n\n console.log(`[RECORDING] Encoding ${recorder.frameCount} frames for session ${sessionId}...`);\n const result = await recorder.encode();\n recorder.clear();\n\n if (!result) {\n console.log('[RECORDING] Encoding failed or produced no output');\n return;\n }\n\n try {\n const { storageQueries } = await import('../../db/remote.js');\n const { readFile, unlink } = await import('node:fs/promises');\n\n // Get presigned upload URL\n const uploadInfo = await storageQueries.getUploadUrl(\n sessionId,\n `browser-recording-${Date.now()}.mp4`,\n 'video/mp4',\n 'browser-recording'\n );\n\n // Upload to GCS\n const fileData = await readFile(result.path);\n await fetch(uploadInfo.uploadUrl, {\n method: 'PUT',\n headers: { 'Content-Type': 'video/mp4' },\n body: fileData,\n });\n\n // Update file size\n await storageQueries.updateFile(uploadInfo.fileId, { sizeBytes: result.sizeBytes });\n\n console.log(`[RECORDING] Uploaded recording for session ${sessionId} (${result.sizeBytes} bytes)`);\n\n // Clean up temp file\n await unlink(result.path).catch(() => {});\n } catch (err: any) {\n console.error('[RECORDING] Upload failed:', err.message);\n }\n}\n\nconst agents = new Hono();\n\n// Schemas\nconst attachmentSchema = z.object({\n type: z.enum(['image', 'file']),\n data: z.string(), // base64 data URL or raw base64\n mediaType: z.string().optional(),\n filename: z.string().optional(),\n});\n\nconst runPromptSchema = z.object({\n prompt: z.string(), // Can be empty if attachments are provided\n attachments: z.array(attachmentSchema).optional(),\n}).refine(\n (data) => data.prompt.trim().length > 0 || (data.attachments && data.attachments.length > 0),\n { message: 'Either prompt or attachments must be provided' }\n);\n\nconst quickStartSchema = z.object({\n prompt: z.string().min(1),\n name: z.string().optional(),\n workingDirectory: z.string().optional(),\n model: z.string().optional(),\n toolApprovals: z.record(z.string(), z.boolean()).optional(),\n});\n\nconst rejectSchema = z.object({\n reason: z.string().optional(),\n}).optional();\n\n// Store for abort controllers - keyed by streamId\nconst streamAbortControllers = new Map<string, AbortController>();\n\n// Attachment type for the stream producer\ninterface StreamAttachment {\n type: 'image' | 'file';\n data: string;\n mediaType?: string;\n filename?: string;\n /** Path where the attachment was saved on disk */\n savedPath?: string;\n}\n\n/**\n * Get the attachments directory for a session\n */\nfunction getAttachmentsDirectory(sessionId: string): string {\n const appDataDir = getAppDataDirectory();\n return join(appDataDir, 'attachments', sessionId);\n}\n\n/**\n * Save an attachment to disk and return the file path.\n * Images are automatically resized if they exceed provider dimension limits.\n */\nasync function saveAttachmentToDisk(\n sessionId: string, \n attachment: { type: 'image' | 'file'; data: string; mediaType?: string; filename?: string },\n index: number\n): Promise<string> {\n const attachmentsDir = getAttachmentsDirectory(sessionId);\n \n if (!existsSync(attachmentsDir)) {\n mkdirSync(attachmentsDir, { recursive: true });\n }\n \n let filename = attachment.filename;\n if (!filename) {\n const ext = getExtensionFromMediaType(attachment.mediaType, attachment.type);\n filename = `attachment_${index + 1}${ext}`;\n }\n \n let base64Data = attachment.data;\n if (base64Data.includes(',')) {\n base64Data = base64Data.split(',')[1];\n }\n \n let buffer: Buffer = Buffer.from(base64Data, 'base64');\n\n if (attachment.type === 'image') {\n buffer = await resizeImageIfNeeded(buffer, attachment.mediaType);\n const prefix = attachment.data.includes(',') ? attachment.data.split(',')[0] + ',' : '';\n attachment.data = prefix + buffer.toString('base64');\n }\n\n const filePath = join(attachmentsDir, filename);\n writeFileSync(filePath, buffer);\n \n return filePath;\n}\n\n/**\n * Get file extension from media type\n */\nfunction getExtensionFromMediaType(mediaType?: string, type?: 'image' | 'file'): string {\n if (!mediaType) {\n return type === 'image' ? '.png' : '.bin';\n }\n \n const mimeToExt: Record<string, string> = {\n 'image/png': '.png',\n 'image/jpeg': '.jpg',\n 'image/jpg': '.jpg',\n 'image/gif': '.gif',\n 'image/webp': '.webp',\n 'image/svg+xml': '.svg',\n 'application/pdf': '.pdf',\n 'text/plain': '.txt',\n 'text/markdown': '.md',\n 'application/json': '.json',\n 'application/javascript': '.js',\n 'text/javascript': '.js',\n 'text/typescript': '.ts',\n 'text/html': '.html',\n 'text/css': '.css',\n };\n \n return mimeToExt[mediaType] || '.bin';\n}\n\n/**\n * Creates a stream producer function for the agent.\n * This function returns a ReadableStream that emits SSE-formatted events.\n * \n * IMPORTANT: The agent runs independently of client connections. Client disconnection\n * does NOT abort the agent - only explicit abort requests do. This enables:\n * - Session switching without stopping the agent\n * - Browser tab closing without losing work\n * - Multiple clients watching the same stream\n * - Reconnection to ongoing streams\n */\nfunction createAgentStreamProducer(\n sessionId: string,\n prompt: string,\n streamId: string,\n attachments?: StreamAttachment[]\n): () => ReadableStream<string> {\n return () => {\n // Create a TransformStream to convert events to SSE format\n const { readable, writable } = new TransformStream<string, string>();\n const writer = writable.getWriter();\n let writerClosed = false;\n const toolCallStarts = new Set<string>();\n\n // Create an abort controller for this specific stream (for explicit stops only)\n const abortController = new AbortController();\n streamAbortControllers.set(streamId, abortController);\n\n let sseEventCount = 0;\n let sseBrowserFrameCount = 0;\n let sseWriteErrors = 0;\n\n // Helper to write SSE event - handles client disconnection gracefully\n const writeSSE = async (data: string) => {\n if (writerClosed) return;\n try {\n sseEventCount++;\n await writer.write(`data: ${data}\\n\\n`);\n } catch (err: any) {\n sseWriteErrors++;\n if (sseWriteErrors === 1) {\n console.log(`[SSE:${streamId}] Writer closed (client disconnected). Total events sent: ${sseEventCount}, browser frames: ${sseBrowserFrameCount}`);\n }\n writerClosed = true;\n }\n };\n \n // Safe close helper\n const safeClose = async () => {\n if (writerClosed) return;\n try {\n console.log(`[SSE:${streamId}] Stream closing. Total events: ${sseEventCount}, browser frames: ${sseBrowserFrameCount}, write errors: ${sseWriteErrors}`);\n writerClosed = true;\n await writer.close();\n } catch {\n // Already closed, ignore\n }\n };\n \n // Cleanup abort controller\n const cleanupAbortController = () => {\n streamAbortControllers.delete(streamId);\n };\n\n // Run the agent in the background\n (async () => {\n let isAborted = false;\n \n try {\n const agent = await Agent.create({ sessionId });\n\n // Send stream ID for clients to use for reconnection\n await writeSSE(JSON.stringify({ type: 'data-stream-id', streamId }));\n\n // Build user message content for broadcast (includes attachments)\n // Note: Attachments are already saved to disk before this point (in the route handler)\n // so they already have savedPath set\n let broadcastContent: string | Array<{ type: string; text?: string; image?: string; data?: string; mediaType?: string; filename?: string; savedPath?: string }>;\n if (attachments && attachments.length > 0) {\n const contentParts: Array<{ type: string; text?: string; image?: string; data?: string; mediaType?: string; filename?: string; savedPath?: string }> = [];\n \n // IMPORTANT: Put file location info FIRST so the model knows where files are saved\n const attachmentDescriptions = attachments\n .map((a, i) => {\n const name = a.filename || `attachment_${i + 1}`;\n const typeLabel = a.type === 'image' ? 'Image' : 'File';\n const location = a.savedPath || '(path unknown)';\n return `${i + 1}. ${typeLabel}: \"${name}\" saved at: ${location}`;\n })\n .join('\\n');\n contentParts.push({ \n type: 'text', \n text: `[FILE ATTACHMENTS - The user has attached the following files which are saved on disk]\\n${attachmentDescriptions}\\n\\nYou can reference these files by their paths above. The file contents are also shown inline below.` \n });\n \n // Add user's text prompt\n if (prompt) {\n contentParts.push({ type: 'text', text: `\\n[USER MESSAGE]\\n${prompt}` });\n }\n \n // Add file/image parts with filename and path metadata\n for (const attachment of attachments) {\n if (attachment.type === 'image') {\n contentParts.push({\n type: 'image',\n image: attachment.data,\n mediaType: attachment.mediaType,\n filename: attachment.filename,\n savedPath: attachment.savedPath,\n });\n } else {\n contentParts.push({\n type: 'file',\n data: attachment.data,\n mediaType: attachment.mediaType || 'application/octet-stream',\n filename: attachment.filename,\n savedPath: attachment.savedPath,\n });\n }\n }\n broadcastContent = contentParts;\n } else {\n broadcastContent = prompt;\n }\n\n // Send user message so other clients can see it (includes attachments)\n await writeSSE(JSON.stringify({\n type: 'data-user-message',\n data: { id: `user_${Date.now()}`, content: broadcastContent },\n }));\n\n // Send message start\n const messageId = `msg_${Date.now()}`;\n await writeSSE(JSON.stringify({ type: 'start', messageId }));\n\n let textId = `text_${Date.now()}`;\n let textStarted = false;\n\n const result = await agent.stream({\n prompt,\n attachments, // Pass attachments to agent\n abortSignal: abortController.signal, // Use our managed abort controller, NOT client signal\n skipSaveUserMessage: true, // User message is saved in the route before streaming\n // Note: tool-input-start/available events are sent from the stream loop\n // when we see tool-call-streaming-start and tool-call events.\n // We only use onToolCall/onToolResult for non-streaming scenarios or\n // tools that need special handling (like approval requests).\n onToolCall: async () => {\n // Events sent from stream loop\n },\n onToolResult: async () => {\n // Events sent from stream loop\n },\n onApprovalRequired: async (execution) => {\n await writeSSE(JSON.stringify({\n type: 'data-approval-required',\n data: {\n id: execution.id,\n toolCallId: execution.toolCallId,\n toolName: execution.toolName,\n input: execution.input,\n },\n }));\n },\n onToolProgress: async (progress) => {\n const status = (progress.data as any)?.status || 'no-status';\n const contentLength = typeof (progress.data as any)?.content === 'string'\n ? (progress.data as any).content.length\n : undefined;\n const chunkIndex = (progress.data as any)?.chunkIndex;\n const chunkCount = (progress.data as any)?.chunkCount;\n console.log(\n '[TOOL-PROGRESS] Sending:',\n progress.toolName,\n status,\n contentLength !== undefined ? `contentLength=${contentLength}` : '',\n chunkIndex !== undefined || chunkCount !== undefined ? `chunk=${chunkIndex}/${chunkCount}` : ''\n );\n await writeSSE(JSON.stringify({\n type: 'tool-progress',\n toolName: progress.toolName,\n data: progress.data,\n }));\n if (progress.toolName === 'write_file' && status === 'content') {\n await writeSSE(JSON.stringify({\n type: 'debug',\n label: 'write-file-progress',\n contentLength,\n chunkIndex,\n chunkCount,\n }));\n await new Promise((resolve) => setTimeout(resolve, 0));\n }\n\n // Browser stream: connect proxy on any agent-browser command, destroy on close\n const browserPort = (progress.data as any)?.browserStreamPort;\n const browserClosed = (progress.data as any)?.browserClosed;\n\n if (progress.toolName === 'bash' && browserClosed) {\n console.log(`[BROWSER-STREAM:${streamId}] agent-browser close detected, destroying proxy for session ${sessionId}`);\n destroyProxy(sessionId);\n } else if (progress.toolName === 'bash' && browserPort) {\n console.log(`[BROWSER-STREAM:${streamId}] agent-browser command detected, ensuring proxy on port ${browserPort} for session ${sessionId}`);\n const proxy = getOrCreateProxy(sessionId, browserPort);\n console.log(`[BROWSER-STREAM:${streamId}] Proxy state: connected=${proxy.connected}, frameListeners=${proxy.listenerCount('frame')}, statusListeners=${proxy.listenerCount('status')}`);\n\n if (!sessionRecorders.has(sessionId)) {\n const recorder = new FrameRecorder(sessionId);\n recorder.start();\n sessionRecorders.set(sessionId, recorder);\n }\n\n // Always replace listeners so they reference the CURRENT stream's writeSSE.\n // Old listeners from a previous stream would silently drop frames because\n // their writeSSE closure has writerClosed=true.\n const oldFrameListeners = proxy.listenerCount('frame');\n if (oldFrameListeners > 0) {\n console.log(`[BROWSER-STREAM:${streamId}] Replacing ${oldFrameListeners} stale frame listener(s) from previous stream`);\n proxy.removeAllListeners('frame');\n proxy.removeAllListeners('status');\n }\n console.log(`[BROWSER-STREAM:${streamId}] Attaching frame+status listeners to proxy`);\n proxy.on('frame', (frame: BrowserFrame) => {\n sseBrowserFrameCount++;\n if (sseBrowserFrameCount === 1) {\n console.log(`[BROWSER-STREAM:${streamId}] First browser frame received! dataSize=${frame.data?.length ?? 0} writerClosed=${writerClosed}`);\n } else if (sseBrowserFrameCount % 50 === 0) {\n console.log(`[BROWSER-STREAM:${streamId}] Browser frame #${sseBrowserFrameCount} (writerClosed=${writerClosed})`);\n }\n const rec = sessionRecorders.get(sessionId);\n rec?.addFrame(frame);\n writeSSE(JSON.stringify({\n type: 'browser-frame',\n data: frame.data,\n metadata: frame.metadata,\n })).catch((err) => {\n console.warn(`[BROWSER-STREAM:${streamId}] Failed to send browser-frame via SSE:`, err);\n });\n });\n proxy.on('status', (s: BrowserStatus) => {\n console.log(`[BROWSER-STREAM:${streamId}] Browser status event: connected=${s.connected} screencasting=${s.screencasting} viewport=${s.viewportWidth}x${s.viewportHeight}`);\n writeSSE(JSON.stringify({\n type: 'browser-status',\n ...s,\n })).catch((err) => {\n console.warn(`[BROWSER-STREAM:${streamId}] Failed to send browser-status via SSE:`, err);\n });\n });\n // Request fresh status so the new listeners get the current state\n proxy.requestStatus();\n }\n },\n onStepFinish: async () => {\n await writeSSE(JSON.stringify({ type: 'finish-step' }));\n if (textStarted) {\n await writeSSE(JSON.stringify({ type: 'text-end', id: textId }));\n textStarted = false;\n textId = `text_${Date.now()}`;\n }\n },\n onAbort: async ({ steps }) => {\n isAborted = true;\n console.log(`Stream aborted after ${steps.length} steps`);\n },\n });\n\n // Consume the stream\n let reasoningId = `reasoning_${Date.now()}`;\n let reasoningStarted = false;\n\n for await (const part of result.stream.fullStream) {\n if (part.type === 'text-delta') {\n if (!textStarted) {\n await writeSSE(JSON.stringify({ type: 'text-start', id: textId }));\n textStarted = true;\n }\n await writeSSE(JSON.stringify({ type: 'text-delta', id: textId, delta: part.text }));\n } else if (part.type === 'reasoning-start') {\n await writeSSE(JSON.stringify({ type: 'reasoning-start', id: reasoningId }));\n reasoningStarted = true;\n } else if (part.type === 'reasoning-delta') {\n await writeSSE(JSON.stringify({ type: 'reasoning-delta', id: reasoningId, delta: part.text }));\n } else if (part.type === 'reasoning-end') {\n if (reasoningStarted) {\n await writeSSE(JSON.stringify({ type: 'reasoning-end', id: reasoningId }));\n reasoningStarted = false;\n reasoningId = `reasoning_${Date.now()}`;\n }\n } else if ((part as any).type === 'tool-call-streaming-start') {\n // Tool call starting to stream - send the tool name\n const p = part as any;\n await writeSSE(JSON.stringify({\n type: 'tool-input-start',\n toolCallId: p.toolCallId,\n toolName: p.toolName,\n }));\n toolCallStarts.add(p.toolCallId);\n } else if ((part as any).type === 'tool-call-delta') {\n // Streaming tool args delta\n const p = part as any;\n await writeSSE(JSON.stringify({\n type: 'tool-input-delta',\n toolCallId: p.toolCallId,\n argsTextDelta: p.argsTextDelta,\n }));\n } else if (part.type === 'tool-call') {\n await emitSyntheticToolStreaming(\n writeSSE,\n toolCallStarts,\n part.toolCallId,\n part.toolName,\n part.input\n );\n await writeSSE(JSON.stringify({\n type: 'tool-input-available',\n toolCallId: part.toolCallId,\n toolName: part.toolName,\n input: sanitizeToolInput(part.toolName, part.input),\n }));\n await writeSSE(JSON.stringify({\n type: 'debug',\n label: 'tool-input-available',\n toolName: part.toolName,\n }));\n } else if (part.type === 'tool-result') {\n await writeSSE(JSON.stringify({\n type: 'tool-output-available',\n toolCallId: part.toolCallId,\n output: part.output,\n }));\n } else if (part.type === 'error') {\n console.error('Stream error:', part.error);\n await writeSSE(JSON.stringify({ type: 'error', errorText: String(part.error) }));\n }\n }\n\n // End text if started\n if (textStarted) {\n await writeSSE(JSON.stringify({ type: 'text-end', id: textId }));\n }\n\n // End reasoning if still open\n if (reasoningStarted) {\n await writeSSE(JSON.stringify({ type: 'reasoning-end', id: reasoningId }));\n }\n\n // Save response messages\n if (!isAborted) {\n await result.saveResponseMessages();\n }\n\n // Send finish or abort message\n if (isAborted) {\n await writeSSE(JSON.stringify({ type: 'abort' }));\n } else {\n await writeSSE(JSON.stringify({ type: 'finish' }));\n }\n\n // Mark stream as finished\n try {\n await activeStreamQueries.finish(streamId);\n } catch {\n // Database may be closed during shutdown - ignore\n }\n } catch (error: any) {\n if (error.name === 'AbortError' || error.message?.includes('aborted')) {\n // User explicitly stopped the stream\n await writeSSE(JSON.stringify({ type: 'abort' }));\n } else {\n // Actual error in agent processing\n console.error('Agent error:', error);\n await writeSSE(JSON.stringify({ type: 'error', errorText: error.message }));\n try {\n await activeStreamQueries.markError(streamId);\n } catch {\n // Database may be closed during shutdown - ignore\n }\n }\n } finally {\n cleanupAbortController();\n // NOTE: Do NOT destroy the browser proxy here -- the browser may still\n // be open across turns. It gets cleaned up on agent-browser close or session delete.\n\n await writeSSE('[DONE]');\n await safeClose();\n }\n })();\n\n return readable;\n };\n}\n\n// Run agent with resumable streaming response\nagents.post(\n '/:id/run',\n zValidator('json', runPromptSchema),\n async (c) => {\n const id = c.req.param('id');\n const { prompt: rawPrompt, attachments } = c.req.valid('json');\n\n // Enrich user prompt with devtools context (page URL, viewport, etc.)\n const prompt = enrichPromptWithDevtoolsContext(id, rawPrompt);\n\n const session = await sessionQueries.getById(id);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n // Get the next message sequence number (this will be the user message's sequence)\n const nextSequence = await messageQueries.getNextSequence(id);\n\n // Create a checkpoint BEFORE processing this user message\n // This allows reverting to the state before this message was sent\n await createCheckpoint(id, session.workingDirectory, nextSequence);\n\n // Build user message content - can be string or array with file parts\n let userMessageContent: string | Array<{ type: string; text?: string; image?: string; data?: string; mediaType?: string; filename?: string; savedPath?: string }>;\n \n // Cast attachments to StreamAttachment[] so we can add savedPath\n const streamAttachments: StreamAttachment[] | undefined = attachments as StreamAttachment[] | undefined;\n \n if (streamAttachments && streamAttachments.length > 0) {\n // Save attachments to disk FIRST so we have paths for the database\n for (let i = 0; i < streamAttachments.length; i++) {\n const attachment = streamAttachments[i];\n try {\n const savedPath = await saveAttachmentToDisk(id, attachment, i);\n attachment.savedPath = savedPath;\n } catch (err) {\n console.error(`Failed to save attachment ${i}:`, err);\n }\n }\n \n // Build content array with text and file parts\n const contentParts: Array<{ type: string; text?: string; image?: string; data?: string; mediaType?: string; filename?: string; savedPath?: string }> = [];\n \n // IMPORTANT: Put file location info FIRST so the model knows where files are saved\n const attachmentDescriptions = streamAttachments\n .map((a, i) => {\n const name = a.filename || `attachment_${i + 1}`;\n const typeLabel = a.type === 'image' ? 'Image' : 'File';\n const location = a.savedPath || '(path unknown)';\n return `${i + 1}. ${typeLabel}: \"${name}\" saved at: ${location}`;\n })\n .join('\\n');\n contentParts.push({ \n type: 'text', \n text: `[FILE ATTACHMENTS - The user has attached the following files which are saved on disk]\\n${attachmentDescriptions}\\n\\nYou can reference these files by their paths above. The file contents are also shown inline below.` \n });\n \n // Add user's actual message\n if (prompt) {\n contentParts.push({ type: 'text', text: `\\n[USER MESSAGE]\\n${prompt}` });\n }\n \n // Add file/image parts with all metadata\n for (const attachment of streamAttachments) {\n if (attachment.type === 'image') {\n contentParts.push({\n type: 'image',\n image: attachment.data, // base64 data URL or raw base64\n mediaType: attachment.mediaType,\n filename: attachment.filename,\n savedPath: attachment.savedPath,\n });\n } else {\n contentParts.push({\n type: 'file',\n data: attachment.data,\n mediaType: attachment.mediaType || 'application/octet-stream',\n filename: attachment.filename,\n savedPath: attachment.savedPath,\n });\n }\n }\n \n userMessageContent = contentParts;\n } else {\n userMessageContent = prompt;\n }\n\n // Save user message to database FIRST, before creating the active stream\n // This ensures other clients can see the user message when they detect the new stream\n await messageQueries.create(id, { role: 'user', content: userMessageContent as any });\n\n // Generate unique stream ID for this run\n const streamId = `stream_${id}_${nanoid(10)}`;\n console.log(`[STREAM] Creating stream ${streamId} for session ${id}`);\n\n // Record the active stream\n await activeStreamQueries.create(id, streamId);\n\n // Create the resumable stream\n // Note: We don't pass c.req.raw.signal - the agent runs independently of client connections\n const stream = await streamContext.resumableStream(\n streamId,\n createAgentStreamProducer(id, prompt, streamId, streamAttachments),\n );\n\n if (!stream) {\n console.error(`[STREAM] Failed to create resumable stream ${streamId}`);\n return c.json({ error: 'Failed to create stream' }, 500);\n }\n console.log(`[STREAM] Stream ${streamId} created successfully`);\n\n // Return SSE response - pipe through TextEncoder for proper streaming\n const encodedStream = stream.pipeThrough(new TextEncoderStream());\n \n return new Response(encodedStream as unknown as ReadableStream<Uint8Array>, {\n headers: {\n 'Content-Type': 'text/event-stream',\n 'Cache-Control': 'no-cache',\n 'Connection': 'keep-alive',\n 'x-vercel-ai-ui-message-stream': 'v1',\n 'x-stream-id': streamId,\n },\n });\n }\n);\n\n// Watch/subscribe to an existing stream (for additional clients)\nagents.get('/:id/watch', async (c) => {\n const sessionId = c.req.param('id');\n const resumeAt = c.req.query('resumeAt');\n const explicitStreamId = c.req.query('streamId');\n\n const session = await sessionQueries.getById(sessionId);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n // Find the active stream for this session\n let streamId: string | undefined = explicitStreamId;\n if (!streamId) {\n const activeStream = await activeStreamQueries.getBySessionId(sessionId);\n if (!activeStream) {\n return c.json({ error: 'No active stream for this session', hint: 'Start a new run with POST /agents/:id/run' }, 404);\n }\n streamId = activeStream.streamId;\n }\n\n console.log(`[STREAM] Watch request for session ${sessionId}, streamId=${streamId}, resumeAt=${resumeAt || 'none'}`);\n\n // Try to resume/join the existing stream\n const stream = await streamContext.resumeExistingStream(\n streamId!,\n resumeAt ? parseInt(resumeAt, 10) : undefined,\n );\n\n if (!stream) {\n console.log(`[STREAM] Watch failed — stream ${streamId} is no longer active`);\n return c.json({ \n error: 'Stream is no longer active', \n streamId,\n hint: 'The stream may have finished. Check /agents/:id/approvals or start a new run.' \n }, 422);\n }\n console.log(`[STREAM] Client watching stream ${streamId}`);\n\n const encodedStream = stream.pipeThrough(new TextEncoderStream());\n \n return new Response(encodedStream as unknown as ReadableStream<Uint8Array>, {\n headers: {\n 'Content-Type': 'text/event-stream',\n 'Cache-Control': 'no-cache',\n 'Connection': 'keep-alive',\n 'x-vercel-ai-ui-message-stream': 'v1',\n 'x-stream-id': streamId ?? '',\n },\n });\n});\n\n// Get active stream info for a session\nagents.get('/:id/stream', async (c) => {\n const sessionId = c.req.param('id');\n\n const session = await sessionQueries.getById(sessionId);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n const activeStream = await activeStreamQueries.getBySessionId(sessionId);\n\n return c.json({\n sessionId,\n hasActiveStream: !!activeStream,\n stream: activeStream ? {\n id: activeStream.id,\n streamId: activeStream.streamId,\n status: activeStream.status,\n createdAt: activeStream.createdAt.toISOString(),\n } : null,\n });\n});\n\n// Run agent without streaming (for simple integrations)\nagents.post(\n '/:id/generate',\n zValidator('json', runPromptSchema),\n async (c) => {\n const id = c.req.param('id');\n const { prompt: rawPrompt } = c.req.valid('json');\n\n // Enrich user prompt with devtools context (page URL, viewport, etc.)\n const prompt = enrichPromptWithDevtoolsContext(id, rawPrompt);\n\n const session = await sessionQueries.getById(id);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n try {\n const agent = await Agent.create({ sessionId: id });\n const result = await agent.run({ prompt });\n\n return c.json({\n sessionId: id,\n text: result.text,\n stepCount: result.steps.length,\n });\n } catch (error: any) {\n return c.json({ error: error.message }, 500);\n }\n }\n);\n\n// Approve a tool execution\nagents.post('/:id/approve/:toolCallId', async (c) => {\n const sessionId = c.req.param('id');\n const toolCallId = c.req.param('toolCallId');\n\n const session = await sessionQueries.getById(sessionId);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n try {\n const agent = await Agent.create({ sessionId });\n const result = await agent.approve(toolCallId);\n\n return c.json({\n success: true,\n toolCallId,\n result,\n });\n } catch (error: any) {\n return c.json({ error: error.message }, 400);\n }\n});\n\n// Reject a tool execution\nagents.post(\n '/:id/reject/:toolCallId',\n zValidator('json', rejectSchema),\n async (c) => {\n const sessionId = c.req.param('id');\n const toolCallId = c.req.param('toolCallId');\n const body = c.req.valid('json');\n\n const session = await sessionQueries.getById(sessionId);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n try {\n const agent = await Agent.create({ sessionId });\n agent.reject(toolCallId, body?.reason);\n\n return c.json({\n success: true,\n toolCallId,\n rejected: true,\n });\n } catch (error: any) {\n return c.json({ error: error.message }, 400);\n }\n }\n);\n\n// Get pending approvals for a session\nagents.get('/:id/approvals', async (c) => {\n const sessionId = c.req.param('id');\n\n const session = await sessionQueries.getById(sessionId);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n const pendingApprovals = await toolExecutionQueries.getPendingApprovals(sessionId);\n\n return c.json({\n sessionId,\n pendingApprovals: pendingApprovals.map((p) => ({\n id: p.id,\n toolCallId: p.toolCallId,\n toolName: p.toolName,\n input: p.input,\n startedAt: p.startedAt.toISOString(),\n })),\n count: pendingApprovals.length,\n });\n});\n\n// Abort/stop an active stream for a session\n// This is used when the user explicitly clicks the Stop button\nagents.post('/:id/abort', async (c) => {\n const sessionId = c.req.param('id');\n\n const session = await sessionQueries.getById(sessionId);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n // Find the active stream for this session\n const activeStream = await activeStreamQueries.getBySessionId(sessionId);\n if (!activeStream) {\n return c.json({ error: 'No active stream for this session' }, 404);\n }\n\n // Get the abort controller for this stream\n const abortController = streamAbortControllers.get(activeStream.streamId);\n if (abortController) {\n abortController.abort();\n streamAbortControllers.delete(activeStream.streamId);\n return c.json({ success: true, streamId: activeStream.streamId, aborted: true });\n }\n\n // Stream might have already finished\n return c.json({ \n success: false, \n streamId: activeStream.streamId, \n message: 'Stream may have already finished or was not found' \n });\n});\n\n// Quick start: create session and run in one request (also resumable)\nagents.post(\n '/quick',\n zValidator('json', quickStartSchema),\n async (c) => {\n const body = c.req.valid('json');\n const config = getConfig();\n\n // Create new session\n const agent = await Agent.create({\n name: body.name,\n workingDirectory: body.workingDirectory || config.resolvedWorkingDirectory,\n model: body.model || config.defaultModel,\n sessionConfig: body.toolApprovals ? { toolApprovals: body.toolApprovals } : undefined,\n });\n\n const session = agent.getSession();\n\n // Enrich user prompt with devtools context (page URL, viewport, etc.)\n const enrichedPrompt = enrichPromptWithDevtoolsContext(session.id, body.prompt);\n\n const streamId = `stream_${session.id}_${nanoid(10)}`;\n\n // Create a checkpoint BEFORE processing the first user message\n // For new sessions, the first user message will be at sequence 0\n await createCheckpoint(session.id, session.workingDirectory, 0);\n\n // Record the active stream\n await activeStreamQueries.create(session.id, streamId);\n\n // Create a custom stream producer that includes session info\n // The agent runs independently of client connections for resumable streams\n const createQuickStreamProducer = (): ReadableStream<string> => {\n const { readable, writable } = new TransformStream<string, string>();\n const writer = writable.getWriter();\n let writerClosed = false;\n const toolCallStarts = new Set<string>();\n\n // Create an abort controller for this specific stream (for explicit stops only)\n const abortController = new AbortController();\n streamAbortControllers.set(streamId, abortController);\n\n let sseEventCount = 0;\n let sseBrowserFrameCount = 0;\n let sseWriteErrors = 0;\n\n // Helper to write SSE event - handles client disconnection gracefully\n const writeSSE = async (data: string) => {\n if (writerClosed) return;\n try {\n sseEventCount++;\n await writer.write(`data: ${data}\\n\\n`);\n } catch (err: any) {\n sseWriteErrors++;\n if (sseWriteErrors === 1) {\n console.log(`[SSE:${streamId}] Writer closed (client disconnected). Total events sent: ${sseEventCount}, browser frames: ${sseBrowserFrameCount}`);\n }\n writerClosed = true;\n }\n };\n \n const safeClose = async () => {\n if (writerClosed) return;\n try {\n console.log(`[SSE:${streamId}] Stream closing. Total events: ${sseEventCount}, browser frames: ${sseBrowserFrameCount}, write errors: ${sseWriteErrors}`);\n writerClosed = true;\n await writer.close();\n } catch {\n // Already closed\n }\n };\n \n // Cleanup abort controller\n const cleanupAbortController = () => {\n streamAbortControllers.delete(streamId);\n };\n\n (async () => {\n let isAborted = false;\n\n try {\n // Send stream ID and session info\n await writeSSE(JSON.stringify({ type: 'data-stream-id', streamId }));\n await writeSSE(JSON.stringify({\n type: 'data-session',\n data: {\n id: session.id,\n name: session.name,\n workingDirectory: session.workingDirectory,\n model: session.model,\n },\n }));\n\n const messageId = `msg_${Date.now()}`;\n await writeSSE(JSON.stringify({ type: 'start', messageId }));\n\n let textId = `text_${Date.now()}`;\n let textStarted = false;\n\n const result = await agent.stream({\n prompt: enrichedPrompt,\n abortSignal: abortController.signal, // Use our managed abort controller, NOT client signal\n onToolProgress: async (progress) => {\n const status = (progress.data as any)?.status || 'no-status';\n const contentLength = typeof (progress.data as any)?.content === 'string'\n ? (progress.data as any).content.length\n : undefined;\n const chunkIndex = (progress.data as any)?.chunkIndex;\n const chunkCount = (progress.data as any)?.chunkCount;\n console.log(\n '[TOOL-PROGRESS] Sending:',\n progress.toolName,\n status,\n contentLength !== undefined ? `contentLength=${contentLength}` : '',\n chunkIndex !== undefined || chunkCount !== undefined ? `chunk=${chunkIndex}/${chunkCount}` : ''\n );\n await writeSSE(JSON.stringify({\n type: 'tool-progress',\n toolName: progress.toolName,\n data: progress.data,\n }));\n if (progress.toolName === 'write_file' && status === 'content') {\n await writeSSE(JSON.stringify({\n type: 'debug',\n label: 'write-file-progress',\n contentLength,\n chunkIndex,\n chunkCount,\n }));\n await new Promise((resolve) => setTimeout(resolve, 0));\n }\n\n const browserPort = (progress.data as any)?.browserStreamPort;\n const browserClosed = (progress.data as any)?.browserClosed;\n\n if (progress.toolName === 'bash' && browserClosed) {\n console.log(`[BROWSER-STREAM:${streamId}] agent-browser close detected, destroying proxy for session ${session.id}`);\n destroyProxy(session.id);\n } else if (progress.toolName === 'bash' && browserPort) {\n console.log(`[BROWSER-STREAM:${streamId}] agent-browser command detected, port ${browserPort} for session ${session.id}`);\n const proxy = getOrCreateProxy(session.id, browserPort);\n console.log(`[BROWSER-STREAM:${streamId}] Proxy state: connected=${proxy.connected}, frameListeners=${proxy.listenerCount('frame')}, statusListeners=${proxy.listenerCount('status')}`);\n\n if (!sessionRecorders.has(session.id)) {\n const recorder = new FrameRecorder(session.id);\n recorder.start();\n sessionRecorders.set(session.id, recorder);\n }\n\n // Always replace listeners so they reference the CURRENT stream's writeSSE.\n const oldFrameListeners = proxy.listenerCount('frame');\n if (oldFrameListeners > 0) {\n console.log(`[BROWSER-STREAM:${streamId}] Replacing ${oldFrameListeners} stale frame listener(s) from previous stream`);\n proxy.removeAllListeners('frame');\n proxy.removeAllListeners('status');\n }\n console.log(`[BROWSER-STREAM:${streamId}] Attaching frame+status listeners to proxy`);\n proxy.on('frame', (frame: BrowserFrame) => {\n sseBrowserFrameCount++;\n if (sseBrowserFrameCount === 1) {\n console.log(`[BROWSER-STREAM:${streamId}] First browser frame received! dataSize=${frame.data?.length ?? 0} writerClosed=${writerClosed}`);\n } else if (sseBrowserFrameCount % 50 === 0) {\n console.log(`[BROWSER-STREAM:${streamId}] Browser frame #${sseBrowserFrameCount} (writerClosed=${writerClosed})`);\n }\n const rec = sessionRecorders.get(session.id);\n rec?.addFrame(frame);\n writeSSE(JSON.stringify({\n type: 'browser-frame',\n data: frame.data,\n metadata: frame.metadata,\n })).catch((err) => {\n console.warn(`[BROWSER-STREAM:${streamId}] Failed to send browser-frame via SSE:`, err);\n });\n });\n proxy.on('status', (s: BrowserStatus) => {\n console.log(`[BROWSER-STREAM:${streamId}] Browser status event: connected=${s.connected} screencasting=${s.screencasting} viewport=${s.viewportWidth}x${s.viewportHeight}`);\n writeSSE(JSON.stringify({\n type: 'browser-status',\n ...s,\n })).catch((err) => {\n console.warn(`[BROWSER-STREAM:${streamId}] Failed to send browser-status via SSE:`, err);\n });\n });\n proxy.requestStatus();\n }\n },\n onStepFinish: async () => {\n await writeSSE(JSON.stringify({ type: 'finish-step' }));\n if (textStarted) {\n await writeSSE(JSON.stringify({ type: 'text-end', id: textId }));\n textStarted = false;\n textId = `text_${Date.now()}`;\n }\n },\n onAbort: async ({ steps }) => {\n isAborted = true;\n console.log(`Stream aborted after ${steps.length} steps`);\n },\n });\n\n let reasoningId = `reasoning_${Date.now()}`;\n let reasoningStarted = false;\n\n for await (const part of result.stream.fullStream) {\n if (part.type === 'text-delta') {\n if (!textStarted) {\n await writeSSE(JSON.stringify({ type: 'text-start', id: textId }));\n textStarted = true;\n }\n await writeSSE(JSON.stringify({ type: 'text-delta', id: textId, delta: part.text }));\n } else if (part.type === 'reasoning-start') {\n await writeSSE(JSON.stringify({ type: 'reasoning-start', id: reasoningId }));\n reasoningStarted = true;\n } else if (part.type === 'reasoning-delta') {\n await writeSSE(JSON.stringify({ type: 'reasoning-delta', id: reasoningId, delta: part.text }));\n } else if (part.type === 'reasoning-end') {\n if (reasoningStarted) {\n await writeSSE(JSON.stringify({ type: 'reasoning-end', id: reasoningId }));\n reasoningStarted = false;\n reasoningId = `reasoning_${Date.now()}`;\n }\n } else if ((part as any).type === 'tool-call-streaming-start') {\n // Tool call starting to stream\n const p = part as any;\n await writeSSE(JSON.stringify({\n type: 'tool-input-start',\n toolCallId: p.toolCallId,\n toolName: p.toolName,\n }));\n toolCallStarts.add(p.toolCallId);\n } else if ((part as any).type === 'tool-call-delta') {\n // Streaming tool args delta\n const p = part as any;\n await writeSSE(JSON.stringify({\n type: 'tool-input-delta',\n toolCallId: p.toolCallId,\n argsTextDelta: p.argsTextDelta,\n }));\n } else if (part.type === 'tool-call') {\n // Tool call complete\n await emitSyntheticToolStreaming(\n writeSSE,\n toolCallStarts,\n part.toolCallId,\n part.toolName,\n part.input\n );\n await writeSSE(JSON.stringify({\n type: 'tool-input-available',\n toolCallId: part.toolCallId,\n toolName: part.toolName,\n input: sanitizeToolInput(part.toolName, part.input),\n }));\n await writeSSE(JSON.stringify({\n type: 'debug',\n label: 'tool-input-available',\n toolName: part.toolName,\n }));\n } else if (part.type === 'tool-result') {\n await writeSSE(JSON.stringify({\n type: 'tool-output-available',\n toolCallId: part.toolCallId,\n output: part.output,\n }));\n } else if (part.type === 'error') {\n console.error('Stream error:', part.error);\n await writeSSE(JSON.stringify({ type: 'error', errorText: String(part.error) }));\n }\n }\n\n if (textStarted) {\n await writeSSE(JSON.stringify({ type: 'text-end', id: textId }));\n }\n\n if (reasoningStarted) {\n await writeSSE(JSON.stringify({ type: 'reasoning-end', id: reasoningId }));\n }\n\n if (!isAborted) {\n await result.saveResponseMessages();\n }\n\n if (isAborted) {\n await writeSSE(JSON.stringify({ type: 'abort' }));\n } else {\n await writeSSE(JSON.stringify({ type: 'finish' }));\n }\n\n await activeStreamQueries.finish(streamId);\n } catch (error: any) {\n if (error.name === 'AbortError' || error.message?.includes('aborted')) {\n // User explicitly stopped the stream\n await writeSSE(JSON.stringify({ type: 'abort' }));\n } else {\n // Actual error in agent processing\n console.error('Agent error:', error);\n await writeSSE(JSON.stringify({ type: 'error', errorText: error.message }));\n await activeStreamQueries.markError(streamId);\n }\n } finally {\n cleanupAbortController();\n\n await writeSSE('[DONE]');\n await safeClose();\n }\n })();\n\n return readable;\n };\n\n const stream = await streamContext.resumableStream(\n streamId,\n createQuickStreamProducer,\n );\n\n if (!stream) {\n return c.json({ error: 'Failed to create stream' }, 500);\n }\n\n const encodedStream = stream.pipeThrough(new TextEncoderStream());\n \n return new Response(encodedStream as unknown as ReadableStream<Uint8Array>, {\n headers: {\n 'Content-Type': 'text/event-stream',\n 'Cache-Control': 'no-cache',\n 'Connection': 'keep-alive',\n 'x-vercel-ai-ui-message-stream': 'v1',\n 'x-stream-id': streamId,\n 'x-session-id': session.id,\n },\n });\n }\n);\n\n// Browser input injection for pair-browsing\nconst browserInputSchema = z.object({\n type: z.enum(['input_mouse', 'input_keyboard', 'input_touch']),\n eventType: z.string(),\n x: z.number().optional(),\n y: z.number().optional(),\n button: z.string().optional(),\n clickCount: z.number().optional(),\n deltaX: z.number().optional(),\n deltaY: z.number().optional(),\n key: z.string().optional(),\n code: z.string().optional(),\n text: z.string().optional(),\n modifiers: z.number().optional(),\n touchPoints: z.array(z.object({\n x: z.number(),\n y: z.number(),\n id: z.number().optional(),\n })).optional(),\n});\n\nagents.post(\n '/:id/browser-input',\n zValidator('json', browserInputSchema),\n async (c) => {\n const sessionId = c.req.param('id');\n const event = c.req.valid('json') as BrowserInputEvent;\n\n const proxy = getProxy(sessionId);\n if (!proxy || !proxy.connected) {\n return c.json({ error: 'No active browser stream for this session' }, 404);\n }\n\n proxy.injectInput(event);\n return c.json({ success: true });\n }\n);\n\n// Browser stream status — check if a browser is active and get its stream port\nagents.get('/:id/browser-stream', async (c) => {\n const sessionId = c.req.param('id');\n const proxy = getProxy(sessionId);\n\n return c.json({\n sessionId,\n active: !!proxy?.connected,\n hasProxy: !!proxy,\n latestFrame: proxy?.latestFrame ? {\n metadata: proxy.latestFrame.metadata,\n timestamp: proxy.latestFrame.timestamp,\n } : null,\n });\n});\n\nexport { agents };\n","/**\n * In-memory pub/sub adapter for resumable-stream library.\n * This enables multiple clients to follow the same stream without Redis.\n * \n * Limitations:\n * - State lost on server restart\n * - Only works for single-server deployment (no load balancing)\n * \n * For production with multiple servers, use Redis.\n */\n\nimport { createResumableStreamContext } from 'resumable-stream/generic';\nimport type { Publisher, Subscriber } from 'resumable-stream/generic';\n\n// In-memory key-value store\nconst store = new Map<string, { value: string; expiresAt?: number }>();\n\n// In-memory pub/sub channels\nconst channels = new Map<string, Set<(message: string) => void>>();\n\n// Cleanup expired keys periodically\n// Use .unref() so this doesn't keep the process alive when it's the only thing running\nconst cleanupInterval = setInterval(() => {\n const now = Date.now();\n for (const [key, data] of store.entries()) {\n if (data.expiresAt && data.expiresAt < now) {\n store.delete(key);\n }\n }\n}, 60000); // Clean up every minute\ncleanupInterval.unref();\n\nlet publishCount = 0;\nlet lastPublishLog = 0;\n\n/**\n * In-memory Publisher implementation\n */\nconst publisher: Publisher = {\n connect: async () => {\n // No-op for in-memory\n },\n\n publish: async (channel: string, message: string) => {\n const subscribers = channels.get(channel);\n publishCount++;\n const now = Date.now();\n if (now - lastPublishLog > 10000) {\n console.log(`[ResumableStream] Publish stats: total=${publishCount}, channels=${channels.size}, store=${store.size}`);\n lastPublishLog = now;\n }\n if (subscribers) {\n for (const callback of subscribers) {\n setImmediate(() => callback(message));\n }\n }\n },\n\n set: async (key: string, value: string, options?: { EX?: number }) => {\n const expiresAt = options?.EX ? Date.now() + options.EX * 1000 : undefined;\n store.set(key, { value, expiresAt });\n if (options?.EX) {\n setTimeout(() => store.delete(key), options.EX * 1000);\n }\n },\n\n get: async (key: string) => {\n const data = store.get(key);\n if (!data) return null;\n \n if (data.expiresAt && data.expiresAt < Date.now()) {\n store.delete(key);\n return null;\n }\n \n return data.value;\n },\n\n incr: async (key: string) => {\n const data = store.get(key);\n const current = data ? parseInt(data.value, 10) : 0;\n const next = (isNaN(current) ? 0 : current) + 1;\n store.set(key, { value: String(next), expiresAt: data?.expiresAt });\n return next;\n },\n};\n\n/**\n * In-memory Subscriber implementation\n */\nconst subscriber: Subscriber = {\n connect: async () => {\n // No-op for in-memory\n },\n\n subscribe: async (channel: string, callback: (message: string) => void) => {\n if (!channels.has(channel)) {\n channels.set(channel, new Set());\n }\n channels.get(channel)!.add(callback);\n console.log(`[ResumableStream] Subscribe to channel \"${channel}\" (total subscribers: ${channels.get(channel)!.size})`);\n },\n\n unsubscribe: async (channel: string) => {\n const count = channels.get(channel)?.size ?? 0;\n channels.delete(channel);\n console.log(`[ResumableStream] Unsubscribe from channel \"${channel}\" (removed ${count} subscribers)`);\n },\n};\n\n/**\n * Create the resumable stream context with in-memory pub/sub\n */\nexport const streamContext = createResumableStreamContext({\n // Background task handler - just let promises run and log errors\n waitUntil: (promise: Promise<unknown>) => {\n promise.catch((err) => {\n console.error('[ResumableStream] Background task error:', err);\n });\n },\n publisher,\n subscriber,\n});\n\n/**\n * Get statistics about the in-memory store (for debugging)\n */\nexport function getStreamStats() {\n return {\n storeSize: store.size,\n activeChannels: channels.size,\n channelSubscribers: Array.from(channels.entries()).map(([channel, subs]) => ({\n channel,\n subscriberCount: subs.size,\n })),\n };\n}\n\n/**\n * Clear all in-memory state (useful for testing)\n */\nexport function clearStreamState() {\n store.clear();\n channels.clear();\n}\n","import { Hono } from 'hono';\nimport { zValidator } from '@hono/zod-validator';\nimport { z } from 'zod';\nimport { readFileSync } from 'node:fs';\nimport { fileURLToPath } from 'node:url';\nimport { dirname, join } from 'node:path';\nimport { getConfig, getApiKeyStatus, setApiKey, removeApiKey, SUPPORTED_PROVIDERS } from '../../config/index.js';\n\n// Read package.json to get the current version\nconst __filename = fileURLToPath(import.meta.url);\nconst __dirname = dirname(__filename);\n\n// Try multiple possible paths to package.json:\n// When bundled by tsup, all code ends up in dist/server/index.js\n// The import.meta.url will reflect the bundled file location\nconst possiblePaths = [\n join(__dirname, '../package.json'), // From dist/server -> dist/../package.json\n join(__dirname, '../../package.json'), // From dist/server (if nested differently)\n join(__dirname, '../../../package.json'), // From src/server/routes (development)\n join(process.cwd(), 'package.json'), // From current working directory\n];\n\nlet currentVersion = '0.0.0';\nlet packageName = 'sparkecoder';\n\nfor (const packageJsonPath of possiblePaths) {\n try {\n const packageJson = JSON.parse(readFileSync(packageJsonPath, 'utf-8'));\n // Match sparkecoder package (but not web/ or remote-server/ subpackages)\n if (packageJson.name === 'sparkecoder') {\n currentVersion = packageJson.version || '0.0.0';\n packageName = packageJson.name || 'sparkecoder';\n break;\n }\n } catch {\n // Try next path\n }\n}\n\nconst health = new Hono();\n\nhealth.get('/', async (c) => {\n const config = getConfig();\n const apiKeyStatus = getApiKeyStatus();\n \n // Check if the AI Gateway key is configured (required for the default model)\n const gatewayKey = apiKeyStatus.find(s => s.provider === 'ai-gateway');\n const hasApiKey = gatewayKey?.configured ?? false;\n\n return c.json({\n status: 'ok',\n version: currentVersion,\n uptime: process.uptime(),\n apiKeyConfigured: hasApiKey,\n config: {\n workingDirectory: config.resolvedWorkingDirectory,\n defaultModel: config.defaultModel,\n defaultToolApprovals: config.toolApprovals || {},\n port: config.server.port,\n },\n timestamp: new Date().toISOString(),\n });\n});\n\n// Check for updates - compare current version with npm registry\nhealth.get('/version', async (c) => {\n let latestVersion = currentVersion;\n let updateAvailable = false;\n let error: string | undefined;\n \n try {\n // Fetch latest version from npm registry\n const npmResponse = await fetch(`https://registry.npmjs.org/${packageName}/latest`, {\n headers: { 'Accept': 'application/json' },\n signal: AbortSignal.timeout(5000), // 5 second timeout\n });\n \n if (npmResponse.ok) {\n const npmData = await npmResponse.json() as { version?: string };\n latestVersion = npmData.version || currentVersion;\n \n // Compare versions (simple semver comparison)\n const parseVersion = (v: string) => {\n const parts = v.replace(/^v/, '').split('.').map(Number);\n return { major: parts[0] || 0, minor: parts[1] || 0, patch: parts[2] || 0 };\n };\n \n const current = parseVersion(currentVersion);\n const latest = parseVersion(latestVersion);\n \n updateAvailable = \n latest.major > current.major ||\n (latest.major === current.major && latest.minor > current.minor) ||\n (latest.major === current.major && latest.minor === current.minor && latest.patch > current.patch);\n } else {\n error = `npm registry returned ${npmResponse.status}`;\n }\n } catch (err) {\n error = err instanceof Error ? err.message : 'Failed to check for updates';\n }\n \n return c.json({\n packageName,\n currentVersion,\n latestVersion,\n updateAvailable,\n updateCommand: updateAvailable ? `npm install -g ${packageName}@latest` : null,\n error,\n timestamp: new Date().toISOString(),\n });\n});\n\nhealth.get('/ready', async (c) => {\n try {\n // Check if config is loaded\n getConfig();\n\n return c.json({\n status: 'ready',\n timestamp: new Date().toISOString(),\n });\n } catch (error: any) {\n return c.json(\n {\n status: 'not_ready',\n error: error.message,\n timestamp: new Date().toISOString(),\n },\n 503\n );\n }\n});\n\n// ============================================\n// API Key Management Endpoints\n// ============================================\n\n// Get status of all API keys (masked)\nhealth.get('/api-keys', async (c) => {\n const status = getApiKeyStatus();\n \n return c.json({\n providers: status,\n supportedProviders: SUPPORTED_PROVIDERS,\n });\n});\n\n// Set an API key\nconst setApiKeySchema = z.object({\n provider: z.string(),\n apiKey: z.string().min(1),\n});\n\nhealth.post(\n '/api-keys',\n zValidator('json', setApiKeySchema),\n async (c) => {\n const { provider, apiKey } = c.req.valid('json');\n \n try {\n setApiKey(provider, apiKey);\n const status = getApiKeyStatus();\n const providerStatus = status.find(s => s.provider === provider.toLowerCase());\n \n return c.json({\n success: true,\n provider: provider.toLowerCase(),\n maskedKey: providerStatus?.maskedKey,\n message: `API key for ${provider} saved successfully`,\n });\n } catch (error: any) {\n return c.json({ error: error.message }, 400);\n }\n }\n);\n\n// Remove an API key\nhealth.delete('/api-keys/:provider', async (c) => {\n const provider = c.req.param('provider');\n \n try {\n removeApiKey(provider);\n \n return c.json({\n success: true,\n provider: provider.toLowerCase(),\n message: `API key for ${provider} removed`,\n });\n } catch (error: any) {\n return c.json({ error: error.message }, 400);\n }\n});\n\nexport { health };\n","import { Hono } from 'hono';\nimport { zValidator } from '@hono/zod-validator';\nimport { z } from 'zod';\nimport * as tmux from '../../terminal/tmux.js';\nimport { sessionQueries } from '../../db/index.js';\n\nexport const terminals = new Hono();\n\n// Spawn a new terminal\nconst spawnSchema = z.object({\n command: z.string(),\n cwd: z.string().optional(),\n name: z.string().optional(),\n});\n\nterminals.post(\n '/:sessionId/terminals',\n zValidator('json', spawnSchema),\n async (c) => {\n const sessionId = c.req.param('sessionId');\n const body = c.req.valid('json');\n\n // Verify session exists\n const session = await sessionQueries.getById(sessionId);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n // Check if tmux is available\n const hasTmux = await tmux.isTmuxAvailable();\n if (!hasTmux) {\n return c.json({ error: 'tmux is not installed. Background terminals require tmux.' }, 400);\n }\n\n const workingDirectory = body.cwd || session.workingDirectory;\n const result = await tmux.runBackground(body.command, workingDirectory, { \n sessionId, \n name: body.name \n });\n\n return c.json({\n id: result.id,\n name: body.name || null,\n command: body.command,\n cwd: workingDirectory,\n status: result.status,\n pid: null, // tmux doesn't expose PID directly\n }, 201);\n }\n);\n\n// List terminals for a session\nterminals.get('/:sessionId/terminals', async (c) => {\n const sessionId = c.req.param('sessionId');\n\n const session = await sessionQueries.getById(sessionId);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n // List all terminals for this session from the file system\n const sessionTerminals = await tmux.listSessionTerminals(sessionId, session.workingDirectory);\n \n // Get running status for each terminal\n const terminalList = await Promise.all(\n sessionTerminals.map(async (meta) => {\n const running = await tmux.isRunning(meta.id);\n return {\n id: meta.id,\n name: meta.name || null,\n command: meta.command,\n cwd: meta.cwd,\n status: running ? 'running' : 'stopped',\n createdAt: meta.createdAt,\n };\n })\n );\n\n return c.json({\n sessionId,\n terminals: terminalList,\n count: terminalList.length,\n running: terminalList.filter(t => t.status === 'running').length,\n });\n});\n\n// Get terminal status\nterminals.get('/:sessionId/terminals/:terminalId', async (c) => {\n const sessionId = c.req.param('sessionId');\n const terminalId = c.req.param('terminalId');\n\n const session = await sessionQueries.getById(sessionId);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n const meta = await tmux.getMeta(terminalId, session.workingDirectory, sessionId);\n if (!meta) {\n return c.json({ error: 'Terminal not found' }, 404);\n }\n\n const running = await tmux.isRunning(terminalId);\n\n return c.json({\n id: terminalId,\n command: meta.command,\n cwd: meta.cwd,\n status: running ? 'running' : 'stopped',\n createdAt: meta.createdAt,\n exitCode: running ? null : 0, // We don't track exit codes in tmux mode\n });\n});\n\n// Get terminal logs\nconst logsQuerySchema = z.object({\n tail: z.string().optional().transform(v => v ? parseInt(v, 10) : undefined),\n});\n\nterminals.get(\n '/:sessionId/terminals/:terminalId/logs',\n zValidator('query', logsQuerySchema),\n async (c) => {\n const sessionId = c.req.param('sessionId');\n const terminalId = c.req.param('terminalId');\n const query = c.req.valid('query');\n\n const session = await sessionQueries.getById(sessionId);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n const result = await tmux.getLogs(terminalId, session.workingDirectory, { tail: query.tail, sessionId });\n\n if (result.status === 'unknown') {\n return c.json({ error: 'Terminal not found' }, 404);\n }\n\n return c.json({\n terminalId,\n logs: result.output,\n lineCount: result.output.split('\\n').length,\n });\n }\n);\n\n// Kill a terminal\nconst killSchema = z.object({\n signal: z.enum(['SIGTERM', 'SIGKILL']).optional(),\n});\n\nterminals.post(\n '/:sessionId/terminals/:terminalId/kill',\n zValidator('json', killSchema.optional()),\n async (c) => {\n const terminalId = c.req.param('terminalId');\n\n const success = await tmux.killTerminal(terminalId);\n\n if (!success) {\n return c.json({ error: 'Failed to kill terminal (may already be stopped)' }, 400);\n }\n\n return c.json({ success: true, message: 'Terminal killed' });\n }\n);\n\n// Write to terminal stdin\nconst writeSchema = z.object({\n input: z.string(),\n});\n\nterminals.post(\n '/:sessionId/terminals/:terminalId/write',\n zValidator('json', writeSchema),\n async (c) => {\n const terminalId = c.req.param('terminalId');\n const body = c.req.valid('json');\n\n // Check if terminal is running\n const isRunning = await tmux.isRunning(terminalId);\n if (!isRunning) {\n return c.json({ error: 'Terminal is not running' }, 400);\n }\n\n // Use sendInput to write to the terminal (uses tmux send-keys internally)\n // Don't press Enter automatically - the input should include newlines if needed\n const success = await tmux.sendInput(terminalId, body.input, { pressEnter: false });\n \n if (!success) {\n return c.json({ error: 'Failed to write to terminal' }, 500);\n }\n\n return c.json({ success: true, written: body.input.length });\n }\n);\n\n// Kill all terminals for a session (cleanup endpoint)\nterminals.post('/:sessionId/terminals/kill-all', async (c) => {\n const sessionId = c.req.param('sessionId');\n\n const session = await sessionQueries.getById(sessionId);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n // List all terminals for this session and kill them\n const sessionTerminals = await tmux.listSessionTerminals(sessionId, session.workingDirectory);\n let killed = 0;\n\n for (const terminal of sessionTerminals) {\n const isRunning = await tmux.isRunning(terminal.id);\n if (isRunning) {\n const success = await tmux.killTerminal(terminal.id);\n if (success) killed++;\n }\n }\n\n return c.json({ success: true, killed });\n});\n\n// Direct terminal stream - simpler endpoint that just needs terminal ID\n// Used by web UI to subscribe to terminal output without needing session context\nterminals.get('/stream/:terminalId', async (c) => {\n const terminalId = c.req.param('terminalId');\n\n // We need to find the terminal - search all sessions\n const sessions = await sessionQueries.list();\n let terminalMeta: Awaited<ReturnType<typeof tmux.getMeta>> = null;\n let workingDirectory = process.cwd();\n let foundSessionId: string | undefined;\n\n // Search in session-scoped directories first (new format)\n for (const session of sessions) {\n terminalMeta = await tmux.getMeta(terminalId, session.workingDirectory, session.id);\n if (terminalMeta) {\n workingDirectory = session.workingDirectory;\n foundSessionId = session.id;\n break;\n }\n }\n\n // Fallback: search in legacy format (no session scope)\n if (!terminalMeta) {\n for (const session of sessions) {\n terminalMeta = await tmux.getMeta(terminalId, session.workingDirectory);\n if (terminalMeta) {\n workingDirectory = session.workingDirectory;\n foundSessionId = terminalMeta.sessionId;\n break;\n }\n }\n }\n\n // Check if terminal session exists in tmux (even if no meta file)\n const isActive = await tmux.isRunning(terminalId);\n if (!terminalMeta && !isActive) {\n return c.json({ error: 'Terminal not found' }, 404);\n }\n\n // Set up SSE with polling\n return new Response(\n new ReadableStream({\n async start(controller) {\n const encoder = new TextEncoder();\n let lastOutput = '';\n let isRunning = true;\n let pollCount = 0;\n const maxPolls = 600; // 5 minutes at 200ms intervals\n\n // Send initial status\n controller.enqueue(\n encoder.encode(`event: status\\ndata: ${JSON.stringify({ terminalId, status: 'connected' })}\\n\\n`)\n );\n\n while (isRunning && pollCount < maxPolls) {\n try {\n const result = await tmux.getLogs(terminalId, workingDirectory, { sessionId: foundSessionId });\n \n // Send new output only\n if (result.output !== lastOutput) {\n const newContent = result.output.slice(lastOutput.length);\n if (newContent) {\n controller.enqueue(\n encoder.encode(`event: stdout\\ndata: ${JSON.stringify({ data: newContent })}\\n\\n`)\n );\n }\n lastOutput = result.output;\n }\n\n isRunning = result.status === 'running';\n \n if (!isRunning) {\n controller.enqueue(\n encoder.encode(`event: exit\\ndata: ${JSON.stringify({ status: 'stopped' })}\\n\\n`)\n );\n break;\n }\n\n // Poll every 200ms for more responsive output\n await new Promise(r => setTimeout(r, 200));\n pollCount++;\n } catch {\n break;\n }\n }\n\n controller.close();\n },\n }),\n {\n headers: {\n 'Content-Type': 'text/event-stream',\n 'Cache-Control': 'no-cache',\n 'Connection': 'keep-alive',\n },\n }\n );\n});\n\n// Stream terminal logs (SSE) - version with session context\nterminals.get('/:sessionId/terminals/:terminalId/stream', async (c) => {\n const sessionId = c.req.param('sessionId');\n const terminalId = c.req.param('terminalId');\n\n const session = await sessionQueries.getById(sessionId);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n const meta = await tmux.getMeta(terminalId, session.workingDirectory, sessionId);\n if (!meta) {\n return c.json({ error: 'Terminal not found' }, 404);\n }\n\n // Set up SSE with polling (simpler than event-based with tmux)\n return new Response(\n new ReadableStream({\n async start(controller) {\n const encoder = new TextEncoder();\n let lastOutput = '';\n let isRunning = true;\n\n while (isRunning) {\n try {\n const result = await tmux.getLogs(terminalId, session.workingDirectory, { sessionId });\n \n // Send new output only\n if (result.output !== lastOutput) {\n const newContent = result.output.slice(lastOutput.length);\n if (newContent) {\n controller.enqueue(\n encoder.encode(`event: stdout\\ndata: ${JSON.stringify({ data: newContent })}\\n\\n`)\n );\n }\n lastOutput = result.output;\n }\n\n isRunning = result.status === 'running';\n \n if (!isRunning) {\n controller.enqueue(\n encoder.encode(`event: exit\\ndata: ${JSON.stringify({ status: 'stopped' })}\\n\\n`)\n );\n break;\n }\n\n // Poll every 500ms\n await new Promise(r => setTimeout(r, 500));\n } catch {\n break;\n }\n }\n\n controller.close();\n },\n }),\n {\n headers: {\n 'Content-Type': 'text/event-stream',\n 'Cache-Control': 'no-cache',\n 'Connection': 'keep-alive',\n },\n }\n );\n});\n","import { Hono } from 'hono';\nimport { zValidator } from '@hono/zod-validator';\nimport { z } from 'zod';\nimport { nanoid } from 'nanoid';\nimport { sessionQueries, activeStreamQueries, type TaskConfig } from '../../db/index.js';\nimport { Agent } from '../../agent/index.js';\nimport { getConfig } from '../../config/index.js';\nimport { streamContext } from '../resumable-stream.js';\n\nconst tasks = new Hono();\n\n// Store abort controllers for running tasks, keyed by session/task ID\nconst taskAbortControllers = new Map<string, AbortController>();\n\nconst createTaskSchema = z.object({\n prompt: z.string().min(1),\n outputSchema: z.record(z.string(), z.unknown()),\n webhookUrl: z.string().url().optional(),\n model: z.string().optional(),\n workingDirectory: z.string().optional(),\n name: z.string().optional(),\n maxIterations: z.number().int().min(1).max(500).optional(),\n});\n\n// POST /tasks — create and start a task (returns immediately)\ntasks.post(\n '/',\n zValidator('json', createTaskSchema),\n async (c) => {\n const body = c.req.valid('json');\n const config = getConfig();\n\n const taskConfig: TaskConfig = {\n enabled: true,\n outputSchema: body.outputSchema,\n webhookUrl: body.webhookUrl,\n maxIterations: body.maxIterations ?? 50,\n status: 'running',\n };\n\n // Create a session with task metadata and all tools auto-approved\n const agent = await Agent.create({\n name: body.name || 'Task',\n workingDirectory: body.workingDirectory || config.resolvedWorkingDirectory,\n model: body.model || config.defaultModel,\n sessionConfig: {\n toolApprovals: { bash: false, write_file: false, read_file: false },\n task: taskConfig,\n },\n });\n\n const taskId = agent.sessionId;\n const abortController = new AbortController();\n taskAbortControllers.set(taskId, abortController);\n\n // Set up a resumable stream so the dashboard can show live updates\n // (tasks are just sessions — they use the same stream infrastructure)\n const streamId = `stream_${taskId}_${nanoid(10)}`;\n await activeStreamQueries.create(taskId, streamId);\n\n const taskStreamProducer = () => {\n const { readable, writable } = new TransformStream<string, string>();\n const writer = writable.getWriter();\n let writerClosed = false;\n\n const writeSSE = async (data: string) => {\n if (writerClosed) return;\n try {\n await writer.write(`data: ${data}\\n\\n`);\n } catch {\n writerClosed = true;\n }\n };\n\n (async () => {\n await writeSSE(JSON.stringify({ type: 'data-stream-id', streamId }));\n try {\n await agent.runTask({\n prompt: body.prompt,\n taskConfig,\n abortSignal: abortController.signal,\n writeSSE,\n });\n await writeSSE(JSON.stringify({ type: 'finish' }));\n } catch (err: any) {\n if (err.name === 'AbortError' || abortController.signal.aborted) {\n console.log(`[TASK] Task ${taskId} was cancelled`);\n await writeSSE(JSON.stringify({ type: 'abort' }));\n } else {\n console.error(`[TASK] Error in task ${taskId}:`, err.message);\n const errorMsg = err.message || 'Unknown error';\n await writeSSE(JSON.stringify({ type: 'error', errorText: errorMsg }));\n const failedTask: TaskConfig = {\n ...taskConfig,\n status: 'failed',\n error: errorMsg,\n };\n await sessionQueries.update(taskId, {\n config: {\n toolApprovals: { bash: false, write_file: false, read_file: false },\n task: failedTask,\n },\n });\n if (taskConfig.webhookUrl) {\n const { sendWebhook } = await import('../../utils/webhook.js');\n sendWebhook(taskConfig.webhookUrl, {\n type: 'task.failed',\n taskId,\n sessionId: taskId,\n timestamp: new Date().toISOString(),\n data: { status: 'failed', error: errorMsg },\n });\n }\n }\n } finally {\n await writeSSE('[DONE]');\n writer.close().catch(() => {});\n await activeStreamQueries.finish(streamId).catch(() => {});\n taskAbortControllers.delete(taskId);\n }\n })();\n\n return readable;\n };\n\n await streamContext.resumableStream(streamId, taskStreamProducer);\n\n return c.json({ taskId, status: 'running' }, 201);\n }\n);\n\n// GET /tasks/:id — get task status and result\ntasks.get('/:id', async (c) => {\n const id = c.req.param('id');\n const session = await sessionQueries.getById(id);\n\n if (!session) {\n return c.json({ error: 'Task not found' }, 404);\n }\n\n const task = session.config?.task;\n if (!task?.enabled) {\n return c.json({ error: 'Session is not a task' }, 400);\n }\n\n // Fetch browser recordings for this task session\n let browserRecordings: Array<{ fileName: string; downloadUrl: string | null; sizeBytes: number | null }> = [];\n try {\n const { isRemoteConfigured, storageQueries } = await import('../../db/remote.js');\n if (isRemoteConfigured()) {\n const files = await storageQueries.getSessionFiles(id);\n browserRecordings = files\n .filter((f) => f.category === 'browser-recording')\n .map((f) => ({ fileName: f.fileName, downloadUrl: f.downloadUrl, sizeBytes: f.sizeBytes }));\n }\n } catch {}\n\n return c.json({\n taskId: id,\n status: task.status,\n result: task.result,\n error: task.error,\n iterations: task.iterations,\n model: session.model,\n name: session.name,\n createdAt: session.createdAt.toISOString(),\n updatedAt: session.updatedAt.toISOString(),\n browserRecordings: browserRecordings.length > 0 ? browserRecordings : undefined,\n });\n});\n\n// POST /tasks/:id/cancel — cancel a running task\ntasks.post('/:id/cancel', async (c) => {\n const id = c.req.param('id');\n const session = await sessionQueries.getById(id);\n\n if (!session) {\n return c.json({ error: 'Task not found' }, 404);\n }\n\n const task = session.config?.task;\n if (!task?.enabled) {\n return c.json({ error: 'Session is not a task' }, 400);\n }\n\n if (task.status !== 'running') {\n return c.json({ error: `Task is already ${task.status}` }, 400);\n }\n\n const abortController = taskAbortControllers.get(id);\n if (abortController) {\n abortController.abort();\n taskAbortControllers.delete(id);\n }\n\n const cancelledTask: TaskConfig = {\n ...task,\n status: 'failed',\n error: 'Task cancelled by user',\n };\n await sessionQueries.update(id, {\n config: { ...session.config, task: cancelledTask },\n });\n\n if (task.webhookUrl) {\n const { sendWebhook } = await import('../../utils/webhook.js');\n sendWebhook(task.webhookUrl, {\n type: 'task.failed',\n taskId: id,\n sessionId: id,\n timestamp: new Date().toISOString(),\n data: { status: 'failed', error: 'Task cancelled by user' },\n });\n }\n\n return c.json({ taskId: id, status: 'failed', error: 'Task cancelled by user' });\n});\n\nexport default tasks;\n","/**\n * System dependency checks for SparkECoder\n * \n * Checks for required external tools (tmux, agent-browser) and provides\n * installation instructions or auto-installs when possible.\n */\n\nimport { exec } from 'node:child_process';\nimport { promisify } from 'node:util';\nimport { platform } from 'node:os';\n\nconst execAsync = promisify(exec);\n\nexport interface DependencyCheckResult {\n available: boolean;\n version?: string;\n error?: string;\n installInstructions?: string;\n}\n\n/**\n * Detect the current platform and package manager\n */\nfunction getInstallInstructions(): string {\n const os = platform();\n \n if (os === 'darwin') {\n return `\nInstall tmux on macOS:\n brew install tmux\n\nIf you don't have Homebrew, install it first:\n /bin/bash -c \"$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)\"\n`.trim();\n }\n \n if (os === 'linux') {\n return `\nInstall tmux on Linux:\n # Ubuntu/Debian\n sudo apt-get update && sudo apt-get install -y tmux\n\n # Fedora/RHEL\n sudo dnf install -y tmux\n\n # Arch Linux\n sudo pacman -S tmux\n`.trim();\n }\n \n // Fallback for other platforms\n return `\nInstall tmux:\n Please install tmux for your operating system.\n Visit: https://github.com/tmux/tmux/wiki/Installing\n`.trim();\n}\n\n/**\n * Check if tmux is installed and get its version\n */\nexport async function checkTmux(): Promise<DependencyCheckResult> {\n try {\n const { stdout } = await execAsync('tmux -V', { timeout: 5000 });\n const version = stdout.trim(); // e.g., \"tmux 3.4\"\n \n return {\n available: true,\n version,\n };\n } catch (error: any) {\n return {\n available: false,\n error: 'tmux is not installed or not in PATH',\n installInstructions: getInstallInstructions(),\n };\n }\n}\n\n/**\n * Check all required dependencies\n * Returns true if all dependencies are available, false otherwise\n */\nexport async function checkDependencies(options: {\n quiet?: boolean;\n exitOnFailure?: boolean;\n} = {}): Promise<boolean> {\n const { quiet = false, exitOnFailure = true } = options;\n \n const tmuxCheck = await checkTmux();\n \n if (!tmuxCheck.available) {\n if (!quiet) {\n console.error('\\n❌ Missing required dependency: tmux');\n console.error('');\n console.error('SparkECoder requires tmux for terminal session management.');\n console.error('');\n if (tmuxCheck.installInstructions) {\n console.error(tmuxCheck.installInstructions);\n }\n console.error('');\n console.error('After installing tmux, run sparkecoder again.');\n console.error('');\n }\n \n if (exitOnFailure) {\n process.exit(1);\n }\n \n return false;\n }\n \n if (!quiet) {\n // Only show version in verbose mode or when explicitly requested\n }\n \n return true;\n}\n\n/**\n * Check if agent-browser is installed globally and available in PATH\n */\nexport async function checkAgentBrowser(): Promise<DependencyCheckResult> {\n try {\n const { stdout } = await execAsync('agent-browser --version', { timeout: 10000 });\n const version = stdout.trim();\n return { available: true, version };\n } catch {\n return {\n available: false,\n error: 'agent-browser is not installed globally',\n installInstructions: 'Install agent-browser globally:\\n npm install -g agent-browser\\n agent-browser install',\n };\n }\n}\n\n/**\n * Install agent-browser globally via npm.\n * Returns true if installation succeeded.\n */\nexport async function tryInstallAgentBrowser(options: { quiet?: boolean } = {}): Promise<boolean> {\n try {\n if (!options.quiet) {\n console.log('📦 Installing agent-browser globally...');\n }\n await execAsync('npm install -g agent-browser', { timeout: 120000 });\n\n // Install Chromium for playwright\n try {\n if (!options.quiet) {\n console.log('📦 Installing Chromium for browser automation...');\n }\n await execAsync('agent-browser install', { timeout: 120000 });\n } catch {\n // Non-fatal — Chromium can be installed later via `agent-browser install`\n }\n\n if (!options.quiet) {\n console.log('✅ agent-browser installed successfully');\n }\n return true;\n } catch (error: any) {\n if (!options.quiet) {\n console.error(`Failed to install agent-browser: ${error.message}`);\n }\n return false;\n }\n}\n\n/**\n * Attempt to auto-install tmux (best effort, may require sudo)\n * Returns true if installation succeeded\n */\nexport async function tryAutoInstallTmux(): Promise<boolean> {\n const os = platform();\n \n try {\n if (os === 'darwin') {\n // Check if brew is available\n try {\n await execAsync('which brew', { timeout: 5000 });\n } catch {\n // Homebrew not installed, can't auto-install\n return false;\n }\n \n console.log('📦 Installing tmux via Homebrew...');\n await execAsync('brew install tmux', { timeout: 300000 }); // 5 min timeout\n console.log('✅ tmux installed successfully');\n return true;\n }\n \n if (os === 'linux') {\n // Try apt-get (Debian/Ubuntu)\n try {\n await execAsync('which apt-get', { timeout: 5000 });\n console.log('📦 Installing tmux via apt-get...');\n console.log(' (This may require sudo password)');\n await execAsync('sudo apt-get update && sudo apt-get install -y tmux', { \n timeout: 300000 \n });\n console.log('✅ tmux installed successfully');\n return true;\n } catch {\n // apt-get not available or failed\n }\n \n // Try dnf (Fedora/RHEL)\n try {\n await execAsync('which dnf', { timeout: 5000 });\n console.log('📦 Installing tmux via dnf...');\n await execAsync('sudo dnf install -y tmux', { timeout: 300000 });\n console.log('✅ tmux installed successfully');\n return true;\n } catch {\n // dnf not available or failed\n }\n }\n \n return false;\n } catch (error: any) {\n console.error(`Failed to auto-install tmux: ${error.message}`);\n return false;\n }\n}\n\n/**\n * Ensure dependencies are available, with optional auto-install attempt\n */\nexport async function ensureDependencies(options: {\n autoInstall?: boolean;\n quiet?: boolean;\n} = {}): Promise<void> {\n const { autoInstall = false, quiet = false } = options;\n \n const tmuxCheck = await checkTmux();\n \n if (!tmuxCheck.available) {\n if (autoInstall) {\n const installed = await tryAutoInstallTmux();\n if (!installed) {\n await checkDependencies({ quiet, exitOnFailure: true });\n }\n } else {\n await checkDependencies({ quiet, exitOnFailure: true });\n }\n }\n \n // Auto-install agent-browser globally if not found (non-blocking)\n const browserCheck = await checkAgentBrowser();\n if (!browserCheck.available) {\n await tryInstallAgentBrowser({ quiet });\n }\n}\n"],"mappings":";;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AA8BO,SAAS,mBAAmB,WAAmB,KAAa;AACjE,oBAAkB,UAAU,QAAQ,OAAO,EAAE;AAC7C,YAAU;AACZ;AAKO,SAAS,sBAAsB;AACpC,oBAAkB;AAClB,YAAU;AACZ;AAKO,SAAS,qBAA8B;AAC5C,SAAO,CAAC,CAAC,mBAAmB,CAAC,CAAC;AAChC;AA2BA,SAAS,WAAW,KAAe;AACjC,MAAI,QAAQ,QAAQ,QAAQ,OAAW,QAAO;AAC9C,MAAI,MAAM,QAAQ,GAAG,EAAG,QAAO,IAAI,IAAI,UAAU;AACjD,MAAI,OAAO,QAAQ,YAAY,eAAe,KAAM,QAAO;AAE3D,QAAM,SAAS,EAAE,GAAG,IAAI;AACxB,aAAW,OAAO,OAAO,KAAK,MAAM,GAAG;AAErC,QAAI,qBAAqB,SAAS,GAAG,GAAG;AACtC;AAAA,IACF;AACA,QAAI,YAAY,SAAS,GAAG,KAAK,OAAO,OAAO,GAAG,MAAM,UAAU;AAChE,aAAO,GAAG,IAAI,IAAI,KAAK,OAAO,GAAG,CAAC;AAAA,IACpC,WAAW,OAAO,OAAO,GAAG,MAAM,UAAU;AAC1C,aAAO,GAAG,IAAI,WAAW,OAAO,GAAG,CAAC;AAAA,IACtC;AAAA,EACF;AACA,SAAO;AACT;AAQA,eAAe,IACb,MACA,UAAyE,CAAC,GAC9D;AACZ,MAAI,CAAC,mBAAmB,CAAC,SAAS;AAChC,UAAM,IAAI,MAAM,iCAAiC;AAAA,EACnD;AAEA,QAAM,MAAM,GAAG,eAAe,MAAM,IAAI;AACxC,QAAM,OAAoB;AAAA,IACxB,QAAQ,QAAQ,UAAU;AAAA,IAC1B,SAAS;AAAA,MACP,gBAAgB;AAAA,MAChB,iBAAiB,UAAU,OAAO;AAAA,IACpC;AAAA,EACF;AAEA,MAAI,QAAQ,MAAM;AAChB,SAAK,OAAO,KAAK,UAAU,QAAQ,IAAI;AAAA,EACzC;AAEA,QAAM,WAAW,MAAM,MAAM,KAAK,IAAI;AAEtC,MAAI,CAAC,SAAS,IAAI;AAChB,UAAM,QAAQ,MAAM,SAAS,KAAK,EAAE,MAAM,OAAO,EAAE,OAAO,gBAAgB,EAAE;AAC5E,UAAM,IAAI,MAAM,MAAM,SAAS,QAAQ,SAAS,MAAM,EAAE;AAAA,EAC1D;AAEA,QAAM,OAAO,MAAM,SAAS,KAAK;AACjC,MAAI,CAAC,QAAQ,SAAS,QAAQ;AAC5B,WAAO;AAAA,EACT;AAEA,QAAM,SAAS,KAAK,MAAM,IAAI;AAG9B,MAAI,QAAQ,gBAAgB;AAC1B,WAAO;AAAA,EACT;AAGA,SAAO,WAAW,MAAM;AAC1B;AAkhBA,eAAe,WACb,MACA,UAA+C,CAAC,GACpC;AACZ,MAAI,CAAC,mBAAmB,CAAC,SAAS;AAChC,UAAM,IAAI,MAAM,iCAAiC;AAAA,EACnD;AAEA,QAAM,MAAM,GAAG,eAAe,WAAW,IAAI;AAC7C,QAAM,OAAoB;AAAA,IACxB,QAAQ,QAAQ,UAAU;AAAA,IAC1B,SAAS;AAAA,MACP,gBAAgB;AAAA,MAChB,iBAAiB,UAAU,OAAO;AAAA,IACpC;AAAA,EACF;AAEA,MAAI,QAAQ,MAAM;AAChB,SAAK,OAAO,KAAK,UAAU,QAAQ,IAAI;AAAA,EACzC;AAEA,QAAM,WAAW,MAAM,MAAM,KAAK,IAAI;AACtC,MAAI,CAAC,SAAS,IAAI;AAChB,UAAM,YAAY,MAAM,SAAS,KAAK,EAAE,MAAM,MAAM,eAAe;AACnE,UAAM,IAAI,MAAM,qBAAqB,SAAS,MAAM,KAAK,SAAS,EAAE;AAAA,EACtE;AACA,SAAO,SAAS,KAAK;AACvB;AA5rBA,IAwBI,iBACA,SA8BE,aASA,sBAqFO,sBAkCA,sBAuDA,4BA2DA,mBAgCA,oBAmBA,uBAwCA,2BA+BA,yBAuCA,yBAwCA,uBAiDA,2BAqEA,0BAsFA;AA9rBb;AAAA;AAAA;AAwBA,IAAI,kBAAiC;AACrC,IAAI,UAAyB;AA8B7B,IAAM,cAAc,CAAC,aAAa,aAAa,aAAa,eAAe,aAAa,cAAc,YAAY,aAAa,iBAAiB,sBAAsB;AAStK,IAAM,uBAAuB,CAAC,gBAAgB,eAAe;AAqFtD,IAAM,uBAAuB;AAAA,MAClC,OAAO,MAAkG;AACvG,eAAO,IAAa,aAAa,EAAE,QAAQ,QAAQ,MAAM,KAAK,CAAC;AAAA,MACjE;AAAA,MAEA,QAAQ,IAA0C;AAChD,eAAO,IAAyB,aAAa,EAAE,EAAE,EAAE,MAAM,MAAM,MAAS;AAAA,MAC1E;AAAA,MAEA,KAAK,QAAQ,IAAI,SAAS,GAAuB;AAC/C,eAAO,IAAe,mBAAmB,KAAK,WAAW,MAAM,EAAE;AAAA,MACnE;AAAA,MAEA,aAAa,IAAY,QAAyD;AAChF,eAAO,IAAyB,aAAa,EAAE,IAAI,EAAE,QAAQ,SAAS,MAAM,EAAE,OAAO,EAAE,CAAC;AAAA,MAC1F;AAAA,MAEA,YAAY,IAAY,OAA6C;AACnE,eAAO,IAAyB,aAAa,EAAE,IAAI,EAAE,QAAQ,SAAS,MAAM,EAAE,MAAM,EAAE,CAAC;AAAA,MACzF;AAAA,MAEA,OAAO,IAAY,SAAwF;AACzG,eAAO,IAAyB,aAAa,EAAE,IAAI,EAAE,QAAQ,SAAS,MAAM,QAAQ,CAAC;AAAA,MACvF;AAAA,MAEA,OAAO,IAA8B;AACnC,eAAO,IAA0B,aAAa,EAAE,IAAI,EAAE,QAAQ,SAAS,CAAC,EAAE,KAAK,OAAK,GAAG,WAAW,KAAK;AAAA,MACzG;AAAA,IACF;AAMO,IAAM,uBAAuB;AAAA,MAClC,MAAM,gBAAgB,WAAoC;AACxD,cAAM,SAAS,MAAM,IAA8B,qBAAqB,SAAS,gBAAgB;AACjG,eAAO,OAAO;AAAA,MAChB;AAAA,MAEA,OAAO,WAAmB,cAA8C;AACtE,eAAO,IAAa,aAAa,EAAE,QAAQ,QAAQ,MAAM,EAAE,WAAW,aAAa,EAAE,CAAC;AAAA,MACxF;AAAA,MAEA,QAAQ,WAAmB,eAAmD;AAC5E,eAAO,IAAe,mBAAmB,EAAE,QAAQ,QAAQ,MAAM,EAAE,WAAW,cAAc,EAAE,CAAC;AAAA,MACjG;AAAA,MAEA,aAAa,WAAuC;AAClD,eAAO,IAAe,qBAAqB,SAAS,EAAE;AAAA,MACxD;AAAA,MAEA,iBAAiB,WAA4C;AAM3D,eAAO,IAAoB,qBAAqB,SAAS,mBAAmB,EAAE,gBAAgB,KAAK,CAAC;AAAA,MACtG;AAAA,MAEA,MAAM,mBAAmB,WAAmB,QAAQ,IAAwB;AAC1E,cAAM,WAAW,MAAM,IAAe,qBAAqB,SAAS,EAAE;AACtE,eAAO,SAAS,MAAM,CAAC,KAAK;AAAA,MAC9B;AAAA,MAEA,MAAM,eAAe,WAAoC;AACvD,cAAM,SAAS,MAAM,IAAuB,qBAAqB,SAAS,QAAQ;AAClF,eAAO,OAAO;AAAA,MAChB;AAAA,MAEA,MAAM,gBAAgB,WAAoC;AACxD,cAAM,SAAS,MAAM,IAAyB,qBAAqB,SAAS,IAAI,EAAE,QAAQ,SAAS,CAAC;AACpG,eAAO,OAAO;AAAA,MAChB;AAAA,MAEA,MAAM,mBAAmB,WAAmB,cAAuC;AACjF,cAAM,SAAS,MAAM;AAAA,UACnB,qBAAqB,SAAS,kBAAkB,YAAY;AAAA,UAC5D,EAAE,QAAQ,SAAS;AAAA,QACrB;AACA,eAAO,OAAO;AAAA,MAChB;AAAA,IACF;AAMO,IAAM,6BAA6B;AAAA,MACxC,OAAO,MAQoB;AACzB,eAAO,IAAmB,oBAAoB,EAAE,QAAQ,QAAQ,MAAM,KAAK,CAAC;AAAA,MAC9E;AAAA,MAEA,QAAQ,IAAgD;AACtD,eAAO,IAA+B,oBAAoB,EAAE,EAAE,EAAE,MAAM,MAAM,MAAS;AAAA,MACvF;AAAA,MAEA,gBAAgB,YAAwD;AACtE,eAAO,IAA+B,oCAAoC,UAAU,EAAE,EAAE,MAAM,MAAM,MAAS;AAAA,MAC/G;AAAA,MAEA,oBAAoB,WAA6C;AAC/D,eAAO,IAAqB,4BAA4B,SAAS,UAAU;AAAA,MAC7E;AAAA,MAEA,QAAQ,IAAgD;AACtD,eAAO,IAA+B,oBAAoB,EAAE,IAAI,EAAE,QAAQ,SAAS,MAAM,EAAE,QAAQ,WAAW,EAAE,CAAC;AAAA,MACnH;AAAA,MAEA,OAAO,IAAgD;AACrD,eAAO,IAA+B,oBAAoB,EAAE,IAAI,EAAE,QAAQ,SAAS,MAAM,EAAE,QAAQ,WAAW,EAAE,CAAC;AAAA,MACnH;AAAA,MAEA,SAAS,IAAY,QAAiB,OAAoD;AACxF,eAAO,IAA+B,oBAAoB,EAAE,IAAI;AAAA,UAC9D,QAAQ;AAAA,UACR,MAAM,EAAE,QAAQ,QAAQ,UAAU,aAAa,QAAQ,MAAM;AAAA,QAC/D,CAAC;AAAA,MACH;AAAA,MAEA,aAAa,WAA6C;AACxD,eAAO,IAAqB,4BAA4B,SAAS,EAAE;AAAA,MACrE;AAAA,MAEA,MAAM,gBAAgB,WAAmB,WAA2C;AAElF,cAAM,YAAY,qBAAqB,OAAO,UAAU,QAAQ,IAAI,IAAI,KAAK,SAAS,EAAE,QAAQ;AAChG,cAAM,SAAS,MAAM;AAAA,UACnB,4BAA4B,SAAS,UAAU,SAAS;AAAA,UACxD,EAAE,QAAQ,SAAS;AAAA,QACrB;AACA,eAAO,OAAO;AAAA,MAChB;AAAA,IACF;AAMO,IAAM,oBAAoB;AAAA,MAC/B,OAAO,MAAiF;AACtF,eAAO,IAAc,UAAU,EAAE,QAAQ,QAAQ,MAAM,KAAK,CAAC;AAAA,MAC/D;AAAA,MAEA,WAAW,WAAmB,OAAwE;AACpG,eAAO,IAAgB,gBAAgB,EAAE,QAAQ,QAAQ,MAAM,EAAE,WAAW,MAAM,EAAE,CAAC;AAAA,MACvF;AAAA,MAEA,aAAa,WAAwC;AACnD,eAAO,IAAgB,kBAAkB,SAAS,EAAE;AAAA,MACtD;AAAA,MAEA,aAAa,IAAY,QAA2D;AAClF,eAAO,IAA0B,UAAU,EAAE,IAAI,EAAE,QAAQ,SAAS,MAAM,EAAE,OAAO,EAAE,CAAC;AAAA,MACxF;AAAA,MAEA,MAAM,OAAO,IAA8B;AACzC,cAAM,SAAS,MAAM,IAA0B,UAAU,EAAE,IAAI,EAAE,QAAQ,SAAS,CAAC;AACnF,eAAO,QAAQ,WAAW;AAAA,MAC5B;AAAA,MAEA,MAAM,aAAa,WAAoC;AACrD,cAAM,SAAS,MAAM,IAAyB,kBAAkB,SAAS,IAAI,EAAE,QAAQ,SAAS,CAAC;AACjG,eAAO,OAAO;AAAA,MAChB;AAAA,IACF;AAMO,IAAM,qBAAqB;AAAA,MAChC,KAAK,WAAmB,WAAyC;AAC/D,eAAO,IAAiB,WAAW,EAAE,QAAQ,QAAQ,MAAM,EAAE,WAAW,UAAU,EAAE,CAAC;AAAA,MACvF;AAAA,MAEA,aAAa,WAA2C;AACtD,eAAO,IAAmB,mBAAmB,SAAS,EAAE;AAAA,MAC1D;AAAA,MAEA,MAAM,SAAS,WAAmB,WAAqC;AACrE,cAAM,SAAS,MAAM,IAA2B,mBAAmB,SAAS,cAAc,SAAS,EAAE;AACrG,eAAO,OAAO;AAAA,MAChB;AAAA,IACF;AAMO,IAAM,wBAAwB;AAAA,MACnC,OAAO,MAA6F;AAClG,eAAO,IAAc,cAAc,EAAE,QAAQ,QAAQ,MAAM,KAAK,CAAC;AAAA,MACnE;AAAA,MAEA,QAAQ,IAA2C;AACjD,eAAO,IAA0B,cAAc,EAAE,EAAE,EAAE,MAAM,MAAM,MAAS;AAAA,MAC5E;AAAA,MAEA,aAAa,WAAwC;AACnD,eAAO,IAAgB,sBAAsB,SAAS,EAAE;AAAA,MAC1D;AAAA,MAEA,WAAW,WAAwC;AACjD,eAAO,IAAgB,sBAAsB,SAAS,UAAU;AAAA,MAClE;AAAA,MAEA,aAAa,IAAY,QAA4B,UAAmB,OAA+C;AACrH,eAAO,IAA0B,cAAc,EAAE,IAAI,EAAE,QAAQ,SAAS,MAAM,EAAE,QAAQ,UAAU,MAAM,EAAE,CAAC;AAAA,MAC7G;AAAA,MAEA,UAAU,IAAY,KAA4C;AAChE,eAAO,IAA0B,cAAc,EAAE,IAAI,EAAE,QAAQ,SAAS,MAAM,EAAE,IAAI,EAAE,CAAC;AAAA,MACzF;AAAA,MAEA,MAAM,OAAO,IAA8B;AACzC,cAAM,SAAS,MAAM,IAA0B,cAAc,EAAE,IAAI,EAAE,QAAQ,SAAS,CAAC;AACvF,eAAO,QAAQ,WAAW;AAAA,MAC5B;AAAA,MAEA,MAAM,gBAAgB,WAAoC;AACxD,cAAM,SAAS,MAAM,IAAyB,sBAAsB,SAAS,IAAI,EAAE,QAAQ,SAAS,CAAC;AACrG,eAAO,OAAO;AAAA,MAChB;AAAA,IACF;AAMO,IAAM,4BAA4B;AAAA,MACvC,OAAO,WAAmB,UAAyC;AACjE,eAAO,IAAkB,YAAY,EAAE,QAAQ,QAAQ,MAAM,EAAE,WAAW,SAAS,EAAE,CAAC;AAAA,MACxF;AAAA,MAEA,eAAe,WAAsD;AACnE,eAAO,IAAyB,oBAAoB,SAAS,EAAE,EAAE,KAAK,OAAK,KAAK,MAAS;AAAA,MAC3F;AAAA,MAEA,cAAc,UAAqD;AACjE,eAAO,IAA8B,yBAAyB,QAAQ,EAAE,EAAE,MAAM,MAAM,MAAS;AAAA,MACjG;AAAA,MAEA,OAAO,UAAqD;AAC1D,eAAO,IAA8B,yBAAyB,QAAQ,IAAI,EAAE,QAAQ,SAAS,MAAM,EAAE,QAAQ,WAAW,EAAE,CAAC;AAAA,MAC7H;AAAA,MAEA,UAAU,UAAqD;AAC7D,eAAO,IAA8B,yBAAyB,QAAQ,IAAI,EAAE,QAAQ,SAAS,MAAM,EAAE,QAAQ,QAAQ,EAAE,CAAC;AAAA,MAC1H;AAAA,MAEA,MAAM,gBAAgB,WAAoC;AACxD,cAAM,SAAS,MAAM,IAAyB,oBAAoB,SAAS,IAAI,EAAE,QAAQ,SAAS,CAAC;AACnG,eAAO,OAAO;AAAA,MAChB;AAAA,IACF;AAMO,IAAM,0BAA0B;AAAA,MACrC,OAAO,MAA6F;AAClG,eAAO,IAAgB,gBAAgB,EAAE,QAAQ,QAAQ,MAAM,KAAK,CAAC;AAAA,MACvE;AAAA,MAEA,QAAQ,IAA6C;AACnD,eAAO,IAA4B,gBAAgB,EAAE,EAAE,EAAE,MAAM,MAAM,MAAS;AAAA,MAChF;AAAA,MAEA,aAAa,WAA0C;AACrD,eAAO,IAAkB,wBAAwB,SAAS,EAAE;AAAA,MAC9D;AAAA,MAEA,qBAAqB,WAAmB,iBAA0D;AAChG,eAAO,IAAuB,wBAAwB,SAAS,gBAAgB,eAAe,EAAE,EAAE,KAAK,OAAK,KAAK,MAAS;AAAA,MAC5H;AAAA,MAEA,UAAU,WAAoD;AAC5D,eAAO,IAAuB,wBAAwB,SAAS,SAAS,EAAE,KAAK,OAAK,KAAK,MAAS;AAAA,MACpG;AAAA,MAEA,MAAM,oBAAoB,WAAmB,iBAA0C;AACrF,cAAM,SAAS,MAAM;AAAA,UACnB,wBAAwB,SAAS,mBAAmB,eAAe;AAAA,UACnE,EAAE,QAAQ,SAAS;AAAA,QACrB;AACA,eAAO,OAAO;AAAA,MAChB;AAAA,MAEA,MAAM,gBAAgB,WAAoC;AACxD,cAAM,SAAS,MAAM,IAAyB,wBAAwB,SAAS,IAAI,EAAE,QAAQ,SAAS,CAAC;AACvG,eAAO,OAAO;AAAA,MAChB;AAAA,IACF;AAMO,IAAM,0BAA0B;AAAA,MACrC,OAAO,MAMiB;AACtB,eAAO,IAAgB,iBAAiB,EAAE,QAAQ,QAAQ,MAAM,KAAK,CAAC;AAAA,MACxE;AAAA,MAEA,gBAAgB,cAA6C;AAC3D,eAAO,IAAkB,4BAA4B,YAAY,EAAE;AAAA,MACrE;AAAA,MAEA,aAAa,WAA0C;AACrD,eAAO,IAAkB,yBAAyB,SAAS,EAAE;AAAA,MAC/D;AAAA,MAEA,gBAAgB,WAAmB,iBAAgD;AACjF,eAAO,IAAkB,yBAAyB,SAAS,kBAAkB,eAAe,EAAE;AAAA,MAChG;AAAA,MAEA,MAAM,UAAU,cAAsB,UAAoC;AACxE,cAAM,SAAS,MAAM;AAAA,UACnB,4BAA4B,YAAY,eAAe,mBAAmB,QAAQ,CAAC;AAAA,QACrF;AACA,eAAO,OAAO;AAAA,MAChB;AAAA,MAEA,MAAM,gBAAgB,WAAoC;AACxD,cAAM,SAAS,MAAM,IAAyB,yBAAyB,SAAS,IAAI,EAAE,QAAQ,SAAS,CAAC;AACxG,eAAO,OAAO;AAAA,MAChB;AAAA,IACF;AAMO,IAAM,wBAAwB;AAAA,MACnC,OAAO,MAMwB;AAC7B,eAAO,IAAuB,cAAc,EAAE,QAAQ,QAAQ,MAAM,KAAK,CAAC;AAAA,MAC5E;AAAA,MAEA,QAAQ,IAAoD;AAC1D,eAAO,IAAmC,cAAc,EAAE,EAAE,EAAE,MAAM,MAAM,MAAS;AAAA,MACrF;AAAA,MAEA,gBAAgB,YAA4D;AAC1E,eAAO,IAAmC,8BAA8B,UAAU,EAAE,EAAE,MAAM,MAAM,MAAS;AAAA,MAC7G;AAAA,MAEA,aAAa,WAAiD;AAC5D,eAAO,IAAyB,sBAAsB,SAAS,EAAE;AAAA,MACnE;AAAA,MAEA,QAAQ,IAAY,MAA4D;AAC9E,eAAO,IAAmC,cAAc,EAAE,aAAa,EAAE,QAAQ,QAAQ,MAAM,EAAE,KAAK,EAAE,CAAC,EAAE,MAAM,MAAM,MAAS;AAAA,MAClI;AAAA,MAEA,SAAS,IAAY,QAAyD;AAC5E,eAAO,IAAmC,cAAc,EAAE,IAAI,EAAE,QAAQ,SAAS,MAAM,EAAE,QAAQ,aAAa,OAAO,EAAE,CAAC,EAAE,MAAM,MAAM,MAAS;AAAA,MACjJ;AAAA,MAEA,UAAU,IAAY,OAAuD;AAC3E,eAAO,IAAmC,cAAc,EAAE,IAAI,EAAE,QAAQ,SAAS,MAAM,EAAE,QAAQ,SAAS,MAAM,EAAE,CAAC,EAAE,MAAM,MAAM,MAAS;AAAA,MAC5I;AAAA,MAEA,OAAO,IAAoD;AACzD,eAAO,IAAmC,cAAc,EAAE,IAAI,EAAE,QAAQ,SAAS,MAAM,EAAE,QAAQ,YAAY,EAAE,CAAC,EAAE,MAAM,MAAM,MAAS;AAAA,MACzI;AAAA,MAEA,MAAM,gBAAgB,WAAoC;AACxD,cAAM,SAAS,MAAM,IAAyB,sBAAsB,SAAS,IAAI,EAAE,QAAQ,SAAS,CAAC;AACrG,eAAO,OAAO;AAAA,MAChB;AAAA,IACF;AAMO,IAAM,4BAA4B;AAAA,MACvC,OACE,KACA,MASuB;AACvB,eAAO,IAAkB,mBAAmB,EAAE,QAAQ,QAAQ,MAAM,KAAK,CAAC;AAAA,MAC5E;AAAA,MAEA,YACE,KACA,QAS+C;AAC/C,eAAO,IAA0C,yBAAyB;AAAA,UACxE,QAAQ;AAAA,UACR,MAAM,EAAE,OAAO;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,QAAQ,KAAU,IAA+C;AAC/D,eAAO,IAA8B,mBAAmB,EAAE,EAAE,EAAE,MAAM,MAAM,MAAS;AAAA,MACrF;AAAA,MAEA,eAAe,KAAU,WAA4C;AACnE,eAAO,IAAoB,6BAA6B,SAAS,EAAE;AAAA,MACrE;AAAA,MAEA,cAAc,KAAU,WAAmB,UAA2C;AACpF,eAAO,IAAoB,6BAA6B,SAAS,SAAS,mBAAmB,QAAQ,CAAC,EAAE;AAAA,MAC1G;AAAA,MAEA,MAAM,kBAAkB,KAAU,WAAoC;AACpE,cAAM,SAAS,MAAM,IAAyB,6BAA6B,SAAS,IAAI,EAAE,QAAQ,SAAS,CAAC;AAC5G,eAAO,OAAO;AAAA,MAChB;AAAA,MAEA,MAAM,iBAAiB,KAAU,WAAmB,UAAmC;AACrF,cAAM,SAAS,MAAM;AAAA,UACnB,6BAA6B,SAAS,SAAS,mBAAmB,QAAQ,CAAC;AAAA,UAC3E,EAAE,QAAQ,SAAS;AAAA,QACrB;AACA,eAAO,OAAO;AAAA,MAChB;AAAA,MAEA,MAAM,iBAAiB,KAAU,WAAoC;AACnE,cAAM,SAAS,MAAM,IAAuB,6BAA6B,SAAS,QAAQ;AAC1F,eAAO,OAAO;AAAA,MAChB;AAAA,IACF;AAMO,IAAM,2BAA2B;AAAA,MACtC,OACE,KACA,MAO4B;AAC5B,eAAO,IAAuB,iBAAiB;AAAA,UAC7C,QAAQ;AAAA,UACR,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,eAAe,KAAK,eAAe,YAAY;AAAA,YAC/C,sBAAsB,KAAK,sBAAsB,YAAY;AAAA,UAC/D;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MAEA,IAAI,KAAU,WAA2D;AACvE,eAAO,IAA8B,2BAA2B,SAAS,EAAE,EAAE,KAAK,OAAK,KAAK,MAAS;AAAA,MACvG;AAAA,MAEA,MAAM,OAAO,KAAU,WAAqC;AAC1D,cAAM,SAAS,MAAM,IAA0B,2BAA2B,SAAS,IAAI,EAAE,QAAQ,SAAS,CAAC;AAC3G,eAAO,QAAQ,WAAW;AAAA,MAC5B;AAAA,MAEA,KAAK,KAAwC;AAC3C,eAAO,IAAyB,eAAe;AAAA,MACjD;AAAA,IACF;AAqDO,IAAM,iBAAiB;AAAA,MAC5B,MAAM,aACJ,WACA,UACA,aACA,UAC4B;AAC5B,eAAO,WAA8B,eAAe;AAAA,UAClD,QAAQ;AAAA,UACR,MAAM,EAAE,WAAW,UAAU,aAAa,SAAS;AAAA,QACrD,CAAC;AAAA,MACH;AAAA,MAEA,MAAM,gBAAgB,WAA2C;AAC/D,cAAM,SAAS,MAAM,WAAqC,UAAU,SAAS,EAAE;AAC/E,eAAO,OAAO;AAAA,MAChB;AAAA,MAEA,MAAM,eAAe,QAAqE;AACxF,eAAO,WAAuD,aAAa,MAAM,EAAE;AAAA,MACrF;AAAA,MAEA,MAAM,WAAW,QAA+B;AAC9C,cAAM,WAAW,UAAU,MAAM,IAAI,EAAE,QAAQ,SAAS,CAAC;AAAA,MAC3D;AAAA,MAEA,MAAM,WAAW,QAAgB,MAA6C;AAC5E,cAAM,WAAW,UAAU,MAAM,IAAI,EAAE,QAAQ,SAAS,MAAM,KAAK,CAAC;AAAA,MACtE;AAAA,IACF;AAAA;AAAA;;;AC1pBO,SAAS,aAAa,QAA0C;AACrE,qBAAmB,OAAO,KAAK,OAAO,OAAO;AAC7C,gBAAc;AAChB;AAMO,SAAS,QAAQ;AACtB,MAAI,CAAC,aAAa;AAChB,UAAM,IAAI,MAAM,oDAAoD;AAAA,EACtE;AAEA,SAAO,CAAC;AACV;AAYO,SAAS,gBAAgB;AAC9B,sBAAoB;AACpB,gBAAc;AAChB;AA/FA,IA2DI,aAuCS,gBACA,gBACA,sBACA,aACA,cAEA,qBACA,mBACA,mBACA,iBAEA;AA7Gb;AAAA;AAAA;AAMA;AAqDA,IAAI,cAAc;AAuCX,IAAM,iBAAiB;AACvB,IAAM,iBAAiB;AACvB,IAAM,uBAAuB;AAC7B,IAAM,cAAc;AACpB,IAAM,eAAe;AAErB,IAAM,sBAAsB;AAC5B,IAAM,oBAAoB;AAC1B,IAAM,oBAAoB;AAC1B,IAAM,kBAAkB;AAExB,IAAM,qBAAqB;AAAA;AAAA;;;AC7GlC,SAAS,SAAS;AAAlB,IAGa,0BASA,qBAaA,kBAYA,qBASA,2BAkDA,0BAWA;AA3Gb;AAAA;AAAA;AAGO,IAAM,2BAA2B,EAAE,OAAO;AAAA,MAC/C,MAAM,EAAE,QAAQ,EAAE,SAAS,EAAE,QAAQ,IAAI;AAAA,MACzC,YAAY,EAAE,QAAQ,EAAE,SAAS,EAAE,QAAQ,KAAK;AAAA,MAChD,WAAW,EAAE,QAAQ,EAAE,SAAS,EAAE,QAAQ,KAAK;AAAA,MAC/C,YAAY,EAAE,QAAQ,EAAE,SAAS,EAAE,QAAQ,KAAK;AAAA,MAChD,MAAM,EAAE,QAAQ,EAAE,SAAS,EAAE,QAAQ,KAAK;AAAA,IAC5C,CAAC;AAGM,IAAM,sBAAsB,EAAE,OAAO;AAAA,MAC1C,MAAM,EAAE,OAAO;AAAA,MACf,aAAa,EAAE,OAAO;AAAA;AAAA,MAEtB,aAAa,EAAE,QAAQ,EAAE,SAAS,EAAE,QAAQ,KAAK;AAAA;AAAA,MAEjD,OAAO,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,SAAS,EAAE,QAAQ,CAAC,CAAC;AAAA,IAClD,CAAC;AAMM,IAAM,mBAAmB,EAAE,OAAO;AAAA,MACvC,SAAS,EAAE,QAAQ;AAAA,MACnB,cAAc,EAAE,OAAO,EAAE,OAAO,GAAG,EAAE,QAAQ,CAAC;AAAA,MAC9C,YAAY,EAAE,OAAO,EAAE,IAAI,EAAE,SAAS;AAAA,MACtC,eAAe,EAAE,OAAO,EAAE,SAAS;AAAA,MACnC,QAAQ,EAAE,KAAK,CAAC,WAAW,aAAa,QAAQ,CAAC;AAAA,MACjD,QAAQ,EAAE,QAAQ,EAAE,SAAS;AAAA,MAC7B,OAAO,EAAE,OAAO,EAAE,SAAS;AAAA,MAC3B,YAAY,EAAE,OAAO,EAAE,SAAS;AAAA,IAClC,CAAC;AAGM,IAAM,sBAAsB,EAAE,OAAO;AAAA,MAC1C,eAAe,EAAE,OAAO,EAAE,OAAO,GAAG,EAAE,QAAQ,CAAC,EAAE,SAAS;AAAA,MAC1D,iBAAiB,EAAE,OAAO,EAAE,IAAI,EAAE,SAAS;AAAA,MAC3C,iBAAiB,EAAE,OAAO,EAAE,SAAS;AAAA,MACrC,iBAAiB,EAAE,OAAO,EAAE,SAAS,EAAE,QAAQ,GAAO;AAAA,MACtD,MAAM,iBAAiB,SAAS;AAAA,IAClC,CAAC;AAGM,IAAM,4BAA4B,EACtC,OAAO;AAAA;AAAA,MAEN,UAAU,EAAE,OAAO,EAAE,SAAS;AAAA;AAAA,MAE9B,SAAS,EAAE,OAAO,EAAE,SAAS;AAAA;AAAA,MAE7B,gBAAgB,EAAE,OAAO,EAAE,QAAQ,sBAAsB;AAAA;AAAA,MAEzD,WAAW,EAAE,OAAO,EAAE,SAAS;AAAA;AAAA,MAE/B,SAAS,EACN,MAAM,EAAE,OAAO,CAAC,EAChB,SAAS,EACT,QAAQ;AAAA,QACP;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA;AAAA,MAEH,SAAS,EACN,MAAM,EAAE,OAAO,CAAC,EAChB,SAAS,EACT,QAAQ;AAAA,QACP;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACL,CAAC,EACA,SAAS;AAGL,IAAM,2BAA2B,EACrC,OAAO;AAAA;AAAA,MAEN,KAAK,EAAE,OAAO,EAAE,IAAI,EAAE,SAAS;AAAA;AAAA;AAAA,MAG/B,SAAS,EAAE,OAAO,EAAE,SAAS;AAAA,IAC/B,CAAC,EACA,SAAS;AAGL,IAAM,yBAAyB,EAAE,OAAO;AAAA;AAAA,MAE7C,cAAc,EAAE,OAAO,EAAE,QAAQ,2BAA2B;AAAA;AAAA,MAG5D,kBAAkB,EAAE,OAAO,EAAE,SAAS;AAAA;AAAA,MAGtC,eAAe,yBAAyB,SAAS,EAAE,QAAQ,CAAC,CAAC;AAAA;AAAA,MAG7D,iBAAiB,EAAE,OAAO,EAAE,IAAI,EAAE,SAAS;AAAA;AAAA,MAG3C,QAAQ,EACL,OAAO;AAAA;AAAA,QAEN,WAAW,EAAE,OAAO,EAAE,SAAS,EAAE,QAAQ,UAAU;AAAA;AAAA,QAEnD,uBAAuB,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,SAAS,EAAE,QAAQ,CAAC,CAAC;AAAA,MAClE,CAAC,EACA,SAAS,EACT,QAAQ,CAAC,CAAC;AAAA;AAAA,MAGb,SAAS,EACN,OAAO;AAAA;AAAA,QAEN,UAAU,EAAE,OAAO,EAAE,SAAS,EAAE,QAAQ,GAAO;AAAA;AAAA,QAE/C,eAAe,EAAE,QAAQ,EAAE,SAAS,EAAE,QAAQ,IAAI;AAAA;AAAA,QAElD,oBAAoB,EAAE,OAAO,EAAE,SAAS,EAAE,QAAQ,EAAE;AAAA,MACtD,CAAC,EACA,SAAS,EACT,QAAQ,CAAC,CAAC;AAAA;AAAA,MAGb,QAAQ,EACL,OAAO;AAAA,QACN,MAAM,EAAE,OAAO,EAAE,QAAQ,IAAI;AAAA,QAC7B,MAAM,EAAE,OAAO,EAAE,QAAQ,WAAW;AAAA;AAAA;AAAA,QAGpC,WAAW,EAAE,OAAO,EAAE,IAAI,EAAE,SAAS;AAAA,MACvC,CAAC,EACA,QAAQ,EAAE,MAAM,MAAM,MAAM,YAAY,CAAC;AAAA;AAAA,MAG5C,cAAc,EAAE,OAAO,EAAE,SAAS,EAAE,QAAQ,kBAAkB;AAAA;AAAA;AAAA,MAI9D,cAAc;AAAA;AAAA,MAGd,eAAe;AAAA,IACjB,CAAC;AAAA;AAAA;;;ACpKD,SAAS,YAAY,cAAc,WAAW,qBAAqB;AACnE,SAAS,SAAS,SAAS,YAAY;AACvC,SAAS,SAAS,gBAAgB;AA0B3B,SAAS,yBAAyB,YAAsC;AAC7E,QAAM,mBAA8D,CAAC;AACrE,QAAM,eAA0D,CAAC;AACjE,QAAM,iBAA2B,CAAC;AAClC,MAAI,eAA8B;AAGlC,QAAM,gBAAgB,KAAK,YAAY,gBAAgB,OAAO;AAC9D,MAAI,WAAW,aAAa,GAAG;AAC7B,qBAAiB,KAAK,EAAE,MAAM,eAAe,UAAU,EAAE,CAAC;AAC1D,mBAAe,KAAK,aAAa;AAAA,EACnC;AAGA,QAAM,iBAAiB,KAAK,YAAY,gBAAgB,QAAQ;AAChE,MAAI,WAAW,cAAc,GAAG;AAC9B,iBAAa,KAAK,EAAE,MAAM,gBAAgB,UAAU,EAAE,CAAC;AACvD,mBAAe,KAAK,cAAc;AAAA,EACpC;AAGA,QAAM,iBAAiB,KAAK,YAAY,WAAW,OAAO;AAC1D,MAAI,WAAW,cAAc,GAAG;AAE9B,iBAAa,KAAK,EAAE,MAAM,gBAAgB,UAAU,EAAE,CAAC;AACvD,mBAAe,KAAK,cAAc;AAAA,EACpC;AAGA,QAAM,kBAAkB,KAAK,YAAY,WAAW,QAAQ;AAC5D,MAAI,WAAW,eAAe,GAAG;AAC/B,iBAAa,KAAK,EAAE,MAAM,iBAAiB,UAAU,EAAE,CAAC;AACxD,mBAAe,KAAK,eAAe;AAAA,EACrC;AAGA,QAAM,kBAAkB,KAAK,YAAY,QAAQ;AACjD,MAAI,WAAW,eAAe,GAAG;AAC/B,iBAAa,KAAK,EAAE,MAAM,iBAAiB,UAAU,EAAE,CAAC;AACxD,mBAAe,KAAK,eAAe;AAAA,EACrC;AAGA,QAAM,WAAW,KAAK,YAAY,WAAW;AAC7C,MAAI,WAAW,QAAQ,GAAG;AACxB,mBAAe;AAAA,EACjB;AAIA,QAAM,UAAU,QAAQ,YAAY,IAAI,QAAQ,WAAW,EAAE,CAAC;AAC9D,QAAM,oBAAoB;AAAA,IACxB,QAAQ,SAAS,mBAAmB;AAAA;AAAA,IACpC,QAAQ,SAAS,kBAAkB;AAAA;AAAA,EACrC;AACA,QAAM,mBAAmB,kBAAkB,KAAK,OAAK,WAAW,CAAC,CAAC;AAClE,MAAI,kBAAkB;AACpB,iBAAa,KAAK,EAAE,MAAM,kBAAkB,UAAU,IAAI,CAAC;AAC3D,mBAAe,KAAK,gBAAgB;AAAA,EACtC;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAQO,SAAS,sBAA8B;AAC5C,QAAM,UAAU;AAEhB,UAAQ,SAAS,GAAG;AAAA,IAClB,KAAK;AACH,aAAO,KAAK,QAAQ,GAAG,WAAW,uBAAuB,OAAO;AAAA,IAClE,KAAK;AACH,aAAO,KAAK,QAAQ,IAAI,WAAW,KAAK,QAAQ,GAAG,WAAW,SAAS,GAAG,OAAO;AAAA,IACnF;AAEE,aAAO,KAAK,QAAQ,IAAI,iBAAiB,KAAK,QAAQ,GAAG,UAAU,OAAO,GAAG,OAAO;AAAA,EACxF;AACF;AAKO,SAAS,yBAAiC;AAC/C,QAAM,MAAM,oBAAoB;AAChC,MAAI,CAAC,WAAW,GAAG,GAAG;AACpB,cAAU,KAAK,EAAE,WAAW,KAAK,CAAC;AAAA,EACpC;AACA,SAAO;AACT;AASA,SAAS,eAAe,UAAiC;AAEvD,MAAI,aAAa;AAEjB,SAAO,eAAe,QAAQ,UAAU,GAAG;AACzC,eAAW,YAAY,mBAAmB;AACxC,YAAM,aAAa,QAAQ,YAAY,QAAQ;AAC/C,UAAI,WAAW,UAAU,GAAG;AAC1B,eAAO;AAAA,MACT;AAAA,IACF;AACA,iBAAa,QAAQ,UAAU;AAAA,EACjC;AAGA,QAAM,aAAa,oBAAoB;AACvC,aAAW,YAAY,mBAAmB;AACxC,UAAM,aAAa,KAAK,YAAY,QAAQ;AAC5C,QAAI,WAAW,UAAU,GAAG;AAC1B,aAAO;AAAA,IACT;AAAA,EACF;AAEA,SAAO;AACT;AAKO,SAAS,WACd,YACA,kBACgB;AAChB,QAAM,MAAM,oBAAoB,QAAQ,IAAI;AAG5C,MAAI,YAAuC,CAAC;AAC5C,MAAI,YAAY;AAEhB,MAAI,YAAY;AACd,QAAI,CAAC,WAAW,UAAU,GAAG;AAC3B,YAAM,IAAI,MAAM,0BAA0B,UAAU,EAAE;AAAA,IACxD;AACA,UAAM,UAAU,aAAa,YAAY,OAAO;AAChD,gBAAY,KAAK,MAAM,OAAO;AAC9B,gBAAY,QAAQ,QAAQ,UAAU,CAAC;AAAA,EACzC,OAAO;AACL,UAAM,YAAY,eAAe,GAAG;AACpC,QAAI,WAAW;AACb,YAAM,UAAU,aAAa,WAAW,OAAO;AAC/C,kBAAY,KAAK,MAAM,OAAO;AAC9B,kBAAY,QAAQ,SAAS;AAAA,IAC/B;AAAA,EACF;AAGA,MAAI,QAAQ,IAAI,mBAAmB;AACjC,cAAU,eAAe,QAAQ,IAAI;AAAA,EACvC;AACA,MAAI,QAAQ,IAAI,kBAAkB;AAChC,cAAU,SAAS;AAAA,MACjB,MAAM,SAAS,QAAQ,IAAI,kBAAkB,EAAE;AAAA,MAC/C,MAAM,UAAU,QAAQ,QAAQ;AAAA,IAClC;AAAA,EACF;AACA,MAAI,QAAQ,IAAI,eAAe;AAC7B,cAAU,eAAe,QAAQ,IAAI;AAAA,EACvC;AAGA,QAAM,SAAS,uBAAuB,MAAM,SAAS;AAMrD,MAAI;AACJ,MAAI,kBAAkB;AAEpB,+BAA2B;AAAA,EAC7B,WAAW,OAAO,oBAAoB,OAAO,qBAAqB,OAAO,OAAO,iBAAiB,WAAW,GAAG,GAAG;AAEhH,+BAA2B,OAAO;AAAA,EACpC,OAAO;AAEL,+BAA2B,QAAQ,IAAI;AAAA,EACzC;AAGA,QAAM,aAAa,yBAAyB,wBAAwB;AAGpE,QAAM,kBAAkB,OAAO,QAAQ,yBAAyB,CAAC,GAC9D,IAAI,CAAC,QAAQ,QAAQ,WAAW,GAAG,CAAC,EACpC,OAAO,CAAC,QAAQ,WAAW,GAAG,CAAC;AAElC,QAAM,4BAA4B;AAAA,IAChC,GAAG,WAAW;AAAA,IACd,GAAG;AAAA,EACL;AAGA,MAAI;AACJ,MAAI,OAAO,gBAAgB,OAAO,iBAAiB,oBAAoB;AAErE,2BAAuB,QAAQ,WAAW,OAAO,YAAY;AAAA,EAC/D,OAAO;AAEL,UAAM,aAAa,uBAAuB;AAC1C,2BAAuB,KAAK,YAAY,gBAAgB;AAAA,EAC1D;AAGA,QAAM,wBAAqD;AAAA,IACzD,UAAU,QAAQ,IAAI,uBAAuB,OAAO,eAAe,YAAY;AAAA,IAC/E,SAAS,QAAQ,IAAI,mBAAmB,OAAO,eAAe,WAAW;AAAA,IACzE,gBACE,QAAQ,IAAI,0BACZ,OAAO,eAAe,kBACtB;AAAA,IACF,WAAW,OAAO,eAAe,aAAa;AAAA,IAC9C,SAAS,OAAO,eAAe,WAAW;AAAA,MACxC;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,IACA,SAAS,OAAO,eAAe,WAAW;AAAA,MACxC;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAIA,QAAM,qBAAqB;AAC3B,QAAM,YAAY,QAAQ,IAAI,0BAA0B,OAAO,cAAc,OAAO;AACpF,QAAM,gBAAgB,QAAQ,IAAI,wBAAwB,OAAO,cAAc,WAAW,kBAAkB;AAE5G,QAAM,uBAAmD;AAAA,IACvD,KAAK;AAAA,IACL,SAAS;AAAA,IACT,cAAc,CAAC,CAAC,aAAa,CAAC,CAAC;AAAA,EACjC;AAEA,QAAM,WAA2B;AAAA,IAC/B,GAAG;AAAA,IACH,QAAQ;AAAA,MACN,MAAM,OAAO,OAAO;AAAA,MACpB,MAAM,OAAO,OAAO,QAAQ;AAAA,MAC5B,WAAW,OAAO,OAAO;AAAA,IAC3B;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,kBAAkB;AAAA,IAClB;AAAA,IACA;AAAA,EACF;AAEA,iBAAe;AACf,SAAO;AACT;AAKO,SAAS,YAA4B;AAC1C,MAAI,CAAC,cAAc;AACjB,UAAM,IAAI,MAAM,2CAA2C;AAAA,EAC7D;AACA,SAAO;AACT;AAKO,SAAS,iBACd,UACA,eACS;AACT,QAAM,SAAS,UAAU;AAGzB,MAAI,eAAe,gBAAgB,GAAG,MAAM,QAAW;AACrD,WAAO,cAAc,cAAc,GAAG;AAAA,EACxC;AAGA,MAAI,eAAe,gBAAgB,QAAQ,MAAM,QAAW;AAC1D,WAAO,cAAc,cAAc,QAAQ;AAAA,EAC7C;AAGA,QAAM,kBAAkB,OAAO;AAC/B,MAAI,gBAAgB,QAAQ,MAAM,QAAW;AAC3C,WAAO,gBAAgB,QAAQ;AAAA,EACjC;AAGA,MAAI,aAAa,QAAQ;AACvB,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AAgDA,SAAS,oBAAmC;AAC1C,QAAM,WAAW,KAAK,oBAAoB,GAAG,aAAa;AAC1D,MAAI,CAAC,WAAW,QAAQ,GAAG;AACzB,WAAO;AAAA,EACT;AACA,MAAI;AACF,UAAM,UAAU,aAAa,UAAU,OAAO;AAC9C,UAAM,OAAO,KAAK,MAAM,OAAO;AAC/B,WAAO,KAAK,WAAW;AAAA,EACzB,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAKO,SAAS,YAAYA,UAAiB,QAAuB;AAClE,QAAM,SAAS,uBAAuB;AACtC,QAAM,WAAW,KAAK,QAAQ,aAAa;AAC3C,QAAM,OAAsB;AAAA,IAC1B,SAAAA;AAAA,IACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,IAClC;AAAA,EACF;AACA,gBAAc,UAAU,KAAK,UAAU,MAAM,MAAM,CAAC,GAAG,EAAE,MAAM,IAAM,CAAC;AACxE;AAqBA,eAAsB,yBACpB,WACA,MAC8C;AAC9C,QAAM,WAAW,MAAM,MAAM,GAAG,SAAS,kBAAkB;AAAA,IACzD,QAAQ;AAAA,IACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,IAC9C,MAAM,KAAK,UAAU,EAAE,MAAM,QAAQ,QAAO,oBAAI,KAAK,GAAE,YAAY,CAAC,GAAG,CAAC;AAAA,EAC1E,CAAC;AAED,MAAI,CAAC,SAAS,IAAI;AAChB,UAAM,QAAQ,MAAM,SAAS,KAAK,EAAE,MAAM,OAAO,CAAC,EAAE;AACpD,UAAM,IAAI,MAAM,MAAM,SAAS,4BAA4B,SAAS,MAAM,EAAE;AAAA,EAC9E;AAEA,QAAM,OAAO,MAAM,SAAS,KAAK;AAGjC,cAAY,KAAK,SAAS,KAAK,MAAM;AAErC,SAAO;AACT;AAMA,eAAsB,oBAAoB,WAAoC;AAE5E,MAAI,QAAQ,IAAI,sBAAsB;AACpC,WAAO,QAAQ,IAAI;AAAA,EACrB;AAGA,QAAM,YAAY,kBAAkB;AACpC,MAAI,WAAW;AACb,WAAO;AAAA,EACT;AAGA,QAAM,EAAE,SAAAA,SAAQ,IAAI,MAAM,yBAAyB,SAAS;AAC5D,SAAOA;AACT;AA2BA,SAAS,iBAAyB;AAChC,QAAM,SAAS,uBAAuB;AACtC,SAAO,KAAK,QAAQ,aAAa;AACnC;AAKA,SAAS,oBAAmC;AAC1C,QAAM,WAAW,eAAe;AAChC,MAAI,CAAC,WAAW,QAAQ,GAAG;AACzB,WAAO,CAAC;AAAA,EACV;AACA,MAAI;AACF,UAAM,UAAU,aAAa,UAAU,OAAO;AAC9C,WAAO,KAAK,MAAM,OAAO;AAAA,EAC3B,QAAQ;AACN,WAAO,CAAC;AAAA,EACV;AACF;AAKA,SAAS,kBAAkB,MAA2B;AACpD,QAAM,WAAW,eAAe;AAChC,gBAAc,UAAU,KAAK,UAAU,MAAM,MAAM,CAAC,GAAG,EAAE,MAAM,IAAM,CAAC;AACxE;AAWO,SAAS,qBAA2B;AACzC,QAAM,aAAa,kBAAkB;AAErC,aAAW,CAAC,UAAU,MAAM,KAAK,OAAO,QAAQ,gBAAgB,GAAG;AACjE,QAAI,CAAC,QAAQ,IAAI,MAAM,KAAK,WAAW,QAAQ,GAAG;AAChD,cAAQ,IAAI,MAAM,IAAI,WAAW,QAAQ;AAAA,IAC3C;AAAA,EACF;AACF;AAmBO,SAAS,UAAU,UAAkB,QAAsB;AAChE,QAAM,qBAAqB,SAAS,YAAY;AAChD,QAAM,SAAS,iBAAiB,kBAAkB;AAElD,MAAI,CAAC,QAAQ;AACX,UAAM,IAAI,MAAM,qBAAqB,QAAQ,gBAAgB,oBAAoB,KAAK,IAAI,CAAC,EAAE;AAAA,EAC/F;AAGA,QAAM,aAAa,kBAAkB;AACrC,aAAW,kBAAkB,IAAI;AACjC,oBAAkB,UAAU;AAG5B,UAAQ,IAAI,MAAM,IAAI;AACxB;AAKO,SAAS,aAAa,UAAwB;AACnD,QAAM,qBAAqB,SAAS,YAAY;AAChD,QAAM,SAAS,iBAAiB,kBAAkB;AAElD,MAAI,CAAC,QAAQ;AACX,UAAM,IAAI,MAAM,qBAAqB,QAAQ,gBAAgB,oBAAoB,KAAK,IAAI,CAAC,EAAE;AAAA,EAC/F;AAGA,QAAM,aAAa,kBAAkB;AACrC,SAAO,WAAW,kBAAkB;AACpC,oBAAkB,UAAU;AAI9B;AAMO,SAAS,kBAMb;AACD,QAAM,aAAa,kBAAkB;AAErC,SAAO,oBAAoB,IAAI,CAAC,aAAa;AAC3C,UAAM,SAAS,iBAAiB,QAAQ;AACxC,UAAM,WAAW,QAAQ,IAAI,MAAM;AACnC,UAAM,cAAc,WAAW,QAAQ;AAEvC,QAAI,SAAqC;AACzC,QAAI;AAEJ,QAAI,UAAU;AAEZ,UAAI,eAAe,aAAa,aAAa;AAC3C,iBAAS;AAAA,MACX,OAAO;AACL,iBAAS;AAAA,MACX;AACA,cAAQ;AAAA,IACV,WAAW,aAAa;AACtB,eAAS;AACT,cAAQ;AAAA,IACV;AAEA,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA,YAAY,CAAC,CAAC;AAAA,MACd;AAAA,MACA,WAAW,QAAQ,WAAW,KAAK,IAAI;AAAA,IACzC;AAAA,EACF,CAAC;AACH;AAKA,SAAS,WAAW,KAAqB;AACvC,MAAI,IAAI,UAAU,IAAI;AACpB,WAAO,SAAS,IAAI,MAAM,EAAE;AAAA,EAC9B;AACA,SAAO,IAAI,MAAM,GAAG,CAAC,IAAI,QAAQ,IAAI,MAAM,EAAE;AAC/C;AApqBA,IAYM,mBAoHF,cA2QE,eA0GA,eAGA,kBASO;AAjgBb;AAAA;AAAA;AAGA;AAmqBA;AA1pBA,IAAM,oBAAoB;AAAA,MACxB;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAgHA,IAAI,eAAsC;AA2Q1C,IAAM,gBAAgB;AA0GtB,IAAM,gBAAgB;AAGtB,IAAM,mBAA2C;AAAA,MAC/C,WAAW;AAAA,MACX,QAAQ;AAAA,MACR,QAAQ;AAAA,MACR,KAAK;AAAA,MACL,cAAc;AAAA,IAChB;AAGO,IAAM,sBAAsB,OAAO,KAAK,gBAAgB;AAAA;AAAA;;;ACjgB/D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,SAAS,YAAAC,WAAU,eAAe;AAClC,SAAS,WAAAC,UAAS,UAAU,WAAAC,UAAS,YAAAC,iBAAgB;AACrD,SAAS,cAAAC,mBAAgC;AACzC,SAAS,iBAAiB;AAwB1B,SAAS,sBAAsB,SAAmE;AAChG,QAAM,mBAAmB,QAAQ,MAAM,mCAAmC;AAE1E,MAAI,CAAC,kBAAkB;AACrB,WAAO;AAAA,EACT;AAEA,QAAM,CAAC,EAAE,aAAa,IAAI,IAAI;AAE9B,MAAI;AAEF,UAAM,QAAQ,YAAY,MAAM,IAAI;AACpC,UAAM,OAAgC,CAAC;AACvC,QAAI,eAAgC;AACpC,QAAI,kBAAiC;AAErC,eAAW,QAAQ,OAAO;AAExB,UAAI,mBAAmB,KAAK,KAAK,EAAE,WAAW,GAAG,GAAG;AAClD,YAAI,QAAQ,KAAK,KAAK,EAAE,MAAM,CAAC,EAAE,KAAK;AAEtC,YAAK,MAAM,WAAW,GAAG,KAAK,MAAM,SAAS,GAAG,KAC3C,MAAM,WAAW,GAAG,KAAK,MAAM,SAAS,GAAG,GAAI;AAClD,kBAAQ,MAAM,MAAM,GAAG,EAAE;AAAA,QAC3B;AACA,sBAAc,KAAK,KAAK;AACxB;AAAA,MACF;AAGA,UAAI,mBAAmB,cAAc;AACnC,aAAK,eAAe,IAAI;AACxB,uBAAe;AACf,0BAAkB;AAAA,MACpB;AAEA,YAAM,aAAa,KAAK,QAAQ,GAAG;AACnC,UAAI,aAAa,GAAG;AAClB,cAAM,MAAM,KAAK,MAAM,GAAG,UAAU,EAAE,KAAK;AAC3C,YAAI,QAAQ,KAAK,MAAM,aAAa,CAAC,EAAE,KAAK;AAG5C,YAAI,UAAU,MAAM,UAAU,MAAM;AAClC,4BAAkB;AAClB,yBAAe,CAAC;AAChB;AAAA,QACF;AAGA,YAAI,MAAM,WAAW,GAAG,KAAK,MAAM,SAAS,GAAG,GAAG;AAChD,gBAAM,eAAe,MAAM,MAAM,GAAG,EAAE;AACtC,gBAAM,QAAQ,aAAa,MAAM,GAAG,EAAE,IAAI,UAAQ;AAChD,gBAAI,UAAU,KAAK,KAAK;AACxB,gBAAK,QAAQ,WAAW,GAAG,KAAK,QAAQ,SAAS,GAAG,KAC/C,QAAQ,WAAW,GAAG,KAAK,QAAQ,SAAS,GAAG,GAAI;AACtD,wBAAU,QAAQ,MAAM,GAAG,EAAE;AAAA,YAC/B;AACA,mBAAO;AAAA,UACT,CAAC,EAAE,OAAO,UAAQ,KAAK,SAAS,CAAC;AACjC,eAAK,GAAG,IAAI;AACZ;AAAA,QACF;AAGA,YAAK,MAAM,WAAW,GAAG,KAAK,MAAM,SAAS,GAAG,KAC3C,MAAM,WAAW,GAAG,KAAK,MAAM,SAAS,GAAG,GAAI;AAClD,kBAAQ,MAAM,MAAM,GAAG,EAAE;AAAA,QAC3B;AAGA,YAAI,UAAU,QAAQ;AACpB,eAAK,GAAG,IAAI;AAAA,QACd,WAAW,UAAU,SAAS;AAC5B,eAAK,GAAG,IAAI;AAAA,QACd,OAAO;AACL,eAAK,GAAG,IAAI;AAAA,QACd;AAAA,MACF;AAAA,IACF;AAGA,QAAI,mBAAmB,cAAc;AACnC,WAAK,eAAe,IAAI;AAAA,IAC1B;AAEA,UAAM,WAAW,oBAAoB,MAAM,IAAI;AAC/C,WAAO,EAAE,UAAU,MAAM,KAAK,KAAK,EAAE;AAAA,EACvC,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAKA,SAAS,qBAAqB,UAA0B;AACtD,SAAO,SAAS,UAAUF,SAAQ,QAAQ,CAAC,EACxC,QAAQ,SAAS,GAAG,EACpB,QAAQ,SAAS,CAAC,MAAM,EAAE,YAAY,CAAC;AAC5C;AAiBA,eAAsB,wBACpB,WACA,UAA6B,CAAC,GACZ;AAClB,QAAM;AAAA,IACJ,WAAW;AAAA,IACX,kBAAkB;AAAA,IAClB,mBAAmB;AAAA,EACrB,IAAI;AAEJ,MAAI,CAACE,YAAW,SAAS,GAAG;AAC1B,WAAO,CAAC;AAAA,EACV;AAEA,QAAM,SAAkB,CAAC;AACzB,QAAM,UAAU,MAAM,QAAQ,WAAW,EAAE,eAAe,KAAK,CAAC;AAEhE,aAAW,SAAS,SAAS;AAE3B,QAAI;AACJ,QAAI;AAEJ,QAAI,MAAM,YAAY,GAAG;AAEvB,YAAM,cAAcH,SAAQ,WAAW,MAAM,MAAM,UAAU;AAC7D,UAAIG,YAAW,WAAW,GAAG;AAC3B,mBAAW;AACX,mBAAW,MAAM;AAAA,MACnB,OAAO;AACL;AAAA,MACF;AAAA,IACF,WAAW,MAAM,KAAK,SAAS,KAAK,KAAK,MAAM,KAAK,SAAS,MAAM,GAAG;AACpE,iBAAWH,SAAQ,WAAW,MAAM,IAAI;AACxC,iBAAW,MAAM;AAAA,IACnB,OAAO;AACL;AAAA,IACF;AAEA,UAAM,UAAU,MAAMD,UAAS,UAAU,OAAO;AAChD,UAAM,SAAS,sBAAsB,OAAO;AAE5C,QAAI,QAAQ;AACV,YAAM,cAAc,oBAAoB,OAAO,SAAS;AACxD,YAAM,WAA0B,cAAc,WAAW;AAEzD,aAAO,KAAK;AAAA,QACV,MAAM,OAAO,SAAS;AAAA,QACtB,aAAa,OAAO,SAAS;AAAA,QAC7B;AAAA,QACA;AAAA,QACA,OAAO,OAAO,SAAS;AAAA,QACvB;AAAA,QACA;AAAA,QACA,WAAW;AAAA,MACb,CAAC;AAAA,IACH,OAAO;AAEL,YAAM,OAAO,qBAAqB,QAAQ;AAC1C,YAAM,iBAAiB,QAAQ,MAAM,MAAM,EAAE,CAAC,GAAG,MAAM,GAAG,GAAG,KAAK;AAElE,aAAO,KAAK;AAAA,QACV;AAAA,QACA,aAAa,eAAe,QAAQ,SAAS,EAAE,EAAE,KAAK;AAAA,QACtD;AAAA,QACA,aAAa;AAAA,QACb,OAAO,CAAC;AAAA,QACR,UAAU,mBAAmB,WAAW;AAAA,QACxC;AAAA,QACA,WAAW;AAAA,MACb,CAAC;AAAA,IACH;AAAA,EACF;AAEA,SAAO;AACT;AAKA,eAAsB,cAAc,aAAyC;AAC3E,QAAM,YAAqB,CAAC;AAC5B,QAAM,YAAY,oBAAI,IAAY;AAElC,aAAW,OAAO,aAAa;AAC7B,UAAM,SAAS,MAAM,wBAAwB,GAAG;AAChD,eAAW,SAAS,QAAQ;AAE1B,UAAI,CAAC,UAAU,IAAI,MAAM,KAAK,YAAY,CAAC,GAAG;AAC5C,kBAAU,IAAI,MAAM,KAAK,YAAY,CAAC;AACtC,kBAAU,KAAK,KAAK;AAAA,MACtB;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAKA,eAAsB,4BACpB,YAC0E;AAC1E,QAAM,YAAqB,CAAC;AAC5B,QAAM,YAAY,oBAAI,IAAY;AAGlC,aAAW,EAAE,MAAM,SAAS,KAAK,WAAW,kBAAkB;AAC5D,UAAM,SAAS,MAAM,wBAAwB,MAAM;AAAA,MACjD;AAAA,MACA,iBAAiB;AAAA,MACjB,kBAAkB;AAAA,IACpB,CAAC;AACD,eAAW,SAAS,QAAQ;AAC1B,UAAI,CAAC,UAAU,IAAI,MAAM,KAAK,YAAY,CAAC,GAAG;AAC5C,kBAAU,IAAI,MAAM,KAAK,YAAY,CAAC;AACtC,kBAAU,KAAK,KAAK;AAAA,MACtB;AAAA,IACF;AAAA,EACF;AAGA,aAAW,EAAE,MAAM,SAAS,KAAK,WAAW,cAAc;AACxD,UAAM,SAAS,MAAM,wBAAwB,MAAM;AAAA,MACjD;AAAA,MACA,iBAAiB;AAAA,MACjB,kBAAkB;AAAA,IACpB,CAAC;AACD,eAAW,SAAS,QAAQ;AAC1B,UAAI,CAAC,UAAU,IAAI,MAAM,KAAK,YAAY,CAAC,GAAG;AAC5C,kBAAU,IAAI,MAAM,KAAK,YAAY,CAAC;AACtC,kBAAU,KAAK,KAAK;AAAA,MACtB;AAAA,IACF;AAAA,EACF;AAGA,QAAM,eAAe,UAAU,OAAO,OAAK,EAAE,eAAe,EAAE,aAAa,QAAQ;AACnF,QAAM,iBAAiB,UAAU,OAAO,OAAK,CAAC,EAAE,eAAe,EAAE,aAAa,QAAQ;AAGtF,QAAM,oBAAwC,MAAM,QAAQ;AAAA,IAC1D,aAAa,IAAI,OAAO,UAAU;AAChC,YAAM,UAAU,MAAMA,UAAS,MAAM,UAAU,OAAO;AACtD,YAAM,SAAS,sBAAsB,OAAO;AAC5C,aAAO;AAAA,QACL,GAAG;AAAA,QACH,SAAS,SAAS,OAAO,OAAO;AAAA,MAClC;AAAA,IACF,CAAC;AAAA,EACH;AAEA,SAAO;AAAA,IACL,QAAQ;AAAA,IACR,UAAU;AAAA,IACV,KAAK;AAAA,EACP;AACF;AAKA,eAAsB,qBACpB,QACA,aACA,kBAC6B;AAC7B,MAAI,YAAY,WAAW,GAAG;AAC5B,WAAO,CAAC;AAAA,EACV;AAGA,QAAM,gBAAgB,YAAY,IAAI,OAAK;AACzC,QAAI,EAAE,WAAW,gBAAgB,GAAG;AAClC,aAAOG,UAAS,kBAAkB,CAAC;AAAA,IACrC;AACA,WAAO;AAAA,EACT,CAAC;AAGD,QAAM,gBAAgB,OAAO,OAAO,WAAS;AAE3C,QAAI,MAAM,eAAe,MAAM,aAAa,UAAU;AACpD,aAAO;AAAA,IACT;AAGA,QAAI,CAAC,MAAM,SAAS,MAAM,MAAM,WAAW,GAAG;AAC5C,aAAO;AAAA,IACT;AAGA,WAAO,cAAc;AAAA,MAAK,UACxB,MAAM,MAAM,KAAK,aAAW,UAAU,MAAM,SAAS,EAAE,WAAW,KAAK,CAAC,CAAC;AAAA,IAC3E;AAAA,EACF,CAAC;AAGD,QAAM,qBAAyC,MAAM,QAAQ;AAAA,IAC3D,cAAc,IAAI,OAAO,UAAU;AACjC,YAAM,UAAU,MAAMH,UAAS,MAAM,UAAU,OAAO;AACtD,YAAM,SAAS,sBAAsB,OAAO;AAC5C,aAAO;AAAA,QACL,GAAG;AAAA,QACH,SAAS,SAAS,OAAO,OAAO;AAAA,QAChC,UAAU;AAAA,MACZ;AAAA,IACF,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAKA,eAAsB,aAAa,cAAqD;AACtF,MAAI,CAAC,gBAAgB,CAACI,YAAW,YAAY,GAAG;AAC9C,WAAO;AAAA,EACT;AAEA,QAAM,UAAU,MAAMJ,UAAS,cAAc,OAAO;AACpD,SAAO;AACT;AAKA,eAAsB,iBACpB,WACA,aACkC;AAClC,QAAM,YAAY,MAAM,cAAc,WAAW;AACjD,QAAM,QAAQ,UAAU;AAAA,IACtB,CAAC,MAAM,EAAE,KAAK,YAAY,MAAM,UAAU,YAAY;AAAA,EACxD;AAEA,MAAI,CAAC,OAAO;AACV,WAAO;AAAA,EACT;AAEA,QAAM,UAAU,MAAMA,UAAS,MAAM,UAAU,OAAO;AACtD,QAAM,SAAS,sBAAsB,OAAO;AAE5C,SAAO;AAAA,IACL,GAAG;AAAA,IACH,SAAS,SAAS,OAAO,OAAO;AAAA,EAClC;AACF;AAKO,SAAS,uBAAuB,QAAyB;AAE9D,QAAM,iBAAiB,OAAO,OAAO,OAAK,CAAC,EAAE,eAAe,EAAE,aAAa,QAAQ;AAEnF,MAAI,eAAe,WAAW,GAAG;AAC/B,WAAO;AAAA,EACT;AAEA,QAAM,QAAQ,CAAC,8DAA8D;AAC7E,aAAW,SAAS,gBAAgB;AAClC,UAAM,WAAW,MAAM,OAAO,SAAS,qBAAqB,MAAM,MAAM,KAAK,IAAI,CAAC,MAAM;AACxF,UAAM,KAAK,KAAK,MAAM,IAAI,KAAK,MAAM,WAAW,GAAG,QAAQ,EAAE;AAAA,EAC/D;AAEA,SAAO,MAAM,KAAK,IAAI;AACxB;AAKO,SAAS,yBAAyB,QAAoC;AAC3E,MAAI,OAAO,WAAW,GAAG;AACvB,WAAO;AAAA,EACT;AAEA,QAAM,WAAqB,CAAC;AAE5B,aAAW,SAAS,QAAQ;AAC1B,aAAS,KAAK,OAAO,MAAM,IAAI;AAAA;AAAA,EAAO,MAAM,OAAO,EAAE;AAAA,EACvD;AAEA,SAAO;AAAA;AAAA,EAA+C,SAAS,KAAK,aAAa,CAAC;AACpF;AAKO,SAAS,wBAAwB,QAAoC;AAC1E,MAAI,OAAO,WAAW,GAAG;AACvB,WAAO;AAAA,EACT;AAEA,QAAM,WAAqB,CAAC;AAE5B,aAAW,SAAS,QAAQ;AAC1B,aAAS,KAAK,OAAO,MAAM,IAAI;AAAA;AAAA,EAAO,MAAM,OAAO,EAAE;AAAA,EACvD;AAEA,SAAO;AAAA;AAAA,EAAqE,SAAS,KAAK,aAAa,CAAC;AAC1G;AAKO,SAAS,sBAAsB,SAAgC;AACpE,MAAI,CAAC,SAAS;AACZ,WAAO;AAAA,EACT;AAEA,SAAO;AAAA;AAAA,EAA0C,OAAO;AAC1D;AAxcA;AAAA;AAAA;AAIA;AAAA;AAAA;;;ACJA,IAAAK,cAAA;AAAA;AAAA;AAAA;AAAA;;;ACKA,SAAS,gBAAgB;AAMlB,SAAS,gBAAgB,kBAAyC;AACvE,MAAI;AACF,UAAM,SAAS,SAAS,6BAA6B;AAAA,MACnD,KAAK;AAAA,MACL,UAAU;AAAA,MACV,OAAO,CAAC,QAAQ,QAAQ,MAAM;AAAA,IAChC,CAAC;AACD,WAAO,OAAO,KAAK;AAAA,EACrB,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAUO,SAAS,kBAAkB,KAAmD;AAEnF,QAAM,WAAW,IAAI,QAAQ,UAAU,EAAE;AAGzC,QAAM,WAAW,SAAS,MAAM,0BAA0B;AAC1D,MAAI,UAAU;AACZ,WAAO,EAAE,KAAK,SAAS,CAAC,GAAG,MAAM,SAAS,CAAC,EAAE;AAAA,EAC/C;AAGA,QAAM,aAAa,SAAS,MAAM,kCAAkC;AACpE,MAAI,YAAY;AACd,WAAO,EAAE,KAAK,WAAW,CAAC,GAAG,MAAM,WAAW,CAAC,EAAE;AAAA,EACnD;AAGA,QAAM,gBAAgB,SAAS,MAAM,+BAA+B;AACpE,MAAI,eAAe;AACjB,WAAO,EAAE,KAAK,cAAc,CAAC,GAAG,MAAM,cAAc,CAAC,EAAE;AAAA,EACzD;AAEA,SAAO;AACT;AASA,SAAS,qBAAqB,KAAqB;AACjD,SAAO,IACJ,YAAY,EACZ,QAAQ,cAAc,GAAG,EACzB,QAAQ,YAAY,EAAE,EACtB,QAAQ,OAAO,GAAG;AACvB;AAUA,eAAsB,iBACpB,kBACA,qBACwB;AAExB,MAAI,qBAAqB;AACvB,WAAO;AAAA,EACT;AAGA,QAAM,YAAY,gBAAgB,gBAAgB;AAClD,MAAI,CAAC,WAAW;AACd,WAAO;AAAA,EACT;AAGA,QAAM,SAAS,kBAAkB,SAAS;AAC1C,MAAI,CAAC,QAAQ;AACX,WAAO;AAAA,EACT;AAGA,QAAM,MAAM,qBAAqB,OAAO,GAAG;AAC3C,QAAM,OAAO,qBAAqB,OAAO,IAAI;AAC7C,SAAO,eAAe,GAAG,IAAI,IAAI;AACnC;AAzGA;AAAA;AAAA;AAAA;AAAA;;;ACKA,SAAS,cAAAC,mBAAkB;AAL3B;AAAA;AAAA;AAAA;AAAA;;;ACMA,SAAS,WAAAC,UAAS,YAAAC,iBAAgB;AANlC;AAAA;AAAA;AAQA;AAAA;AAAA;;;ACyCO,SAAS,iBAAiB,WAAmB,KAAa;AAC/D,EAAAC,mBAAkB,UAAU,QAAQ,OAAO,EAAE;AAC7C,EAAAC,WAAU;AACZ;AAKO,SAAS,2BAAoC;AAClD,SAAO,CAAC,CAACD,oBAAmB,CAAC,CAACC;AAChC;AAKA,eAAe,UACb,MACA,UAA+C,CAAC,GACpC;AACZ,MAAI,CAACD,oBAAmB,CAACC,UAAS;AAChC,UAAM,IAAI,MAAM,8DAA8D;AAAA,EAChF;AAEA,QAAM,MAAM,GAAGD,gBAAe,WAAW,IAAI;AAC7C,QAAM,OAAoB;AAAA,IACxB,QAAQ,QAAQ,UAAU;AAAA,IAC1B,SAAS;AAAA,MACP,gBAAgB;AAAA,MAChB,iBAAiB,UAAUC,QAAO;AAAA,IACpC;AAAA,EACF;AAEA,MAAI,QAAQ,MAAM;AAChB,SAAK,OAAO,KAAK,UAAU,QAAQ,IAAI;AAAA,EACzC;AAEA,QAAM,WAAW,MAAM,MAAM,KAAK,IAAI;AAEtC,MAAI,CAAC,SAAS,IAAI;AAChB,UAAM,QAAQ,MAAM,SAAS,KAAK,EAAE,MAAM,OAAO,EAAE,OAAO,gBAAgB,EAAE;AAC5E,UAAM,IAAI,MAAM,MAAM,SAAS,QAAQ,SAAS,MAAM,EAAE;AAAA,EAC1D;AAEA,SAAO,SAAS,KAAK;AACvB;AA8EO,SAAS,kBAAuC;AACrD,MAAI,CAAC,yBAAyB,GAAG;AAE/B,QAAI;AACF,YAAM,SAAS,UAAU;AACzB,UAAI,OAAO,qBAAqB,OAAO,OAAO,qBAAqB,SAAS;AAC1E,yBAAiB,OAAO,qBAAqB,KAAK,OAAO,qBAAqB,OAAO;AAAA,MACvF,OAAO;AACL,eAAO;AAAA,MACT;AAAA,IACF,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAEA,SAAO;AACT;AAKA,eAAsB,oBAAmC;AAEzD;AAKO,SAAS,4BAAqC;AACnD,MAAI;AACF,UAAM,SAAS,UAAU;AACzB,WAAO,CAAC,EAAE,OAAO,qBAAqB,OAAO,OAAO,qBAAqB;AAAA,EAC3E,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAKO,SAAS,oBAA4B;AAC1C,MAAI;AACF,UAAM,SAAS,UAAU;AACzB,WAAO,OAAO,sBAAsB;AAAA,EACtC,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AA1NA,IA2CID,kBACAC,UAuDS;AAnGb;AAAA;AAAA;AAKA;AAsCA,IAAID,mBAAiC;AACrC,IAAIC,WAAyB;AAuDtB,IAAM,qBAAqB;AAAA,MAChC,YAAY;AAAA;AAAA;AAAA;AAAA,QAIV,MAAM,cACJ,OACA,SAI0B;AAC1B,iBAAO,UAA2B,UAAU;AAAA,YAC1C,QAAQ;AAAA,YACR,MAAM;AAAA,cACJ;AAAA,cACA,WAAW,QAAQ;AAAA,cACnB,gBAAgB,QAAQ;AAAA,YAC1B;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAAA,MAEA,QAAQ;AAAA;AAAA;AAAA;AAAA,QAIN,MAAM,aACJ,OACA,SAMuB;AACvB,iBAAO,UAAwB,WAAW;AAAA,YACxC,QAAQ;AAAA,YACR,MAAM;AAAA,cACJ;AAAA,cACA,WAAW,QAAQ;AAAA,cACnB,MAAM,QAAQ,QAAQ;AAAA,cACtB,gBAAgB,QAAQ;AAAA,YAC1B;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAAA;AAAA;AAAA;AAAA,MAKA,MAAM,gBAAgB,WAAkC;AACtD,cAAM,UAAU,cAAc,mBAAmB,SAAS,CAAC,IAAI;AAAA,UAC7D,QAAQ;AAAA,QACV,CAAC;AAAA,MACH;AAAA;AAAA;AAAA;AAAA,MAKA,MAAM,QAAuB;AAAA,MAE7B;AAAA,IACF;AAAA;AAAA;;;AC7JA,SAAS,gBAAAC,eAAc,gBAAgB;AACvC,SAAe,YAAAC,iBAAgB;AAC/B,SAAS,aAAAC,kBAAiB;AAic1B,eAAsB,eAAe,kBAAgD;AACnF,QAAM,SAAS,UAAU;AACzB,QAAM,YAAY,MAAM;AAAA,IACtB;AAAA,IACA,OAAO,sBAAsB;AAAA,EAC/B;AAEA,QAAM,eAAe,OAAO,qBAAqB;AAEjD,MAAI,CAAC,WAAW;AACd,WAAO;AAAA,MACL,WAAW;AAAA,MACX,aAAa;AAAA,MACb,eAAe;AAAA,MACf,sBAAsB;AAAA,MACtB;AAAA,IACF;AAAA,EACF;AAEA,MAAI;AACF,UAAM,KAAK,MAAM;AACjB,UAAM,SAAS,MAAM,mBAAmB,IAAI,IAAI,SAAS;AAEzD,QAAI,CAAC,QAAQ;AACX,aAAO;AAAA,QACL;AAAA,QACA,aAAa;AAAA,QACb,eAAe;AAAA,QACf,sBAAsB;AAAA,QACtB;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,MACL;AAAA,MACA,aAAa,OAAO,eAAe;AAAA,MACnC,eAAe,OAAO,iBAAiB;AAAA,MACvC,sBAAsB,OAAO,wBAAwB;AAAA,MACrD;AAAA,IACF;AAAA,EACF,QAAQ;AACN,WAAO;AAAA,MACL;AAAA,MACA,aAAa;AAAA,MACb,eAAe;AAAA,MACf,sBAAsB;AAAA,MACtB;AAAA,IACF;AAAA,EACF;AACF;AAKA,eAAsB,iBAAiB,kBAA4C;AACjF,QAAM,SAAS,MAAM,eAAe,gBAAgB;AACpD,SAAO,OAAO,cAAc;AAC9B;AAjgBA,IAgBMC;AAhBN;AAAA;AAAA;AAQA;AACA;AAEA;AACA;AACA;AAGA,IAAMA,iBAAgB,OAAO;AAAA;AAAA;;;AChB7B;AAAA;AAAA;AAMA,IAAAC;AAGA;AAUA;AASA;AAOA;AAQA;AAAA;AAAA;;;AC3CA;AAAA;AAAA;AAAA;AAKA,SAAS,QAAAC,aAAY;AACrB,SAAS,KAAAC,UAAS;AAClB,SAAS,cAAAC,cAAY,gBAAAC,qBAAoB;AACzC,SAAS,QAAAC,aAAY;AACrB,SAAS,aAAAC,kBAAiB;AA6CnB,SAAS,yBAAyB,SAAoC;AAC3E,SAAOL,MAAK;AAAA,IACV,aAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAYb,aAAa;AAAA,IAEb,SAAS,OAAO;AAAA,MACd;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,MAAgF;AAC9E,YAAM,YAAY,KAAK,IAAI;AAE3B,UAAI;AACF,cAAM,SAAS,UAAU;AAEzB,cAAM,YAAY,MAAM;AAAA,UACtB,QAAQ;AAAA,UACR,OAAO,sBAAsB;AAAA,QAC/B;AAEA,YAAI,CAAC,WAAW;AACd,iBAAO;AAAA,YACL,SAAS;AAAA,YACT,OAAO;AAAA,UACT;AAAA,QACF;AAEA,cAAM,SAAS,gBAAgB;AAC/B,YAAI,CAAC,QAAQ;AACX,iBAAO;AAAA,YACL,SAAS;AAAA,YACT,OAAO;AAAA,UACT;AAAA,QACF;AAEA,YAAI;AACF,gBAAM,cAAc,KAAK,IAAI,KAAK,IAAI,GAAG,IAAI,GAAG,EAAE;AAElD,gBAAM,iBAAiB,kBAAkB;AACzC,gBAAM,SAAS,MAAM,OAAO,OAAO,aAAa,OAAO;AAAA,YACrD;AAAA,YACA,MAAM,cAAc;AAAA,YACpB,iBAAiB;AAAA,YACjB;AAAA,UACF,CAAC;AAED,gBAAM,UAA2B,CAAC;AAElC,qBAAW,SAAS,OAAO,SAAS;AAClC,kBAAM,WAAW,MAAM;AACvB,gBAAI,CAAC,SAAU;AAEf,kBAAM,WAAW,SAAS;AAC1B,kBAAM,YAAY,SAAS;AAC3B,kBAAM,UAAU,SAAS;AACzB,kBAAM,gBAAgB,SAAS;AAC/B,kBAAM,aAAa,SAAS;AAE5B,gBAAI,aAAa;AACf,oBAAM,iBAAiBK,WAAU,UAAU,aAAa,EAAE,KAAK,KAAK,CAAC;AACrE,kBAAI,CAAC,eAAgB;AAAA,YACvB;AAEA,gBAAI,YAAY,kBAAkB,SAAS,YAAY,GAAG;AACxD;AAAA,YACF;AAEA,kBAAM,WAAWD,MAAK,QAAQ,kBAAkB,QAAQ;AACxD,gBAAI,CAACF,aAAW,QAAQ,GAAG;AACzB;AAAA,YACF;AAEA,gBAAI,UAAU;AACd,gBAAI;AACF,oBAAM,UAAUC,cAAa,UAAU,OAAO;AAC9C,oBAAM,QAAQ,QAAQ,MAAM,IAAI;AAChC,oBAAM,eAAe,MAAM;AAAA,gBACzB,KAAK,IAAI,GAAG,YAAY,CAAC;AAAA,gBACzB,KAAK,IAAI,MAAM,QAAQ,OAAO;AAAA,cAChC;AACA,wBAAU,aAAa,KAAK,IAAI;AAEhC,kBAAI,QAAQ,SAAS,KAAK;AACxB,0BAAU,QAAQ,MAAM,GAAG,GAAG,IAAI;AAAA,cACpC;AAAA,YACF,QAAQ;AAAA,YAER;AAEA,oBAAQ,KAAK;AAAA,cACX;AAAA,cACA;AAAA,cACA;AAAA,cACA,OAAO,MAAM;AAAA,cACb;AAAA,cACA;AAAA,cACA,UAAU;AAAA,YACZ,CAAC;AAED,gBAAI,QAAQ,UAAU,aAAa;AACjC;AAAA,YACF;AAAA,UACF;AAEA,iBAAO;AAAA,YACL,SAAS;AAAA,YACT;AAAA,YACA;AAAA,YACA,cAAc,QAAQ;AAAA,YACtB,UAAU,KAAK,IAAI,IAAI;AAAA,UACzB;AAAA,QACF,UAAE;AACA,gBAAM,kBAAkB;AAAA,QAC1B;AAAA,MACF,SAAS,OAAO;AACd,eAAO;AAAA,UACL,SAAS;AAAA,UACT,OAAO,2BAA2B,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,QAC1F;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AACH;AA5LA,IAgCM;AAhCN;AAAA;AAAA;AAUA;AAMA;AAgBA,IAAM,4BAA4BF,GAAE,OAAO;AAAA,MACzC,OAAOA,GACJ,OAAO,EACP,SAAS,gEAAgE;AAAA,MAC5E,MAAMA,GACH,OAAO,EACP,SAAS,EACT,QAAQ,EAAE,EACV,SAAS,oDAAoD;AAAA,MAChE,aAAaA,GACV,OAAO,EACP,SAAS,EACT,SAAS,mEAAmE;AAAA,MAC/E,UAAUA,GACP,OAAO,EACP,SAAS,EACT,SAAS,+DAA+D;AAAA,IAC7E,CAAC;AAAA;AAAA;;;ACjDD;AAAA;AAAA;AAAA;AAqBA,eAAsB,YAAY,KAAa,OAAoC;AACjF,MAAI;AACF,UAAM,aAAa,IAAI,gBAAgB;AACvC,UAAM,UAAU,WAAW,MAAM,WAAW,MAAM,GAAG,GAAI;AAEzD,UAAM,MAAM,KAAK;AAAA,MACf,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,uBAAuB,MAAM;AAAA,MAC/B;AAAA,MACA,MAAM,KAAK,UAAU,KAAK;AAAA,MAC1B,QAAQ,WAAW;AAAA,IACrB,CAAC;AAED,iBAAa,OAAO;AAAA,EACtB,QAAQ;AAAA,EAER;AACF;AAxCA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,OAAO,eAAe;AACtB,SAAS,oBAAoB;AAwPtB,SAAS,iBAAiB,WAAmB,MAAkC;AACpF,QAAM,WAAW,cAAc,IAAI,SAAS;AAC5C,MAAI,UAAU;AACZ,YAAQ,IAAI,mDAAmD,SAAS,eAAe,SAAS,SAAS,GAAG;AAC5G,WAAO;AAAA,EACT;AAEA,UAAQ,IAAI,+CAA+C,SAAS,YAAY,IAAI,qBAAqB,cAAc,IAAI,GAAG;AAC9H,QAAM,QAAQ,IAAI,mBAAmB,IAAI;AACzC,gBAAc,IAAI,WAAW,KAAK;AAClC,QAAM,GAAG,SAAS,MAAM;AACtB,YAAQ,IAAI,yCAAyC,SAAS,0BAA0B;AACxF,kBAAc,OAAO,SAAS;AAAA,EAChC,CAAC;AACD,QAAM,QAAQ;AACd,SAAO;AACT;AAEO,SAAS,SAAS,WAAmD;AAC1E,SAAO,cAAc,IAAI,SAAS;AACpC;AAEO,SAAS,aAAa,WAAyB;AACpD,QAAM,QAAQ,cAAc,IAAI,SAAS;AACzC,MAAI,OAAO;AACT,YAAQ,IAAI,kDAAkD,SAAS,EAAE;AACzE,UAAM,QAAQ;AACd,kBAAc,OAAO,SAAS;AAAA,EAChC,OAAO;AACL,YAAQ,IAAI,sEAAsE,SAAS,EAAE;AAAA,EAC/F;AACF;AAxRA,IA4DM,oBACA,wBACA,mBAMO,oBAmLP;AAvPN;AAAA;AAAA;AA4DA,IAAM,qBAAqB;AAC3B,IAAM,yBAAyB;AAC/B,IAAM,oBAAoB;AAMnB,IAAM,qBAAN,cAAiC,aAAa;AAAA,MAC3C,KAAuB;AAAA,MACvB;AAAA,MACA,oBAAoB;AAAA,MACpB,iBAAuD;AAAA,MACvD,YAAY;AAAA,MACZ,gBAAgB;AAAA,MAChB,eAAoC;AAAA,MACpC,aAAa;AAAA,MAErB,YAAY,MAAc;AACxB,cAAM;AACN,aAAK,OAAO;AAAA,MACd;AAAA,MAEA,IAAI,YAAqB;AACvB,eAAO,KAAK;AAAA,MACd;AAAA,MAEA,IAAI,cAAmC;AACrC,eAAO,KAAK;AAAA,MACd;AAAA,MAEA,UAAgB;AACd,YAAI,KAAK,UAAW;AACpB,gBAAQ,IAAI,0CAA0C,KAAK,IAAI,EAAE;AACjE,aAAK,UAAU;AAAA,MACjB;AAAA,MAEQ,YAAkB;AACxB,YAAI,KAAK,UAAW;AAEpB,cAAM,MAAM,kBAAkB,KAAK,IAAI;AACvC,gBAAQ,IAAI,mDAAmD,GAAG,aAAa,KAAK,oBAAoB,CAAC,IAAI,sBAAsB,GAAG;AACtI,YAAI;AACF,eAAK,KAAK,IAAI,UAAU,GAAG;AAAA,QAC7B,SAAS,KAAK;AACZ,kBAAQ,KAAK,gDAAgD,GAAG,KAAK,GAAG;AACxE,eAAK,kBAAkB;AACvB;AAAA,QACF;AAEA,aAAK,GAAG,GAAG,QAAQ,MAAM;AACvB,kBAAQ,IAAI,6BAA6B,GAAG,WAAW,KAAK,iBAAiB,WAAW;AACxF,eAAK,oBAAoB;AACzB,eAAK,aAAa;AAAA,QAIpB,CAAC;AAED,aAAK,GAAG,GAAG,WAAW,CAAC,QAAyB;AAC9C,cAAI;AACF,kBAAM,MAAM,KAAK,MAAM,OAAO,QAAQ,WAAW,MAAM,IAAI,SAAS,MAAM,CAAC;AAC3E,iBAAK,cAAc,GAAG;AAAA,UACxB,SAAS,KAAK;AACZ,oBAAQ,KAAK,uCAAuC,GAAG,KAAK,GAAG;AAAA,UACjE;AAAA,QACF,CAAC;AAED,aAAK,GAAG,GAAG,SAAS,CAAC,MAAM,WAAW;AACpC,gBAAM,eAAe,KAAK;AAC1B,eAAK,aAAa;AAClB,kBAAQ,IAAI,wCAAwC,IAAI,YAAY,QAAQ,SAAS,KAAK,EAAE,kBAAkB,YAAY,cAAc,KAAK,SAAS,EAAE;AACxJ,cAAI,cAAc;AAChB,iBAAK,KAAK,UAAU,EAAE,WAAW,OAAO,eAAe,MAAM,CAAyB;AAAA,UACxF;AACA,cAAI,CAAC,KAAK,WAAW;AACnB,iBAAK,kBAAkB;AAAA,UACzB;AAAA,QACF,CAAC;AAED,aAAK,GAAG,GAAG,SAAS,CAAC,QAAQ;AAC3B,kBAAQ,KAAK,wCAAwC,KAAK,IAAI,KAAK,IAAI,OAAO;AAAA,QAChF,CAAC;AAAA,MACH;AAAA,MAEQ,aAAa;AAAA,MACb,iBAAiB;AAAA,MACjB,mBAAmB;AAAA,MAEnB,cAAc,KAAgB;AACpC,YAAI,IAAI,SAAS,SAAS;AACxB,gBAAM,MAAM,KAAK,IAAI;AACrB,cAAI,MAAM,KAAK,gBAAgB,mBAAmB;AAChD,iBAAK;AACL;AAAA,UACF;AACA,eAAK,gBAAgB;AACrB,eAAK;AAGL,cAAI,MAAM,KAAK,mBAAmB,KAAM;AACtC,oBAAQ,IAAI,qCAAqC,KAAK,UAAU,cAAc,KAAK,cAAc,cAAc,KAAK,cAAc,OAAO,CAAC,aAAa,IAAI,MAAM,UAAU,CAAC,EAAE;AAC9K,iBAAK,mBAAmB;AAAA,UAC1B;AAEA,gBAAM,QAAsB;AAAA,YAC1B,MAAM,IAAI;AAAA,YACV,UAAU,IAAI,YAAY;AAAA,cACxB,aAAa;AAAA,cACb,cAAc;AAAA,cACd,iBAAiB;AAAA,cACjB,WAAW;AAAA,cACX,eAAe;AAAA,cACf,eAAe;AAAA,YACjB;AAAA,YACA,WAAW;AAAA,UACb;AACA,eAAK,eAAe;AACpB,eAAK,KAAK,SAAS,KAAK;AAAA,QAC1B,WAAW,IAAI,SAAS,UAAU;AAChC,kBAAQ,IAAI,yCAAyC,KAAK,UAAU,GAAG,CAAC;AACxE,eAAK,KAAK,UAAU;AAAA,YAClB,WAAW,IAAI,aAAa;AAAA,YAC5B,eAAe,IAAI,iBAAiB;AAAA,YACpC,eAAe,IAAI;AAAA,YACnB,gBAAgB,IAAI;AAAA,UACtB,CAAyB;AAAA,QAC3B,OAAO;AACL,kBAAQ,IAAI,sCAAsC,IAAI,IAAI,EAAE;AAAA,QAC9D;AAAA,MACF;AAAA,MAEQ,oBAA0B;AAChC,YAAI,KAAK,aAAa,KAAK,qBAAqB,wBAAwB;AACtE,kBAAQ,IAAI,kDAAkD,KAAK,SAAS,aAAa,KAAK,iBAAiB,IAAI,sBAAsB,EAAE;AAC3I,eAAK,KAAK,OAAO;AACjB;AAAA,QACF;AACA,aAAK;AAGL,cAAM,QAAQ,KAAK,qBAAqB,IACpC,qBACA,sBAAsB,KAAK,oBAAoB;AACnD,gBAAQ,IAAI,wCAAwC,KAAK,eAAe,KAAK,iBAAiB,IAAI,sBAAsB,GAAG;AAC3H,aAAK,iBAAiB,WAAW,MAAM,KAAK,UAAU,GAAG,KAAK;AAAA,MAChE;AAAA;AAAA;AAAA;AAAA,MAKA,YAAY,OAAgC;AAC1C,YAAI,KAAK,IAAI,eAAe,UAAU,MAAM;AAC1C,eAAK,GAAG,KAAK,KAAK,UAAU,KAAK,CAAC;AAAA,QACpC;AAAA,MACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAOA,gBAAsB;AACpB,YAAI,KAAK,IAAI,eAAe,UAAU,MAAM;AAC1C,kBAAQ,IAAI,wDAAwD;AACpE,eAAK,GAAG,KAAK,KAAK,UAAU,EAAE,MAAM,SAAS,CAAC,CAAC;AAAA,QACjD;AAAA,MACF;AAAA,MAEA,UAAgB;AACd,gBAAQ,IAAI,0CAA0C,KAAK,IAAI,aAAa,KAAK,UAAU,sBAAsB,KAAK,cAAc,GAAG;AACvI,aAAK,YAAY;AACjB,YAAI,KAAK,gBAAgB;AACvB,uBAAa,KAAK,cAAc;AAChC,eAAK,iBAAiB;AAAA,QACxB;AACA,YAAI,KAAK,IAAI;AACX,eAAK,GAAG,mBAAmB;AAC3B,eAAK,GAAG,MAAM;AACd,eAAK,KAAK;AAAA,QACZ;AACA,aAAK,aAAa;AAClB,aAAK,mBAAmB;AAAA,MAC1B;AAAA,IACF;AAGA,IAAM,gBAAgB,oBAAI,IAAgC;AAAA;AAAA;;;ACvP1D;AAAA;AAAA;AAAA;AAAA,SAAS,QAAAK,aAAY;AACrB,SAAS,aAAAC,kBAAiB;AAC1B,SAAS,aAAAC,YAAW,SAAAC,QAAO,YAAAC,YAAU,UAAAC,SAAQ,WAAAC,UAAS,UAAU;AAChE,SAAS,QAAAC,aAAY;AACrB,SAAS,cAAc;AACvB,SAAS,UAAAC,eAAc;AAiHvB,eAAe,cAAgC;AAC7C,MAAI;AACF,UAAMC,WAAU,mBAAmB,EAAE,SAAS,IAAK,CAAC;AACpD,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAEA,eAAe,QAAQ,KAA4B;AACjD,MAAI;AACF,UAAM,GAAG,KAAK,EAAE,WAAW,MAAM,OAAO,KAAK,CAAC;AAAA,EAChD,QAAQ;AAAA,EAER;AACF;AArIA,IAQMA,YAOO;AAfb;AAAA;AAAA;AAQA,IAAMA,aAAYR,WAAUD,KAAI;AAOzB,IAAM,gBAAN,MAAoB;AAAA,MACjB,SAA0B,CAAC;AAAA,MAC3B,YAA2B;AAAA,MAC3B,YAAY;AAAA,MACZ;AAAA,MAER,YAAY,WAAmB;AAC7B,aAAK,YAAY;AAAA,MACnB;AAAA,MAEA,IAAI,cAAuB;AACzB,eAAO,KAAK;AAAA,MACd;AAAA,MAEA,IAAI,aAAqB;AACvB,eAAO,KAAK,OAAO;AAAA,MACrB;AAAA,MAEA,QAAc;AACZ,aAAK,SAAS,CAAC;AACf,aAAK,YAAY,KAAK,IAAI;AAC1B,aAAK,YAAY;AAAA,MACnB;AAAA,MAEA,SAAS,OAA2B;AAClC,YAAI,CAAC,KAAK,UAAW;AACrB,aAAK,OAAO,KAAK;AAAA,UACf,MAAM,OAAO,KAAK,MAAM,MAAM,QAAQ;AAAA,UACtC,WAAW,MAAM;AAAA,QACnB,CAAC;AAAA,MACH;AAAA,MAEA,OAAa;AACX,aAAK,YAAY;AAAA,MACnB;AAAA;AAAA;AAAA;AAAA;AAAA,MAMA,MAAM,SAA8D;AAClE,YAAI,KAAK,OAAO,WAAW,EAAG,QAAO;AAErC,cAAM,UAAUO,MAAK,OAAO,GAAG,yBAAyBC,QAAO,CAAC,CAAC,EAAE;AACnE,cAAML,OAAM,SAAS,EAAE,WAAW,KAAK,CAAC;AAExC,YAAI;AAEF,mBAAS,IAAI,GAAG,IAAI,KAAK,OAAO,QAAQ,KAAK;AAC3C,kBAAM,YAAYI,MAAK,SAAS,SAAS,OAAO,CAAC,EAAE,SAAS,GAAG,GAAG,CAAC,MAAM;AACzE,kBAAML,WAAU,WAAW,KAAK,OAAO,CAAC,EAAE,IAAI;AAAA,UAChD;AAGA,gBAAM,YAAY,KAAK,OAAO,KAAK,OAAO,SAAS,CAAC,EAAE,YAAY,KAAK,OAAO,CAAC,EAAE,aAAa;AAC9F,gBAAM,MAAM,WAAW,IAAI,KAAK,MAAM,KAAK,OAAO,SAAS,QAAQ,IAAI;AACvE,gBAAM,aAAa,KAAK,IAAI,GAAG,KAAK,IAAI,KAAK,EAAE,CAAC;AAEhD,gBAAM,aAAaK,MAAK,SAAS,aAAa,KAAK,SAAS,MAAM;AAGlE,gBAAM,YAAY,MAAM,YAAY;AACpC,cAAI,WAAW;AACb,kBAAME;AAAA,cACJ,wBAAwB,UAAU,QAAQF,MAAK,SAAS,gBAAgB,CAAC,yDAErE,UAAU;AAAA,cACd,EAAE,SAAS,KAAQ;AAAA,YACrB;AAAA,UACF,OAAO;AAGL,oBAAQ,KAAK,0DAA0D;AACvE,kBAAM,QAAQ,OAAO;AACrB,mBAAO;AAAA,UACT;AAEA,gBAAM,YAAY,MAAMH,WAAS,UAAU;AAG3C,gBAAM,QAAQ,MAAME,SAAQ,OAAO;AACnC,qBAAW,KAAK,OAAO;AACrB,gBAAI,EAAE,WAAW,QAAQ,GAAG;AAC1B,oBAAMD,QAAOE,MAAK,SAAS,CAAC,CAAC,EAAE,MAAM,MAAM;AAAA,cAAC,CAAC;AAAA,YAC/C;AAAA,UACF;AAEA,iBAAO,EAAE,MAAM,YAAY,WAAW,UAAU,OAAO;AAAA,QACzD,SAAS,OAAO;AACd,kBAAQ,MAAM,0CAA0C,KAAK;AAC7D,gBAAM,QAAQ,OAAO;AACrB,iBAAO;AAAA,QACT;AAAA,MACF;AAAA;AAAA,MAGA,QAAc;AACZ,aAAK,SAAS,CAAC;AACf,aAAK,YAAY;AACjB,aAAK,YAAY;AAAA,MACnB;AAAA,IACF;AAAA;AAAA;;;ACpHA,OAAO;AACP,SAAS,QAAAG,aAAY;AACrB,SAAS,aAA8B;AACvC,SAAS,YAAY;AACrB,SAAS,cAAc;AACvB,SAAS,cAAAC,cAAY,aAAAC,YAAW,iBAAAC,sBAAqB;AACrD,SAAS,WAAAC,WAAS,WAAAC,UAAS,QAAAC,cAAY;AACvC,SAAS,SAAAC,cAAgC;AACzC,SAAS,gBAAgB,uBAAuB;AAChD,SAAS,iBAAAC,sBAAqB;;;ACF9B;AAPA,SAAS,YAAY;AACrB,SAAS,kBAAkB;AAC3B,SAAS,KAAAC,WAAS;AAClB,SAAS,cAAAC,cAAY,aAAAC,YAAW,iBAAAC,gBAAe,aAAa,YAAAC,WAAU,kBAAkB;AACxF,SAAS,WAAAC,gBAAe;AACxB,SAAS,QAAAC,OAAM,YAAAC,WAAU,WAAAC,UAAS,YAAAC,iBAAgB;AAClD,SAAS,UAAAC,eAAc;;;ACNvB;AAAA,EACE,cAAAC;AAAA,EACA,gBAAAC;AAAA,EACA,QAAAC;AAAA,EACA,eAAAC;AAAA,OAGK;;;ACPP,SAAS,eAAe;;;ACuBxB,SAAS,gBAAgB,QAAsB;AAC7C,SAAO,OAAO,IAAI,CAAC,QAAa;AAC9B,QAAI,CAAC,MAAM,QAAQ,IAAI,OAAO,EAAG,QAAO;AACxC,WAAO;AAAA,MACL,GAAG;AAAA,MACH,SAAS,IAAI,QAAQ,IAAI,CAAC,SAAc;AACtC,YAAI,KAAK,SAAS,UAAU,KAAK,gBAAgB,YAAY;AAC3D,iBAAO;AAAA,YACL,GAAG;AAAA,YACH,MAAM,OAAO,KAAK,KAAK,IAAI,EAAE,SAAS,QAAQ;AAAA,YAC9C,SAAS;AAAA,UACX;AAAA,QACF;AACA,eAAO;AAAA,MACT,CAAC;AAAA,IACH;AAAA,EACF,CAAC;AACH;AAMA,SAAS,iBAAiB,OAAiB;AACzC,MAAI,SAAS,OAAO,UAAU,UAAU;AACtC,QAAI,MAAM,gBAAgB,OAAO,MAAM,SAAS,UAAU;AACxD,aAAO,OAAO,KAAK,MAAM,MAAM,QAAQ;AAAA,IACzC;AACA,QAAI,MAAM,QAAQ,KAAK,GAAG;AACxB,aAAO,MAAM,IAAI,gBAAgB;AAAA,IACnC;AACA,UAAM,SAAc,CAAC;AACrB,eAAW,CAAC,GAAG,CAAC,KAAK,OAAO,QAAQ,KAAK,GAAG;AAC1C,aAAO,CAAC,IAAI,iBAAiB,CAAC;AAAA,IAChC;AACA,WAAO;AAAA,EACT;AACA,SAAO;AACT;AAKA,SAAS,eAAe,SAA+C;AACrE,QAAM,EAAE,aAAa,GAAG,KAAK,IAAI;AACjC,SAAO;AAAA,IACL,GAAG;AAAA,IACH,QAAQ,gBAAgB,QAAQ,MAAM;AAAA,EACxC;AACF;AAOO,SAAS,kBACd,SACA,QACA;AACA,QAAM,UAAU,OAAO,IAAI,QAAQ,OAAO,EAAE;AAC5C,QAAM,UAAU;AAAA,IACd,gBAAgB;AAAA,IAChB,iBAAiB,UAAU,OAAO,OAAO;AAAA,EAC3C;AAEA,SAAO;AAAA,IACL,sBAAsB;AAAA,IACtB,UAAU;AAAA,IACV;AAAA,IACA,eAAe,CAAC;AAAA,IAEhB,MAAM,WAAW,SAAsB;AACrC,YAAM,MAAM,MAAM,MAAM,GAAG,OAAO,uBAAuB;AAAA,QACvD,QAAQ;AAAA,QACR;AAAA,QACA,MAAM,KAAK,UAAU;AAAA,UACnB;AAAA,UACA,SAAS,eAAe,OAAO;AAAA,QACjC,CAAC;AAAA,QACD,QAAQ,QAAQ;AAAA,MAClB,CAAC;AAED,UAAI,CAAC,IAAI,IAAI;AACX,cAAM,MAAM,MAAM,IAAI,KAAK,EAAE,MAAM,OAAO,CAAC,EAAE;AAC7C,cAAM,SAAS,kBAAkB,IAAI,QAAQ,SAAS,GAAG;AACzD,cAAM,IAAI,MAAM,MAAM;AAAA,MACxB;AAEA,YAAM,SAAS,MAAM,IAAI,KAAK;AAC9B,aAAO,iBAAiB,MAAM;AAAA,IAChC;AAAA,IAEA,MAAM,SAAS,SAAsB;AACnC,YAAM,MAAM,MAAM,MAAM,GAAG,OAAO,qBAAqB;AAAA,QACrD,QAAQ;AAAA,QACR;AAAA,QACA,MAAM,KAAK,UAAU;AAAA,UACnB;AAAA,UACA,SAAS,eAAe,OAAO;AAAA,QACjC,CAAC;AAAA,QACD,QAAQ,QAAQ;AAAA,MAClB,CAAC;AAED,UAAI,CAAC,IAAI,IAAI;AACX,cAAM,MAAM,MAAM,IAAI,KAAK,EAAE,MAAM,OAAO,CAAC,EAAE;AAC7C,cAAM,SAAS,kBAAkB,IAAI,QAAQ,SAAS,GAAG;AACzD,cAAM,IAAI,MAAM,MAAM;AAAA,MACxB;AAEA,YAAM,SAAS,IAAI,KAAM,UAAU;AACnC,YAAM,UAAU,IAAI,YAAY;AAChC,UAAI,SAAS;AAEb,YAAM,SAAS,IAAI,eAAe;AAAA,QAChC,MAAM,KAAK,YAAY;AACrB,iBAAO,MAAM;AACX,kBAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAE1C,gBAAI,MAAM;AACR,kBAAI,OAAO,KAAK,GAAG;AACjB,oBAAI;AACF,wBAAM,SAAS,iBAAiB,KAAK,MAAM,OAAO,KAAK,CAAC,CAAC;AACzD,sBAAI,OAAO,SAAS,SAAS;AAC3B,+BAAW,MAAM,IAAI,MAAM,OAAO,KAAK,CAAC;AAAA,kBAC1C,OAAO;AACL,+BAAW,QAAQ,MAAM;AAAA,kBAC3B;AAAA,gBACF,QAAQ;AAAA,gBAA4B;AAAA,cACtC;AACA,yBAAW,MAAM;AACjB;AAAA,YACF;AAEA,sBAAU,QAAQ,OAAO,OAAO,EAAE,QAAQ,KAAK,CAAC;AAChD,kBAAM,QAAQ,OAAO,MAAM,IAAI;AAC/B,qBAAS,MAAM,IAAI,KAAK;AAExB,uBAAW,QAAQ,OAAO;AACxB,kBAAI,CAAC,KAAK,KAAK,EAAG;AAClB,kBAAI;AACF,sBAAM,SAAS,iBAAiB,KAAK,MAAM,IAAI,CAAC;AAChD,oBAAI,OAAO,SAAS,SAAS;AAC3B,6BAAW,MAAM,IAAI,MAAM,OAAO,KAAK,CAAC;AACxC;AAAA,gBACF;AACA,2BAAW,QAAQ,MAAM;AAAA,cAC3B,QAAQ;AAAA,cAER;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,QACA,SAAS;AACP,iBAAO,OAAO;AAAA,QAChB;AAAA,MACF,CAAC;AAED,YAAM,kBAA0C,CAAC;AACjD,UAAI,QAAQ,QAAQ,CAAC,GAAG,MAAM;AAC5B,YAAI,EAAE,WAAW,aAAa,GAAG;AAC/B,0BAAgB,EAAE,QAAQ,eAAe,EAAE,CAAC,IAAI;AAAA,QAClD;AAAA,MACF,CAAC;AAED,aAAO;AAAA,QACL;AAAA,QACA,UAAU,OAAO,KAAK,eAAe,EAAE,SAAS,IAC5C,EAAE,SAAS,gBAAgB,IAC3B;AAAA,MACN;AAAA,IACF;AAAA,EACF;AACF;AAEA,SAAS,kBACP,QACA,SACA,MACQ;AACR,QAAM,QAAQ,CAAC,4BAA4B,MAAM,SAAS,OAAO,EAAE;AAEnE,MAAI,KAAK,MAAO,OAAM,KAAK,KAAK,KAAK;AAErC,MAAI,KAAK,SAAS;AAChB,UAAM,IAAI,KAAK;AACf,QAAI,EAAE,KAAM,OAAM,KAAK,QAAQ,EAAE,IAAI,EAAE;AACvC,QAAI,EAAE,cAAc,EAAE,eAAe,OAAQ,OAAM,KAAK,YAAY,EAAE,UAAU,EAAE;AAClF,QAAI,EAAE,MAAO,OAAM,KAAK,UAAU,EAAE,KAAK,EAAE;AAC3C,QAAI,EAAE,eAAe,OAAQ,OAAM,KAAK,kBAAkB,EAAE,cAAc,KAAK,IAAI,CAAC,EAAE;AAAA,EACxF;AAEA,SAAO,MAAM,KAAK,UAAK;AACzB;;;ADrNA;AAEA,IAAM,mBAAmB;AAMlB,SAAS,iBAAiB,SAA0B;AACzD,QAAM,aAAa,QAAQ,KAAK,EAAE,YAAY;AAC9C,SAAO,WAAW,WAAW,gBAAgB,KAAK,WAAW,WAAW,SAAS;AACnF;AAiBO,SAAS,aAAa,SAAgC;AAC3D,MAAI;AACF,UAAM,SAAS,UAAU;AACzB,QAAI,OAAO,qBAAqB,cAAc;AAC5C,aAAO,kBAAkB,QAAQ,KAAK,GAAG;AAAA,QACvC,KAAK,OAAO,qBAAqB;AAAA,QACjC,SAAS,OAAO,qBAAqB;AAAA,MACvC,CAAC;AAAA,IACH;AAAA,EACF,QAAQ;AAAA,EAER;AACA,SAAO,QAAQ,QAAQ,KAAK,CAAC;AAC/B;AAGO,IAAM,kBAAkB;AAAA,EAC7B,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,SAAS;AACX;;;ADxCA;AAMA;AARA,SAAS,KAAAC,WAAS;AAClB,SAAS,UAAAC,eAAc;;;AGVvB,SAAS,YAAY;AACrB,SAAS,KAAAC,UAAS;AAClB,SAAS,QAAAC,aAAY;AACrB,SAAS,aAAAC,kBAAiB;;;ACE1B,IAAM,kBAAkB;AACxB,IAAM,0BAA0B;AAEzB,SAAS,eAAe,MAAsB;AACnD,SAAO,KAAK,KAAK,KAAK,SAAS,eAAe;AAChD;AAEO,SAAS,sBAAsB,UAA6D;AACjG,SAAO,SAAS,OAAO,CAAC,OAAO,QAAQ;AACrC,UAAM,UAAU,OAAO,IAAI,YAAY,WACnC,IAAI,UACJ,KAAK,UAAU,IAAI,OAAO;AAC9B,WAAO,QAAQ,eAAe,OAAO,IAAI;AAAA,EAC3C,GAAG,CAAC;AACN;;;ACjBA,IAAM,mBAAmB;AAKlB,SAAS,eACd,QACA,WAAmB,kBACX;AACR,MAAI,OAAO,UAAU,UAAU;AAC7B,WAAO;AAAA,EACT;AAEA,QAAM,UAAU,KAAK,MAAM,WAAW,CAAC;AACvC,QAAM,iBAAiB,OAAO,SAAS;AAEvC,SACE,OAAO,MAAM,GAAG,OAAO,IACvB;AAAA;AAAA,kBAAuB,eAAe,eAAe,CAAC;AAAA;AAAA,IACtD,OAAO,MAAM,CAAC,OAAO;AAEzB;AAKO,SAAS,qBAAqB,UAA+C;AAClF,SAAO,SAAS,OAAO,CAAC,OAAO,QAAQ;AACrC,UAAM,UAAU,OAAO,IAAI,YAAY,WACnC,IAAI,UACJ,KAAK,UAAU,IAAI,OAAO;AAC9B,WAAO,QAAQ,QAAQ;AAAA,EACzB,GAAG,CAAC;AACN;;;ACpBA;AANA,SAAS,YAAY;AACrB,SAAS,iBAAiB;AAC1B,SAAS,OAAO,WAAW,gBAAgB;AAC3C,SAAS,cAAAC,aAAY,aAAAC,kBAAiB;AACtC,SAAS,QAAAC,aAAY;AACrB,SAAS,cAAc;AAGvB,IAAM,YAAY,UAAU,IAAI;AAGhC,IAAM,iBAAiB;AAGvB,IAAM,eAAe;AAoBrB,IAAI,qBAAqC;AAKzC,eAAsB,kBAAoC;AACxD,MAAI,uBAAuB,MAAM;AAC/B,WAAO;AAAA,EACT;AAEA,MAAI;AACF,UAAM,EAAE,OAAO,IAAI,MAAM,UAAU,SAAS;AAC5C,yBAAqB;AAErB,WAAO;AAAA,EACT,SAAS,OAAO;AACd,yBAAqB;AACrB,YAAQ,IAAI,yBAAyB,iBAAiB,QAAQ,MAAM,UAAU,eAAe,EAAE;AAC/F,WAAO;AAAA,EACT;AACF;AAMO,SAAS,qBAA6B;AAE3C,SAAO,MAAM,OAAO,CAAC;AACvB;AAKO,SAAS,eAAe,YAA4B;AACzD,SAAO,GAAG,cAAc,GAAG,UAAU;AACvC;AAMA,SAAS,qBAA6B;AACpC,QAAM,aAAa,oBAAoB;AAEvC,MAAI,CAACF,YAAW,UAAU,GAAG;AAC3B,IAAAC,WAAU,YAAY,EAAE,WAAW,KAAK,CAAC;AAAA,EAC3C;AACA,SAAO;AACT;AAKO,SAAS,UAAU,YAAoB,mBAA2B,WAA4B;AACnG,QAAM,UAAU,mBAAmB;AACnC,MAAI,WAAW;AAEb,WAAOC,MAAK,SAAS,cAAc,WAAW,aAAa,UAAU;AAAA,EACvE;AAEA,SAAOA,MAAK,SAAS,aAAa,UAAU;AAC9C;AAKA,SAAS,YAAY,KAAqB;AAExC,SAAO,IAAI,IAAI,QAAQ,MAAM,OAAO,CAAC;AACvC;AAKA,eAAe,WAAW,YAAoB,MAAoB,kBAA2C;AAC3G,QAAM,SAAS,UAAU,YAAY,kBAAkB,KAAK,SAAS;AACrE,QAAM,MAAM,QAAQ,EAAE,WAAW,KAAK,CAAC;AACvC,QAAM,UAAUA,MAAK,QAAQ,WAAW,GAAG,KAAK,UAAU,MAAM,MAAM,CAAC,CAAC;AAExE,QAAM,UAAUA,MAAK,QAAQ,YAAY,GAAG,EAAE;AAC9C,SAAO;AACT;AAKA,eAAe,UACb,WACA,SACkB;AAClB,QAAM,EAAE,SAAS,WAAW,IAAI,IAAI;AACpC,QAAM,YAAY,KAAK,IAAI;AAE3B,SAAO,KAAK,IAAI,IAAI,YAAY,SAAS;AACvC,QAAI,MAAM,UAAU,GAAG;AACrB,aAAO;AAAA,IACT;AACA,UAAM,IAAI,QAAQ,OAAK,WAAW,GAAG,QAAQ,CAAC;AAAA,EAChD;AAEA,SAAO;AACT;AAKA,eAAsB,QACpB,SACA,kBACA,SACyB;AACzB,MAAI,CAAC,SAAS;AACZ,UAAM,IAAI,MAAM,iEAAiE;AAAA,EACnF;AACA,QAAM,KAAK,QAAQ,cAAc,mBAAmB;AACpD,QAAM,UAAU,eAAe,EAAE;AACjC,QAAM,SAAS,MAAM,WAAW,IAAI;AAAA,IAClC;AAAA,IACA;AAAA,IACA,KAAK;AAAA,IACL,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,IAClC,WAAW,QAAQ;AAAA,IACnB,YAAY;AAAA,EACd,GAAG,gBAAgB;AAEnB,QAAM,UAAUA,MAAK,QAAQ,YAAY;AACzC,QAAM,eAAeA,MAAK,QAAQ,WAAW;AAC7C,QAAM,UAAU,QAAQ,WAAW;AAEnC,MAAI;AAGF,UAAM,iBAAiB,IAAI,OAAO,mBAAmB,YAAY,OAAO,CAAC,eAAe,YAAY,YAAY,CAAC;AAGjH,UAAM;AAAA,MACJ,0BAA0B,OAAO,OAAO,YAAY,gBAAgB,CAAC,IAAI,YAAY,cAAc,CAAC;AAAA,MACpG,EAAE,SAAS,IAAK;AAAA,IAClB;AAGA,QAAI;AACF,YAAM;AAAA,QACJ,qBAAqB,OAAO,eAAe,YAAY,OAAO,CAAC;AAAA,QAC/D,EAAE,SAAS,IAAK;AAAA,MAClB;AAAA,IACF,QAAQ;AAAA,IAER;AAGA,UAAM,YAAY,MAAM;AAAA,MACtB,YAAY;AACV,YAAI;AACF,gBAAM,UAAU,uBAAuB,OAAO,IAAI,EAAE,SAAS,IAAK,CAAC;AACnE,iBAAO;AAAA,QACT,QAAQ;AACN,iBAAO;AAAA,QACT;AAAA,MACF;AAAA,MACA,EAAE,SAAS,UAAU,IAAI;AAAA,IAC3B;AAEA,QAAI,CAAC,WAAW;AAEd,UAAI;AACF,cAAM,UAAU,wBAAwB,OAAO,IAAI,EAAE,SAAS,IAAK,CAAC;AAAA,MACtE,QAAQ;AAAA,MAER;AAGA,UAAIC,UAAS;AACb,UAAI;AACF,QAAAA,UAAS,MAAM,SAAS,SAAS,OAAO;AAAA,MAC1C,QAAQ;AAAA,MAER;AAEA,aAAO;AAAA,QACL;AAAA,QACA,QAAQA,QAAO,KAAK;AAAA,QACpB,UAAU;AAAA;AAAA,QACV,QAAQ;AAAA,MACV;AAAA,IACF;AAIA,UAAM,IAAI,QAAQ,OAAK,WAAW,GAAG,EAAE,CAAC;AAExC,QAAI,SAAS;AACb,QAAI;AACF,eAAS,MAAM,SAAS,SAAS,OAAO;AAAA,IAC1C,QAAQ;AAAA,IAER;AAGA,QAAI,WAAW;AACf,QAAI;AACF,UAAIH,YAAW,YAAY,GAAG;AAC5B,cAAM,cAAc,MAAM,SAAS,cAAc,OAAO;AACxD,mBAAW,SAAS,YAAY,KAAK,GAAG,EAAE,KAAK;AAAA,MACjD;AAAA,IACF,QAAQ;AAAA,IAER;AAEA,WAAO;AAAA,MACL;AAAA,MACA,QAAQ,OAAO,KAAK;AAAA,MACpB;AAAA,MACA,QAAQ;AAAA,IACV;AAAA,EACF,SAAS,OAAY;AAEnB,QAAI;AACF,YAAM,UAAU,wBAAwB,OAAO,IAAI,EAAE,SAAS,IAAK,CAAC;AAAA,IACtE,QAAQ;AAAA,IAER;AAEA,UAAM;AAAA,EACR;AACF;AAKA,eAAsB,cACpB,SACA,kBACA,SACyB;AACzB,MAAI,CAAC,SAAS;AACZ,UAAM,IAAI,MAAM,uEAAuE;AAAA,EACzF;AACA,QAAM,KAAK,QAAQ,cAAc,mBAAmB;AACpD,QAAM,UAAU,eAAe,EAAE;AACjC,QAAM,SAAS,MAAM,WAAW,IAAI;AAAA,IAClC;AAAA,IACA;AAAA,IACA,KAAK;AAAA,IACL,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,IAClC,WAAW,QAAQ;AAAA,IACnB,YAAY;AAAA,IACZ,MAAM,QAAQ;AAAA,EAChB,GAAG,gBAAgB;AAEnB,QAAM,UAAUE,MAAK,QAAQ,YAAY;AAGzC,QAAM,iBAAiB,IAAI,OAAO,mBAAmB,YAAY,OAAO,CAAC;AAGzE,QAAM;AAAA,IACJ,0BAA0B,OAAO,OAAO,YAAY,gBAAgB,CAAC,IAAI,YAAY,cAAc,CAAC;AAAA,IACpG,EAAE,SAAS,IAAK;AAAA,EAClB;AAEA,SAAO;AAAA,IACL;AAAA,IACA,QAAQ;AAAA,IACR,UAAU;AAAA,IACV,QAAQ;AAAA,EACV;AACF;AAKA,eAAsB,QACpB,YACA,kBACA,UAAiD,CAAC,GACsB;AACxE,QAAM,UAAU,eAAe,UAAU;AACzC,QAAM,SAAS,UAAU,YAAY,kBAAkB,QAAQ,SAAS;AACxE,QAAM,UAAUA,MAAK,QAAQ,YAAY;AAGzC,MAAIE,aAAY;AAChB,MAAI;AACF,UAAM,UAAU,uBAAuB,OAAO,IAAI,EAAE,SAAS,IAAK,CAAC;AACnE,IAAAA,aAAY;AAAA,EACd,QAAQ;AAAA,EAER;AAGA,MAAIA,YAAW;AACb,QAAI;AACF,YAAM,QAAQ,QAAQ,QAAQ;AAC9B,YAAM,EAAE,OAAO,IAAI,MAAM;AAAA,QACvB,wBAAwB,OAAO,WAAW,KAAK;AAAA,QAC/C,EAAE,SAAS,KAAM,WAAW,KAAK,OAAO,KAAK;AAAA,MAC/C;AACA,aAAO,EAAE,QAAQ,OAAO,KAAK,GAAG,QAAQ,UAAU;AAAA,IACpD,QAAQ;AAAA,IAER;AAAA,EACF;AAGA,MAAI;AACF,QAAI,SAAS,MAAM,SAAS,SAAS,OAAO;AAE5C,QAAI,QAAQ,MAAM;AAChB,YAAM,QAAQ,OAAO,MAAM,IAAI;AAC/B,eAAS,MAAM,MAAM,CAAC,QAAQ,IAAI,EAAE,KAAK,IAAI;AAAA,IAC/C;AAEA,WAAO,EAAE,QAAQ,OAAO,KAAK,GAAG,QAAQA,aAAY,YAAY,UAAU;AAAA,EAC5E,QAAQ;AACN,WAAO,EAAE,QAAQ,IAAI,QAAQ,UAAU;AAAA,EACzC;AACF;AAKA,eAAsB,UAAU,YAAsC;AACpE,QAAM,UAAU,eAAe,UAAU;AACzC,MAAI;AACF,UAAM,UAAU,uBAAuB,OAAO,IAAI,EAAE,SAAS,IAAK,CAAC;AACnE,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAKA,eAAsB,aAAa,YAAsC;AACvE,QAAM,UAAU,eAAe,UAAU;AACzC,MAAI;AACF,UAAM,UAAU,wBAAwB,OAAO,IAAI,EAAE,SAAS,IAAK,CAAC;AACpE,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAKA,eAAsB,eAAkC;AACtD,MAAI;AACF,UAAM,EAAE,OAAO,IAAI,MAAM;AAAA,MACvB;AAAA,MACA,EAAE,SAAS,IAAK;AAAA,IAClB;AAEA,WAAO,OACJ,KAAK,EACL,MAAM,IAAI,EACV,OAAO,UAAQ,KAAK,WAAW,cAAc,CAAC,EAC9C,IAAI,UAAQ,KAAK,MAAM,eAAe,MAAM,CAAC;AAAA,EAClD,QAAQ;AACN,WAAO,CAAC;AAAA,EACV;AACF;AAKA,eAAsB,QAAQ,YAAoB,kBAA0B,WAAkD;AAC5H,QAAM,SAAS,UAAU,YAAY,kBAAkB,SAAS;AAChE,QAAM,WAAWF,MAAK,QAAQ,WAAW;AAEzC,MAAI;AACF,UAAM,UAAU,MAAM,SAAS,UAAU,OAAO;AAChD,WAAO,KAAK,MAAM,OAAO;AAAA,EAC3B,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAKA,eAAsB,qBACpB,WACA,kBACyB;AACzB,QAAM,eAAeA,MAAK,kBAAkB,cAAc,WAAW,WAAW;AAChF,QAAMG,aAA4B,CAAC;AAEnC,MAAI;AACF,UAAM,EAAE,SAAAC,SAAQ,IAAI,MAAM,OAAO,aAAkB;AACnD,UAAM,UAAU,MAAMA,SAAQ,cAAc,EAAE,eAAe,KAAK,CAAC;AAEnE,eAAW,SAAS,SAAS;AAC3B,UAAI,MAAM,YAAY,GAAG;AACvB,cAAM,OAAO,MAAM,QAAQ,MAAM,MAAM,kBAAkB,SAAS;AAClE,YAAI,MAAM;AACR,UAAAD,WAAU,KAAK,IAAI;AAAA,QACrB;AAAA,MACF;AAAA,IACF;AAAA,EACF,QAAQ;AAAA,EAER;AAEA,SAAOA;AACT;AAMA,eAAsB,UAAU,YAAoB,OAAe,UAAoC,CAAC,GAAqB;AAC3H,QAAM,UAAU,eAAe,UAAU;AACzC,QAAM,EAAE,aAAa,KAAK,IAAI;AAE9B,MAAI;AAEF,UAAM,UAAU,uBAAuB,OAAO,IAAI,EAAE,SAAS,IAAK,CAAC;AAGnE,UAAM;AAAA,MACJ,qBAAqB,OAAO,OAAO,YAAY,KAAK,CAAC;AAAA,MACrD,EAAE,SAAS,IAAK;AAAA,IAClB;AAGA,QAAI,YAAY;AACd,YAAM;AAAA,QACJ,qBAAqB,OAAO;AAAA,QAC5B,EAAE,SAAS,IAAK;AAAA,MAClB;AAAA,IACF;AAEA,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAKA,eAAsB,QAAQ,YAAoB,KAAkH;AAClK,QAAM,UAAU,eAAe,UAAU;AAEzC,MAAI;AACF,UAAM,UAAU,uBAAuB,OAAO,IAAI,EAAE,SAAS,IAAK,CAAC;AACnE,UAAM,UAAU,qBAAqB,OAAO,IAAI,GAAG,IAAI,EAAE,SAAS,IAAK,CAAC;AACxE,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;;;AH3eA,IAAME,aAAYC,WAAUC,KAAI;AAEhC,IAAM,kBAAkB;AACxB,IAAMC,oBAAmB;AAGzB,IAAM,mBAAmB;AAAA,EACvB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAKA,SAAS,iBAAiB,SAA0B;AAClD,QAAM,oBAAoB,QAAQ,YAAY,EAAE,KAAK;AACrD,SAAO,iBAAiB;AAAA,IAAK,CAAC,YAC5B,kBAAkB,SAAS,QAAQ,YAAY,CAAC;AAAA,EAClD;AACF;AAiBA,IAAM,2BAA2B;AACjC,IAAM,sBAAsB,oBAAI,IAAoB;AACpD,IAAI,iBAAiB;AAErB,SAAS,qBAAqB,WAA2B;AACvD,MAAI,OAAO,oBAAoB,IAAI,SAAS;AAC5C,MAAI,CAAC,MAAM;AACT,WAAO,2BAA2B;AAClC,wBAAoB,IAAI,WAAW,IAAI;AAAA,EACzC;AACA,SAAO;AACT;AAEA,SAAS,uBAAuB,SAA0B;AACxD,SAAO,oBAAoB,KAAK,OAAO;AACzC;AAMA,SAAS,2BAA2B,SAA0B;AAC5D,SAAO,4CAA4C,KAAK,OAAO;AACjE;AAMA,SAAS,wBAAwB,SAAiB,MAAsB;AACtE,SAAO,QAAQ;AAAA,IACb;AAAA,IACA,6BAA6B,IAAI;AAAA,EACnC;AACF;AAGA,IAAM,kBAAkBC,GAAE,OAAO;AAAA,EAC/B,SAASA,GACN,OAAO,EACP,SAAS,EACT,SAAS,4DAA4D;AAAA,EACxE,YAAYA,GACT,QAAQ,EACR,QAAQ,KAAK,EACb,SAAS,uGAAuG;AAAA,EACnH,IAAIA,GACD,OAAO,EACP,SAAS,EACT,SAAS,iFAAiF;AAAA,EAC7F,MAAMA,GACH,QAAQ,EACR,SAAS,EACT,SAAS,sCAAsC;AAAA,EAClD,MAAMA,GACH,OAAO,EACP,SAAS,EACT,SAAS,8DAA8D;AAAA,EAC1E,OAAOA,GACJ,OAAO,EACP,SAAS,EACT,SAAS,2FAA2F;AAAA,EACvG,KAAKA,GACF,KAAK,CAAC,SAAS,UAAU,MAAM,QAAQ,QAAQ,SAAS,OAAO,OAAO,OAAO,KAAK,GAAG,CAAC,EACtF,SAAS,EACT,SAAS,iGAAiG;AAC/G,CAAC;AAKD,IAAI,UAA0B;AAE9B,eAAe,gBAAkC;AAC/C,MAAI,YAAY,MAAM;AACpB,cAAU,MAAW,gBAAgB;AACrC,QAAI,CAAC,SAAS;AACZ,cAAQ,KAAK,qDAAqD;AAAA,IACpE;AAAA,EACF;AACA,SAAO;AACT;AAKA,eAAe,aACb,SACA,kBACA,UACiF;AACjF,MAAI;AACF,UAAM,EAAE,QAAQ,OAAO,IAAI,MAAMC,WAAU,SAAS;AAAA,MAClD,KAAK;AAAA,MACL,SAAS;AAAA,MACT,WAAW,KAAK,OAAO;AAAA,IACzB,CAAC;AAED,UAAM,SAAS,eAAe,UAAU,SAAS;AAAA,EAAK,MAAM,KAAK,KAAKC,iBAAgB;AACtF,eAAW,MAAM;AAEjB,WAAO;AAAA,MACL,SAAS;AAAA,MACT;AAAA,MACA,UAAU;AAAA,IACZ;AAAA,EACF,SAAS,OAAY;AACnB,UAAM,SAAS;AAAA,OACZ,MAAM,UAAU,OAAO,MAAM,SAAS;AAAA,EAAK,MAAM,MAAM,KAAK;AAAA,MAC7DA;AAAA,IACF;AACA,eAAW,UAAU,MAAM,OAAO;AAElC,QAAI,MAAM,QAAQ;AAChB,aAAO;AAAA,QACL,SAAS;AAAA,QACT,OAAO,2BAA2B,kBAAkB,GAAI;AAAA,QACxD;AAAA,QACA,UAAU;AAAA,MACZ;AAAA,IACF;AAEA,WAAO;AAAA,MACL,SAAS;AAAA,MACT,OAAO,MAAM;AAAA,MACb;AAAA,MACA,UAAU,MAAM,QAAQ;AAAA,IAC1B;AAAA,EACF;AACF;AAEO,SAAS,eAAe,SAA0B;AACvD,SAAO,KAAK;AAAA,IACV,aAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IA+Bb,aAAa;AAAA,IAEb,SAAS,OAAO,cAAyB;AACvC,YAAM,EAAE,SAAS,YAAY,IAAI,MAAM,MAAM,OAAO,WAAW,IAAI,IAAI;AAGvE,UAAI,IAAI;AAEN,YAAI,MAAM;AACR,gBAAM,UAAU,MAAW,aAAa,EAAE;AAC1C,iBAAO;AAAA,YACL;AAAA,YACA;AAAA,YACA,QAAQ,UAAU,YAAY;AAAA,YAC9B,SAAS,UAAU,YAAY,EAAE,aAAa,YAAY,EAAE;AAAA,UAC9D;AAAA,QACF;AAGA,YAAI,cAAc,QAAW;AAC3B,gBAAM,UAAU,MAAW,UAAU,IAAI,WAAW,EAAE,YAAY,KAAK,CAAC;AACxE,cAAI,CAAC,SAAS;AACZ,mBAAO;AAAA,cACL,SAAS;AAAA,cACT;AAAA,cACA,OAAO,YAAY,EAAE;AAAA,YACvB;AAAA,UACF;AAGA,gBAAM,IAAI,QAAQ,OAAK,WAAW,GAAG,GAAG,CAAC;AACzC,gBAAM,EAAE,QAAAC,SAAQ,QAAAC,QAAO,IAAI,MAAW,QAAQ,IAAI,QAAQ,kBAAkB,EAAE,MAAM,QAAQ,IAAI,WAAW,QAAQ,UAAU,CAAC;AAC9H,gBAAMC,mBAAkB,eAAeF,SAAQD,iBAAgB;AAE/D,iBAAO;AAAA,YACL,SAAS;AAAA,YACT;AAAA,YACA,QAAQG;AAAA,YACR,QAAAD;AAAA,YACA,SAAS,eAAe,SAAS;AAAA,UACnC;AAAA,QACF;AAGA,YAAI,KAAK;AACP,gBAAM,UAAU,MAAW,QAAQ,IAAI,GAAG;AAC1C,cAAI,CAAC,SAAS;AACZ,mBAAO;AAAA,cACL,SAAS;AAAA,cACT;AAAA,cACA,OAAO,YAAY,EAAE;AAAA,YACvB;AAAA,UACF;AAGA,gBAAM,IAAI,QAAQ,OAAK,WAAW,GAAG,GAAG,CAAC;AACzC,gBAAM,EAAE,QAAAD,SAAQ,QAAAC,QAAO,IAAI,MAAW,QAAQ,IAAI,QAAQ,kBAAkB,EAAE,MAAM,QAAQ,IAAI,WAAW,QAAQ,UAAU,CAAC;AAC9H,gBAAMC,mBAAkB,eAAeF,SAAQD,iBAAgB;AAE/D,iBAAO;AAAA,YACL,SAAS;AAAA,YACT;AAAA,YACA,QAAQG;AAAA,YACR,QAAAD;AAAA,YACA,SAAS,aAAa,GAAG;AAAA,UAC3B;AAAA,QACF;AAGA,cAAM,EAAE,QAAQ,OAAO,IAAI,MAAW,QAAQ,IAAI,QAAQ,kBAAkB,EAAE,MAAM,WAAW,QAAQ,UAAU,CAAC;AAClH,cAAM,kBAAkB,eAAe,QAAQF,iBAAgB;AAE/D,eAAO;AAAA,UACL,SAAS;AAAA,UACT;AAAA,UACA,QAAQ;AAAA,UACR;AAAA,QACF;AAAA,MACF;AAGA,UAAI,CAAC,SAAS;AACZ,eAAO;AAAA,UACL,SAAS;AAAA,UACT,OAAO;AAAA,QACT;AAAA,MACF;AAGA,UAAI,iBAAiB,OAAO,GAAG;AAC7B,eAAO;AAAA,UACL,SAAS;AAAA,UACT,OAAO;AAAA,UACP,QAAQ;AAAA,UACR,UAAU;AAAA,QACZ;AAAA,MACF;AAKA,UAAI,gBAAgB;AACpB,YAAM,kBAAkB,uBAAuB,OAAO;AACtD,YAAM,eAAe,2BAA2B,OAAO;AACvD,UAAI;AAEJ,UAAI,iBAAiB;AACnB,sBAAc,qBAAqB,QAAQ,SAAS;AACpD,YAAI,CAAC,cAAc;AACjB,0BAAgB,wBAAwB,SAAS,WAAW;AAAA,QAC9D;AAAA,MACF;AAGA,YAAM,aAAa,MAAM,cAAc;AAEvC,UAAI,YAAY;AAEd,YAAI,CAAC,YAAY;AACf,iBAAO;AAAA,YACL,SAAS;AAAA,YACT,OAAO;AAAA,UACT;AAAA,QACF;AAGA,cAAM,aAAkB,mBAAmB;AAC3C,gBAAQ,aAAa,EAAE,YAAY,QAAQ,WAAW,SAAS,mBAAmB,YAAY,CAAC;AAE/F,cAAM,SAAS,MAAW,cAAc,eAAe,QAAQ,kBAAkB;AAAA,UAC/E,WAAW,QAAQ;AAAA,UACnB;AAAA,QACF,CAAC;AAED,eAAO;AAAA,UACL,SAAS;AAAA,UACT,IAAI,OAAO;AAAA,UACX,QAAQ;AAAA,UACR,SAAS,+CAA+C,OAAO,EAAE;AAAA,QACnE;AAAA,MACF;AAGA,UAAI,YAAY;AACd,cAAM,aAAkB,mBAAmB;AAC3C,gBAAQ,aAAa,EAAE,YAAY,QAAQ,WAAW,SAAS,mBAAmB,YAAY,CAAC;AAE/F,YAAI;AACF,gBAAM,SAAS,MAAW,QAAQ,eAAe,QAAQ,kBAAkB;AAAA,YACzE,WAAW,QAAQ;AAAA,YACnB,SAAS;AAAA,YACT;AAAA,UACF,CAAC;AAED,gBAAM,kBAAkB,eAAe,OAAO,QAAQA,iBAAgB;AACtE,kBAAQ,WAAW,eAAe;AAElC,kBAAQ,aAAa;AAAA,YACnB;AAAA,YACA,QAAQ;AAAA,YACR;AAAA,YACA,mBAAmB;AAAA,YACnB,eAAe,gBAAgB;AAAA,UACjC,CAAC;AAED,iBAAO;AAAA,YACL,SAAS,OAAO,aAAa;AAAA,YAC7B,IAAI,OAAO;AAAA,YACX,QAAQ;AAAA,YACR,UAAU,OAAO;AAAA,YACjB,QAAQ,OAAO;AAAA,UACjB;AAAA,QACF,SAAS,OAAY;AACnB,kBAAQ,aAAa,EAAE,YAAY,QAAQ,aAAa,QAAQ,CAAC;AACjE,iBAAO;AAAA,YACL,SAAS;AAAA,YACT,OAAO,MAAM;AAAA,YACb,QAAQ;AAAA,YACR,UAAU;AAAA,UACZ;AAAA,QACF;AAAA,MACF,OAAO;AAEL,cAAM,SAAS,MAAM,aAAa,eAAe,QAAQ,kBAAkB,QAAQ,QAAQ;AAC3F,eAAO;AAAA,UACL,SAAS,OAAO;AAAA,UAChB,QAAQ,OAAO;AAAA,UACf,UAAU,OAAO;AAAA,UACjB,OAAO,OAAO;AAAA,QAChB;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AACH;;;AIpZA,SAAS,QAAAI,aAAY;AACrB,SAAS,KAAAC,UAAS;AAClB,SAAS,YAAAC,WAAU,YAAY;AAC/B,SAAS,WAAAC,UAAS,UAAU,YAAY,eAAe;AACvD,SAAS,cAAAC,mBAAkB;;;ACA3B;AAJA,OAAO,WAAW;AAClB,SAAS,kBAAkB;AAC3B,SAAS,cAAAC,aAAY,aAAAC,YAAW,gBAAAC,eAAc,iBAAAC,sBAAqB;AACnE,SAAS,QAAAC,aAAY;AAcrB,IAAM,gBAAgB;AACtB,IAAM,iBAAiB,IAAI,OAAO;AAClC,IAAM,iBAAiB;AAEvB,SAAS,cAAsB;AAC7B,QAAM,MAAMA,MAAK,oBAAoB,GAAG,cAAc;AACtD,MAAI,CAACJ,YAAW,GAAG,EAAG,CAAAC,WAAU,KAAK,EAAE,WAAW,KAAK,CAAC;AACxD,SAAO;AACT;AAEA,SAAS,SAAS,QAAwB;AACxC,SAAO,WAAW,QAAQ,EAAE,OAAO,MAAM,EAAE,OAAO,KAAK;AACzD;AAQA,eAAsB,oBAAoB,QAAgB,WAAqC;AAC7F,MAAI;AACJ,MAAI;AACF,eAAW,MAAM,MAAM,MAAM,EAAE,SAAS;AAAA,EAC1C,QAAQ;AACN,WAAO;AAAA,EACT;AAEA,QAAM,EAAE,OAAO,OAAO,IAAI;AAC1B,MAAI,CAAC,SAAS,CAAC,OAAQ,QAAO;AAE9B,QAAM,WAAW,KAAK,IAAI,OAAO,MAAM;AACvC,QAAM,cAAc,WAAW;AAC/B,QAAM,cAAc,OAAO,SAAS;AAEpC,MAAI,CAAC,eAAe,CAAC,YAAa,QAAO;AAEzC,QAAM,MAAM,SAAS,MAAM;AAC3B,QAAM,WAAW,YAAY;AAC7B,QAAM,QAAQ,WAAW,SAAS,KAAK;AACvC,QAAM,MAAM,QAAQ,SAAS;AAC7B,QAAM,YAAYG,MAAK,UAAU,MAAM,GAAG;AAE1C,MAAIJ,YAAW,SAAS,GAAG;AACzB,YAAQ,IAAI,gCAAgC,KAAK,IAAI,MAAM,QAAQ;AACnE,WAAOE,cAAa,SAAS;AAAA,EAC/B;AAEA,MAAI,WAAW,MAAM,MAAM;AAE3B,MAAI,aAAa;AACf,eAAW,SAAS,OAAO,eAAe,eAAe;AAAA,MACvD,KAAK;AAAA,MACL,oBAAoB;AAAA,IACtB,CAAC;AAAA,EACH;AAIA,MAAI;AACJ,MAAI,UAAU,eAAe,OAAO,SAAS,IAAI,OAAO,OAAO;AAC7D,aAAS,MAAM,SAAS,KAAK,EAAE,SAAS,GAAG,CAAC,EAAE,SAAS;AAAA,EACzD,WAAW,OAAO;AAChB,aAAS,MAAM,SAAS,IAAI,EAAE,SAAS;AAAA,EACzC,OAAO;AACL,aAAS,MAAM,SAAS,KAAK,EAAE,SAAS,GAAG,CAAC,EAAE,SAAS;AAAA,EACzD;AAGA,MAAI,OAAO,SAAS,gBAAgB;AAClC,eAAW,WAAW,CAAC,IAAI,IAAI,EAAE,GAAG;AAClC,eAAS,MAAM,MAAM,MAAM,EACxB,OAAO,eAAe,eAAe,EAAE,KAAK,UAAU,oBAAoB,KAAK,CAAC,EAChF,KAAK,EAAE,QAAQ,CAAC,EAChB,SAAS;AACZ,UAAI,OAAO,UAAU,eAAgB;AAAA,IACvC;AAAA,EACF;AAEA,EAAAC,eAAc,WAAW,MAAM;AAE/B,QAAM,aAAa,MAAM,MAAM,MAAM,EAAE,SAAS;AAChD,UAAQ;AAAA,IACN,kBAAkB,KAAK,IAAI,MAAM,OAAO,WAAW,KAAK,IAAI,WAAW,MAAM,MACxE,OAAO,SAAS,MAAM,QAAQ,CAAC,CAAC,UAAU,OAAO,SAAS,MAAM,QAAQ,CAAC,CAAC;AAAA,EACjF;AAEA,SAAO;AACT;;;ADjGA,IAAM,gBAAgB,IAAI,OAAO;AACjC,IAAM,iBAAiB,KAAK,OAAO;AACnC,IAAME,oBAAmB;AAEzB,IAAM,mBAA2C;AAAA,EAC/C,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,SAAS;AACX;AAEA,SAAS,YAAY,UAA2B;AAC9C,SAAO,QAAQ,QAAQ,EAAE,YAAY,KAAK;AAC5C;AAEA,SAAS,kBAAkB,UAA0B;AACnD,SAAO,iBAAiB,QAAQ,QAAQ,EAAE,YAAY,CAAC,KAAK;AAC9D;AAMA,IAAM,sBAAsBC,GAAE,OAAO;AAAA,EACnC,MAAMA,GACH,OAAO,EACP,SAAS,6IAA6I;AAAA,EACzJ,WAAWA,GACR,OAAO,EACP,SAAS,EACT,SAAS,iFAAiF;AAAA,EAC7F,SAASA,GACN,OAAO,EACP,SAAS,EACT,SAAS,yFAAyF;AACvG,CAAC;AAEM,SAAS,mBAAmB,SAA8B;AAC/D,SAAOC,MAAK;AAAA,IACV,aAAa,kFAAkF,QAAQ,gBAAgB;AAAA;AAAA;AAAA;AAAA,IAKvH,aAAa;AAAA,IAEb,SAAS,OAAO,EAAE,MAAM,UAAU,WAAW,QAAQ,MAA2C;AAC9F,UAAI;AACF,cAAM,eAAe,WAAW,QAAQ,IACpC,WACAC,SAAQ,QAAQ,kBAAkB,QAAQ;AAE9C,cAAM,eAAe,SAAS,QAAQ,kBAAkB,YAAY;AACpE,YAAI,aAAa,WAAW,IAAI,KAAK,CAAC,WAAW,QAAQ,GAAG;AAC1D,iBAAO;AAAA,YACL,SAAS;AAAA,YACT,OAAO;AAAA,YACP,SAAS;AAAA,UACX;AAAA,QACF;AAEA,YAAI,CAACC,YAAW,YAAY,GAAG;AAC7B,iBAAO;AAAA,YACL,SAAS;AAAA,YACT,OAAO,mBAAmB,QAAQ;AAAA,YAClC,SAAS;AAAA,UACX;AAAA,QACF;AAEA,cAAM,QAAQ,MAAM,KAAK,YAAY;AAErC,YAAI,MAAM,YAAY,GAAG;AACvB,iBAAO;AAAA,YACL,SAAS;AAAA,YACT,OAAO;AAAA,YACP,SAAS;AAAA,UACX;AAAA,QACF;AAGA,YAAI,YAAY,YAAY,GAAG;AAC7B,cAAI,MAAM,OAAO,gBAAgB;AAC/B,mBAAO;AAAA,cACL,SAAS;AAAA,cACT,OAAO,wBAAwB,MAAM,OAAO,OAAO,MAAM,QAAQ,CAAC,CAAC,wBAAwB,iBAAiB,OAAO,IAAI;AAAA,cACvH,SAAS;AAAA,YACX;AAAA,UACF;AAEA,gBAAM,YAAY,MAAMC,UAAS,YAAY;AAC7C,gBAAM,YAAY,kBAAkB,YAAY;AAChD,gBAAM,SAAS,MAAM,oBAAoB,WAAW,SAAS;AAC7D,gBAAM,SAAS,OAAO,SAAS,QAAQ;AAEvC,iBAAO;AAAA,YACL,SAAS;AAAA,YACT,MAAM;AAAA,YACN,cAAc,SAAS,QAAQ,kBAAkB,YAAY;AAAA,YAC7D,SAAS,WAAW,YAAY,KAAK,SAAS,MAAM,MAAM,OAAO,MAAM,QAAQ,CAAC,CAAC;AAAA,YACjF;AAAA,YACA,WAAW;AAAA,YACX,WAAW,MAAM;AAAA,UACnB;AAAA,QACF;AAGA,YAAI,MAAM,OAAO,eAAe;AAC9B,iBAAO;AAAA,YACL,SAAS;AAAA,YACT,OAAO,uBAAuB,MAAM,OAAO,OAAO,MAAM,QAAQ,CAAC,CAAC,wBAAwB,gBAAgB,OAAO,IAAI;AAAA,YACrH,SAAS;AAAA,UACX;AAAA,QACF;AAEA,YAAI,UAAU,MAAMA,UAAS,cAAc,OAAO;AAElD,YAAI,cAAc,UAAa,YAAY,QAAW;AACpD,gBAAM,QAAQ,QAAQ,MAAM,IAAI;AAChC,gBAAM,SAAS,aAAa,KAAK;AACjC,gBAAM,MAAM,WAAW,MAAM;AAE7B,cAAI,QAAQ,KAAK,SAAS,MAAM,QAAQ;AACtC,mBAAO;AAAA,cACL,SAAS;AAAA,cACT,OAAO,cAAc,SAAS,8BAA8B,MAAM,MAAM;AAAA,cACxE,SAAS;AAAA,YACX;AAAA,UACF;AAEA,oBAAU,MACP,MAAM,OAAO,GAAG,EAChB,IAAI,CAAC,MAAM,QAAQ,IAAI,QAAQ,MAAM,GAAG,SAAS,EAAE,SAAS,CAAC,CAAC,KAAK,IAAI,EAAE,EACzE,KAAK,IAAI;AAAA,QACd;AAEA,cAAM,mBAAmB,eAAe,SAASL,iBAAgB;AACjE,cAAM,eAAe,iBAAiB,SAAS,QAAQ;AAEvD,eAAO;AAAA,UACL,SAAS;AAAA,UACT,MAAM;AAAA,UACN,cAAc,SAAS,QAAQ,kBAAkB,YAAY;AAAA,UAC7D,SAAS;AAAA,UACT,WAAW,QAAQ,MAAM,IAAI,EAAE;AAAA,UAC/B;AAAA,UACA,WAAW,MAAM;AAAA,QACnB;AAAA,MACF,SAAS,OAAY;AACnB,YAAI,MAAM,SAAS,2BAA2B,MAAM,QAAQ,SAAS,UAAU,GAAG;AAChF,iBAAO;AAAA,YACL,SAAS;AAAA,YACT,OAAO;AAAA,YACP,SAAS;AAAA,UACX;AAAA,QACF;AAEA,eAAO;AAAA,UACL,SAAS;AAAA,UACT,OAAO,MAAM;AAAA,UACb,SAAS;AAAA,QACX;AAAA,MACF;AAAA,IACF;AAAA,IAEA,eAAe,CAAC,EAAE,OAAO,MAAM;AAC7B,UAAI,UAAU,OAAO,WAAW,YAAY,eAAe,UAAU,OAAO,WAAW;AACrF,cAAM,SAAS;AACf,eAAO;AAAA,UACL,MAAM;AAAA,UACN,OAAO;AAAA,YACL,EAAE,MAAM,QAAiB,MAAM,OAAO,QAAQ;AAAA,YAC9C,EAAE,MAAM,cAAuB,MAAM,OAAO,WAAW,WAAW,OAAO,UAAU;AAAA,UACrF;AAAA,QACF;AAAA,MACF;AACA,aAAO,OAAO,WAAW,WACrB,EAAE,MAAM,QAAiB,OAAO,OAAO,IACvC,EAAE,MAAM,QAAiB,OAAO,OAAc;AAAA,IACpD;AAAA,EACF,CAAC;AACH;;;AE7LA,SAAS,QAAAM,aAAY;AACrB,SAAS,KAAAC,UAAS;AAClB,SAAS,YAAAC,WAAU,aAAAC,YAAW,SAAAC,cAAa;AAC3C,SAAS,WAAAC,UAAS,YAAAC,WAAU,cAAAC,aAAY,WAAAC,gBAAe;AACvD,SAAS,cAAAC,mBAAkB;;;ACQ3B;AALA,SAAS,YAAAC,WAAU,aAAAC,YAAW,QAAQ,SAAAC,cAAa;AACnD,SAAS,cAAAC,mBAAkB;AAC3B,SAAS,WAAAC,UAAS,YAAAC,WAAU,WAAAC,gBAAe;AAC3C,SAAS,QAAAC,aAAY;AACrB,SAAS,aAAAC,kBAAiB;AAW1B,IAAMC,aAAYD,WAAUD,KAAI;AAKhC,eAAe,WAAW,kBAAuD;AAC/E,MAAI;AACF,UAAM,EAAE,OAAO,IAAI,MAAME,WAAU,sBAAsB;AAAA,MACvD,KAAK;AAAA,MACL,SAAS;AAAA,IACX,CAAC;AACD,WAAO,OAAO,KAAK;AAAA,EACrB,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAwBA,IAAM,iBAAiB,oBAAI,IAA+B;AAKnD,SAAS,qBAAqB,WAAmB,kBAA6C;AACnG,MAAI,UAAU,eAAe,IAAI,SAAS;AAC1C,MAAI,CAAC,SAAS;AACZ,cAAU;AAAA,MACR;AAAA,MACA;AAAA,MACA,qBAAqB;AAAA,IACvB;AACA,mBAAe,IAAI,WAAW,OAAO;AAAA,EACvC;AACA,SAAO;AACT;AAMA,eAAsB,iBACpB,WACA,kBACA,iBACqB;AAErB,QAAM,UAAU,MAAM,WAAW,gBAAgB;AAGjD,QAAM,aAAa,MAAM,kBAAkB,OAAO;AAAA,IAChD;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAGD,QAAM,UAAU,qBAAqB,WAAW,gBAAgB;AAChE,UAAQ,sBAAsB,WAAW;AAEzC,SAAO;AACT;AAMA,eAAsB,WACpB,WACA,kBACA,UAC4B;AAC5B,QAAM,UAAU,qBAAqB,WAAW,gBAAgB;AAEhE,MAAI,CAAC,QAAQ,qBAAqB;AAChC,YAAQ,KAAK,yDAAyD;AACtE,WAAO;AAAA,EACT;AAGA,QAAM,eAAeC,SAAQ,kBAAkB,QAAQ;AACvD,QAAM,eAAeC,UAAS,kBAAkB,YAAY;AAG5D,MAAI,MAAM,kBAAkB,UAAU,QAAQ,qBAAqB,YAAY,GAAG;AAEhF,WAAO;AAAA,EACT;AAGA,MAAI,kBAAiC;AACrC,MAAI,UAAU;AAEd,MAAIC,YAAW,YAAY,GAAG;AAC5B,QAAI;AACF,wBAAkB,MAAMC,UAAS,cAAc,OAAO;AACtD,gBAAU;AAAA,IACZ,SAAS,OAAY;AACnB,cAAQ,KAAK,gDAAgD,MAAM,OAAO,EAAE;AAAA,IAC9E;AAAA,EACF;AAGA,QAAM,SAAS,MAAM,kBAAkB,OAAO;AAAA,IAC5C,cAAc,QAAQ;AAAA,IACtB;AAAA,IACA,UAAU;AAAA,IACV;AAAA,IACA;AAAA,EACF,CAAC;AAED,SAAO;AACT;AAUA,eAAsB,mBACpB,WACA,cAQC;AAED,QAAM,UAAU,MAAM,eAAe,QAAQ,SAAS;AACtD,MAAI,CAAC,SAAS;AACZ,WAAO;AAAA,MACL,SAAS;AAAA,MACT,eAAe;AAAA,MACf,cAAc;AAAA,MACd,iBAAiB;AAAA,MACjB,oBAAoB;AAAA,MACpB,OAAO;AAAA,IACT;AAAA,EACF;AAGA,QAAM,aAAa,MAAM,kBAAkB,QAAQ,YAAY;AAC/D,MAAI,CAAC,cAAc,WAAW,cAAc,WAAW;AACrD,WAAO;AAAA,MACL,SAAS;AAAA,MACT,eAAe;AAAA,MACf,cAAc;AAAA,MACd,iBAAiB;AAAA,MACjB,oBAAoB;AAAA,MACpB,OAAO;AAAA,IACT;AAAA,EACF;AAEA,QAAM,mBAAmB,QAAQ;AAKjC,QAAM,kBAAkB,MAAM,kBAAkB,gBAAgB,WAAW,WAAW,eAAe;AAIrG,QAAM,uBAAuB,oBAAI,IAAwB;AACzD,aAAW,UAAU,iBAAiB;AACpC,QAAI,CAAC,qBAAqB,IAAI,OAAO,QAAQ,GAAG;AAC9C,2BAAqB,IAAI,OAAO,UAAU,MAAM;AAAA,IAClD;AAAA,EACF;AAEA,MAAI,gBAAgB;AACpB,MAAI,eAAe;AAGnB,aAAW,CAAC,UAAU,MAAM,KAAK,sBAAsB;AACrD,UAAM,eAAeH,SAAQ,kBAAkB,QAAQ;AAEvD,QAAI;AACF,UAAI,OAAO,WAAW,OAAO,oBAAoB,MAAM;AAErD,cAAM,MAAMI,SAAQ,YAAY;AAChC,YAAI,CAACF,YAAW,GAAG,GAAG;AACpB,gBAAMG,OAAM,KAAK,EAAE,WAAW,KAAK,CAAC;AAAA,QACtC;AACA,cAAMC,WAAU,cAAc,OAAO,iBAAiB,OAAO;AAC7D;AAAA,MACF,WAAW,CAAC,OAAO,SAAS;AAE1B,YAAIJ,YAAW,YAAY,GAAG;AAC5B,gBAAM,OAAO,YAAY;AACzB;AAAA,QACF;AAAA,MACF;AAAA,IACF,SAAS,OAAY;AACnB,cAAQ,MAAM,qBAAqB,QAAQ,KAAK,MAAM,OAAO,EAAE;AAAA,IACjE;AAAA,EACF;AAGA,QAAM,kBAAkB,MAAM,eAAe,mBAAmB,WAAW,WAAW,eAAe;AAGrG,QAAM,qBAAqB,gBAAgB,WAAW,WAAW,SAAS;AAG1E,QAAM,qBAAqB,MAAM,kBAAkB,oBAAoB,WAAW,WAAW,eAAe;AAG5G,QAAM,UAAU,qBAAqB,WAAW,gBAAgB;AAChE,UAAQ,sBAAsB,WAAW;AAEzC,SAAO;AAAA,IACL,SAAS;AAAA,IACT;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAKA,eAAsB,eAAe,WAA0C;AAC7E,SAAO,kBAAkB,aAAa,SAAS;AACjD;AAKA,eAAsB,eACpB,WAQC;AACD,QAAM,UAAU,MAAM,eAAe,QAAQ,SAAS;AACtD,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,OAAO,CAAC,EAAE;AAAA,EACrB;AAEA,QAAM,mBAAmB,QAAQ;AAGjC,QAAM,aAAa,MAAM,kBAAkB,aAAa,SAAS;AAGjE,QAAM,uBAAuB,oBAAI,IAAwB;AACzD,aAAW,UAAU,YAAY;AAC/B,QAAI,CAAC,qBAAqB,IAAI,OAAO,QAAQ,GAAG;AAC9C,2BAAqB,IAAI,OAAO,UAAU,MAAM;AAAA,IAClD;AAAA,EACF;AAEA,QAAM,QAKD,CAAC;AAEN,aAAW,CAAC,UAAU,cAAc,KAAK,sBAAsB;AAC7D,UAAM,eAAeF,SAAQ,kBAAkB,QAAQ;AAGvD,QAAI,iBAAgC;AACpC,QAAI,gBAAgB;AAEpB,QAAIE,YAAW,YAAY,GAAG;AAC5B,UAAI;AACF,yBAAiB,MAAMC,UAAS,cAAc,OAAO;AACrD,wBAAgB;AAAA,MAClB,QAAQ;AAAA,MAER;AAAA,IACF;AAGA,QAAI;AACJ,QAAI,CAAC,eAAe,WAAW,eAAe;AAC5C,eAAS;AAAA,IACX,WAAW,eAAe,WAAW,CAAC,eAAe;AACnD,eAAS;AAAA,IACX,OAAO;AACL,eAAS;AAAA,IACX;AAEA,UAAM,KAAK;AAAA,MACT,MAAM;AAAA,MACN;AAAA,MACA,iBAAiB,eAAe;AAAA,MAChC;AAAA,IACF,CAAC;AAAA,EACH;AAEA,SAAO,EAAE,MAAM;AACjB;AAKO,SAAS,uBAAuB,WAAyB;AAC9D,iBAAe,OAAO,SAAS;AACjC;;;ACnVA,SAAS,WAAAI,UAAS,WAAAC,gBAAe;;;ACRjC,SAAS,aAAa;AACtB,SAAS,cAAAC,mBAAkB;AAC3B,SAAS,WAAAC,UAAS,WAAAC,gBAAe;AAMjC,SAAS,gBAAgB,UAAkB,SAAkC;AAC3E,MAAI,MAAM;AACV,QAAM,OAAO;AAEb,SAAO,QAAQ,MAAM;AACnB,eAAW,UAAU,SAAS;AAC5B,UAAIF,YAAWC,SAAQ,KAAK,MAAM,CAAC,GAAG;AACpC,eAAO;AAAA,MACT;AAAA,IACF;AACA,UAAM,SAASC,SAAQ,GAAG;AAC1B,QAAI,WAAW,IAAK;AACpB,UAAM;AAAA,EACR;AAEA,SAAO;AACT;AAKA,eAAe,cAAc,KAA+B;AAC1D,MAAI;AACF,UAAM,EAAE,MAAAC,MAAK,IAAI,MAAM,OAAO,eAAoB;AAClD,UAAM,EAAE,WAAAC,WAAU,IAAI,MAAM,OAAO,MAAW;AAC9C,UAAMC,aAAYD,WAAUD,KAAI;AAEhC,UAAM,YAAY,QAAQ,aAAa;AACvC,UAAM,WAAW,YAAY,SAAS,GAAG,KAAK,SAAS,GAAG;AAE1D,UAAME,WAAU,QAAQ;AACxB,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAQO,IAAM,mBAAwC;AAAA,EACnD,IAAI;AAAA,EACJ,MAAM;AAAA,EACN,YAAY,CAAC,OAAO,QAAQ,OAAO,QAAQ,QAAQ,QAAQ,QAAQ,MAAM;AAAA,EAEzE,MAAM,MAAM,MAA+C;AAEzD,UAAM,cAAc,gBAAgB,MAAM;AAAA,MACxC;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC,KAAK;AAGN,UAAM,SAAS,MAAM,cAAc,KAAK;AACxC,UAAM,UAAU,MAAM,cAAc,MAAM;AAC1C,UAAM,UAAU,MAAM,cAAc,MAAM;AAE1C,QAAI;AAEJ,QAAI,SAAS;AACX,YAAM,CAAC,QAAQ,8BAA8B,SAAS;AAAA,IACxD,WAAW,SAAS;AAClB,YAAM,CAAC,QAAQ,8BAA8B,SAAS;AAAA,IACxD,WAAW,QAAQ;AACjB,YAAM,CAAC,OAAO,8BAA8B,SAAS;AAAA,IACvD,OAAO;AACL,cAAQ,KAAK,8EAA8E;AAC3F,aAAO;AAAA,IACT;AAEA,QAAI;AACF,YAAM,OAAO,MAAM,IAAI,CAAC,GAAG,IAAI,MAAM,CAAC,GAAG;AAAA,QACvC,KAAK;AAAA,QACL,OAAO,CAAC,QAAQ,QAAQ,MAAM;AAAA,QAC9B,KAAK;AAAA,UACH,GAAG,QAAQ;AAAA;AAAA,UAEX,SAAS;AAAA,QACX;AAAA,MACF,CAAC;AAGD,WAAK,QAAQ,GAAG,QAAQ,CAAC,SAAS;AAChC,cAAM,MAAM,KAAK,SAAS,EAAE,KAAK;AACjC,YAAI,OAAO,CAAC,IAAI,SAAS,YAAY,GAAG;AAEtC,kBAAQ,MAAM,2BAA2B,GAAG;AAAA,QAC9C;AAAA,MACF,CAAC;AAED,aAAO;AAAA,QACL,SAAS;AAAA,QACT,gBAAgB;AAAA;AAAA,UAEd,aAAa;AAAA,YACX,gCAAgC;AAAA,YAChC,0CAA0C;AAAA,YAC1C,yCAAyC;AAAA,UAC3C;AAAA,QACF;AAAA,MACF;AAAA,IACF,SAAS,OAAO;AACd,cAAQ,MAAM,qDAAqD,KAAK;AACxE,aAAO;AAAA,IACT;AAAA,EACF;AACF;AAKO,IAAM,UAAiC;AAAA,EAC5C;AACF;AAKO,SAAS,sBAAsB,KAAyC;AAC7E,aAAW,UAAU,SAAS;AAC5B,QAAI,OAAO,WAAW,SAAS,GAAG,GAAG;AACnC,aAAO;AAAA,IACT;AAAA,EACF;AACA,SAAO;AACT;AAKO,SAAS,yBAAmC;AACjD,QAAM,aAAa,oBAAI,IAAY;AACnC,aAAW,UAAU,SAAS;AAC5B,eAAW,OAAO,OAAO,YAAY;AACnC,iBAAW,IAAI,GAAG;AAAA,IACpB;AAAA,EACF;AACA,SAAO,MAAM,KAAK,UAAU;AAC9B;;;ACxJA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,OAEK;AACP,SAAS,eAAe,qBAAqB;AAC7C,SAAS,YAAAC,iBAAgB;AACzB,SAAS,cAAAC,mBAAkB;AAC3B,SAAS,WAAAC,UAAS,iBAAiB;AAMnC,SAAS,cAAc,UAA0B;AAC/C,QAAM,MAAMA,SAAQ,QAAQ,EAAE,YAAY;AAC1C,QAAM,MAA8B;AAAA,IAClC,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,UAAU;AAAA,EACZ;AACA,SAAO,IAAI,GAAG,KAAK;AACrB;AAKO,SAAS,cAAc,UAA0B;AACtD,SAAO,UAAU,QAAQ;AAC3B;AAKA,eAAsB,aACpB,UACA,QACA,MACoB;AACpB,QAAM,EAAE,SAAS,KAAK,IAAI;AAE1B,MAAI,CAAC,KAAK,UAAU,CAAC,KAAK,OAAO;AAC/B,UAAM,IAAI,MAAM,wCAAwC;AAAA,EAC1D;AAGA,QAAM,aAAgC;AAAA,IACpC,IAAI,oBAAoB,KAAK,MAAM;AAAA,IACnC,IAAI,oBAAoB,KAAK,KAAK;AAAA,EACpC;AAGA,QAAM,cAAc,oBAAI,IAA0B;AAGlD,QAAM,eAAe,oBAAI,IAAoB;AAG7C,QAAM,sBAAsB,oBAAI,IAA+B;AAG/D,aAAW,eAAe,mCAAmC,CAAC,WAAgB;AAC5E,UAAM,WAAW,cAAc,cAAc,OAAO,GAAG,CAAC;AACxD,gBAAY,IAAI,UAAU,OAAO,eAAe,CAAC,CAAC;AAGlD,UAAM,YAAY,oBAAoB,IAAI,QAAQ;AAClD,QAAI,WAAW;AACb,iBAAW,YAAY,WAAW;AAChC,iBAAS;AAAA,MACX;AAAA,IACF;AAAA,EACF,CAAC;AAGD,aAAW,UAAU,2BAA2B,OAAO,WAAgB;AAErE,WAAO,OAAO,MAAM,IAAI,MAAM,OAAO,kBAAkB,CAAC,CAAC;AAAA,EAC3D,CAAC;AAED,aAAW,UAAU,6BAA6B,YAAY;AAE5D,WAAO;AAAA,EACT,CAAC;AAED,aAAW,UAAU,kCAAkC,YAAY;AAEjE,WAAO;AAAA,EACT,CAAC;AAED,aAAW,eAAe,qBAAqB,CAAC,WAAgB;AAE9D,QAAI,OAAO,QAAQ,GAAG;AACpB,cAAQ,MAAM,QAAQ,QAAQ,KAAK,OAAO,OAAO;AAAA,IACnD;AAAA,EACF,CAAC;AAGD,aAAW,OAAO;AAGlB,QAAM,aAAa,MAAM,WAAW,YAAY,cAAc;AAAA,IAC5D,WAAW,QAAQ;AAAA,IACnB,SAAS,cAAc,IAAI,EAAE;AAAA,IAC7B,UAAU;AAAA,IACV,kBAAkB;AAAA,MAChB;AAAA,QACE,MAAM;AAAA,QACN,KAAK,cAAc,IAAI,EAAE;AAAA,MAC3B;AAAA,IACF;AAAA,IACA,cAAc;AAAA,MACZ,cAAc;AAAA,QACZ,iBAAiB;AAAA,UACf,qBAAqB;AAAA,UACrB,UAAU;AAAA,UACV,mBAAmB;AAAA,UACnB,SAAS;AAAA,QACX;AAAA,QACA,oBAAoB;AAAA,UAClB,oBAAoB;AAAA,UACpB,gBAAgB;AAAA,UAChB,wBAAwB;AAAA,QAC1B;AAAA,QACA,YAAY;AAAA,UACV,qBAAqB;AAAA,UACrB,gBAAgB;AAAA,YACd,gBAAgB;AAAA,YAChB,qBAAqB,CAAC,YAAY,WAAW;AAAA,UAC/C;AAAA,QACF;AAAA,QACA,OAAO;AAAA,UACL,qBAAqB;AAAA,UACrB,eAAe,CAAC,YAAY,WAAW;AAAA,QACzC;AAAA,QACA,YAAY;AAAA,UACV,qBAAqB;AAAA,QACvB;AAAA,QACA,YAAY;AAAA,UACV,qBAAqB;AAAA,QACvB;AAAA,QACA,gBAAgB;AAAA,UACd,qBAAqB;AAAA,UACrB,mCAAmC;AAAA,UACnC,YAAY;AAAA,YACV,UAAU,CAAC,GAAE,GAAE,GAAE,GAAE,GAAE,GAAE,GAAE,GAAE,GAAE,IAAG,IAAG,IAAG,IAAG,IAAG,IAAG,IAAG,IAAG,IAAG,IAAG,IAAG,IAAG,IAAG,IAAG,IAAG,IAAG,EAAE;AAAA,UACjF;AAAA,QACF;AAAA,MACF;AAAA,MACA,WAAW;AAAA,QACT,eAAe;AAAA,QACf,wBAAwB;AAAA,UACtB,qBAAqB;AAAA,QACvB;AAAA,QACA,uBAAuB;AAAA,UACrB,qBAAqB;AAAA,QACvB;AAAA,QACA,kBAAkB;AAAA,MACpB;AAAA,IACF;AAAA,IACA,uBAAuB,OAAO;AAAA,EAChC,CAAC;AAGD,QAAM,WAAW,iBAAiB,eAAe,CAAC,CAAC;AAGnD,QAAM,SAAoB;AAAA,IACxB;AAAA,IACA;AAAA,IACA;AAAA,IAEA,MAAM,WAAW,UAAiC;AAChD,YAAM,aAAa,cAAc,QAAQ;AAEzC,UAAI,CAACD,YAAW,UAAU,GAAG;AAC3B;AAAA,MACF;AAEA,UAAI;AACF,cAAM,UAAU,MAAMD,UAAS,YAAY,OAAO;AAClD,cAAM,WAAW,aAAa,IAAI,UAAU,KAAK,MAAM;AACvD,qBAAa,IAAI,YAAY,OAAO;AAEpC,YAAI,YAAY,GAAG;AAEjB,gBAAM,WAAW,iBAAiB,wBAAwB;AAAA,YACxD,cAAc;AAAA,cACZ,KAAK,cAAc,UAAU,EAAE;AAAA,cAC/B,YAAY,cAAc,UAAU;AAAA,cACpC;AAAA,cACA,MAAM;AAAA,YACR;AAAA,UACF,CAAC;AAAA,QACH,OAAO;AAEL,gBAAM,WAAW,iBAAiB,0BAA0B;AAAA,YAC1D,cAAc;AAAA,cACZ,KAAK,cAAc,UAAU,EAAE;AAAA,cAC/B;AAAA,YACF;AAAA,YACA,gBAAgB,CAAC,EAAE,MAAM,QAAQ,CAAC;AAAA,UACpC,CAAC;AAAA,QACH;AAAA,MACF,SAAS,OAAO;AACd,gBAAQ,MAAM,+BAA+B,KAAK;AAAA,MACpD;AAAA,IACF;AAAA,IAEA,MAAM,aAAa,UAAiC;AAClD,YAAM,aAAa,cAAc,QAAQ;AAEzC,UAAI,CAACC,YAAW,UAAU,GAAG;AAC3B;AAAA,MACF;AAEA,UAAI;AACF,cAAM,UAAU,MAAMD,UAAS,YAAY,OAAO;AAClD,cAAM,WAAW,aAAa,IAAI,UAAU,KAAK,KAAK;AACtD,qBAAa,IAAI,YAAY,OAAO;AAEpC,cAAM,WAAW,iBAAiB,0BAA0B;AAAA,UAC1D,cAAc;AAAA,YACZ,KAAK,cAAc,UAAU,EAAE;AAAA,YAC/B;AAAA,UACF;AAAA,UACA,gBAAgB,CAAC,EAAE,MAAM,QAAQ,CAAC;AAAA,QACpC,CAAC;AAAA,MACH,SAAS,OAAO;AACd,gBAAQ,MAAM,iCAAiC,KAAK;AAAA,MACtD;AAAA,IACF;AAAA,IAEA,MAAM,YAAY,UAAiC;AACjD,YAAM,aAAa,cAAc,QAAQ;AACzC,mBAAa,OAAO,UAAU;AAC9B,kBAAY,OAAO,UAAU;AAE7B,UAAI;AACF,cAAM,WAAW,iBAAiB,yBAAyB;AAAA,UACzD,cAAc;AAAA,YACZ,KAAK,cAAc,UAAU,EAAE;AAAA,UACjC;AAAA,QACF,CAAC;AAAA,MACH,SAAS,OAAO;AACd,gBAAQ,MAAM,gCAAgC,KAAK;AAAA,MACrD;AAAA,IACF;AAAA,IAEA,MAAM,0BAA0B,SAA8D;AAC5F,UAAI;AACF,cAAM,WAAW,iBAAiB,mCAAmC;AAAA,UACnE;AAAA,QACF,CAAC;AAAA,MACH,SAAS,OAAO;AACd,gBAAQ,MAAM,wCAAwC,KAAK;AAAA,MAC7D;AAAA,IACF;AAAA,IAEA,MAAM,mBAAmB,UAAkB,YAAY,KAA6B;AAClF,YAAM,aAAa,cAAc,QAAQ;AAEzC,aAAO,IAAI,QAAsB,CAACG,cAAY;AAC5C,cAAM,YAAY,KAAK,IAAI;AAC3B,YAAI;AACJ,YAAI,WAAW;AAEf,cAAMC,WAAU,MAAM;AACpB,cAAI,cAAe,cAAa,aAAa;AAC7C,gBAAM,YAAY,oBAAoB,IAAI,UAAU;AACpD,cAAI,WAAW;AACb,kBAAM,MAAM,UAAU,QAAQ,YAAY;AAC1C,gBAAI,OAAO,EAAG,WAAU,OAAO,KAAK,CAAC;AACrC,gBAAI,UAAU,WAAW,GAAG;AAC1B,kCAAoB,OAAO,UAAU;AAAA,YACvC;AAAA,UACF;AAAA,QACF;AAEA,cAAM,SAAS,MAAM;AACnB,cAAI,SAAU;AACd,qBAAW;AACX,UAAAA,SAAQ;AACR,UAAAD,UAAQ,YAAY,IAAI,UAAU,KAAK,CAAC,CAAC;AAAA,QAC3C;AAEA,cAAM,eAAe,MAAM;AAEzB,cAAI,cAAe,cAAa,aAAa;AAC7C,0BAAgB,WAAW,QAAQ,GAAG;AAAA,QACxC;AAGA,YAAI,CAAC,oBAAoB,IAAI,UAAU,GAAG;AACxC,8BAAoB,IAAI,YAAY,CAAC,CAAC;AAAA,QACxC;AACA,4BAAoB,IAAI,UAAU,EAAG,KAAK,YAAY;AAGtD,mBAAW,MAAM;AACf,cAAI,CAAC,UAAU;AACb,mBAAO;AAAA,UACT;AAAA,QACF,GAAG,SAAS;AAGZ,YAAI,YAAY,IAAI,UAAU,GAAG;AAC/B,uBAAa;AAAA,QACf;AAAA,MACF,CAAC;AAAA,IACH;AAAA,IAEA,eAAe,UAAgC;AAC7C,aAAO,YAAY,IAAI,cAAc,QAAQ,CAAC,KAAK,CAAC;AAAA,IACtD;AAAA,IAEA,oBAA+C;AAC7C,aAAO,IAAI,IAAI,WAAW;AAAA,IAC5B;AAAA,IAEA,MAAM,cAAc,UAAkB,MAAc,WAAwC;AAC1F,YAAM,aAAa,cAAc,QAAQ;AAEzC,UAAI,CAAC,aAAa,IAAI,UAAU,GAAG;AACjC,cAAM,OAAO,WAAW,UAAU;AAAA,MACpC;AACA,UAAI;AACF,cAAM,SAAc,MAAM,WAAW,YAAY,2BAA2B;AAAA,UAC1E,cAAc,EAAE,KAAK,cAAc,UAAU,EAAE,KAAK;AAAA,UACpD,UAAU,EAAE,MAAM,UAAU;AAAA,QAC9B,CAAC;AACD,YAAI,CAAC,OAAQ,QAAO,CAAC;AACrB,cAAM,QAAQ,MAAM,QAAQ,MAAM,IAAI,SAAS,CAAC,MAAM;AACtD,eAAO,MAAM,IAAI,CAAC,OAAY;AAAA,UAC5B,KAAK,EAAE,aAAa,EAAE;AAAA,UACtB,OAAO,EAAE,eAAe,EAAE;AAAA,QAC5B,EAAE;AAAA,MACJ,SAAS,OAAO;AACd,gBAAQ,MAAM,mCAAmC,KAAK;AACtD,eAAO,CAAC;AAAA,MACV;AAAA,IACF;AAAA,IAEA,MAAM,cAAc,UAAkB,MAAc,WAAmB,qBAAqB,OAA4B;AACtH,YAAM,aAAa,cAAc,QAAQ;AACzC,UAAI,CAAC,aAAa,IAAI,UAAU,GAAG;AACjC,cAAM,OAAO,WAAW,UAAU;AAAA,MACpC;AACA,UAAI;AACF,cAAM,SAAS,MAAM,WAAW,YAAY,2BAA2B;AAAA,UACrE,cAAc,EAAE,KAAK,cAAc,UAAU,EAAE,KAAK;AAAA,UACpD,UAAU,EAAE,MAAM,UAAU;AAAA,UAC5B,SAAS,EAAE,mBAAmB;AAAA,QAChC,CAAC;AACD,eAAQ,UAAyB,CAAC;AAAA,MACpC,SAAS,OAAO;AACd,gBAAQ,MAAM,mCAAmC,KAAK;AACtD,eAAO,CAAC;AAAA,MACV;AAAA,IACF;AAAA,IAEA,MAAM,SAAS,UAAkB,MAAc,WAA2C;AACxF,YAAM,aAAa,cAAc,QAAQ;AACzC,UAAI,CAAC,aAAa,IAAI,UAAU,GAAG;AACjC,cAAM,OAAO,WAAW,UAAU;AAAA,MACpC;AACA,UAAI;AACF,cAAM,SAAc,MAAM,WAAW,YAAY,sBAAsB;AAAA,UACrE,cAAc,EAAE,KAAK,cAAc,UAAU,EAAE,KAAK;AAAA,UACpD,UAAU,EAAE,MAAM,UAAU;AAAA,QAC9B,CAAC;AACD,YAAI,CAAC,UAAU,CAAC,OAAO,SAAU,QAAO;AACxC,YAAI,OAAO,OAAO,aAAa,SAAU,QAAO,OAAO;AACvD,YAAI,OAAO,SAAS,MAAO,QAAO,OAAO,SAAS;AAClD,YAAI,MAAM,QAAQ,OAAO,QAAQ,GAAG;AAClC,iBAAO,OAAO,SAAS,IAAI,CAAC,MAAW,OAAO,MAAM,WAAW,IAAI,EAAE,KAAK,EAAE,KAAK,IAAI;AAAA,QACvF;AACA,eAAO;AAAA,MACT,SAAS,OAAO;AACd,gBAAQ,MAAM,8BAA8B,KAAK;AACjD,eAAO;AAAA,MACT;AAAA,IACF;AAAA,IAEA,MAAM,mBAAmB,UAA6C;AACpE,YAAM,aAAa,cAAc,QAAQ;AACzC,UAAI,CAAC,aAAa,IAAI,UAAU,GAAG;AACjC,cAAM,OAAO,WAAW,UAAU;AAAA,MACpC;AACA,UAAI;AACF,cAAM,SAAgB,MAAM,WAAW,YAAY,+BAA+B;AAAA,UAChF,cAAc,EAAE,KAAK,cAAc,UAAU,EAAE,KAAK;AAAA,QACtD,CAAC;AACD,YAAI,CAAC,UAAU,OAAO,WAAW,EAAG,QAAO,CAAC;AAI5C,YAAI,OAAO,CAAC,EAAE,OAAO;AACnB,iBAAO;AAAA,QACT;AAEA,eAAO,OAAO,IAAI,CAAC,QAAa;AAAA,UAC9B,MAAM,GAAG;AAAA,UACT,MAAM,GAAG;AAAA,UACT,OAAO,GAAG,UAAU,SAAS,EAAE,OAAO,EAAE,MAAM,GAAG,WAAW,EAAE,GAAG,KAAK,EAAE,MAAM,GAAG,WAAW,EAAE,EAAE;AAAA,UAChG,gBAAgB,GAAG,UAAU,SAAS,EAAE,OAAO,EAAE,MAAM,GAAG,WAAW,EAAE,GAAG,KAAK,EAAE,MAAM,GAAG,WAAW,EAAE,EAAE;AAAA,UACzG,QAAQ,GAAG;AAAA,QACb,EAAE;AAAA,MACJ,SAAS,OAAO;AACd,gBAAQ,MAAM,yCAAyC,KAAK;AAC5D,eAAO,CAAC;AAAA,MACV;AAAA,IACF;AAAA,IAEA,MAAM,qBAAqB,OAA6C;AACtE,UAAI;AACF,cAAM,SAAS,MAAM,WAAW,YAAY,oBAAoB,EAAE,MAAM,CAAC;AACzE,eAAQ,UAAkC,CAAC;AAAA,MAC7C,SAAS,OAAO;AACd,gBAAQ,MAAM,0CAA0C,KAAK;AAC7D,eAAO,CAAC;AAAA,MACV;AAAA,IACF;AAAA,IAEA,MAAM,WAA0B;AAC9B,UAAI;AACF,cAAM,WAAW,YAAY,UAAU;AACvC,cAAM,WAAW,iBAAiB,MAAM;AACxC,mBAAW,IAAI;AACf,mBAAW,QAAQ;AACnB,aAAK,KAAK;AAAA,MACZ,SAAS,OAAO;AAEd,aAAK,KAAK,SAAS;AAAA,MACrB;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;ACnRO,SAAS,iBAAiB,YAAgC;AAC/D,QAAM,WAAW;AAAA,IACf,CAAC,aAAwB,GAAG;AAAA,IAC5B,CAAC,eAA0B,GAAG;AAAA,IAC9B,CAAC,mBAA8B,GAAG;AAAA,IAClC,CAAC,YAAuB,GAAG;AAAA,EAC7B,EAAE,WAAW,YAAY,aAAwB;AAEjD,QAAM,OAAO,WAAW,MAAM,MAAM,OAAO;AAC3C,QAAM,MAAM,WAAW,MAAM,MAAM,YAAY;AAC/C,QAAM,SAAS,WAAW,SAAS,KAAK,WAAW,MAAM,MAAM;AAE/D,SAAO,GAAG,QAAQ,KAAK,IAAI,IAAI,GAAG,IAAI,MAAM,IAAI,WAAW,OAAO;AACpE;AAKO,SAAS,0BACd,UACA,aACA,UAA6D,CAAC,GACtD;AACR,QAAM,EAAE,iBAAiB,IAAI,aAAa,KAAK,IAAI;AAGnD,QAAM,WAAW,aACb,YAAY,OAAO,OAAK,EAAE,aAAa,aAAwB,IAC/D;AAEJ,MAAI,SAAS,WAAW,EAAG,QAAO;AAElC,QAAM,UAAU,SAAS,MAAM,GAAG,cAAc;AAChD,QAAM,SAAS,SAAS,SAAS,iBAC7B;AAAA,UAAa,SAAS,SAAS,cAAc,UAC7C;AAEJ,QAAM,YAAY,QAAQ,IAAI,gBAAgB,EAAE,KAAK,IAAI;AAEzD,SAAO;AAAA;AAAA;AAAA,qBAAyE,QAAQ;AAAA,EAAO,SAAS,GAAG,MAAM;AAAA;AACnH;;;AHnLA,IAAI,QAAkB;AAAA,EACpB,SAAS,oBAAI,IAAI;AAAA,EACjB,QAAQ,oBAAI,IAAI;AAAA,EAChB,aAAa;AACf;AAaA,eAAe,iBAAiB,UAA6C;AAC3E,QAAM,aAAa,cAAc,QAAQ;AACzC,QAAM,MAAME,SAAQ,UAAU;AAG9B,QAAM,YAAY,sBAAsB,GAAG;AAC3C,MAAI,CAAC,WAAW;AACd,WAAO;AAAA,EACT;AAGA,QAAM,OAAOC,SAAQ,UAAU;AAC/B,QAAM,MAAM,GAAG,UAAU,EAAE,IAAI,IAAI;AAGnC,QAAM,WAAW,MAAM,QAAQ,IAAI,GAAG;AACtC,MAAI,UAAU;AACZ,WAAO;AAAA,EACT;AAGA,MAAI,MAAM,OAAO,IAAI,GAAG,GAAG;AACzB,WAAO;AAAA,EACT;AAGA,MAAI;AACF,UAAM,SAAS,MAAM,UAAU,MAAM,IAAI;AACzC,QAAI,CAAC,QAAQ;AACX,YAAM,OAAO,IAAI,GAAG;AACpB,aAAO;AAAA,IACT;AAEA,YAAQ,IAAI,iBAAiB,UAAU,IAAI,QAAQ,IAAI,EAAE;AAEzD,UAAM,SAAS,MAAM,aAAa,UAAU,IAAI,QAAQ,IAAI;AAC5D,UAAM,QAAQ,IAAI,KAAK,MAAM;AAG7B,WAAO,QAAQ,GAAG,QAAQ,CAAC,SAAS;AAClC,cAAQ,IAAI,SAAS,UAAU,IAAI,qBAAqB,IAAI,EAAE;AAC9D,YAAM,QAAQ,OAAO,GAAG;AAAA,IAC1B,CAAC;AAED,WAAO;AAAA,EACT,SAAS,OAAO;AACd,YAAQ,MAAM,yBAAyB,UAAU,IAAI,KAAK,KAAK;AAC/D,UAAM,OAAO,IAAI,GAAG;AACpB,WAAO;AAAA,EACT;AACF;AAKA,eAAe,kBAAkB,UAAwC;AACvE,QAAM,SAAS,MAAM,iBAAiB,QAAQ;AAC9C,SAAO,SAAS,CAAC,MAAM,IAAI,CAAC;AAC9B;AAWA,eAAsB,UAAU,UAAkB,qBAAqB,OAAsB;AAC3F,QAAM,UAAU,MAAM,kBAAkB,QAAQ;AAEhD,MAAI,QAAQ,WAAW,GAAG;AACxB;AAAA,EACF;AAGA,QAAM,QAAQ,IAAI,QAAQ,IAAI,YAAU,OAAO,WAAW,QAAQ,CAAC,CAAC;AAGpE,MAAI,oBAAoB;AACtB,UAAM,QAAQ,IAAI,QAAQ,IAAI,YAAU,OAAO,mBAAmB,QAAQ,CAAC,CAAC;AAAA,EAC9E;AACF;AAKA,eAAsB,eAAe,UAAyC;AAC5E,QAAM,aAAa,cAAc,QAAQ;AACzC,QAAM,UAAU,MAAM,kBAAkB,UAAU;AAElD,QAAM,iBAA+B,CAAC;AAEtC,aAAW,UAAU,SAAS;AAC5B,UAAM,QAAQ,OAAO,eAAe,UAAU;AAC9C,mBAAe,KAAK,GAAG,KAAK;AAAA,EAC9B;AAEA,SAAO;AACT;AAKA,eAAsB,oBAA2D;AAC/E,QAAM,UAAwC,CAAC;AAE/C,aAAW,UAAU,MAAM,QAAQ,OAAO,GAAG;AAC3C,UAAM,cAAc,OAAO,kBAAkB;AAC7C,eAAW,CAAC,MAAM,WAAW,KAAK,YAAY,QAAQ,GAAG;AACvD,YAAM,WAAW,QAAQ,IAAI,KAAK,CAAC;AACnC,eAAS,KAAK,GAAG,WAAW;AAC5B,cAAQ,IAAI,IAAI;AAAA,IAClB;AAAA,EACF;AAEA,SAAO;AACT;AAsCA,eAAsB,cACpB,UACA,MACA,WACA,qBAAqB,OACA;AACrB,QAAM,aAAa,cAAc,QAAQ;AACzC,QAAM,SAAS,MAAM,iBAAiB,UAAU;AAChD,MAAI,CAAC,OAAQ,QAAO,CAAC;AACrB,SAAO,OAAO,cAAc,YAAY,MAAM,WAAW,kBAAkB;AAC7E;AAKA,eAAsB,SAAS,UAAkB,MAAc,WAA2C;AACxG,QAAM,aAAa,cAAc,QAAQ;AACzC,QAAM,SAAS,MAAM,iBAAiB,UAAU;AAChD,MAAI,CAAC,OAAQ,QAAO;AACpB,SAAO,OAAO,SAAS,YAAY,MAAM,SAAS;AACpD;AAKA,eAAsB,mBAAmB,UAA6C;AACpF,QAAM,aAAa,cAAc,QAAQ;AACzC,QAAM,SAAS,MAAM,iBAAiB,UAAU;AAChD,MAAI,CAAC,OAAQ,QAAO,CAAC;AACrB,SAAO,OAAO,mBAAmB,UAAU;AAC7C;AAyBA,eAAsB,wBACpB,UACA,UAA6D,CAAC,GAC7C;AACjB,QAAM,cAAc,MAAM,eAAe,QAAQ;AACjD,SAAO,0BAA0B,UAAU,aAAa,OAAO;AACjE;AAYO,SAAS,YAAY,UAA2B;AACrD,QAAM,MAAMC,SAAQ,QAAQ;AAC5B,SAAO,sBAAsB,GAAG,MAAM;AACxC;;;AFnPA,IAAM,0BAA0B,KAAK;AAErC,IAAM,uBAAuBC,GAAE,OAAO;AAAA,EACpC,MAAMA,GACH,OAAO,EACP,SAAS,yEAAyE;AAAA,EACrF,MAAMA,GACH,KAAK,CAAC,QAAQ,aAAa,CAAC,EAC5B,SAAS,2FAA2F;AAAA,EACvG,SAASA,GACN,OAAO,EACP,SAAS,EACT,SAAS,4DAA4D;AAAA,EACxE,YAAYA,GACT,OAAO,EACP,SAAS,EACT,SAAS,8DAA8D;AAAA,EAC1E,YAAYA,GACT,OAAO,EACP,SAAS,EACT,SAAS,+DAA+D;AAC7E,CAAC;AAEM,SAAS,oBAAoB,SAA+B;AACjE,SAAOC,MAAK;AAAA,IACV,aAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAeI,QAAQ,gBAAgB;AAAA,IAEzC,aAAa;AAAA,IAEb,SAAS,OAAO,EAAE,MAAM,MAAM,SAAS,YAAY,WAAW,MAA4C;AACxG,UAAI;AAEF,cAAM,eAAeC,YAAW,IAAI,IAChC,OACAC,SAAQ,QAAQ,kBAAkB,IAAI;AAG1C,cAAM,eAAeC,UAAS,QAAQ,kBAAkB,YAAY;AACpE,YAAI,aAAa,WAAW,IAAI,KAAK,CAACF,YAAW,IAAI,GAAG;AACtD,iBAAO;AAAA,YACL,SAAS;AAAA,YACT,OAAO;AAAA,UACT;AAAA,QACF;AAEA,YAAI,SAAS,QAAQ;AAEnB,cAAI,YAAY,QAAW;AACzB,mBAAO;AAAA,cACL,SAAS;AAAA,cACT,OAAO;AAAA,YACT;AAAA,UACF;AAEA,gBAAM,UAAUG,YAAW,YAAY;AACvC,gBAAM,SAAS,UAAU,aAAa;AAGtC,kBAAQ,IAAI,4CAA4C,CAAC,CAAC,QAAQ,UAAU;AAC5E,kBAAQ,IAAI,4CAA4C,YAAY;AACpE,kBAAQ,aAAa;AAAA,YACnB,MAAM;AAAA,YACN;AAAA,YACA,MAAM;AAAA,YACN,QAAQ;AAAA,YACR;AAAA,YACA,aAAa,QAAQ;AAAA,UACvB,CAAC;AAGD,cAAI,QAAQ,UAAU,yBAAyB;AAC7C,oBAAQ,aAAa;AAAA,cACnB,MAAM;AAAA,cACN;AAAA,cACA,MAAM;AAAA,cACN,QAAQ;AAAA,cACR;AAAA,cACA;AAAA,cACA,aAAa,QAAQ;AAAA,YACvB,CAAC;AAAA,UACH,OAAO;AACL,kBAAM,aAAa,KAAK,KAAK,QAAQ,SAAS,uBAAuB;AACrE,qBAAS,IAAI,GAAG,IAAI,YAAY,KAAK,GAAG;AACtC,oBAAM,aAAa,IAAI;AACvB,oBAAM,QAAQ,QAAQ,MAAM,YAAY,aAAa,uBAAuB;AAC5E,sBAAQ,aAAa;AAAA,gBACnB,MAAM;AAAA,gBACN;AAAA,gBACA,MAAM;AAAA,gBACN,QAAQ;AAAA,gBACR,SAAS;AAAA,gBACT;AAAA,gBACA,aAAa,QAAQ;AAAA,gBACrB,YAAY;AAAA,gBACZ;AAAA,gBACA;AAAA,gBACA,WAAW;AAAA,cACb,CAAC;AAED,kBAAI,aAAa,GAAG;AAClB,sBAAM,IAAI,QAAQ,CAACF,cAAY,WAAWA,WAAS,CAAC,CAAC;AAAA,cACvD;AAAA,YACF;AAAA,UACF;AAGA,gBAAM,WAAW,QAAQ,WAAW,QAAQ,kBAAkB,YAAY;AAG1E,gBAAM,MAAMG,SAAQ,YAAY;AAChC,cAAI,CAACD,YAAW,GAAG,GAAG;AACpB,kBAAME,OAAM,KAAK,EAAE,WAAW,KAAK,CAAC;AAAA,UACtC;AAEA,gBAAMC,WAAU,cAAc,SAAS,OAAO;AAG9C,cAAI,oBAAoB;AACxB,cAAI,QAAQ,cAAc,SAAa,YAAY,YAAY,GAAG;AAChE,kBAAU,UAAU,cAAc,IAAI;AACtC,gCAAoB,MAAU,wBAAwB,YAAY;AAAA,UACpE;AAGA,kBAAQ,aAAa;AAAA,YACnB,MAAM;AAAA,YACN;AAAA,YACA,MAAM;AAAA,YACN,QAAQ;AAAA,YACR;AAAA,YACA,aAAa,QAAQ;AAAA,UACvB,CAAC;AAED,iBAAO;AAAA,YACL,SAAS;AAAA,YACT,MAAM;AAAA,YACN;AAAA,YACA,MAAM;AAAA,YACN;AAAA,YACA,cAAc,OAAO,WAAW,SAAS,OAAO;AAAA,YAChD,WAAW,QAAQ,MAAM,IAAI,EAAE;AAAA,YAC/B,GAAI,qBAAqB,EAAE,aAAa,kBAAkB;AAAA,UAC5D;AAAA,QACF,WAAW,SAAS,eAAe;AAEjC,cAAI,eAAe,UAAa,eAAe,QAAW;AACxD,mBAAO;AAAA,cACL,SAAS;AAAA,cACT,OAAO;AAAA,YACT;AAAA,UACF;AAEA,cAAI,CAACH,YAAW,YAAY,GAAG;AAC7B,mBAAO;AAAA,cACL,SAAS;AAAA,cACT,OAAO,mBAAmB,IAAI;AAAA,YAChC;AAAA,UACF;AAGA,kBAAQ,aAAa;AAAA,YACnB,MAAM;AAAA,YACN;AAAA,YACA,MAAM;AAAA,YACN,QAAQ;AAAA,YACR,QAAQ;AAAA,UACV,CAAC;AAGD,kBAAQ,aAAa;AAAA,YACnB,MAAM;AAAA,YACN;AAAA,YACA,MAAM;AAAA,YACN,QAAQ;AAAA,YACR,WAAW;AAAA,YACX,WAAW;AAAA,YACX,QAAQ;AAAA,UACV,CAAC;AAGD,gBAAM,WAAW,QAAQ,WAAW,QAAQ,kBAAkB,YAAY;AAG1E,gBAAM,iBAAiB,MAAMI,UAAS,cAAc,OAAO;AAG3D,cAAI,CAAC,eAAe,SAAS,UAAU,GAAG;AAExC,kBAAM,QAAQ,eAAe,MAAM,IAAI;AACvC,kBAAM,UAAU,MAAM,MAAM,GAAG,EAAE,EAAE,KAAK,IAAI;AAE5C,mBAAO;AAAA,cACL,SAAS;AAAA,cACT,OAAO;AAAA,cACP,MAAM;AAAA,cACN,aAAa,MAAM,SAAS,KACxB,GAAG,OAAO;AAAA,OAAU,MAAM,SAAS,EAAE,iBACrC;AAAA,YACN;AAAA,UACF;AAGA,gBAAM,cAAc,eAAe,MAAM,UAAU,EAAE,SAAS;AAC9D,cAAI,cAAc,GAAG;AACnB,mBAAO;AAAA,cACL,SAAS;AAAA,cACT,OAAO,SAAS,WAAW;AAAA,cAC3B,MAAM;AAAA,YACR;AAAA,UACF;AAGA,gBAAM,aAAa,eAAe,QAAQ,YAAY,UAAU;AAChE,gBAAMD,WAAU,cAAc,YAAY,OAAO;AAGjD,gBAAM,WAAW,WAAW,MAAM,IAAI,EAAE;AACxC,gBAAM,WAAW,WAAW,MAAM,IAAI,EAAE;AAGxC,cAAI,oBAAoB;AACxB,cAAI,QAAQ,cAAc,SAAa,YAAY,YAAY,GAAG;AAChE,kBAAU,UAAU,cAAc,IAAI;AACtC,gCAAoB,MAAU,wBAAwB,YAAY;AAAA,UACpE;AAGA,kBAAQ,aAAa;AAAA,YACnB,MAAM;AAAA,YACN;AAAA,YACA,MAAM;AAAA,YACN,QAAQ;AAAA,YACR,QAAQ;AAAA,UACV,CAAC;AAED,iBAAO;AAAA,YACL,SAAS;AAAA,YACT,MAAM;AAAA,YACN;AAAA,YACA,MAAM;AAAA,YACN,cAAc;AAAA,YACd,YAAY;AAAA,YACZ,WAAW,WAAW;AAAA,YACtB,GAAI,qBAAqB,EAAE,aAAa,kBAAkB;AAAA,UAC5D;AAAA,QACF;AAEA,eAAO;AAAA,UACL,SAAS;AAAA,UACT,OAAO,iBAAiB,IAAI;AAAA,QAC9B;AAAA,MACF,SAAS,OAAY;AACnB,eAAO;AAAA,UACL,SAAS;AAAA,UACT,OAAO,MAAM;AAAA,QACf;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AACH;;;AM9TA;AAFA,SAAS,QAAAE,aAAY;AACrB,SAAS,KAAAC,UAAS;AAOlB,IAAM,kBAAkBA,GAAE,OAAO;AAAA,EAC/B,QAAQA,GACL,KAAK,CAAC,OAAO,QAAQ,QAAQ,OAAO,CAAC,EACrC,SAAS,wCAAwC;AAAA,EACpD,OAAOA,GACJ;AAAA,IACCA,GAAE,OAAO;AAAA,MACP,SAASA,GAAE,OAAO,EAAE,SAAS,yBAAyB;AAAA,MACtD,OAAOA,GAAE,OAAO,EAAE,SAAS,EAAE,SAAS,mDAAmD;AAAA,IAC3F,CAAC;AAAA,EACH,EACC,SAAS,EACT,SAAS,8CAA8C;AAAA,EAC1D,QAAQA,GACL,OAAO,EACP,SAAS,EACT,SAAS,sDAAsD;AAAA,EAClE,QAAQA,GACL,KAAK,CAAC,WAAW,eAAe,aAAa,WAAW,CAAC,EACzD,SAAS,EACT,SAAS,qDAAqD;AACnE,CAAC;AAEM,SAAS,eAAe,SAA0B;AACvD,SAAOD,MAAK;AAAA,IACV,aAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAgBb,aAAa;AAAA,IAEb,SAAS,OAAO,EAAE,QAAQ,OAAO,QAAQ,OAAO,MAAuC;AACrF,UAAI;AACF,gBAAQ,QAAQ;AAAA,UACd,KAAK,OAAO;AACV,gBAAI,CAAC,SAAS,MAAM,WAAW,GAAG;AAChC,qBAAO;AAAA,gBACL,SAAS;AAAA,gBACT,OAAO;AAAA,cACT;AAAA,YACF;AAEA,kBAAM,UAAU,MAAM,YAAY,WAAW,QAAQ,WAAW,KAAK;AAErE,mBAAO;AAAA,cACL,SAAS;AAAA,cACT,QAAQ;AAAA,cACR,YAAY,QAAQ;AAAA,cACpB,OAAO,QAAQ,IAAI,cAAc;AAAA,YACnC;AAAA,UACF;AAAA,UAEA,KAAK,QAAQ;AACX,kBAAM,QAAQ,MAAM,YAAY,aAAa,QAAQ,SAAS;AAE9D,kBAAM,QAAQ;AAAA,cACZ,OAAO,MAAM;AAAA,cACb,SAAS,MAAM,OAAO,CAAC,MAAgB,EAAE,WAAW,SAAS,EAAE;AAAA,cAC/D,YAAY,MAAM,OAAO,CAAC,MAAgB,EAAE,WAAW,aAAa,EAAE;AAAA,cACtE,WAAW,MAAM,OAAO,CAAC,MAAgB,EAAE,WAAW,WAAW,EAAE;AAAA,cACnE,WAAW,MAAM,OAAO,CAAC,MAAgB,EAAE,WAAW,WAAW,EAAE;AAAA,YACrE;AAEA,mBAAO;AAAA,cACL,SAAS;AAAA,cACT,QAAQ;AAAA,cACR;AAAA,cACA,OAAO,MAAM,IAAI,cAAc;AAAA,YACjC;AAAA,UACF;AAAA,UAEA,KAAK,QAAQ;AACX,gBAAI,CAAC,QAAQ;AACX,qBAAO;AAAA,gBACL,SAAS;AAAA,gBACT,OAAO;AAAA,cACT;AAAA,YACF;AAEA,gBAAI,CAAC,QAAQ;AACX,qBAAO;AAAA,gBACL,SAAS;AAAA,gBACT,OAAO;AAAA,cACT;AAAA,YACF;AAEA,kBAAM,UAAU,MAAM,YAAY,aAAa,QAAQ,MAAM;AAE7D,gBAAI,CAAC,SAAS;AACZ,qBAAO;AAAA,gBACL,SAAS;AAAA,gBACT,OAAO,wBAAwB,MAAM;AAAA,cACvC;AAAA,YACF;AAEA,mBAAO;AAAA,cACL,SAAS;AAAA,cACT,QAAQ;AAAA,cACR,MAAM,eAAe,OAAO;AAAA,YAC9B;AAAA,UACF;AAAA,UAEA,KAAK,SAAS;AACZ,kBAAM,QAAQ,MAAM,YAAY,aAAa,QAAQ,SAAS;AAE9D,mBAAO;AAAA,cACL,SAAS;AAAA,cACT,QAAQ;AAAA,cACR,cAAc;AAAA,YAChB;AAAA,UACF;AAAA,UAEA;AACE,mBAAO;AAAA,cACL,SAAS;AAAA,cACT,OAAO,mBAAmB,MAAM;AAAA,YAClC;AAAA,QACJ;AAAA,MACF,SAAS,OAAY;AACnB,eAAO;AAAA,UACL,SAAS;AAAA,UACT,OAAO,MAAM;AAAA,QACf;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AACH;AAEA,SAAS,eAAe,MAAgB;AACtC,SAAO;AAAA,IACL,IAAI,KAAK;AAAA,IACT,SAAS,KAAK;AAAA,IACd,QAAQ,KAAK;AAAA,IACb,OAAO,KAAK;AAAA,IACZ,WAAW,KAAK,UAAU,YAAY;AAAA,EACxC;AACF;;;AC1JA;AACA;AAHA,SAAS,QAAAE,aAAY;AACrB,SAAS,KAAAC,UAAS;AASlB,IAAM,uBAAuBA,GAAE,OAAO;AAAA,EACpC,QAAQA,GACL,KAAK,CAAC,QAAQ,MAAM,CAAC,EACrB,SAAS,2EAA2E;AAAA,EACvF,WAAWA,GACR,OAAO,EACP,SAAS,EACT,SAAS,kDAAkD;AAChE,CAAC;AAEM,SAAS,oBAAoB,SAA+B;AACjE,SAAOD,MAAK;AAAA,IACV,aAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IASb,aAAa;AAAA,IAEb,SAAS,OAAO,EAAE,QAAQ,UAAU,MAA4C;AAC9E,UAAI;AACF,gBAAQ,QAAQ;AAAA,UACd,KAAK,QAAQ;AACX,kBAAM,SAAS,MAAM,cAAc,QAAQ,iBAAiB;AAE5D,mBAAO;AAAA,cACL,SAAS;AAAA,cACT,QAAQ;AAAA,cACR,YAAY,OAAO;AAAA,cACnB,QAAQ,OAAO,IAAI,CAAC,OAAO;AAAA,gBACzB,MAAM,EAAE;AAAA,gBACR,aAAa,EAAE;AAAA,cACjB,EAAE;AAAA,cACF,WAAW,uBAAuB,MAAM;AAAA,YAC1C;AAAA,UACF;AAAA,UAEA,KAAK,QAAQ;AACX,gBAAI,CAAC,WAAW;AACd,qBAAO;AAAA,gBACL,SAAS;AAAA,gBACT,OAAO;AAAA,cACT;AAAA,YACF;AAGA,gBAAI,MAAM,aAAa,SAAS,QAAQ,WAAW,SAAS,GAAG;AAC7D,qBAAO;AAAA,gBACL,SAAS;AAAA,gBACT,OAAO,UAAU,SAAS;AAAA,cAC5B;AAAA,YACF;AAGA,kBAAM,QAAQ,MAAM,iBAAiB,WAAW,QAAQ,iBAAiB;AAEzE,gBAAI,CAAC,OAAO;AACV,oBAAM,YAAY,MAAM,cAAc,QAAQ,iBAAiB;AAC/D,qBAAO;AAAA,gBACL,SAAS;AAAA,gBACT,OAAO,UAAU,SAAS;AAAA,gBAC1B,iBAAiB,UAAU,IAAI,CAAC,MAAM,EAAE,IAAI;AAAA,cAC9C;AAAA,YACF;AAGA,kBAAM,aAAa,KAAK,QAAQ,WAAW,SAAS;AAEpD,mBAAO;AAAA,cACL,SAAS;AAAA,cACT,QAAQ;AAAA,cACR,WAAW,MAAM;AAAA,cACjB,aAAa,MAAM;AAAA,cACnB,SAAS,MAAM;AAAA,cACf,eAAe,MAAM,QAAQ;AAAA,YAC/B;AAAA,UACF;AAAA,UAEA;AACE,mBAAO;AAAA,cACL,SAAS;AAAA,cACT,OAAO,mBAAmB,MAAM;AAAA,YAClC;AAAA,QACJ;AAAA,MACF,SAAS,OAAY;AACnB,eAAO;AAAA,UACL,SAAS;AAAA,UACT,OAAO,MAAM;AAAA,QACf;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AACH;;;ACnGA,SAAS,QAAAE,aAAY;AACrB,SAAS,KAAAC,UAAS;AAClB,SAAS,WAAAC,UAAS,YAAAC,WAAU,cAAAC,aAAY,WAAAC,gBAAe;AACvD,SAAS,cAAAC,oBAAkB;AAC3B,SAAS,WAAAC,UAAS,QAAAC,aAAY;AAQ9B,IAAM,oBAAoBC,GAAE,OAAO;AAAA,EACjC,OAAOA,GACJ,MAAMA,GAAE,OAAO,CAAC,EAChB,SAAS,EACT,SAAS,wHAAwH;AAAA,EACpI,KAAKA,GACF,QAAQ,EACR,SAAS,EACT,QAAQ,KAAK,EACb,SAAS,qEAAqE;AACnF,CAAC;AAKD,eAAe,mBACb,KACA,kBACA,WAAW,IACQ;AACnB,QAAM,QAAkB,CAAC;AACzB,QAAM,sBAA0B,uBAAuB;AAEvD,iBAAe,KAAK,YAAoB;AACtC,QAAI,MAAM,UAAU,SAAU;AAE9B,QAAI;AACF,YAAM,UAAU,MAAMC,SAAQ,YAAY,EAAE,eAAe,KAAK,CAAC;AAEjE,iBAAW,SAAS,SAAS;AAC3B,YAAI,MAAM,UAAU,SAAU;AAE9B,cAAM,WAAWC,SAAQ,YAAY,MAAM,IAAI;AAG/C,YAAI,MAAM,YAAY,GAAG;AACvB,cAAI,CAAC,gBAAgB,QAAQ,QAAQ,SAAS,SAAS,UAAU,EAAE,SAAS,MAAM,IAAI,GAAG;AACvF;AAAA,UACF;AACA,gBAAM,KAAK,QAAQ;AAAA,QACrB,WAAW,MAAM,OAAO,GAAG;AACzB,gBAAM,MAAMC,SAAQ,MAAM,IAAI;AAC9B,cAAI,oBAAoB,SAAS,GAAG,GAAG;AACrC,kBAAM,KAAK,QAAQ;AAAA,UACrB;AAAA,QACF;AAAA,MACF;AAAA,IACF,QAAQ;AAAA,IAER;AAAA,EACF;AAEA,QAAM,KAAK,GAAG;AACd,SAAO;AACT;AAEO,SAAS,iBAAiB,SAA4B;AAC3D,SAAOC,MAAK;AAAA,IACV,aAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAYI,QAAQ,gBAAgB;AAAA,IAEzC,aAAa;AAAA,IAEb,SAAS,OAAO,EAAE,MAAM,MAAyC;AAC/D,UAAI;AAEF,YAAI,CAAC,SAAS,MAAM,WAAW,GAAG;AAChC,gBAAM,iBAAiB,MAAU,kBAAkB;AAEnD,cAAI,OAAO,KAAK,cAAc,EAAE,WAAW,GAAG;AAC5C,mBAAO;AAAA,cACL,SAAS;AAAA,cACT,SAAS;AAAA,cACT,OAAO,CAAC;AAAA,cACR,aAAa;AAAA,cACb,eAAe;AAAA,YACjB;AAAA,UACF;AAEA,iBAAO,wBAAwB,gBAAgB,QAAQ,gBAAgB;AAAA,QACzE;AAGA,cAAM,eAAyB,CAAC;AAEhC,mBAAW,QAAQ,OAAO;AACxB,gBAAM,eAAeC,YAAW,IAAI,IAChC,OACAH,SAAQ,QAAQ,kBAAkB,IAAI;AAE1C,cAAI,CAACI,aAAW,YAAY,GAAG;AAC7B;AAAA,UACF;AAEA,gBAAM,QAAQ,MAAMC,MAAK,YAAY;AAErC,cAAI,MAAM,YAAY,GAAG;AACvB,kBAAM,WAAW,MAAM,mBAAmB,cAAc,QAAQ,gBAAgB;AAChF,yBAAa,KAAK,GAAG,QAAQ;AAAA,UAC/B,WAAW,MAAM,OAAO,GAAG;AACzB,gBAAQ,YAAY,YAAY,GAAG;AACjC,2BAAa,KAAK,YAAY;AAAA,YAChC;AAAA,UACF;AAAA,QACF;AAEA,YAAI,aAAa,WAAW,GAAG;AAC7B,iBAAO;AAAA,YACL,SAAS;AAAA,YACT,SAAS,8DAAkE,uBAAuB,EAAE,KAAK,IAAI;AAAA,YAC7G,OAAO,CAAC;AAAA,YACR,aAAa;AAAA,YACb,eAAe;AAAA,UACjB;AAAA,QACF;AAGA,cAAM,QAAQ;AAAA,UACZ,aAAa,IAAI,UAAY,UAAU,MAAM,IAAI,CAAC;AAAA,QACpD;AAGA,cAAM,iBAA+C,CAAC;AAEtD,mBAAW,QAAQ,cAAc;AAC/B,gBAAM,cAAc,MAAU,eAAe,IAAI;AACjD,cAAI,YAAY,SAAS,GAAG;AAC1B,2BAAe,IAAI,IAAI;AAAA,UACzB;AAAA,QACF;AAEA,eAAO,wBAAwB,gBAAgB,QAAQ,gBAAgB;AAAA,MACzE,SAAS,OAAY;AACnB,eAAO;AAAA,UACL,SAAS;AAAA,UACT,OAAO,MAAM;AAAA,QACf;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AACH;AAKA,SAAS,wBACP,gBACA,kBACA;AACA,MAAI,cAAc;AAClB,MAAI,gBAAgB;AACpB,MAAI,YAAY;AAEhB,QAAM,QAaD,CAAC;AAEN,aAAW,CAAC,UAAU,WAAW,KAAK,OAAO,QAAQ,cAAc,GAAG;AACpE,UAAM,eAAeC,UAAS,kBAAkB,QAAQ;AACxD,QAAI,aAAa;AACjB,QAAI,eAAe;AAEnB,UAAM,uBAAuB,YAAY,IAAI,OAAK;AAChD,YAAM,WAAW,kBAAkB,EAAE,QAAQ;AAE7C,UAAI,EAAE,4BAA2C;AAC/C;AACA;AAAA,MACF,WAAW,EAAE,8BAA6C;AACxD;AACA;AAAA,MACF,OAAO;AACL;AAAA,MACF;AAEA,aAAO;AAAA,QACL;AAAA,QACA,MAAM,EAAE,MAAM,MAAM,OAAO;AAAA,QAC3B,QAAQ,EAAE,MAAM,MAAM,YAAY;AAAA,QAClC,SAAS,EAAE;AAAA,QACX,QAAQ,EAAE;AAAA,QACV,MAAM,EAAE;AAAA,MACV;AAAA,IACF,CAAC;AAED,UAAM,KAAK;AAAA,MACT,MAAM;AAAA,MACN;AAAA,MACA,QAAQ;AAAA,MACR,UAAU;AAAA,MACV,aAAa;AAAA,IACf,CAAC;AAAA,EACH;AAGA,QAAM,KAAK,CAAC,GAAG,MAAM,EAAE,SAAS,EAAE,MAAM;AAExC,QAAM,YAAY,cAAc,KAAK,gBAAgB;AAErD,SAAO;AAAA,IACL,SAAS;AAAA,IACT,SAAS,YACL,SAAS,WAAW,iBAAiB,aAAa,kBAAkB,MAAM,MAAM,cAChF,2BAA2B,OAAO,KAAK,cAAc,EAAE,UAAU,KAAK;AAAA,IAC1E;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,SAAS,YACL,cAAc,KAAK,IACnB;AAAA,EACN;AACF;AAKA,SAAS,kBAAkB,UAA2B;AACpD,UAAQ,UAAU;AAAA,IAChB;AACE,aAAO;AAAA,IACT;AACE,aAAO;AAAA,IACT;AACE,aAAO;AAAA,IACT;AACE,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;AAKA,SAAS,cACP,OASQ;AACR,QAAM,QAAkB,CAAC;AAEzB,aAAW,QAAQ,OAAO;AACxB,UAAM,KAAK;AAAA,EAAK,KAAK,YAAY,GAAG;AACpC,eAAW,KAAK,KAAK,YAAY,MAAM,GAAG,EAAE,GAAG;AAC7C,YAAM,SAAS,EAAE,aAAa,UAAU,WAAM,EAAE,aAAa,YAAY,iBAAO;AAChF,YAAM,KAAK,KAAK,MAAM,KAAK,EAAE,IAAI,IAAI,EAAE,MAAM,KAAK,EAAE,OAAO,EAAE;AAAA,IAC/D;AACA,QAAI,KAAK,YAAY,SAAS,IAAI;AAChC,YAAM,KAAK,aAAa,KAAK,YAAY,SAAS,EAAE,OAAO;AAAA,IAC7D;AAAA,EACF;AAEA,SAAO,MAAM,KAAK,IAAI;AACxB;;;AC7SA,SAAS,QAAAC,cAAY;AACrB,SAAS,KAAAC,WAAS;;;ACDlB;AAAA,EAEE;AAAA,EACA;AAAA,OAEK;AACP,SAAS,UAAAC,eAAc;AAEvB;AA0DO,IAAe,WAAf,MAA2C;AAAA;AAAA,EAQtC;AAAA;AAAA,EAGA,WAAmB;AAAA,EAE7B,YAAY,OAAgB;AAC1B,SAAK,QAAQ,SAAS,gBAAgB;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA;AAAA,EAgBU,YAAY,MAAc,OAAgC;AAClE,WAAO,EAAE,MAAM,MAAM;AAAA,EACvB;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,IAAI,SAA+D;AACvE,UAAM,EAAE,MAAM,WAAW,YAAY,YAAY,YAAY,IAAI;AACjE,UAAM,QAAwB,CAAC;AAG/B,UAAM,YAAY,MAAM,gBAAgB,OAAO;AAAA,MAC7C;AAAA,MACA;AAAA,MACA,cAAc,KAAK;AAAA,MACnB;AAAA,MACA,OAAO,KAAK;AAAA,IACd,CAAC;AAED,UAAM,UAAU,OAAO,SAAiD;AACtE,YAAM,WAAyB;AAAA,QAC7B,IAAIC,QAAO,CAAC;AAAA,QACZ,WAAW,KAAK,IAAI;AAAA,QACpB,GAAG;AAAA,MACL;AACA,YAAM,KAAK,QAAQ;AAGnB,YAAM,gBAAgB,QAAQ,UAAU,IAAI,QAAQ;AAGpD,YAAM,aAAa;AAAA,QACjB,MAAM;AAAA,QACN,YAAY,UAAU;AAAA,QACtB,cAAc,KAAK;AAAA,QACnB,MAAM;AAAA,MACR,CAAC;AAAA,IACH;AAEA,QAAI;AACF,YAAM,QAAQ,KAAK,SAAS,OAAO;AACnC,YAAM,eAAe,KAAK,gBAAgB,OAAO;AAGjD,YAAM,SAAS,MAAM,aAAa;AAAA,QAChC,OAAO,aAAa,KAAK,KAAK;AAAA,QAC9B,QAAQ;AAAA,QACR,UAAU;AAAA,UACR,EAAE,MAAM,QAAQ,SAAS,KAAK;AAAA,QAChC;AAAA,QACA;AAAA,QACA,UAAU,YAAY,KAAK,QAAQ;AAAA,QACnC;AAAA,QACA,cAAc,OAAO,SAAS;AAE5B,cAAI,KAAK,MAAM;AACb,kBAAM,QAAQ;AAAA,cACZ,MAAM;AAAA,cACN,SAAS,KAAK;AAAA,YAChB,CAAC;AACD,kBAAM,aAAa;AAAA,cACjB,MAAM;AAAA,cACN,YAAY,UAAU;AAAA,cACtB,cAAc,KAAK;AAAA,cACnB,MAAM,KAAK;AAAA,YACb,CAAC;AAAA,UACH;AAGA,cAAI,KAAK,WAAW;AAClB,uBAAW,YAAY,KAAK,WAAW;AACrC,oBAAM,QAAQ;AAAA,gBACZ,MAAM;AAAA,gBACN,SAAS,WAAW,SAAS,QAAQ;AAAA,gBACrC,UAAU,SAAS;AAAA,gBACnB,WAAW,SAAS;AAAA,cACtB,CAAC;AACD,oBAAM,aAAa;AAAA,gBACjB,MAAM;AAAA,gBACN,YAAY,UAAU;AAAA,gBACtB,cAAc,KAAK;AAAA,gBACnB,UAAU,SAAS;AAAA,gBACnB,WAAW,SAAS;AAAA,cACtB,CAAC;AAAA,YACH;AAAA,UACF;AAGA,cAAI,KAAK,aAAa;AACpB,uBAAW,cAAc,KAAK,aAAa;AACzC,oBAAM,QAAQ;AAAA,gBACZ,MAAM;AAAA,gBACN,SAAS,eAAe,WAAW,QAAQ;AAAA,gBAC3C,UAAU,WAAW;AAAA,gBACrB,YAAY,WAAW;AAAA,cACzB,CAAC;AACD,oBAAM,aAAa;AAAA,gBACjB,MAAM;AAAA,gBACN,YAAY,UAAU;AAAA,gBACtB,cAAc,KAAK;AAAA,gBACnB,UAAU,WAAW;AAAA,gBACrB,YAAY,WAAW;AAAA,cACzB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF;AAAA,MACF,CAAC;AAGD,YAAM,eAAe,KAAK,YAAY,OAAO,MAAM,KAAK;AAGxD,YAAM,gBAAgB,SAAS,UAAU,IAAI,YAAY;AAEzD,YAAM,aAAa;AAAA,QACjB,MAAM;AAAA,QACN,YAAY,UAAU;AAAA,QACtB,cAAc,KAAK;AAAA,QACnB,QAAQ;AAAA,MACV,CAAC;AAED,aAAO;AAAA,QACL,SAAS;AAAA,QACT,QAAQ;AAAA,QACR;AAAA,QACA,aAAa,UAAU;AAAA,MACzB;AAAA,IACF,SAAS,OAAY;AACnB,YAAM,eAAe,MAAM,WAAW;AAGtC,YAAM,gBAAgB,UAAU,UAAU,IAAI,YAAY;AAE1D,YAAM,aAAa;AAAA,QACjB,MAAM;AAAA,QACN,YAAY,UAAU;AAAA,QACtB,cAAc,KAAK;AAAA,QACnB,OAAO;AAAA,MACT,CAAC;AAED,aAAO;AAAA,QACL,SAAS;AAAA,QACT,OAAO;AAAA,QACP;AAAA,QACA,aAAa,UAAU;AAAA,MACzB;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,OAAO,SAAoE;AAChF,UAAM,SAAkC,CAAC;AACzC,QAAI,cAAsE;AAC1E,QAAI,OAAO;AAGX,UAAM,aAAsC,CAAC;AAG7C,UAAM,aAAa,KAAK,IAAI;AAAA,MAC1B,GAAG;AAAA,MACH,YAAY,OAAO,UAAU;AAC3B,mBAAW,KAAK,KAAK;AACrB,YAAI,aAAa;AACf,sBAAY,WAAW,MAAM,CAAE;AAC/B,wBAAc;AAAA,QAChB;AAAA,MACF;AAAA,IACF,CAAC,EAAE,KAAK,CAAC,WAAW;AAClB,aAAO;AACP,UAAI,aAAa;AACf,oBAAY,IAAI;AAAA,MAClB;AACA,aAAO;AAAA,IACT,CAAC;AAGD,WAAO,CAAC,QAAQ,WAAW,SAAS,GAAG;AACrC,UAAI,WAAW,SAAS,GAAG;AACzB,cAAM,WAAW,MAAM;AAAA,MACzB,WAAW,CAAC,MAAM;AAEhB,cAAM,QAAQ,MAAM,IAAI,QAAsC,CAACC,cAAY;AACzE,wBAAcA;AAAA,QAChB,CAAC;AACD,YAAI,OAAO;AACT,gBAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAGA,UAAM;AAAA,EACR;AACF;;;ACtSA,SAAS,QAAAC,aAA0B;AACnC,SAAS,KAAAC,WAAS;AAClB,SAAS,QAAAC,aAAY;AACrB,SAAS,aAAAC,kBAAiB;AAC1B,SAAS,YAAAC,WAAU,QAAAC,OAAM,WAAAC,gBAAe;AACxC,SAAS,WAAAC,UAAS,YAAAC,WAAU,cAAAC,mBAAkB;AAC9C,SAAS,cAAAC,oBAAkB;AAI3B;;;ACCA,SAAS,QAAAC,aAAY;AACrB,SAAS,KAAAC,UAAS;AAClB,SAAS,WAAAC,UAAS,YAAAC,WAAU,cAAAC,aAAY,YAAAC,iBAAgB;AACxD,SAAS,YAAAC,WAAU,WAAAC,gBAAe;AAClC,SAAS,cAAAC,oBAAkB;AAC3B,SAAS,iBAAAC,sBAAqB;AAC9B,SAAS,oBAAoB;AAS7B,IAAM,uBAAuBC,GAAE,OAAO;AAAA,EACpC,QAAQA,GAAE,OAAO,EAAE;AAAA,IACjB;AAAA,EACF;AAAA,EACA,UAAUA,GAAE,OAAO,EAAE,SAAS,EAAE;AAAA,IAC9B;AAAA,EACF;AAAA,EACA,OAAOA,GAAE,OAAO,EAAE,SAAS,EAAE,QAAQ,CAAC,EAAE;AAAA,IACtC;AAAA,EACF;AACF,CAAC;AAOD,SAAS,WAAW,UAA2B;AAC7C,QAAM,aAAa,SAAS,QAAQ,OAAO,GAAG;AAG9C,MAAI,oEAAoE,KAAK,UAAU,EAAG,QAAO;AAEjG,MAAI,uCAAuC,KAAK,UAAU,EAAG,QAAO;AACpE,SAAO;AACT;AAGA,SAAS,iBAAiB,UAAkB,kBAA8C;AACxF,QAAM,MAAMC,UAAS,kBAAkB,QAAQ,EAAE,QAAQ,OAAO,GAAG;AAGnE,QAAM,WAAW,IAAI,MAAM,6EAA6E;AACxG,MAAI,SAAU,QAAO,SAAS,CAAC,KAAK;AAEpC,QAAM,aAAa,IAAI,MAAM,0CAA0C;AACvE,MAAI,WAAY,QAAO,WAAW,CAAC,KAAK;AACxC,SAAO;AACT;AAGA,SAAS,eAAe,MAA0B;AAChD,QAAM,QAAgC;AAAA,IACpC,cAAiB,GAAG;AAAA,IACpB,kBAAoB,GAAG;AAAA,IACvB,eAAkB,GAAG;AAAA,IACrB,iBAAoB,GAAG;AAAA,IACvB,kBAAoB,GAAG;AAAA,IACvB,mBAAqB,GAAG;AAAA,IACxB,cAAgB,GAAG;AAAA,IACnB,kBAAoB,GAAG;AAAA,IACvB,oBAAuB,GAAG;AAAA,IAC1B,eAAkB,GAAG;AAAA,IACrB,kBAAqB,GAAG;AAAA,IACxB,uBAAyB,GAAG;AAAA,IAC5B,cAAiB,GAAG;AAAA,IACpB,oBAAsB,GAAG;AAAA,IACzB,gBAAkB,GAAG;AAAA,EACvB;AACA,SAAO,MAAM,IAAI,KAAK;AACxB;AAIA,SAAS,qBACP,SACA,MACA,WACuB;AACvB,aAAW,OAAO,SAAS;AACzB,QAAI,CAAC,IAAI,MAAO;AAChB,UAAM,EAAE,OAAO,IAAI,IAAI,IAAI;AAC3B,UAAM,aAAa,OAAO,MAAM,QAAS,SAAS,MAAM,QAAQ,aAAa,MAAM;AAEnF,UAAM,YAAY,OAAO,IAAI,QAAS,SAAS,IAAI,QAAQ,YAAY,IAAI;AAC3E,QAAI,cAAc,WAAW;AAC3B,UAAI,IAAI,UAAU,QAAQ;AACxB,cAAM,QAAQ,qBAAqB,IAAI,UAAU,MAAM,SAAS;AAChE,YAAI,MAAO,QAAO;AAAA,MACpB;AACA,aAAO;AAAA,IACT;AAAA,EACF;AACA,SAAO;AACT;AAGA,SAAS,iBAAiB,SAA2B,MAAqC;AACxF,aAAW,OAAO,SAAS;AACzB,QAAI,IAAI,SAAS,QAAQ,IAAI,eAAgB,QAAO;AACpD,QAAI,IAAI,UAAU;AAChB,YAAM,QAAQ,iBAAiB,IAAI,UAAU,IAAI;AACjD,UAAI,MAAO,QAAO;AAAA,IACpB;AAAA,EACF;AACA,SAAO;AACT;AAGA,SAAS,eAAe,MAAsB;AAC5C,SAAO,KAAK,QAAQ,cAAc,EAAE,EAAE,QAAQ,UAAU,EAAE,EAAE,KAAK;AACnE;AAGA,eAAe,cAAc,QAAgB,kBAA4F;AAEvI,QAAM,UAAU,OAAO,QAAQ,uBAAuB,MAAM;AAG5D,QAAM,aAAa;AAAA,IACjB,oFAAoF,OAAO;AAAA,IAC3F,iCAAiC,OAAO;AAAA,EAC1C;AAEA,aAAW,WAAW,YAAY;AAChC,QAAI;AACF,YAAM,SAAS,aAAa,MAAM;AAAA,QAChC;AAAA,QAAM;AAAA,QACN;AAAA,QAAM;AAAA,QACN;AAAA,QAAU;AAAA,QACV;AAAA,QAAM;AAAA,MACR,GAAG;AAAA,QACD,KAAK;AAAA,QACL,UAAU;AAAA,QACV,SAAS;AAAA,QACT,OAAO,CAAC,QAAQ,QAAQ,MAAM;AAAA,MAChC,CAAC,EAAE,KAAK;AAER,UAAI,QAAQ;AACV,cAAM,YAAY,OAAO,MAAM,IAAI,EAAE,CAAC;AACtC,cAAM,QAAQ,UAAU,MAAM,mBAAmB;AACjD,YAAI,OAAO;AACT,gBAAM,MAAM,MAAM,CAAC,EAAE,QAAQ,MAAM;AACnC,iBAAO;AAAA,YACL,UAAUC,SAAQ,kBAAkB,MAAM,CAAC,CAAC;AAAA,YAC5C,MAAM,SAAS,MAAM,CAAC,CAAC,IAAI;AAAA,YAC3B,MAAM,OAAO,IAAI,MAAM;AAAA,UACzB;AAAA,QACF;AAAA,MACF;AAAA,IACF,QAAQ;AAAA,IAER;AAAA,EACF;AAGA,QAAM,aAAa,IAAI;AAAA,IACrB,qEAAqE,OAAO;AAAA,EAC9E;AACA,QAAM,iBAAiB,oBAAI,IAAI,CAAC,OAAO,QAAQ,OAAO,MAAM,CAAC;AAC7D,QAAM,eAAe,oBAAI,IAAI,CAAC,gBAAgB,QAAQ,QAAQ,SAAS,SAAS,UAAU,CAAC;AAE3F,iBAAe,OAAO,KAAa,UAAoF;AACrH,QAAI,YAAY,EAAG,QAAO;AAC1B,QAAI,YAAY;AAEhB,QAAI;AACF,YAAM,UAAU,MAAMC,SAAQ,KAAK,EAAE,eAAe,KAAK,CAAC;AAC1D,iBAAW,SAAS,SAAS;AAC3B,YAAI,aAAa,EAAG,QAAO;AAC3B,cAAM,WAAWD,SAAQ,KAAK,MAAM,IAAI;AAExC,YAAI,MAAM,YAAY,GAAG;AACvB,cAAI,aAAa,IAAI,MAAM,IAAI,KAAK,MAAM,KAAK,WAAW,GAAG,EAAG;AAChE,gBAAM,QAAQ,MAAM,OAAO,UAAU,SAAS;AAC9C,cAAI,MAAO,QAAO;AAClB,uBAAa;AAAA,QACf,WAAW,MAAM,OAAO,GAAG;AACzB,gBAAM,MAAM,MAAM,KAAK,UAAU,MAAM,KAAK,YAAY,GAAG,CAAC;AAC5D,cAAI,CAAC,eAAe,IAAI,GAAG,EAAG;AAC9B;AAEA,gBAAM,UAAU,MAAME,UAAS,UAAU,OAAO;AAChD,gBAAM,QAAQ,QAAQ,MAAM,IAAI;AAChC,mBAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,gBAAI,WAAW,KAAK,MAAM,CAAC,CAAC,GAAG;AAC7B,oBAAM,MAAM,MAAM,CAAC,EAAE,QAAQ,MAAM;AACnC,kBAAI,OAAO,GAAG;AACZ,uBAAO,EAAE,UAAU,UAAU,MAAM,GAAG,MAAM,IAAI;AAAA,cAClD;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF,QAAQ;AAAA,IAA8B;AACtC,WAAO;AAAA,EACT;AAEA,SAAO,OAAO,kBAAkB,GAAG;AACrC;AAMA,IAAM,gBAAgB;AACtB,IAAM,qBAAqB;AAC3B,IAAM,gCAAgC;AAE/B,SAAS,oBAAoB,SAA+B;AACjE,SAAOC,MAAK;AAAA,IACV,aAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAeI,QAAQ,gBAAgB;AAAA,IAEzC,aAAa;AAAA,IAEb,SAAS,OAAO,EAAE,QAAQ,UAAU,MAAM,MAA4C;AACpF,YAAM,WAAW,KAAK,IAAI,SAAS,GAAG,CAAC;AAEvC,UAAI;AAIF,YAAI;AACJ,YAAI,UAAU;AACd,YAAI,UAAU;AACd,YAAI,YAAmC;AAEvC,YAAI,UAAU;AACZ,gBAAM,UAAUC,YAAW,QAAQ,IAC/B,WACAJ,SAAQ,QAAQ,kBAAkB,QAAQ;AAE9C,cAAI,CAACK,aAAW,OAAO,GAAG;AACxB,mBAAO,EAAE,SAAS,OAAO,OAAO,mBAAmB,QAAQ,GAAG;AAAA,UAChE;AAEA,cAAI,CAAK,YAAY,OAAO,GAAG;AAC7B,mBAAO,EAAE,SAAS,OAAO,OAAO,sCAA0C,uBAAuB,EAAE,KAAK,IAAI,CAAC,GAAG;AAAA,UAClH;AAGA,gBAAU,UAAU,SAAS,IAAI;AACjC,gBAAM,UAAU,MAAU,mBAAmB,OAAO;AACpD,sBAAY,iBAAiB,SAAS,MAAM;AAE5C,cAAI,WAAW;AACb,0BAAc;AACd,sBAAU,UAAU,eAAe,MAAM;AACzC,sBAAU,UAAU,eAAe,MAAM;AAAA,UAC3C,OAAO;AAEL,kBAAM,UAAU,MAAMH,UAAS,SAAS,OAAO;AAC/C,kBAAMI,SAAQ,QAAQ,MAAM,IAAI;AAEhC,kBAAM,aAAa,IAAI;AAAA,cACrB,qEAAqE,OAAO,QAAQ,uBAAuB,MAAM,CAAC;AAAA,YACpH;AACA,qBAAS,IAAI,GAAG,IAAIA,OAAM,QAAQ,KAAK;AACrC,kBAAI,WAAW,KAAKA,OAAM,CAAC,CAAC,GAAG;AAC7B,sBAAM,MAAMA,OAAM,CAAC,EAAE,QAAQ,MAAM;AACnC,oBAAI,QAAQ,IAAI;AACd,gCAAc;AACd,4BAAU;AACV,4BAAU;AACV;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAEA,gBAAI,CAAC,aAAa;AAChB,uBAAS,IAAI,GAAG,IAAIA,OAAM,QAAQ,KAAK;AACrC,sBAAM,MAAMA,OAAM,CAAC,EAAE,QAAQ,MAAM;AACnC,oBAAI,QAAQ,IAAI;AACd,gCAAc;AACd,4BAAU;AACV,4BAAU;AACV;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,QACF,OAAO;AAEL,gBAAM,QAAQ,MAAM,cAAc,QAAQ,QAAQ,gBAAgB;AAClE,cAAI,OAAO;AACT,0BAAc,MAAM;AACpB,sBAAU,MAAM;AAChB,sBAAU,MAAM;AAAA,UAClB;AAAA,QACF;AAEA,YAAI,CAAC,aAAa;AAChB,iBAAO;AAAA,YACL,SAAS;AAAA,YACT,OAAO,0BAA0B,MAAM;AAAA,UACzC;AAAA,QACF;AAGA,cAAU,UAAU,aAAa,IAAI;AAKrC,cAAM,WAAW,MAAU,SAAS,aAAa,SAAS,OAAO;AACjE,cAAM,WAAW,WAAW,eAAe,QAAQ,IAAI;AAKvD,cAAM,cAAc,MAAU,mBAAmB,WAAW;AAC5D,YAAI,CAAC,aAAa,YAAY,SAAS,GAAG;AACxC,sBAAY,iBAAiB,aAAa,MAAM;AAAA,QAClD;AAKA,cAAM,aAAa,MAAU,cAAc,aAAa,SAAS,SAAS,KAAK;AAG/E,cAAM,aAAa,oBAAI,IAAwB;AAC/C,mBAAW,OAAO,YAAY;AAC5B,gBAAM,UAAUC,eAAc,IAAI,GAAG;AACrC,cAAI,CAAC,WAAW,IAAI,OAAO,GAAG;AAC5B,uBAAW,IAAI,SAAS,CAAC,CAAC;AAAA,UAC5B;AACA,qBAAW,IAAI,OAAO,EAAG,KAAK,GAAG;AAAA,QACnC;AAmBA,cAAM,eAA8B,CAAC;AACrC,YAAI,YAAY;AAEhB,mBAAW,CAAC,SAAS,IAAI,KAAK,YAAY;AACxC,cAAI,aAAa,cAAe;AAEhC,cAAI,YAAY,YAAa;AAC7B;AAEA,gBAAM,UAAUR,UAAS,QAAQ,kBAAkB,OAAO;AAC1D,gBAAM,WAAW,WAAW,OAAO;AACnC,gBAAM,YAAY,WAAW,iBAAiB,SAAS,QAAQ,gBAAgB,IAAI;AAGnF,gBAAU,UAAU,SAAS,KAAK;AAClC,gBAAM,iBAAiB,MAAU,mBAAmB,OAAO;AAE3D,gBAAM,OAAO,oBAAI,IAA2F;AAE5G,qBAAW,OAAO,MAAM;AACtB,kBAAM,YAAY;AAAA,cAChB;AAAA,cACA,IAAI,MAAM,MAAM;AAAA,cAChB,IAAI,MAAM,MAAM;AAAA,YAClB;AACA,gBAAI,aAAa,CAAC,KAAK,IAAI,UAAU,IAAI,GAAG;AAE1C,kBAAI,iBAAgC;AACpC,kBAAI;AACF,sBAAM,MAAM,MAAU;AAAA,kBACpB;AAAA,kBACA,UAAU,eAAe,MAAM;AAAA,kBAC/B,UAAU,eAAe,MAAM;AAAA,gBACjC;AACA,oBAAI,IAAK,kBAAiB,eAAe,GAAG,EAAE,MAAM,IAAI,EAAE,CAAC;AAAA,cAC7D,QAAQ;AAAA,cAAa;AAErB,mBAAK,IAAI,UAAU,MAAM;AAAA,gBACvB,MAAM,UAAU;AAAA,gBAChB,MAAM,eAAe,UAAU,IAAI;AAAA,gBACnC,MAAM,UAAU,eAAe,MAAM,OAAO;AAAA,gBAC5C,MAAM,UAAU,eAAe,MAAM;AAAA,gBACrC,UAAU,kBAAkB;AAAA,cAC9B,CAAC;AAAA,YACH;AAAA,UACF;AAEA,uBAAa,KAAK;AAAA,YAChB,UAAU;AAAA,YACV,cAAc;AAAA,YACd,QAAQ;AAAA,YACR;AAAA,YACA,mBAAmB,MAAM,KAAK,KAAK,OAAO,CAAC;AAAA,UAC7C,CAAC;AAAA,QACH;AAgBA,cAAM,aAA0B,CAAC;AAEjC,YAAI,YAAY,GAAG;AACjB,qBAAW,WAAW,aAAa,MAAM,GAAG,kBAAkB,GAAG;AAC/D,uBAAW,OAAO,QAAQ,kBAAkB,MAAM,GAAG,6BAA6B,GAAG;AACnF,kBAAI;AAEF,sBAAM,aAAa,IAAI,OAAO;AAC9B,sBAAM,UAAU,IAAI;AAEpB,sBAAM,cAAc,MAAU;AAAA,kBAC5B,QAAQ;AAAA,kBAAU;AAAA,kBAAY;AAAA,kBAAS;AAAA,gBACzC;AAEA,sBAAM,UAA6B,CAAC;AACpC,sBAAM,YAAY,oBAAI,IAAY;AAElC,2BAAW,OAAO,YAAY,MAAM,GAAG,EAAE,GAAG;AAC1C,wBAAM,SAASQ,eAAc,IAAI,GAAG;AACpC,sBAAI,WAAW,QAAQ,YAAY,WAAW,YAAa;AAC3D,sBAAI,UAAU,IAAI,MAAM,EAAG;AAC3B,4BAAU,IAAI,MAAM;AAEpB,wBAAM,QAAQR,UAAS,QAAQ,kBAAkB,MAAM;AACvD,wBAAM,SAAS,WAAW,MAAM;AAChC,wBAAM,UAAU,SAAS,iBAAiB,QAAQ,QAAQ,gBAAgB,IAAI;AAG9E,sBAAI;AACJ,sBAAI;AACF,0BAAU,UAAU,QAAQ,KAAK;AACjC,0BAAM,YAAY,MAAU,mBAAmB,MAAM;AACrD,0BAAM,YAAY,qBAAqB,WAAW,IAAI,MAAM,MAAM,MAAM,IAAI,MAAM,MAAM,SAAS;AACjG,wBAAI,UAAW,iBAAgB,UAAU;AAAA,kBAC3C,QAAQ;AAAA,kBAAa;AAErB,0BAAQ,KAAK;AAAA,oBACX,cAAc;AAAA,oBACd,QAAQ;AAAA,oBACR,WAAW;AAAA,oBACX,kBAAkB;AAAA,kBACpB,CAAC;AAAA,gBACH;AAEA,oBAAI,QAAQ,SAAS,GAAG;AACtB,6BAAW,KAAK;AAAA,oBACd,cAAc,IAAI;AAAA,oBAClB,YAAY,QAAQ;AAAA,oBACpB,MAAM;AAAA,kBACR,CAAC;AAAA,gBACH;AAAA,cACF,QAAQ;AAAA,cAA4B;AAAA,YACtC;AAAA,UACF;AAAA,QACF;AAKA,cAAM,aAAaA,UAAS,QAAQ,kBAAkB,WAAW;AACjE,cAAM,QAAkB,CAAC;AAGzB,cAAM,KAAK,OAAO,MAAM,MAAM;AAC9B,cAAM,KAAK,SAAS,UAAU,IAAI,UAAU,CAAC,EAAE;AAC/C,YAAI,UAAW,OAAM,KAAK,SAAS,eAAe,UAAU,IAAI,CAAC,EAAE;AACnE,YAAI,SAAU,OAAM,KAAK,SAAS,QAAQ,EAAE;AAG5C,cAAM,mBAAmB,WAAW,OAAO,OAAKQ,eAAc,EAAE,GAAG,MAAM,WAAW,EAAE;AACtF,cAAM,oBAAoB,WAAW,QAAQ,WAAW,IAAI,WAAY,IAAI,IAAI;AAEhF,YAAI,aAAa,SAAS,GAAG;AAC3B,gBAAM,KAAK,EAAE;AACb,gBAAM,KAAK,sBAAsB,gBAAgB,kBAAkB,iBAAiB,aAAa;AAEjG,gBAAM,QAAQ,aAAa,OAAO,OAAK,EAAE,MAAM;AAC/C,gBAAM,WAAW,aAAa,OAAO,OAAK,CAAC,EAAE,MAAM;AAEnD,cAAI,MAAM,SAAS,GAAG;AACpB,kBAAM,KAAK,EAAE;AACb,kBAAM,KAAK,eAAe;AAC1B,uBAAW,QAAQ,OAAO;AACxB,oBAAM,KAAK,KAAK,KAAK,YAAY,GAAG,KAAK,YAAY,WAAM,KAAK,SAAS,KAAK,EAAE,EAAE;AAClF,yBAAW,KAAK,KAAK,mBAAmB;AACtC,sBAAM,KAAK,0BAAW,EAAE,IAAI,KAAK,EAAE,IAAI,IAAI,EAAE,IAAI,IAAI,EAAE,WAAW,WAAM,EAAE,QAAQ,KAAK,EAAE,EAAE;AAAA,cAC7F;AAAA,YACF;AAAA,UACF;AAEA,cAAI,SAAS,SAAS,GAAG;AACvB,kBAAM,KAAK,EAAE;AACb,kBAAM,KAAK,uBAAuB;AAClC,uBAAW,OAAO,UAAU;AAC1B,oBAAM,KAAK,KAAK,IAAI,YAAY,EAAE;AAClC,yBAAW,KAAK,IAAI,mBAAmB;AACrC,sBAAM,WAAW,EAAE,YAAY,EAAE,SAAS,SAAS,MAAM,WAAM,EAAE,QAAQ,KAAK;AAC9E,sBAAM,KAAK,0BAAW,EAAE,IAAI,KAAK,EAAE,IAAI,IAAI,EAAE,IAAI,IAAI,QAAQ,EAAE;AAAA,cACjE;AAAA,YACF;AAAA,UACF;AAAA,QACF,OAAO;AACL,gBAAM,KAAK,EAAE;AACb,gBAAM,KAAK,wFAAwF;AAAA,QACrG;AAGA,YAAI,WAAW,SAAS,GAAG;AACzB,gBAAM,KAAK,EAAE;AACb,gBAAM,KAAK,iCAAiC;AAC5C,qBAAW,MAAM,YAAY;AAC3B,kBAAM,KAAK,EAAE;AACb,kBAAM,KAAK,GAAG,GAAG,YAAY,KAAK,GAAG,UAAU,eAAe;AAC9D,uBAAW,OAAO,GAAG,MAAM;AACzB,oBAAM,MAAM,IAAI,SAAS,YAAY;AACrC,oBAAM,QAAQ,IAAI,YAAY,WAAM,IAAI,SAAS,KAAK;AACtD,oBAAM,YAAY,IAAI,mBAAmB,OAAO,IAAI,gBAAgB,KAAK;AACzE,oBAAM,KAAK,wBAAS,IAAI,YAAY,GAAG,GAAG,GAAG,KAAK,GAAG,SAAS,EAAE;AAAA,YAClE;AAAA,UACF;AAAA,QACF;AAGA,YAAI,YAAY,SAAS,GAAG;AAC1B,gBAAM,KAAK,EAAE;AACb,gBAAM,KAAK,uBAAuBC,UAAS,WAAW,CAAC,OAAO;AAC9D,qBAAW,OAAO,aAAa;AAC7B,kBAAM,SAAS,IAAI,SAAS,SAAS,oBAAe;AACpD,kBAAM,KAAK,KAAK,IAAI,IAAI,KAAK,eAAe,IAAI,IAAI,CAAC,IAAI,IAAI,eAAe,MAAM,OAAO,CAAC,IAAI,MAAM,EAAE;AACtG,gBAAI,IAAI,UAAU;AAChB,yBAAW,SAAS,IAAI,SAAS,MAAM,GAAG,EAAE,GAAG;AAC7C,sBAAM,KAAK,0BAAW,MAAM,IAAI,KAAK,eAAe,MAAM,IAAI,CAAC,IAAI,MAAM,eAAe,MAAM,OAAO,CAAC,GAAG;AAAA,cAC3G;AACA,kBAAI,IAAI,SAAS,SAAS,IAAI;AAC5B,sBAAM,KAAK,eAAe,IAAI,SAAS,SAAS,EAAE,OAAO;AAAA,cAC3D;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,cAAM,kBAAkB,MAAM,KAAK,IAAI;AAEvC,eAAO;AAAA,UACL,SAAS;AAAA,UACT;AAAA,UACA,UAAU;AAAA,UACV,MAAM,UAAU;AAAA,UAChB,MAAM,YAAY,eAAe,UAAU,IAAI,IAAI;AAAA,UACnD,UAAU,YAAY;AAAA,UACtB,gBAAgB;AAAA,UAChB,gBAAgB;AAAA,UAChB,OAAO,aACJ,OAAO,OAAK,EAAE,MAAM,EACpB,IAAI,QAAM,EAAE,MAAM,EAAE,cAAc,OAAO,EAAE,UAAU,EAAE;AAAA,UAC1D;AAAA,QACF;AAAA,MACF,SAAS,OAAgB;AACvB,eAAO;AAAA,UACL,SAAS;AAAA,UACT,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,QAC9D;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AACH;;;ADvlBA,IAAMC,aAAYC,WAAUC,KAAI;AAEhC,IAAMC,oBAAmB;AACzB,IAAMC,iBAAgB,IAAI,OAAO;AACjC,IAAM,oBAAoB;AAC1B,IAAM,eAAe;AAuCd,IAAM,iBAAN,cAA6B,SAAuB;AAAA,EAChD,OAAO;AAAA,EACP,OAAO;AAAA,EAEhB,YAAY,OAAgB;AAC1B,UAAM,SAAS,gBAAgB,MAAM;AACrC,SAAK,WAAW;AAAA,EAClB;AAAA,EAEU,gBAAgB,SAAqC;AAC7D,UAAM,eAAe,QAAQ,oBACzB;AAAA;AAAA,EAAwC,QAAQ,iBAAiB;AAAA;AAAA;AAAA,IACjE;AAEJ,WAAO;AAAA;AAAA,qBAEU,QAAQ,gBAAgB;AAAA,EAC3C,YAAY;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAqDZ;AAAA,EAEA,MAAgB,cAAc,SAA+C;AAC3E,UAAM,mBAAmB,QAAQ;AAEjC,UAAM,QAAiB;AAAA,MACrB,MAAMC,MAAK;AAAA,QACT,aAAa;AAAA,QACb,aAAaC,IAAE,OAAO;AAAA,UACpB,SAASA,IAAE,OAAO,EAAE,SAAS,iCAAiC;AAAA,UAC9D,MAAMA,IAAE,OAAO,EAAE,SAAS,EAAE,SAAS,mEAAmE;AAAA,UACxG,UAAUA,IAAE,OAAO,EAAE,SAAS,EAAE,SAAS,8CAA8C;AAAA,UACvF,YAAYA,IAAE,OAAO,EAAE,SAAS,EAAE,QAAQ,EAAE,EAAE,SAAS,qCAAqC;AAAA,QAC9F,CAAC;AAAA,QACD,SAAS,OAAO,EAAE,SAAS,MAAM,UAAU,WAAW,MAAM;AAC1D,cAAI;AACF,kBAAM,aAAa,OACfC,SAAQ,kBAAkB,IAAI,IAC9B;AAEJ,gBAAI,OAAO,CAAC,MAAM,iBAAiB,cAAc;AAEjD,gBAAI,UAAU;AACZ,mBAAK,KAAK,UAAU,QAAQ;AAAA,YAC9B;AAEA,iBAAK,KAAK,eAAe,OAAO,cAAc,EAAE,CAAC;AACjD,iBAAK,KAAK,MAAM,SAAS,UAAU;AAEnC,kBAAM,EAAE,QAAQ,OAAO,IAAI,MAAMP,WAAU,KAAK,KAAK,GAAG,GAAG;AAAA,cACzD,KAAK;AAAA,cACL,WAAW,IAAI,OAAO;AAAA,cACtB,SAAS;AAAA,YACX,CAAC;AAED,kBAAM,SAAS,eAAe,UAAU,oBAAoBG,iBAAgB;AAC5E,kBAAM,cAAc,UAAU,IAAI,MAAM,IAAI,EAAE,OAAO,OAAO,EAAE;AAE9D,mBAAO;AAAA,cACL,SAAS;AAAA,cACT;AAAA,cACA;AAAA,cACA;AAAA,YACF;AAAA,UACF,SAAS,OAAY;AAEnB,gBAAI,MAAM,SAAS,KAAK,CAAC,MAAM,QAAQ;AACrC,qBAAO;AAAA,gBACL,SAAS;AAAA,gBACT,QAAQ;AAAA,gBACR,YAAY;AAAA,gBACZ;AAAA,cACF;AAAA,YACF;AACA,mBAAO;AAAA,cACL,SAAS;AAAA,cACT,OAAO,MAAM;AAAA,cACb;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,MACF,CAAC;AAAA,MAED,MAAME,MAAK;AAAA,QACT,aAAa;AAAA,QACb,aAAaC,IAAE,OAAO;AAAA,UACpB,SAASA,IAAE,OAAO,EAAE,SAAS,0DAA0D;AAAA,UACvF,YAAYA,IAAE,OAAO,EAAE,SAAS,EAAE,QAAQ,GAAG,EAAE,SAAS,mCAAmC;AAAA,QAC7F,CAAC;AAAA,QACD,SAAS,OAAO,EAAE,SAAS,WAAW,MAAM;AAC1C,cAAI;AAEF,kBAAM,EAAE,OAAO,IAAI,MAAMN;AAAA,cACvB,yBAAyB,QAAQ,QAAQ,OAAO,EAAE,CAAC,2BAA2B,cAAc,GAAG;AAAA,cAC/F;AAAA,gBACE,KAAK;AAAA,gBACL,SAAS;AAAA,cACX;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO,KAAK,EAAE,MAAM,IAAI,EAAE,OAAO,OAAO;AAEtD,mBAAO;AAAA,cACL,SAAS;AAAA,cACT;AAAA,cACA,OAAO,MAAM;AAAA,cACb;AAAA,YACF;AAAA,UACF,SAAS,OAAY;AACnB,mBAAO;AAAA,cACL,SAAS;AAAA,cACT,OAAO,MAAM;AAAA,cACb;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,MACF,CAAC;AAAA,MAED,WAAWK,MAAK;AAAA,QACd,aAAa;AAAA,QACb,aAAaC,IAAE,OAAO;AAAA,UACpB,MAAMA,IAAE,OAAO,EAAE,SAAS,8DAA8D;AAAA,UACxF,WAAWA,IAAE,OAAO,EAAE,SAAS,EAAE,SAAS,0CAA0C;AAAA,UACpF,SAASA,IAAE,OAAO,EAAE,SAAS,EAAE,SAAS,kDAAkD;AAAA,QAC5F,CAAC;AAAA,QACD,SAAS,OAAO,EAAE,MAAM,WAAW,QAAQ,MAAM;AAC/C,cAAI;AACF,kBAAM,eAAeE,YAAW,IAAI,IAChC,OACAD,SAAQ,kBAAkB,IAAI;AAElC,gBAAI,CAACE,aAAW,YAAY,GAAG;AAC7B,qBAAO;AAAA,gBACL,SAAS;AAAA,gBACT,OAAO,mBAAmB,IAAI;AAAA,cAChC;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAMC,MAAK,YAAY;AACrC,gBAAI,MAAM,OAAON,gBAAe;AAC9B,qBAAO;AAAA,gBACL,SAAS;AAAA,gBACT,OAAO,oBAAoB,MAAM,OAAO,OAAO,MAAM,QAAQ,CAAC,CAAC;AAAA,cACjE;AAAA,YACF;AAEA,gBAAI,UAAU,MAAMO,UAAS,cAAc,OAAO;AAElD,gBAAI,cAAc,UAAa,YAAY,QAAW;AACpD,oBAAM,QAAQ,QAAQ,MAAM,IAAI;AAChC,oBAAM,SAAS,aAAa,KAAK;AACjC,oBAAM,MAAM,WAAW,MAAM;AAC7B,wBAAU,MAAM,MAAM,OAAO,GAAG,EAAE,KAAK,IAAI;AAAA,YAC7C;AAEA,mBAAO;AAAA,cACL,SAAS;AAAA,cACT,MAAMC,UAAS,kBAAkB,YAAY;AAAA,cAC7C,SAAS,eAAe,SAAST,iBAAgB;AAAA,cACjD,WAAW,QAAQ,MAAM,IAAI,EAAE;AAAA,YACjC;AAAA,UACF,SAAS,OAAY;AACnB,mBAAO;AAAA,cACL,SAAS;AAAA,cACT,OAAO,MAAM;AAAA,YACf;AAAA,UACF;AAAA,QACF;AAAA,MACF,CAAC;AAAA,MAED,UAAUE,MAAK;AAAA,QACb,aAAa;AAAA,QACb,aAAaC,IAAE,OAAO;AAAA,UACpB,MAAMA,IAAE,OAAO,EAAE,SAAS,EAAE,QAAQ,GAAG,EAAE,SAAS,gDAAgD;AAAA,UAClG,WAAWA,IAAE,QAAQ,EAAE,SAAS,EAAE,QAAQ,KAAK,EAAE,SAAS,sDAAsD;AAAA,UAChH,UAAUA,IAAE,OAAO,EAAE,SAAS,EAAE,QAAQ,CAAC,EAAE,SAAS,qCAAqC;AAAA,QAC3F,CAAC;AAAA,QACD,SAAS,OAAO,EAAE,MAAM,WAAW,SAAS,MAAM;AAChD,cAAI;AACF,kBAAM,eAAeE,YAAW,IAAI,IAChC,OACAD,SAAQ,kBAAkB,IAAI;AAElC,gBAAI,CAACE,aAAW,YAAY,GAAG;AAC7B,qBAAO;AAAA,gBACL,SAAS;AAAA,gBACT,OAAO,wBAAwB,IAAI;AAAA,cACrC;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAMC,MAAK,YAAY;AACrC,gBAAI,CAAC,MAAM,YAAY,GAAG;AACxB,qBAAO;AAAA,gBACL,SAAS;AAAA,gBACT,OAAO,oBAAoB,IAAI;AAAA,cACjC;AAAA,YACF;AAEA,gBAAI,WAAW;AAEb,oBAAM,EAAE,OAAO,IAAI,MAAMV;AAAA,gBACvB,oBAAoB,QAAQ;AAAA,gBAC5B;AAAA,kBACE,KAAK;AAAA,kBACL,SAAS;AAAA,gBACX;AAAA,cACF;AAEA,oBAAM,QAAQ,OAAO,KAAK,EAAE,MAAM,IAAI,EAAE,OAAO,OAAO;AACtD,qBAAO;AAAA,gBACL,SAAS;AAAA,gBACT,MAAMY,UAAS,kBAAkB,YAAY,KAAK;AAAA,gBAClD;AAAA,gBACA,OAAO,MAAM;AAAA,gBACb,WAAW;AAAA,cACb;AAAA,YACF,OAAO;AACL,oBAAM,UAAU,MAAMC,SAAQ,cAAc,EAAE,eAAe,KAAK,CAAC;AACnE,oBAAM,QAAQ,QAAQ,MAAM,GAAG,GAAG,EAAE,IAAI,QAAM;AAAA,gBAC5C,MAAM,EAAE;AAAA,gBACR,MAAM,EAAE,YAAY,IAAI,cAAc;AAAA,cACxC,EAAE;AAEF,qBAAO;AAAA,gBACL,SAAS;AAAA,gBACT,MAAMD,UAAS,kBAAkB,YAAY,KAAK;AAAA,gBAClD;AAAA,gBACA,OAAO,MAAM;AAAA,cACf;AAAA,YACF;AAAA,UACF,SAAS,OAAY;AACnB,mBAAO;AAAA,cACL,SAAS;AAAA,cACT,OAAO,MAAM;AAAA,YACf;AAAA,UACF;AAAA,QACF;AAAA,MACF,CAAC;AAAA,MACD,YAAY,oBAAoB;AAAA,QAC9B;AAAA,MACF,CAAC;AAAA,IACH;AAGA,QAAI;AACF,UAAI,0BAA0B,GAAG;AAC/B,cAAM,WAAW,MAAM,iBAAiB,gBAAgB;AACxD,YAAI,UAAU;AACZ,gBAAM,EAAE,0BAAAE,0BAAyB,IAAI,MAAM;AAC3C,gBAAM,kBAAkBA,0BAAyB;AAAA,YAC/C;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF,QAAQ;AAAA,IAER;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA,EAIU,SAAS,SAAsC;AAEvD,WAAO,CAAC;AAAA,EACV;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,IAAI,SAA6F;AAErG,UAAM,aAAa,MAAM,KAAK,cAAc,OAAO;AAGnD,UAAM,mBAAmB,KAAK,SAAS,KAAK,IAAI;AAChD,SAAK,WAAW,MAAM;AAEtB,QAAI;AACF,aAAO,MAAM,MAAM,IAAI,OAAO;AAAA,IAChC,UAAE;AACA,WAAK,WAAW;AAAA,IAClB;AAAA,EACF;AAAA,EAEU,YAAY,MAAc,OAAqC;AAUvE,UAAM,WAAqB,CAAC;AAE5B,eAAW,QAAQ,OAAO;AACxB,UAAI,KAAK,SAAS,UAAU,KAAK,SAAS;AACxC,iBAAS,KAAK,KAAK,OAAO;AAAA,MAC5B;AAAA,IACF;AAGA,UAAM,cAAc,SAAS,SAAS,IAClC,SAAS,KAAK,MAAM,IACpB;AAGJ,UAAM,WAA4B,CAAC;AACnC,QAAI,gBAAgB;AACpB,QAAI,aAAa;AAEjB,eAAW,QAAQ,OAAO;AACxB,UAAI,KAAK,SAAS,iBAAiB,KAAK,YAAY;AAClD,cAAM,SAAS,KAAK;AAEpB,YAAI,KAAK,aAAa,UAAU,OAAO,SAAS;AAC9C,wBAAc,OAAO,cAAc;AAGnC,gBAAM,SAAS,OAAO,UAAU,IAAI,MAAM,IAAI,EAAE,OAAO,OAAO;AAC9D,qBAAW,QAAQ,MAAM,MAAM,GAAG,EAAE,GAAG;AAErC,kBAAM,QAAQ,KAAK,MAAM,sBAAsB;AAC/C,gBAAI,OAAO;AACT,uBAAS,KAAK;AAAA,gBACZ,MAAM;AAAA,gBACN,MAAM,MAAM,CAAC;AAAA,gBACb,YAAY,SAAS,MAAM,CAAC,GAAG,EAAE;AAAA,gBACjC,SAAS,MAAM,CAAC,EAAE,KAAK;AAAA,gBACvB,WAAW;AAAA,cACb,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF,WAAW,KAAK,aAAa,UAAU,OAAO,SAAS;AACrD,2BAAiB,OAAO,SAAS;AAEjC,qBAAW,SAAS,OAAO,SAAS,CAAC,GAAG,MAAM,GAAG,EAAE,GAAG;AACpD,qBAAS,KAAK;AAAA,cACZ,MAAM;AAAA,cACN,MAAM;AAAA,cACN,WAAW;AAAA,YACb,CAAC;AAAA,UACH;AAAA,QACF,WAAW,KAAK,aAAa,eAAe,OAAO,SAAS;AAE1D,gBAAM,UAAU,OAAO,UACnB,eAAe,OAAO,SAAS,GAAG,IAClC;AAEJ,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,MAAM,OAAO;AAAA,YACb,SAAS;AAAA,YACT,WAAW;AAAA,YACX,SAAS,GAAG,OAAO,SAAS;AAAA,UAC9B,CAAC;AAAA,QACH,WAAW,KAAK,aAAa,qBAAqB,OAAO,SAAS;AAEhE,gBAAM,UAAU,OAAO,WAAW,CAAC;AACnC,qBAAW,KAAK,QAAQ,MAAM,GAAG,EAAE,GAAG;AACpC,qBAAS,KAAK;AAAA,cACZ,MAAM;AAAA,cACN,MAAM,EAAE;AAAA,cACR,YAAY,EAAE;AAAA,cACd,SAAS,EAAE,UAAU,eAAe,EAAE,SAAS,GAAG,IAAI;AAAA,cACtD,WAAW,EAAE,QAAQ,MAAM,SAAS,EAAE,QAAQ,MAAM,WAAW;AAAA,cAC/D,SAAS,EAAE,cAAc,EAAE;AAAA,YAC7B,CAAC;AAAA,UACH;AAAA,QACF,WAAW,KAAK,aAAa,gBAAgB,OAAO,SAAS;AAE3D,wBAAc,OAAO,kBAAkB;AAEvC,cAAI,OAAO,UAAU;AACnB,qBAAS,KAAK;AAAA,cACZ,MAAM;AAAA,cACN,MAAM,OAAO;AAAA,cACb,YAAY,OAAO;AAAA,cACnB,SAAS,OAAO,WAAW,eAAe,OAAO,UAAU,GAAG,IAAI;AAAA,cAClE,WAAW;AAAA,cACX,SAAS,GAAG,OAAO,QAAQ,QAAQ,GAAG,OAAO,iBAAiB,KAAK,OAAO,cAAc,UAAU,EAAE;AAAA,YACtG,CAAC;AAAA,UACH;AAGA,qBAAW,SAAS,OAAO,SAAS,CAAC,GAAG,MAAM,GAAG,EAAE,GAAG;AACpD,qBAAS,KAAK;AAAA,cACZ,MAAM;AAAA,cACN,MAAM,KAAK;AAAA,cACX,WAAW;AAAA,cACX,SAAS,KAAK,QAAQ,UAAU,KAAK,KAAK,KAAK;AAAA,YACjD,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAGA,UAAM,gBAAgB,eAAe,aAAa,iBAAiB;AAEnE,WAAO;AAAA,MACL,OAAO;AAAA;AAAA,MACP,SAAS;AAAA,MACT,UAAU,SAAS,MAAM,GAAG,YAAY;AAAA,MACxC;AAAA,MACA;AAAA,IACF;AAAA,EACF;AACF;AAGO,SAAS,qBAAqB,OAAgC;AACnE,SAAO,IAAI,eAAe,KAAK;AACjC;;;AFtgBA,IAAM,mBAAmB;AAwClB,SAAS,iBAAiB,SAA4B;AAC3D,SAAOC,OAAK;AAAA,IACV,aAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAab,aAAaC,IAAE,OAAO;AAAA,MACpB,OAAOA,IAAE,OAAO,EAAE,SAAS,gEAAiE;AAAA,MAC5F,SAASA,IAAE,OAAO,EAAE,SAAS,0YAA0Y;AAAA,IACza,CAAC;AAAA,IAED,SAAS,OAAO,EAAE,OAAO,QAAQ,GAAG,gBAAgB;AAClD,YAAM,aAAc,YAAoB,cAAc,iBAAiB,KAAK,IAAI,CAAC;AAGjF,YAAM,QAAQ,aAAa;AAAA,QACzB,QAAQ;AAAA,QACR,YAAY;AAAA,MACd,CAAC;AAED,UAAI;AACF,cAAM,WAAW,qBAAqB;AAItC,cAAM,WAAW,UACb,GAAG,KAAK;AAAA;AAAA,WAAgB,OAAO,KAC/B;AAIJ,cAAM,SAAS,MAAM,SAAS,IAAI;AAAA,UAChC,MAAM;AAAA,UACN,WAAW,QAAQ;AAAA,UACnB;AAAA,UACA,kBAAkB,QAAQ;AAAA,UAC1B,YAAY,OAAO,UAAiC;AAElD,gBAAI,MAAM,SAAS,UAAU,MAAM,MAAM;AACvC,oBAAM,QAAQ,aAAa;AAAA,gBACzB,QAAQ;AAAA,gBACR,YAAY,MAAM;AAAA,gBAClB,UAAU,MAAM,KAAK;AAAA,gBACrB,aAAa,MAAM,KAAK;AAAA,gBACxB,UAAU,MAAM,KAAK;AAAA,gBACrB,WAAW,MAAM,KAAK;AAAA,gBACtB,YAAY,MAAM,KAAK;AAAA,cACzB,CAAC;AAAA,YACH,WAAW,MAAM,SAAS,YAAY;AACpC,oBAAM,QAAQ,aAAa;AAAA,gBACzB,QAAQ;AAAA,gBACR,YAAY,MAAM;AAAA,gBAClB,QAAQ,MAAM;AAAA,cAChB,CAAC;AAAA,YACH,WAAW,MAAM,SAAS,SAAS;AACjC,oBAAM,QAAQ,aAAa;AAAA,gBACzB,QAAQ;AAAA,gBACR,YAAY,MAAM;AAAA,gBAClB,OAAO,MAAM;AAAA,cACf,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF,CAAC;AAED,YAAI,CAAC,OAAO,SAAS;AACnB,iBAAO;AAAA,YACL,SAAS;AAAA,YACT,OAAO,OAAO,SAAS;AAAA,YACvB,aAAa,OAAO;AAAA,UACtB;AAAA,QACF;AAEA,cAAM,eAAe,OAAO;AAG5B,qBAAa,QAAQ;AASrB,YAAI,kBAAkB;AAItB,YAAI,aAAa,SAAS;AACxB,6BAAmB,aAAa;AAAA,QAClC;AAGA,YAAI,aAAa,SAAS,SAAS,GAAG;AACpC,6BAAmB;AAAA;AAAA,6BAAkC,aAAa,SAAS,MAAM;AAAA;AAEjF,qBAAW,WAAW,aAAa,UAAU;AAC3C,gBAAI,QAAQ,SAAS,SAAS;AAC5B,iCAAmB;AAAA,MAAS,QAAQ,IAAI,IAAI,QAAQ,UAAU,UAAU,eAAe,QAAQ,WAAW,IAAI,GAAG,CAAC;AAAA,YACpH,WAAW,QAAQ,SAAS,YAAY;AACtC,iCAAmB;AAAA,MAAS,QAAQ,IAAI,IAAI,QAAQ,UAAU,iBAAiB,QAAQ,UAAU,IAAI,QAAQ,OAAO,MAAM,EAAE;AAC5H,kBAAI,QAAQ,SAAS;AACnB,mCAAmB;AAAA;AAAA,IAAiB,eAAe,QAAQ,SAAS,GAAG,CAAC;AAAA;AAAA,cAC1E;AAAA,YACF,WAAW,QAAQ,SAAS,QAAQ;AAClC,iCAAmB;AAAA,MAAS,QAAQ,IAAI,MAAM,QAAQ,UAAU,IAAI,QAAQ,OAAO,MAAM,EAAE;AAAA,YAC7F;AAAA,UACF;AAAA,QACF;AAEA,YAAI,CAAC,gBAAgB,KAAK,GAAG;AAC3B,4BAAkB;AAAA,QACpB;AAEA,eAAO;AAAA,UACL,SAAS;AAAA,UACT,OAAO,aAAa;AAAA,UACpB,SAAS,aAAa;AAAA,UACtB,UAAU,aAAa;AAAA,UACvB,YAAY,aAAa;AAAA,UACzB,eAAe,aAAa;AAAA,UAC5B,iBAAiB,eAAe,iBAAiB,gBAAgB;AAAA,UACjE,aAAa,OAAO;AAAA,UACpB,YAAY,OAAO,MAAM;AAAA,QAC3B;AAAA,MACF,SAAS,OAAY;AACnB,cAAM,QAAQ,aAAa;AAAA,UACzB,QAAQ;AAAA,UACR,OAAO,MAAM;AAAA,QACf,CAAC;AAED,eAAO;AAAA,UACL,SAAS;AAAA,UACT,OAAO,MAAM;AAAA,QACf;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AACH;;;AIxLA;;;ACRA,SAAS,QAAAC,cAAY;AACrB,SAAS,KAAAC,WAAS;AAClB,OAAO,SAAS;AAEhB,IAAM,MAAM,IAAI,IAAI,EAAE,WAAW,KAAK,CAAC;AAahC,SAAS,uBAAuB,SAAiC;AACtE,QAAM,WAAW,IAAI,QAAQ,QAAQ,YAAY;AAEjD,SAAOD,OAAK;AAAA,IACV,aACE;AAAA,IACF,aAAaC,IAAE,OAAO;AAAA,MACpB,QAAQA,IACL,OAAOA,IAAE,OAAO,GAAGA,IAAE,QAAQ,CAAC,EAC9B,SAAS,6DAA6D;AAAA,IAC3E,CAAC;AAAA,IACD,SAAS,OAAO,UAAU;AACxB,YAAM,QAAQ,SAAS,MAAM,MAAM;AACnC,UAAI,CAAC,OAAO;AACV,cAAM,SAAS,SAAS,QAAQ,IAAI,CAAC,OAAO;AAAA,UAC1C,MAAM,EAAE,gBAAgB;AAAA,UACxB,SAAS,EAAE;AAAA,UACX,QAAQ,EAAE;AAAA,QACZ,EAAE;AACF,eAAO;AAAA,UACL,QAAQ;AAAA,UACR,SACE;AAAA,UACF;AAAA,UACA,gBAAgB,QAAQ;AAAA,QAC1B;AAAA,MACF;AAEA,cAAQ,WAAW,EAAE,QAAQ,aAAa,QAAQ,MAAM,OAAO,CAAC;AAChE,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,SAAS;AAAA,MACX;AAAA,IACF;AAAA,EACF,CAAC;AACH;AAEO,SAAS,qBAAqB,SAAqD;AACxF,SAAOD,OAAK;AAAA,IACV,aACE;AAAA,IACF,aAAaC,IAAE,OAAO;AAAA,MACpB,QAAQA,IAAE,OAAO,EAAE,SAAS,iDAAiD;AAAA,IAC/E,CAAC;AAAA,IACD,SAAS,OAAO,UAAU;AACxB,cAAQ,WAAW,EAAE,QAAQ,UAAU,OAAO,MAAM,OAAO,CAAC;AAC5D,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,SAAS,0BAA0B,MAAM,MAAM;AAAA,MACjD;AAAA,IACF;AAAA,EACF,CAAC;AACH;;;ACrEA,SAAS,QAAAC,cAAY;AACrB,SAAS,KAAAC,WAAS;AAClB,SAAS,YAAAC,WAAU,QAAAC,aAAY;AAC/B,SAAS,QAAAC,OAAM,YAAAC,WAAU,WAAAC,gBAAe;AAOxC,IAAM,aAAqC;AAAA,EACzC,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,OAAO;AAAA,EACP,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,OAAO;AACT;AAEO,SAAS,qBAAqB,SAAgC;AACnE,SAAON,OAAK;AAAA,IACV,aAAa;AAAA,IACb,aAAaC,IAAE,OAAO;AAAA,MACpB,MAAMA,IAAE,OAAO,EAAE,SAAS,wEAAwE;AAAA,MAClG,MAAMA,IAAE,OAAO,EAAE,SAAS,EAAE,SAAS,sDAAsD;AAAA,IAC7F,CAAC;AAAA,IACD,SAAS,OAAO,UAAU;AACxB,UAAI;AACF,cAAM,EAAE,oBAAAM,qBAAoB,gBAAAC,gBAAe,IAAI,MAAM;AACrD,YAAI,CAACD,oBAAmB,GAAG;AACzB,iBAAO;AAAA,YACL,SAAS;AAAA,YACT,OAAO;AAAA,UACT;AAAA,QACF;AAEA,cAAM,WAAW,MAAM,KAAK,WAAW,GAAG,IACtC,MAAM,OACNH,MAAK,QAAQ,kBAAkB,MAAM,IAAI;AAG7C,YAAI;AACF,gBAAMD,MAAK,QAAQ;AAAA,QACrB,QAAQ;AACN,iBAAO;AAAA,YACL,SAAS;AAAA,YACT,OAAO,mBAAmB,MAAM,IAAI;AAAA,UACtC;AAAA,QACF;AAEA,cAAM,WAAW,MAAM,QAAQE,UAAS,QAAQ;AAChD,cAAM,MAAMC,SAAQ,QAAQ,EAAE,YAAY;AAC1C,cAAM,cAAc,WAAW,GAAG,KAAK;AAGvC,cAAM,aAAa,MAAME,gBAAe;AAAA,UACtC,QAAQ;AAAA,UACR;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAGA,cAAM,WAAW,MAAMN,UAAS,QAAQ;AACxC,cAAM,SAAS,MAAM,MAAM,WAAW,WAAW;AAAA,UAC/C,QAAQ;AAAA,UACR,SAAS,EAAE,gBAAgB,YAAY;AAAA,UACvC,MAAM;AAAA,QACR,CAAC;AAED,YAAI,CAAC,OAAO,IAAI;AACd,iBAAO;AAAA,YACL,SAAS;AAAA,YACT,OAAO,kBAAkB,OAAO,MAAM,IAAI,OAAO,UAAU;AAAA,UAC7D;AAAA,QACF;AAGA,cAAMM,gBAAe,WAAW,WAAW,QAAQ,EAAE,WAAW,SAAS,OAAO,CAAC;AAGjF,cAAM,eAAe,MAAMA,gBAAe,eAAe,WAAW,MAAM;AAE1E,eAAO;AAAA,UACL,SAAS;AAAA,UACT,QAAQ,WAAW;AAAA,UACnB;AAAA,UACA,WAAW,SAAS;AAAA,UACpB;AAAA,UACA,aAAa,aAAa;AAAA,UAC1B,WAAW,aAAa;AAAA,QAC1B;AAAA,MACF,SAAS,KAAU;AACjB,eAAO;AAAA,UACL,SAAS;AAAA,UACT,OAAO,kBAAkB,IAAI,OAAO;AAAA,QACtC;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AACH;;;AFzGA;AACA;AA6GA;AArFA,eAAsB,YAAY,SAA+C;AAC/E,QAAM,QAAiB;AAAA,IACrB,MAAM,eAAe;AAAA,MACnB,kBAAkB,QAAQ;AAAA,MAC1B,WAAW,QAAQ;AAAA,MACnB,UAAU,QAAQ;AAAA,MAClB,YAAY,QAAQ;AAAA,IACtB,CAAC;AAAA,IAED,WAAW,mBAAmB;AAAA,MAC5B,kBAAkB,QAAQ;AAAA,IAC5B,CAAC;AAAA,IAED,YAAY,oBAAoB;AAAA,MAC9B,kBAAkB,QAAQ;AAAA,MAC1B,WAAW,QAAQ;AAAA,MACnB,WAAW,QAAQ,aAAa;AAAA,MAChC,YAAY,QAAQ;AAAA,IACtB,CAAC;AAAA,IAED,MAAM,eAAe;AAAA,MACnB,WAAW,QAAQ;AAAA,IACrB,CAAC;AAAA,IAED,YAAY,oBAAoB;AAAA,MAC9B,WAAW,QAAQ;AAAA,MACnB,mBAAmB,QAAQ;AAAA,IAC7B,CAAC;AAAA,IAED,QAAQ,iBAAiB;AAAA,MACvB,kBAAkB,QAAQ;AAAA,IAC5B,CAAC;AAAA,IAED,eAAe,iBAAiB;AAAA,MAC9B,WAAW,QAAQ;AAAA,MACnB,kBAAkB,QAAQ;AAAA,MAC1B,YAAY,QAAQ;AAAA,IACtB,CAAC;AAAA,IAED,YAAY,oBAAoB;AAAA,MAC9B,kBAAkB,QAAQ;AAAA,IAC5B,CAAC;AAAA,EACH;AAGA,MAAI,mBAAmB,GAAG;AACxB,UAAM,cAAc,qBAAqB;AAAA,MACvC,kBAAkB,QAAQ;AAAA,MAC1B,WAAW,QAAQ;AAAA,IACrB,CAAC;AAAA,EACH;AAGA,MAAI,QAAQ,yBAAyB,OAAO;AAC1C,QAAI;AACF,UAAI,0BAA0B,GAAG;AAC/B,cAAM,WAAW,MAAM,iBAAiB,QAAQ,gBAAgB;AAChE,YAAI,UAAU;AACZ,gBAAM,kBAAkB,yBAAyB;AAAA,YAC/C,kBAAkB,QAAQ;AAAA,UAC5B,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF,QAAQ;AAAA,IAER;AAAA,EACF;AAGA,MAAI,QAAQ,WAAW;AACrB,UAAM,gBAAgB,uBAAuB,QAAQ,SAAS;AAC9D,UAAM,cAAc,qBAAqB,QAAQ,SAAS;AAAA,EAC5D;AAEA,SAAO;AACT;;;AGhHA,SAAS,gBAAAC,qBAAyD;AAElE;;;ACDA;AASA;AAVA,OAAO,QAAQ;AAgBf,SAAS,wBAAgC;AACvC,QAAMC,YAAW,QAAQ;AAEzB,QAAM,SAAS;AAAA;AAAA;AAIf,MAAIA,cAAa,SAAS;AACxB,WAAO,GAAG,MAAM;AAAA;AAAA;AAAA;AAAA,EAIlB;AAGA,SAAO,GAAG,MAAM;AAAA;AAAA;AAAA;AAIlB;AAKA,eAAsB,kBAAkB,SAOpB;AAClB,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,cAAc,CAAC;AAAA,IACf;AAAA,EACF,IAAI;AAGJ,MAAI,sBAAsB;AAC1B,MAAI,qBAAqB;AACzB,MAAI,kBAAkB;AACtB,MAAI,wBAAwB;AAE5B,MAAI,kBAAkB;AAEpB,UAAM,EAAE,QAAQ,UAAU,IAAI,IAAI,MAAM,4BAA4B,gBAAgB;AAGpF,0BAAsB,yBAAyB,MAAM;AAGrD,4BAAwB,uBAAuB,QAAQ;AAGvD,UAAM,WAAW,MAAM,aAAa,iBAAiB,YAAY;AACjE,sBAAkB,sBAAsB,QAAQ;AAGhD,QAAI,YAAY,SAAS,GAAG;AAC1B,YAAM,cAAc,MAAM,qBAAqB,KAAK,aAAa,gBAAgB;AACjF,2BAAqB,wBAAwB,WAAW;AAAA,IAC1D;AAAA,EACF,OAAO;AAEL,UAAM,EAAE,eAAAC,eAAc,IAAI,MAAM;AAChC,UAAM,SAAS,MAAMA,eAAc,iBAAiB;AACpD,4BAAwB,uBAAuB,MAAM;AAAA,EACvD;AAGA,QAAM,QAAQ,MAAM,YAAY,aAAa,SAAS;AACtD,QAAM,eAAe,sBAAsB,KAAK;AAGhD,QAAMD,YAAW,QAAQ,aAAa,UAAU,YAAY,QAAQ,aAAa,WAAW,UAAU;AACtG,QAAM,eAAc,oBAAI,KAAK,GAAE,mBAAmB,SAAS,EAAE,SAAS,QAAQ,MAAM,WAAW,OAAO,QAAQ,KAAK,UAAU,CAAC;AAC9H,QAAM,qBAAqB,sBAAsB;AAEjD,QAAM,eAAe;AAAA;AAAA;AAAA,kBAGLA,SAAQ,KAAK,GAAG,QAAQ,CAAC;AAAA,cAC7B,WAAW;AAAA,2BACE,gBAAgB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAmLzC,kBAAkB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAoDlB,eAAe;AAAA;AAAA,EAEf,mBAAmB;AAAA;AAAA,EAEnB,kBAAkB;AAAA;AAAA;AAAA,EAGlB,qBAAqB;AAAA;AAAA;AAAA,EAGrB,YAAY;AAAA;AAAA,EAEZ,qBAAqB;AAAA,EAA2B,kBAAkB,KAAK,EAAE;AAAA;AAAA;AAIzE,SAAO;AACT;AAKA,SAAS,sBAAsB,OAA2B;AACxD,MAAI,MAAM,WAAW,GAAG;AACtB,WAAO;AAAA,EACT;AAEA,QAAM,cAAsC;AAAA,IAC1C,SAAS;AAAA,IACT,aAAa;AAAA,IACb,WAAW;AAAA,IACX,WAAW;AAAA,EACb;AAEA,QAAM,QAAQ,CAAC,gBAAgB;AAC/B,aAAW,QAAQ,OAAO;AACxB,UAAM,QAAQ,YAAY,KAAK,MAAM,KAAK;AAC1C,UAAM,KAAK,GAAG,KAAK,KAAK,KAAK,EAAE,KAAK,KAAK,OAAO,EAAE;AAAA,EACpD;AAEA,SAAO,MAAM,KAAK,IAAI;AACxB;AAMO,SAAS,wBAAwB,cAA+C;AACrF,SAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA+DP,KAAK,UAAU,cAAc,MAAM,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAOvC;AAKO,SAAS,oBAAoB,qBAAqC;AACvE,SAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASP,mBAAmB;AAAA;AAAA;AAGrB;;;AC9bA,SAAS,0BAA6C;AAMtD,SAAS,sBAAsB,OAAyB;AACtD,MAAI,UAAU,QAAQ,UAAU,OAAW,QAAO;AAClD,MAAI,iBAAiB,KAAM,QAAO,MAAM,YAAY;AACpD,MAAI,MAAM,QAAQ,KAAK,EAAG,QAAO,MAAM,IAAI,qBAAqB;AAChE,MAAI,OAAO,UAAU,UAAU;AAC7B,UAAM,SAAkC,CAAC;AACzC,eAAW,CAAC,GAAG,CAAC,KAAK,OAAO,QAAQ,KAAgC,GAAG;AACrE,aAAO,CAAC,IAAI,sBAAsB,CAAC;AAAA,IACrC;AACA,WAAO;AAAA,EACT;AACA,SAAO;AACT;AAUA,SAAS,gBAAgB,KAAuB;AAC9C,MAAI,QAAQ,QAAQ,QAAQ,UAAa,OAAO,QAAQ,SAAU,QAAO;AAEzE,QAAM,UAAU;AAGhB,MAAI,CAAC,QAAQ,QAAQ,OAAO,QAAQ,SAAS,SAAU,QAAO;AAG9D,SAAO,sBAAsB,OAAO;AACtC;AAWA,SAAS,iBACP,GACA,GACyC;AACzC,QAAM,YAAY,OAAO,MAAM;AAC/B,QAAM,YAAY,OAAO,MAAM;AAG/B,MAAI,aAAa,WAAW;AAC1B,WAAO,GAAG,CAAC;AAAA;AAAA,EAAO,CAAC;AAAA,EACrB;AAGA,QAAM,SAAyC,YAC3C,CAAC,EAAE,MAAM,QAAQ,MAAM,EAAE,CAAC,IAC1B,MAAM,QAAQ,CAAC,IACZ,IACD,CAAC;AAEP,QAAM,SAAyC,YAC3C,CAAC,EAAE,MAAM,QAAQ,MAAM,EAAE,CAAC,IAC1B,MAAM,QAAQ,CAAC,IACZ,IACD,CAAC;AAEP,SAAO,CAAC,GAAG,QAAQ,GAAG,MAAM;AAC9B;AAeA,SAAS,yBAAyB,UAA0C;AAC1E,MAAI,SAAS,UAAU,EAAG,QAAO;AAEjC,QAAM,SAAyB,CAAC;AAEhC,aAAW,OAAO,UAAU;AAC1B,UAAM,OAAO,OAAO,OAAO,SAAS,CAAC;AAErC,QAAI,CAAC,QAAS,KAAa,SAAU,IAAY,MAAM;AAErD,aAAO,KAAK,GAAG;AACf;AAAA,IACF;AAGA,UAAM,OAAQ,IAAY;AAE1B,QAAI,SAAS,QAAQ;AACnB,YAAM,gBAAgB,iBAAkB,KAAa,SAAU,IAAY,OAAO;AAClF,aAAO,OAAO,SAAS,CAAC,IAAI,EAAE,MAAM,QAAQ,SAAS,cAAc;AACnE,cAAQ,KAAK,sDAAsD;AAAA,IACrE,WAAW,SAAS,aAAa;AAE/B,YAAM,YAAY,OAAQ,KAAa,YAAY,WAC/C,CAAC,EAAE,MAAM,QAAQ,MAAO,KAAa,QAAQ,CAAC,IAC9C,MAAM,QAAS,KAAa,OAAO,IAChC,KAAa,UACd,CAAC;AACP,YAAM,WAAW,OAAQ,IAAY,YAAY,WAC7C,CAAC,EAAE,MAAM,QAAQ,MAAO,IAAY,QAAQ,CAAC,IAC7C,MAAM,QAAS,IAAY,OAAO,IAC/B,IAAY,UACb,CAAC;AACP,aAAO,OAAO,SAAS,CAAC,IAAI,EAAE,MAAM,aAAa,SAAS,CAAC,GAAG,WAAW,GAAG,QAAQ,EAAE;AACtF,cAAQ,KAAK,2DAA2D;AAAA,IAC1E,WAAW,SAAS,QAAQ;AAE1B,YAAM,cAAc,MAAM,QAAS,KAAa,OAAO,IAAK,KAAa,UAAU,CAAC;AACpF,YAAM,aAAa,MAAM,QAAS,IAAY,OAAO,IAAK,IAAY,UAAU,CAAC;AACjF,aAAO,OAAO,SAAS,CAAC,IAAI,EAAE,MAAM,QAAQ,SAAS,CAAC,GAAG,aAAa,GAAG,UAAU,EAAE;AACrF,cAAQ,KAAK,sDAAsD;AAAA,IACrE,OAAO;AAEL,aAAO,KAAK,GAAG;AAAA,IACjB;AAAA,EACF;AAEA,SAAO;AACT;AAeO,SAAS,sBAAsB,UAA0C;AAG9E,MAAI,WAAW;AACf,aAAW,OAAO,UAAU;AAC1B,QAAI;AACF,yBAAmB,MAAM,GAAG;AAAA,IAC9B,QAAQ;AACN,iBAAW;AACX;AAAA,IACF;AAAA,EACF;AAEA,MAAI;AAEJ,MAAI,UAAU;AACZ,aAAS;AAAA,EACX,OAAO;AAEL,YAAQ,KAAK,0EAA0E;AAEvF,UAAM,YAA4B,CAAC;AACnC,QAAI,cAAc;AAElB,aAAS,IAAI,GAAG,IAAI,SAAS,QAAQ,KAAK;AACxC,YAAM,MAAM,SAAS,CAAC;AAGtB,UAAI;AACF,2BAAmB,MAAM,GAAG;AAC5B,kBAAU,KAAK,GAAG;AAClB;AAAA,MACF,QAAQ;AAAA,MAER;AAGA,YAAM,QAAQ,gBAAgB,GAAG;AACjC,UAAI;AACF,2BAAmB,MAAM,KAAK;AAC9B,kBAAU,KAAK,KAAK;AACpB;AACA,gBAAQ,KAAK,wCAAwC,CAAC,UAAW,IAAY,IAAI,uCAAuC;AACxH;AAAA,MACF,QAAQ;AAAA,MAER;AAIA,UAAK,IAAY,SAAS,UAAU,MAAM,QAAS,IAAY,OAAO,GAAG;AACvE,cAAM,eAAiB,IAAY,QAAkB,IAAI,CAAC,SAAc;AACtE,cAAI,KAAK,SAAS,iBAAiB,KAAK,WAAW,QAAW;AAC5D,kBAAM,SAAS,sBAAsB,KAAK,MAAM;AAEhD,gBAAI,UAAU,OAAO,WAAW,YAAY,CAAE,OAAe,MAAM;AACjE,qBAAO,EAAE,GAAG,MAAM,QAAQ,EAAE,MAAM,QAAQ,OAAO,OAAO,EAAE;AAAA,YAC5D;AAEA,kBAAM,aAAa,CAAC,QAAQ,QAAQ,oBAAoB,cAAc,cAAc,SAAS;AAC7F,gBAAI,UAAU,OAAO,WAAW,YAAY,CAAC,WAAW,SAAU,OAAe,IAAI,GAAG;AACtF,qBAAO,EAAE,GAAG,MAAM,QAAQ,EAAE,MAAM,QAAQ,OAAO,OAAO,EAAE;AAAA,YAC5D;AACA,mBAAO,EAAE,GAAG,MAAM,OAAO;AAAA,UAC3B;AACA,iBAAO,sBAAsB,IAAI;AAAA,QACnC,CAAC;AAED,cAAM,aAAa,EAAE,GAAI,KAAa,SAAS,aAAa;AAC5D,YAAI;AACF,6BAAmB,MAAM,UAAU;AACnC,oBAAU,KAAK,UAAU;AACzB;AACA,kBAAQ,KAAK,wCAAwC,CAAC,gDAAgD;AACtG;AAAA,QACF,QAAQ;AAAA,QAER;AAAA,MACF;AAKA,cAAQ;AAAA,QACN,gDAAgD,CAAC,UAAW,IAAY,IAAI,oDAC3B,KAAK,UAAU,OAAO,KAAK,GAAU,CAAC,CAAC;AAAA,MAC1F;AACA,gBAAU,KAAK,GAAG;AAAA,IACpB;AAEA,QAAI,cAAc,GAAG;AACnB,cAAQ,KAAK,mDAAmD,WAAW,IAAI,SAAS,MAAM,WAAW;AAAA,IAC3G;AAEA,aAAS;AAAA,EACX;AAIA,WAAS,yBAAyB,MAAM;AAExC,SAAO;AACT;;;AClRA,IAAM,eAA4C;AAAA,EAChD,6BAAiC,EAAE,eAAe,KAAS,eAAe,KAAQ;AAAA,EAClF,6BAAiC,EAAE,eAAe,KAAS,eAAe,KAAQ;AAAA,EAClF,+BAAiC,EAAE,eAAe,KAAS,eAAe,KAAQ;AAAA,EAClF,4BAAiC,EAAE,eAAe,KAAS,eAAe,KAAQ;AAAA,EAClF,iCAAiC,EAAE,eAAe,KAAW,eAAe,KAAQ;AAAA,EACpF,yBAAiC,EAAE,eAAe,KAAW,eAAe,KAAQ;AAAA,EACpF,2BAAiC,EAAE,eAAe,KAAW,eAAe,KAAQ;AAAA,EACpF,iBAAiC,EAAE,eAAe,OAAS,eAAe,KAAO;AAAA,EACjF,kBAAgC,EAAE,eAAe,KAAW,eAAe,KAAQ;AAAA,EACnF,aAAiC,EAAE,eAAe,KAAS,eAAe,KAAQ;AAAA,EAClF,cAAiC,EAAE,eAAe,QAAS,eAAe,IAAO;AACnF;AAEA,IAAM,iBAA8B,EAAE,eAAe,KAAS,eAAe,KAAQ;AAErF,IAAM,kBAA+C;AAAA,EACnD,cAAc,EAAE,eAAe,KAAS,eAAe,KAAQ;AAAA,EAC/D,WAAc,EAAE,eAAe,KAAW,eAAe,KAAQ;AAAA,EACjE,WAAc,EAAE,eAAe,OAAS,eAAe,KAAO;AAAA,EAC9D,QAAc,EAAE,eAAe,QAAS,eAAe,IAAO;AAChE;AAMO,SAAS,eAAe,SAA8B;AAC3D,QAAM,aAAa,QAAQ,KAAK,EAAE,YAAY;AAE9C,QAAM,QAAQ,aAAa,UAAU;AACrC,MAAI,MAAO,QAAO;AAElB,aAAW,CAAC,QAAQ,MAAM,KAAK,OAAO,QAAQ,eAAe,GAAG;AAC9D,QAAI,WAAW,WAAW,MAAM,EAAG,QAAO;AAAA,EAC5C;AAEA,SAAO;AACT;AAEO,IAAM,sBAAsB;AAG5B,IAAM,uBAAuB;AAM7B,IAAM,uBAAuB;;;AH/BpC,IAAM,yBAAyB;AAE/B,IAAM,oBAAoB,oBAAI,IAAI;AAAA,EAChC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AAgBM,IAAM,iBAAN,MAAqB;AAAA,EAClB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,YAAsB,CAAC;AAAA,EAE/B,YAAY,SAAgC;AAC1C,SAAK,YAAY,QAAQ;AACzB,SAAK,UAAU,QAAQ;AACvB,SAAK,kBAAkB,QAAQ;AAC/B,SAAK,qBAAqB,QAAQ;AAClC,SAAK,gBAAgB,QAAQ;AAAA,EAC/B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAyC;AAC7C,QAAI,WAAY,MAAM,eAAe,iBAAiB,KAAK,SAAS;AACpE,eAAW,sBAAsB,QAAQ;AAGzC,eAAW,KAAK,qBAAqB,UAAU,KAAK,kBAAkB;AAEtE,QAAI,KAAK,eAAe;AACtB,YAAM,EAAE,cAAc,IAAI,eAAe,KAAK,OAAO;AACrD,YAAM,gBAAgB,KAAK,MAAM,gBAAgB,oBAAoB;AAGrE,iBAAW,MAAM,KAAK,eAAe,UAAU,aAAa;AAG5D,YAAM,KAAK,cAAc,aAAa;AAAA,IACxC;AAGA,QAAI,KAAK,UAAU,SAAS,GAAG;AAC7B,YAAM,iBAAiB,KAAK,UAAU,KAAK,aAAa;AACxD,iBAAW;AAAA,QACT;AAAA,UACE,MAAM;AAAA,UACN,SAAS;AAAA,EAAoC,cAAc;AAAA,QAC7D;AAAA,QACA,GAAG;AAAA,MACL;AAAA,IACF;AAGA,eAAW,kBAAkB,QAAQ;AAErC,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,qBACE,UACA,aACkB;AAClB,QAAI,SAAS,UAAU,YAAa,QAAO;AAE3C,UAAM,WAAW,SAAS,SAAS;AACnC,UAAM,gBAAgB,SAAS,MAAM,GAAG,QAAQ;AAChD,UAAM,iBAAiB,SAAS,MAAM,QAAQ;AAG9C,UAAM,qBAAqB,oBAAI,IAAY;AAC3C,UAAM,YAA8B,CAAC;AAErC,eAAW,OAAO,eAAe;AAC/B,YAAM,YAAY,KAAK,eAAe,KAAK,kBAAkB;AAC7D,UAAI,UAAW,WAAU,KAAK,SAAS;AAAA,IACzC;AAGA,QAAI,mBAAmB,OAAO,GAAG;AAC/B,YAAM,UAA4B,CAAC;AACnC,iBAAW,OAAO,WAAW;AAC3B,cAAM,SAAS,yBAAyB,KAAK,kBAAkB;AAC/D,YAAI,OAAQ,SAAQ,KAAK,MAAM;AAAA,MACjC;AACA,aAAO,CAAC,GAAG,SAAS,GAAG,cAAc;AAAA,IACvC;AAEA,WAAO,CAAC,GAAG,WAAW,GAAG,cAAc;AAAA,EACzC;AAAA,EAEQ,eACN,KACA,oBACuB;AACvB,QAAI,CAAC,MAAM,QAAQ,IAAI,OAAO,EAAG,QAAO;AAExC,UAAM,QAAe,CAAC;AACtB,eAAW,QAAQ,IAAI,SAAkB;AAEvC,UAAI,KAAK,SAAS,eAAe,KAAK,aAAa,QAAQ;AACzD,YAAI,KAAK,WAAY,oBAAmB,IAAI,KAAK,UAAU;AAC3D;AAAA,MACF;AAGA,UAAI,KAAK,SAAS,iBAAiB,KAAK,aAAa,QAAQ;AAC3D,YAAI,KAAK,WAAY,oBAAmB,IAAI,KAAK,UAAU;AAC3D;AAAA,MACF;AAGA,UAAI,KAAK,SAAS,eAAe,KAAK,SAAS,WAAY;AAG3D,UAAI,KAAK,SAAS,iBAAiB,kBAAkB,IAAI,KAAK,QAAQ,GAAG;AACvE,cAAM,KAAK,KAAK,eAAe,IAAI,CAAC;AACpC;AAAA,MACF;AAEA,YAAM,KAAK,IAAI;AAAA,IACjB;AAEA,QAAI,MAAM,WAAW,EAAG,QAAO;AAC/B,WAAO,EAAE,GAAG,KAAK,SAAS,MAAM;AAAA,EAClC;AAAA,EAEQ,eAAe,MAAgB;AACrC,UAAM,UAAU,MAAM,QAAQ,KAAK,MAAM,IAAI,KAAK,SAAS,CAAC,KAAK,MAAM;AACvE,UAAM,iBAAiB,QAAQ,IAAI,CAAC,MAAW;AAC7C,UAAI,OAAO,MAAM,YAAY,EAAE,SAAS,wBAAwB;AAC9D,cAAM,OAAO,KAAK,MAAM,yBAAyB,CAAC;AAClD,eACE,EAAE,MAAM,GAAG,IAAI,IACf;AAAA,cAAiB,EAAE,SAAS,sBAAsB;AAAA,IAClD,EAAE,MAAM,CAAC,IAAI;AAAA,MAEjB;AACA,UAAI,KAAK,OAAO,MAAM,YAAY,OAAO,EAAE,SAAS,YAAY,EAAE,KAAK,SAAS,wBAAwB;AACtG,cAAM,OAAO,KAAK,MAAM,yBAAyB,CAAC;AAClD,eAAO;AAAA,UACL,GAAG;AAAA,UACH,MACE,EAAE,KAAK,MAAM,GAAG,IAAI,IACpB;AAAA,cAAiB,EAAE,KAAK,SAAS,sBAAsB;AAAA,IACvD,EAAE,KAAK,MAAM,CAAC,IAAI;AAAA,QACtB;AAAA,MACF;AACA,aAAO;AAAA,IACT,CAAC;AAED,WAAO;AAAA,MACL,GAAG;AAAA,MACH,QAAQ,MAAM,QAAQ,KAAK,MAAM,IAAI,iBAAiB,eAAe,CAAC;AAAA,IACxE;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAc,eACZ,UACA,eAC2B;AAC3B,QAAI,cAAc,sBAAsB,QAAQ;AAEhD,WAAO,cAAc,iBAAiB,SAAS,SAAS,KAAK,oBAAoB;AAE/E,UAAI,cAAc;AAClB,UAAI,WAAW;AACf,YAAM,eAAe,SAAS,SAAS,KAAK;AAE5C,eAAS,IAAI,GAAG,IAAI,cAAc,KAAK;AACrC,cAAM,YAAY,KAAK,cAAc,SAAS,CAAC,CAAC;AAChD,uBAAe;AACf,mBAAW,IAAI;AACf,YAAI,eAAe,qBAAsB;AAAA,MAC3C;AAEA,UAAI,aAAa,EAAG;AAEpB,YAAM,QAAQ,SAAS,MAAM,GAAG,QAAQ;AACxC,YAAM,YAAY,SAAS,MAAM,QAAQ;AAEzC,YAAM,UAAU,MAAM,KAAK,eAAe,KAAK;AAC/C,UAAI,SAAS;AACX,aAAK,UAAU,KAAK,OAAO;AAC3B,gBAAQ;AAAA,UACN,wBAAwB,MAAM,MAAM,eAAe,WAAW,iBAAiB,eAAe,OAAO,CAAC;AAAA,QACxG;AAAA,MACF;AAEA,iBAAW;AACX,oBAAc,sBAAsB,QAAQ;AAAA,IAC9C;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,MAAc,eAAe,OAAiD;AAC5E,UAAM,cAAc,MACjB,IAAI,CAAC,QAAQ;AACZ,YAAM,UAAU,OAAO,IAAI,YAAY,WACnC,IAAI,UACJ,KAAK,UAAU,IAAI,OAAO;AAC9B,aAAO,IAAI,IAAI,IAAI,MAAM,OAAO;AAAA,IAClC,CAAC,EACA,KAAK,MAAM;AAEd,QAAI;AACF,YAAM,SAAS,MAAME,cAAa;AAAA,QAChC,OAAO,aAAa,mBAAmB;AAAA,QACvC,QAAQ,oBAAoB,WAAW;AAAA,MACzC,CAAC;AACD,aAAO,OAAO;AAAA,IAChB,SAAS,OAAO;AACd,cAAQ,MAAM,yCAAyC,KAAK;AAC5D,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAc,cAAc,QAA+B;AACzD,QAAI,KAAK,UAAU,UAAU,EAAG;AAEhC,UAAM,qBAAqB,KAAK,UAAU;AAAA,MACxC,CAAC,GAAG,MAAM,IAAI,eAAe,CAAC;AAAA,MAC9B;AAAA,IACF;AAEA,QAAI,sBAAsB,OAAQ;AAElC,UAAM,WAAW,KAAK,UAAU,KAAK,aAAa;AAElD,QAAI;AACF,YAAM,SAAS,MAAMA,cAAa;AAAA,QAChC,OAAO,aAAa,mBAAmB;AAAA,QACvC,QAAQ,oBAAoB,QAAQ;AAAA,MACtC,CAAC;AAED,cAAQ;AAAA,QACN,oBAAoB,KAAK,UAAU,MAAM,eAAe,kBAAkB,iBAAiB,eAAe,OAAO,IAAI,CAAC;AAAA,MACxH;AAEA,WAAK,YAAY,CAAC,OAAO,IAAI;AAAA,IAC/B,SAAS,OAAO;AACd,cAAQ,MAAM,qCAAqC,KAAK;AAAA,IAC1D;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAMQ,cAAc,KAA6B;AACjD,UAAM,UAAU,OAAO,IAAI,YAAY,WACnC,IAAI,UACJ,KAAK,UAAU,IAAI,OAAO;AAC9B,WAAO,eAAe,OAAO,IAAI;AAAA,EACnC;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,eACJ,SAGe;AACf,UAAM,cAA4B;AAAA,MAChC,MAAM;AAAA,MACN;AAAA,IACF;AACA,UAAM,eAAe,OAAO,KAAK,WAAW,WAAW;AAAA,EACzD;AAAA,EAEA,MAAM,oBAAoB,UAA2C;AACnE,UAAM,eAAe,QAAQ,KAAK,WAAW,QAA0B;AAAA,EACzE;AAAA,EAEA,MAAM,WAMH;AACD,UAAM,WAAY,MAAM,eAAe,iBAAiB,KAAK,SAAS;AAEtE,WAAO;AAAA,MACL,cAAc,SAAS;AAAA,MACvB,cAAc,qBAAqB,QAAQ;AAAA,MAC3C,iBAAiB,sBAAsB,QAAQ;AAAA,MAC/C,YAAY,KAAK,UAAU,SAAS;AAAA,MACpC,cAAc,KAAK,UAAU;AAAA,IAC/B;AAAA,EACF;AAAA,EAEA,MAAM,QAAuB;AAC3B,UAAM,eAAe,gBAAgB,KAAK,SAAS;AACnD,SAAK,YAAY,CAAC;AAAA,EACpB;AACF;AASA,SAAS,yBACP,KACA,YACuB;AACvB,MAAI,CAAC,MAAM,QAAQ,IAAI,OAAO,EAAG,QAAO;AAExC,QAAM,QAAS,IAAI,QAAkB,OAAO,CAAC,SAAS;AACpD,QAAI,KAAK,SAAS,iBAAiB,WAAW,IAAI,KAAK,UAAU,EAAG,QAAO;AAC3E,QAAI,KAAK,SAAS,eAAe,WAAW,IAAI,KAAK,UAAU,EAAG,QAAO;AACzE,WAAO;AAAA,EACT,CAAC;AAED,MAAI,MAAM,WAAW,EAAG,QAAO;AAC/B,SAAO,EAAE,GAAG,KAAK,SAAS,MAAM;AAClC;AAQO,SAAS,kBAAkB,UAA8C;AAE9E,QAAM,cAAc,oBAAI,IAAY;AACpC,QAAM,gBAAgB,oBAAI,IAAY;AAEtC,aAAW,OAAO,UAAU;AAC1B,QAAI,CAAC,MAAM,QAAQ,IAAI,OAAO,EAAG;AACjC,eAAW,QAAQ,IAAI,SAAkB;AACvC,UAAI,KAAK,SAAS,eAAe,KAAK,WAAY,aAAY,IAAI,KAAK,UAAU;AACjF,UAAI,KAAK,SAAS,iBAAiB,KAAK,WAAY,eAAc,IAAI,KAAK,UAAU;AAAA,IACvF;AAAA,EACF;AAGA,QAAM,gBAAgB,IAAI,IAAI,CAAC,GAAG,WAAW,EAAE,OAAO,CAAC,OAAO,CAAC,cAAc,IAAI,EAAE,CAAC,CAAC;AACrF,QAAM,kBAAkB,IAAI,IAAI,CAAC,GAAG,aAAa,EAAE,OAAO,CAAC,OAAO,CAAC,YAAY,IAAI,EAAE,CAAC,CAAC;AAEvF,MAAI,cAAc,SAAS,KAAK,gBAAgB,SAAS,EAAG,QAAO;AAEnE,MAAI,cAAc,OAAO,GAAG;AAC1B,YAAQ,KAAK,0BAA0B,cAAc,IAAI,gDAAgD;AAAA,EAC3G;AACA,MAAI,gBAAgB,OAAO,GAAG;AAC5B,YAAQ,KAAK,0BAA0B,gBAAgB,IAAI,gDAAgD;AAAA,EAC7G;AAEA,QAAM,WAA6B,CAAC;AAEpC,aAAW,OAAO,UAAU;AAC1B,QAAI,CAAC,MAAM,QAAQ,IAAI,OAAO,GAAG;AAC/B,eAAS,KAAK,GAAG;AACjB;AAAA,IACF;AAEA,UAAM,QAAS,IAAI,QAAkB,OAAO,CAAC,SAAS;AACpD,UAAI,KAAK,SAAS,eAAe,cAAc,IAAI,KAAK,UAAU,EAAG,QAAO;AAC5E,UAAI,KAAK,SAAS,iBAAiB,gBAAgB,IAAI,KAAK,UAAU,EAAG,QAAO;AAChF,aAAO;AAAA,IACT,CAAC;AAED,QAAI,MAAM,WAAW,EAAG;AACxB,aAAS,KAAK,EAAE,GAAG,KAAK,SAAS,MAAM,CAAmB;AAAA,EAC5D;AAEA,SAAO;AACT;;;AzBxaA;AAGA,IAAM,uBAAuB,IAAI;AACjC,IAAM,qBAAqB,IAAI;AAE/B,SAAS,uBAAuB,OAAyD;AACvF,QAAM,MAAM,EAAE,GAAG,MAAM;AACvB,aAAW,OAAO,CAAC,WAAW,cAAc,YAAY,GAAY;AAClE,UAAM,MAAM,IAAI,GAAG;AACnB,QAAI,OAAO,QAAQ,YAAY,IAAI,SAAS,sBAAsB;AAChE,UAAI,GAAG,IAAI,GAAG,IAAI,MAAM,GAAG,kBAAkB,CAAC;AAAA;AAC9C,UAAI,GAAG,GAAG,WAAW,IAAI;AACzB,UAAI,GAAG,GAAG,QAAQ,IAAI,IAAI;AAAA,IAC5B;AAAA,EACF;AACA,SAAO;AACT;AAGA,IAAM,oBAAoB,oBAAI,IAI3B;AA+CI,IAAM,QAAN,MAAM,OAAM;AAAA,EACT;AAAA,EACA;AAAA,EACA;AAAA,EACA,mBAA+C,oBAAI,IAAI;AAAA,EAEvD,YAAY,SAAkB,SAAyB,OAAgB;AAC7E,SAAK,UAAU;AACf,SAAK,UAAU;AACf,SAAK,YAAY;AAAA,EACnB;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,yBAAyB,SAElB;AACnB,UAAM,SAAS,UAAU;AACzB,WAAO,YAAY;AAAA,MACjB,WAAW,KAAK,QAAQ;AAAA,MACxB,kBAAkB,KAAK,QAAQ;AAAA,MAC/B,mBAAmB,OAAO;AAAA,MAC1B,gBAAgB,QAAQ,iBACpB,CAAC,aAAa,QAAQ,eAAgB,EAAE,UAAU,QAAQ,MAAM,SAAS,CAAC,IAC1E;AAAA,MACJ,qBAAqB,QAAQ,iBACzB,CAAC,aAAa,QAAQ,eAAgB,EAAE,UAAU,cAAc,MAAM,SAAS,CAAC,IAChF;AAAA,MACJ,kBAAkB,QAAQ,iBACtB,CAAC,aAAa,QAAQ,eAAgB,EAAE,UAAU,iBAAiB,MAAM,SAAS,CAAC,IACnF;AAAA,IACN,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,aAAa,OAAO,UAAwB,CAAC,GAAmB;AAC9D,UAAM,SAAS,UAAU;AAGzB,QAAI;AAEJ,QAAI,QAAQ,WAAW;AACrB,YAAM,WAAW,MAAM,eAAe,QAAQ,QAAQ,SAAS;AAC/D,UAAI,CAAC,UAAU;AACb,cAAM,IAAI,MAAM,sBAAsB,QAAQ,SAAS,EAAE;AAAA,MAC3D;AACA,gBAAU;AAAA,IACZ,OAAO;AACL,gBAAU,MAAM,eAAe,OAAO;AAAA,QACpC,MAAM,QAAQ;AAAA,QACd,kBAAkB,QAAQ,oBAAoB,OAAO;AAAA,QACrD,OAAO,QAAQ,SAAS,OAAO;AAAA,QAC/B,QAAQ,QAAQ;AAAA,MAClB,CAAC;AAAA,IACH;AAGA,UAAM,UAAU,IAAI,eAAe;AAAA,MACjC,WAAW,QAAQ;AAAA,MACnB,SAAS,QAAQ,SAAS,OAAO;AAAA,MACjC,iBAAiB,OAAO,SAAS,YAAY;AAAA,MAC7C,oBAAoB,OAAO,SAAS,sBAAsB;AAAA,MAC1D,eAAe,OAAO,SAAS,iBAAiB;AAAA,IAClD,CAAC;AAGD,UAAM,QAAQ,MAAM,YAAY;AAAA,MAC9B,WAAW,QAAQ;AAAA,MACnB,kBAAkB,QAAQ;AAAA,MAC1B,mBAAmB,OAAO;AAAA,IAC5B,CAAC;AAED,WAAO,IAAI,OAAM,SAAS,SAAS,KAAK;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,YAAoB;AACtB,WAAO,KAAK,QAAQ;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA,EAKA,aAAsB;AACpB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKQ,wBACN,QACA,aAC2I;AAC3I,QAAI,CAAC,eAAe,YAAY,WAAW,GAAG;AAC5C,aAAO;AAAA,IACT;AAGA,UAAM,eAAiJ,CAAC;AAIxJ,UAAM,yBAAyB,YAC5B,IAAI,CAAC,GAAG,MAAM;AACb,YAAM,OAAO,EAAE,YAAY,cAAc,IAAI,CAAC;AAC9C,YAAM,YAAY,EAAE,SAAS,UAAU,UAAU;AACjD,YAAM,WAAW,EAAE,aAAa;AAChC,aAAO,GAAG,IAAI,CAAC,KAAK,SAAS,MAAM,IAAI,eAAe,QAAQ;AAAA,IAChE,CAAC,EACA,KAAK,IAAI;AAEZ,iBAAa,KAAK;AAAA,MAChB,MAAM;AAAA,MACN,MAAM;AAAA,EAA2F,sBAAsB;AAAA;AAAA;AAAA,IACzH,CAAC;AAGD,QAAI,QAAQ;AACV,mBAAa,KAAK,EAAE,MAAM,QAAQ,MAAM;AAAA;AAAA,EAAqB,MAAM,GAAG,CAAC;AAAA,IACzE;AAGA,eAAW,cAAc,aAAa;AACpC,UAAI,WAAW,SAAS,SAAS;AAC/B,qBAAa,KAAK;AAAA,UAChB,MAAM;AAAA,UACN,OAAO,WAAW;AAAA;AAAA,UAClB,WAAW,WAAW;AAAA,UACtB,UAAU,WAAW;AAAA,UACrB,WAAW,WAAW;AAAA,QACxB,CAAC;AAAA,MACH,OAAO;AACL,qBAAa,KAAK;AAAA,UAChB,MAAM;AAAA,UACN,MAAM,WAAW;AAAA,UACjB,WAAW,WAAW,aAAa;AAAA,UACnC,UAAU,WAAW;AAAA,UACrB,WAAW,WAAW;AAAA,QACxB,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,OAAO,SAAsD;AACjE,UAAM,SAAS,UAAU;AAGzB,UAAM,cAAc,KAAK,wBAAwB,QAAQ,QAAQ,QAAQ,WAAW;AAGpF,QAAI,CAAC,QAAQ,qBAAqB;AAChC,WAAK,QAAQ,eAAe,WAAW;AAAA,IACzC;AAGA,UAAM,eAAe,aAAa,KAAK,QAAQ,IAAI,QAAQ;AAG3D,UAAM,eAAe,MAAM,kBAAkB;AAAA,MAC3C,kBAAkB,KAAK,QAAQ;AAAA,MAC/B,mBAAmB,OAAO;AAAA,MAC1B,WAAW,KAAK,QAAQ;AAAA,MACxB,kBAAkB,OAAO;AAAA;AAAA,MAEzB,aAAa,CAAC;AAAA,IAChB,CAAC;AAGD,UAAM,WAAW,MAAM,KAAK,QAAQ,YAAY;AAGhD,UAAM,QAAQ,QAAQ,iBAClB,MAAM,KAAK,yBAAyB,EAAE,gBAAgB,QAAQ,eAAe,CAAC,IAC9E,KAAK;AAGT,UAAM,eAAe,KAAK,sBAAsB,SAAS,KAAK;AAG9D,UAAM,eAAe,iBAAiB,KAAK,QAAQ,KAAK;AACxD,UAAM,SAASC,YAAW;AAAA,MACxB,OAAO,aAAa,KAAK,QAAQ,KAAK;AAAA,MACtC,QAAQ;AAAA,MACR;AAAA,MACA,OAAO;AAAA,MACP,UAAUC,aAAY,GAAG;AAAA;AAAA,MAEzB,aAAa,QAAQ;AAAA;AAAA,MAErB,iBAAiB,eACb;AAAA,QACE,WAAW;AAAA,UACT,eAAe;AAAA,UACf,UAAU;AAAA,YACR,MAAM;AAAA,YACN,cAAc;AAAA,UAChB;AAAA,QACF;AAAA,MACF,IACA;AAAA,MACJ,cAAc,OAAO,SAAS;AAC5B,gBAAQ,eAAe,IAAW;AAAA,MACpC;AAAA,MACA,SAAS,CAAC,EAAE,MAAM,MAAM;AACtB,gBAAQ,UAAU,EAAE,MAAM,CAAC;AAAA,MAC7B;AAAA,IACF,CAAC;AAGD,UAAM,uBAAuB,YAAY;AACvC,YAAM,SAAS,MAAM;AACrB,YAAM,WAAW,MAAM,OAAO;AAC9B,YAAM,mBAAmB,SAAS;AAClC,WAAK,QAAQ,oBAAoB,gBAAgB;AAAA,IACnD;AAEA,WAAO;AAAA,MACL,WAAW,KAAK,QAAQ;AAAA,MACxB;AAAA,MACA,kBAAkB,MAAM,KAAK,iBAAiB;AAAA,MAC9C;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,IAAI,SAAuF;AAC/F,UAAM,SAAS,UAAU;AAGzB,SAAK,QAAQ,eAAe,QAAQ,MAAM;AAG1C,UAAM,eAAe,MAAM,kBAAkB;AAAA,MAC3C,kBAAkB,KAAK,QAAQ;AAAA,MAC/B,mBAAmB,OAAO;AAAA,MAC1B,WAAW,KAAK,QAAQ;AAAA,MACxB,kBAAkB,OAAO;AAAA,MACzB,aAAa,CAAC;AAAA,IAChB,CAAC;AAGD,UAAM,WAAW,MAAM,KAAK,QAAQ,YAAY;AAGhD,UAAM,QAAQ,QAAQ,iBAClB,MAAM,KAAK,yBAAyB,EAAE,gBAAgB,QAAQ,eAAe,CAAC,IAC9E,KAAK;AAGT,UAAM,eAAe,KAAK,sBAAsB,SAAS,KAAK;AAE9D,UAAM,eAAe,iBAAiB,KAAK,QAAQ,KAAK;AACxD,UAAM,SAAS,MAAMC,cAAa;AAAA,MAChC,OAAO,aAAa,KAAK,QAAQ,KAAK;AAAA,MACtC,QAAQ;AAAA,MACR;AAAA,MACA,OAAO;AAAA,MACP,UAAUD,aAAY,GAAG;AAAA;AAAA,MAEzB,iBAAiB,eACb;AAAA,QACE,WAAW;AAAA,UACT,UAAU;AAAA,YACR,MAAM;AAAA,YACN,cAAc;AAAA,UAChB;AAAA,QACF;AAAA,MACF,IACA;AAAA,IACN,CAAC;AAGD,UAAM,mBAAmB,OAAO,SAAS;AACzC,SAAK,QAAQ,oBAAoB,gBAAgB;AAEjD,WAAO;AAAA,MACL,MAAM,OAAO;AAAA,MACb,OAAO,OAAO;AAAA,IAChB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,QAAQ,SAUwF;AACpG,UAAM,SAAS,UAAU;AACzB,UAAM,gBAAgB,QAAQ,WAAW,iBAAiB;AAC1D,UAAM,aAAa,QAAQ,WAAW;AAEtC,UAAM,cAAc,CAAC,MAA4B,SAAkB;AACjE,UAAI,CAAC,WAAY;AACjB,kBAAY,YAAY;AAAA,QACtB;AAAA,QACA,QAAQ,KAAK,QAAQ;AAAA,QACrB,WAAW,KAAK,QAAQ;AAAA,QACxB,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,QAClC;AAAA,MACF,CAAC;AAAA,IACH;AAGA,UAAM,aAAsD,EAAE,QAAQ,KAAK;AAC3E,UAAM,aAAa,CAAC,WAAiC;AACnD,iBAAW,SAAS;AAAA,IACtB;AAGA,QAAI,eAAsE;AAC1E,UAAM,YAAY,KAAK,QAAQ;AAE/B,UAAM,OAAO,QAAQ;AAErB,UAAM,sBAAsB,CAAC,aAA+B;AAC1D,cAAQ,iBAAiB,EAAE,UAAU,QAAQ,MAAM,SAAS,CAAC;AAC7D,UAAI,KAAM,MAAK,KAAK,UAAU,EAAE,MAAM,iBAAiB,UAAU,QAAQ,MAAM,SAAS,CAAC,CAAC,EAAE,MAAM,MAAM;AAAA,MAAC,CAAC;AAG1G,YAAM,OAAO,SAAS;AACtB,UAAI,QAAQ,SAAS,WAAW,WAAW;AACzC,kFAAqC,KAAK,CAAC,EAAE,kBAAAE,kBAAiB,MAAM;AAClE,gBAAM,QAAQA,kBAAiB,WAAW,IAAI;AAC9C,cAAI,CAAC,cAAc;AACjB,8EAAiC,KAAK,CAAC,EAAE,eAAAC,eAAc,MAAM;AAC3D,6BAAe,IAAIA,eAAc,SAAS;AAC1C,2BAAa,MAAM;AAAA,YACrB,CAAC;AAAA,UACH;AACA,cAAI,MAAM,cAAc,OAAO,MAAM,GAAG;AACtC,kBAAM,GAAG,SAAS,CAAC,UAAU;AAC3B,4BAAc,SAAS,KAAK;AAC5B,kBAAI,KAAM,MAAK,KAAK,UAAU,EAAE,MAAM,iBAAiB,MAAM,MAAM,MAAM,UAAU,MAAM,SAAS,CAAC,CAAC,EAAE,MAAM,MAAM;AAAA,cAAC,CAAC;AAAA,YACtH,CAAC;AACD,kBAAM,GAAG,UAAU,CAAC,MAAW;AAC7B,kBAAI,KAAM,MAAK,KAAK,UAAU,EAAE,MAAM,kBAAkB,GAAG,EAAE,CAAC,CAAC,EAAE,MAAM,MAAM;AAAA,cAAC,CAAC;AAAA,YACjF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF;AAGA,UAAM,YAAY,MAAM,YAAY;AAAA,MAClC,WAAW,KAAK,QAAQ;AAAA,MACxB,kBAAkB,KAAK,QAAQ;AAAA,MAC/B,mBAAmB,OAAO;AAAA,MAC1B,gBAAgB;AAAA,MAChB,qBAAqB,CAAC,aAAa;AACjC,gBAAQ,iBAAiB,EAAE,UAAU,cAAc,MAAM,SAAS,CAAC;AACnE,YAAI,KAAM,MAAK,KAAK,UAAU,EAAE,MAAM,iBAAiB,UAAU,cAAc,MAAM,SAAS,CAAC,CAAC,EAAE,MAAM,MAAM;AAAA,QAAC,CAAC;AAAA,MAClH;AAAA,MACA,kBAAkB,CAAC,aAAa;AAC9B,gBAAQ,iBAAiB,EAAE,UAAU,iBAAiB,MAAM,SAAS,CAAC;AACtE,YAAI,KAAM,MAAK,KAAK,UAAU,EAAE,MAAM,iBAAiB,UAAU,iBAAiB,MAAM,SAAS,CAAC,CAAC,EAAE,MAAM,MAAM;AAAA,QAAC,CAAC;AAAA,MACrH;AAAA,MACA,WAAW;AAAA,QACT,cAAc,QAAQ,WAAW;AAAA,QACjC;AAAA,MACF;AAAA,IACF,CAAC;AAGD,UAAM,mBAAmB,MAAM,kBAAkB;AAAA,MAC/C,kBAAkB,KAAK,QAAQ;AAAA,MAC/B,mBAAmB,OAAO;AAAA,MAC1B,WAAW,KAAK,QAAQ;AAAA,MACxB,kBAAkB,OAAO;AAAA,MACzB,aAAa,CAAC;AAAA,IAChB,CAAC;AACD,UAAM,eAAe,wBAAwB,QAAQ,WAAW,YAAY;AAC5E,UAAM,eAAe,GAAG,gBAAgB;AAAA;AAAA,EAAO,YAAY;AAE3D,gBAAY,gBAAgB,EAAE,QAAQ,QAAQ,OAAO,CAAC;AAEtD,QAAI,MAAM;AACR,YAAM,KAAK,KAAK,UAAU,EAAE,MAAM,qBAAqB,MAAM,EAAE,IAAI,QAAQ,KAAK,IAAI,CAAC,IAAI,SAAS,QAAQ,OAAO,EAAE,CAAC,CAAC;AAAA,IACvH;AAGA,UAAM,KAAK,QAAQ,eAAe,QAAQ,MAAM;AAEhD,QAAI,YAAY;AAEhB,WAAO,YAAY,eAAe;AAChC;AAEA,UAAI,QAAQ,aAAa,SAAS;AAChC,cAAM,cAAc;AACpB,oBAAY,eAAe,EAAE,QAAQ,UAAU,OAAO,aAAa,YAAY,UAAU,CAAC;AAC1F,eAAO,EAAE,QAAQ,UAAU,OAAO,aAAa,YAAY,UAAU;AAAA,MACvE;AAEA,YAAM,WAAW,MAAM,KAAK,QAAQ,YAAY;AAChD,YAAM,eAAe,iBAAiB,KAAK,QAAQ,KAAK;AAGxD,UAAI,MAAM;AACR,cAAM,KAAK,KAAK,UAAU,EAAE,MAAM,SAAS,WAAW,OAAO,KAAK,IAAI,CAAC,GAAG,CAAC,CAAC;AAAA,MAC9E;AAEA,UAAI,cAAc;AAClB,UAAI,SAAS,QAAQ,KAAK,IAAI,CAAC;AAC/B,UAAI,cAAc,aAAa,KAAK,IAAI,CAAC;AACzC,UAAI,mBAAmB;AACvB,YAAM,iBAAiB,oBAAI,IAAY;AAEvC,YAAM,aAAaJ,YAAW;AAAA,QAC5B,OAAO,aAAa,KAAK,QAAQ,KAAK;AAAA,QACtC,QAAQ;AAAA,QACR;AAAA,QACA,OAAO;AAAA,QACP,UAAUC,aAAY,GAAG;AAAA,QACzB,aAAa,QAAQ;AAAA,QACrB,iBAAiB,eACb;AAAA,UACE,WAAW;AAAA,YACT,eAAe;AAAA,YACf,UAAU,EAAE,MAAM,WAAW,cAAc,IAAM;AAAA,UACnD;AAAA,QACF,IACA;AAAA,QACJ,cAAc,OAAO,SAAc;AACjC,kBAAQ,eAAe,IAAI;AAC3B,sBAAY,sBAAsB,EAAE,WAAW,MAAM,KAAK,KAAK,CAAC;AAChE,cAAI,MAAM;AACR,gBAAI,aAAa;AACf,oBAAM,KAAK,KAAK,UAAU,EAAE,MAAM,YAAY,IAAI,OAAO,CAAC,CAAC;AAC3D,4BAAc;AACd,uBAAS,QAAQ,KAAK,IAAI,CAAC;AAAA,YAC7B;AACA,kBAAM,KAAK,KAAK,UAAU,EAAE,MAAM,cAAc,CAAC,CAAC;AAAA,UACpD;AAAA,QACF;AAAA,MACF,CAAC;AAGD,uBAAiB,QAAQ,WAAW,YAAY;AAC9C,YAAI,KAAK,SAAS,cAAc;AAC9B,cAAI,MAAM;AACR,gBAAI,CAAC,aAAa;AAChB,oBAAM,KAAK,KAAK,UAAU,EAAE,MAAM,cAAc,IAAI,OAAO,CAAC,CAAC;AAC7D,4BAAc;AAAA,YAChB;AACA,kBAAM,KAAK,KAAK,UAAU,EAAE,MAAM,cAAc,IAAI,QAAQ,OAAO,KAAK,KAAK,CAAC,CAAC;AAAA,UACjF;AAAA,QACF,WAAW,KAAK,SAAS,mBAAmB;AAC1C,cAAI,MAAM;AACR,kBAAM,KAAK,KAAK,UAAU,EAAE,MAAM,mBAAmB,IAAI,YAAY,CAAC,CAAC;AACvE,+BAAmB;AAAA,UACrB;AAAA,QACF,WAAW,KAAK,SAAS,mBAAmB;AAC1C,cAAI,MAAM;AACR,kBAAM,KAAK,KAAK,UAAU,EAAE,MAAM,mBAAmB,IAAI,aAAa,OAAO,KAAK,KAAK,CAAC,CAAC;AAAA,UAC3F;AAAA,QACF,WAAW,KAAK,SAAS,iBAAiB;AACxC,cAAI,QAAQ,kBAAkB;AAC5B,kBAAM,KAAK,KAAK,UAAU,EAAE,MAAM,iBAAiB,IAAI,YAAY,CAAC,CAAC;AACrE,+BAAmB;AACnB,0BAAc,aAAa,KAAK,IAAI,CAAC;AAAA,UACvC;AAAA,QACF,WAAY,KAAa,SAAS,6BAA6B;AAC7D,cAAI,MAAM;AACR,kBAAM,IAAI;AACV,kBAAM,KAAK,KAAK,UAAU,EAAE,MAAM,oBAAoB,YAAY,EAAE,YAAY,UAAU,EAAE,SAAS,CAAC,CAAC;AACvG,2BAAe,IAAI,EAAE,UAAU;AAAA,UACjC;AAAA,QACF,WAAY,KAAa,SAAS,mBAAmB;AACnD,cAAI,MAAM;AACR,kBAAM,IAAI;AACV,kBAAM,KAAK,KAAK,UAAU,EAAE,MAAM,oBAAoB,YAAY,EAAE,YAAY,eAAe,EAAE,cAAc,CAAC,CAAC;AAAA,UACnH;AAAA,QACF,WAAW,KAAK,SAAS,aAAa;AACpC,cAAI,MAAM;AACR,gBAAI,CAAC,eAAe,IAAI,KAAK,UAAU,GAAG;AACxC,oBAAM,KAAK,KAAK,UAAU,EAAE,MAAM,oBAAoB,YAAY,KAAK,YAAY,UAAU,KAAK,SAAS,CAAC,CAAC;AAC7G,6BAAe,IAAI,KAAK,UAAU;AAAA,YACpC;AACA,kBAAM,YAAY,KAAK,aAAa,gBAAgB,KAAK,SAAS,OAAO,KAAK,UAAU,WACpF,uBAAuB,KAAK,KAAgC,IAC5D,KAAK;AACT,kBAAM,KAAK,KAAK,UAAU,EAAE,MAAM,wBAAwB,YAAY,KAAK,YAAY,UAAU,KAAK,UAAU,OAAO,UAAU,CAAC,CAAC;AAAA,UACrI;AAAA,QACF,WAAW,KAAK,SAAS,eAAe;AACtC,cAAI,MAAM;AACR,kBAAM,KAAK,KAAK,UAAU,EAAE,MAAM,yBAAyB,YAAY,KAAK,YAAY,QAAQ,KAAK,OAAO,CAAC,CAAC;AAAA,UAChH;AAAA,QACF,WAAW,KAAK,SAAS,SAAS;AAChC,kBAAQ,MAAM,sBAAsB,KAAK,KAAK;AAC9C,cAAI,MAAM;AACR,kBAAM,KAAK,KAAK,UAAU,EAAE,MAAM,SAAS,WAAW,OAAO,KAAK,KAAK,EAAE,CAAC,CAAC;AAAA,UAC7E;AAAA,QACF;AAAA,MACF;AAGA,UAAI,QAAQ,aAAa;AACvB,cAAM,KAAK,KAAK,UAAU,EAAE,MAAM,YAAY,IAAI,OAAO,CAAC,CAAC;AAAA,MAC7D;AACA,UAAI,QAAQ,kBAAkB;AAC5B,cAAM,KAAK,KAAK,UAAU,EAAE,MAAM,iBAAiB,IAAI,YAAY,CAAC,CAAC;AAAA,MACvE;AAGA,YAAM,eAAe,MAAM,WAAW;AACtC,YAAM,mBAAmB,aAAa;AACtC,YAAM,KAAK,QAAQ,oBAAoB,gBAAgB;AAEvD,YAAM,aAAa,MAAM,WAAW;AACpC,YAAM,cAAc,MAAM,WAAW;AAGrC,UAAI,YAAY;AACd,gBAAQ,SAAS,UAAU;AAC3B,oBAAY,gBAAgB,EAAE,WAAW,MAAM,WAAW,CAAC;AAAA,MAC7D;AAGA,iBAAW,QAAQ,aAAa;AAC9B,YAAK,KAAa,WAAW;AAC3B,qBAAW,MAAO,KAAa,WAAW;AACxC,oBAAQ,aAAa,EAAE,YAAY,GAAG,YAAY,UAAU,GAAG,UAAU,OAAO,GAAG,KAAK,CAAC;AACzF,wBAAY,kBAAkB,EAAE,WAAW,UAAU,GAAG,UAAU,YAAY,GAAG,YAAY,OAAO,GAAG,KAAK,CAAC;AAAA,UAC/G;AAAA,QACF;AACA,YAAK,KAAa,aAAa;AAC7B,qBAAW,MAAO,KAAa,aAAa;AAC1C,oBAAQ,eAAe,EAAE,YAAY,GAAG,YAAY,UAAU,GAAG,UAAU,QAAQ,GAAG,OAAO,CAAC;AAC9F,wBAAY,oBAAoB,EAAE,WAAW,UAAU,GAAG,UAAU,YAAY,GAAG,YAAY,QAAQ,GAAG,OAAO,CAAC;AAAA,UACpH;AAAA,QACF;AAAA,MACF;AAGA,UAAI,WAAW,QAAQ;AACrB,cAAM,MAAM,WAAW;AACvB,cAAM,cAAc,IAAI;AAGxB,YAAI;AACJ,YAAI,gBAAgB,eAAe,IAAI,UAAU,OAAO,IAAI,WAAW,UAAU;AAC/E,gBAAM,YAAY,IAAI;AACtB,gBAAM,YAAY,MAAM,QAAQ,UAAU,KAAK,IAAI,UAAU,QAAoB,CAAC;AAClF,cAAI,UAAU,SAAS,GAAG;AACxB,uBAAW,MAAM,KAAK,gBAAgB,SAAS;AAAA,UACjD;AAAA,QACF;AAGA,cAAM,gBAAgB,MAAM,KAAK,oBAAoB,YAAY;AAEjE,cAAM,cAAc,CAAC,GAAI,YAAY,CAAC,GAAI,GAAG,aAAa;AAE1D,cAAM,YAAY,gBAAgB,cAAc,mBAAmB;AACnE,oBAAY,WAAmC;AAAA,UAC7C,QAAQ;AAAA,UACR,QAAQ,IAAI;AAAA,UACZ,OAAO,IAAI;AAAA,UACX,YAAY;AAAA,UACZ,UAAU,YAAY,SAAS,IAAI,cAAc;AAAA,UACjD,sBAAsB,cAAc,SAAS,IAAI,gBAAgB;AAAA,QACnE,CAAC;AAGD,cAAMI,eAA0B;AAAA,UAC9B,GAAG,QAAQ;AAAA,UACX,QAAQ;AAAA,UACR,QAAQ,IAAI;AAAA,UACZ,OAAO,IAAI;AAAA,UACX,YAAY;AAAA,QACd;AACA,cAAM,eAAe,OAAO,KAAK,QAAQ,IAAI;AAAA,UAC3C,QAAQ,EAAE,GAAG,KAAK,QAAQ,QAAQ,MAAMA,aAAY;AAAA,QACtD,CAAC;AAED,eAAO;AAAA,UACL,QAAQ;AAAA,UACR,QAAQ,IAAI;AAAA,UACZ,OAAO,IAAI;AAAA,UACX,YAAY;AAAA,QACd;AAAA,MACF;AAGA,YAAM,qBAAqB;AAC3B,UAAI,MAAM;AACR,cAAM,KAAK,KAAK,UAAU,EAAE,MAAM,qBAAqB,MAAM,EAAE,IAAI,QAAQ,KAAK,IAAI,CAAC,IAAI,SAAS,mBAAmB,EAAE,CAAC,CAAC;AAAA,MAC3H;AACA,YAAM,KAAK,QAAQ,eAAe,kBAAkB;AAAA,IACtD;AAGA,UAAM,eAAe,gCAAgC,aAAa;AAClE,UAAM,uBAAuB,MAAM,KAAK,oBAAoB,YAAY;AACxE,gBAAY,eAAe;AAAA,MACzB,QAAQ;AAAA,MACR,OAAO;AAAA,MACP,YAAY;AAAA,MACZ,sBAAsB,qBAAqB,SAAS,IAAI,uBAAuB;AAAA,IACjF,CAAC;AAED,UAAM,cAA0B;AAAA,MAC9B,GAAG,QAAQ;AAAA,MACX,QAAQ;AAAA,MACR,OAAO;AAAA,MACP,YAAY;AAAA,IACd;AACA,UAAM,eAAe,OAAO,KAAK,QAAQ,IAAI;AAAA,MAC3C,QAAQ,EAAE,GAAG,KAAK,QAAQ,QAAQ,MAAM,YAAY;AAAA,IACtD,CAAC;AAED,WAAO,EAAE,QAAQ,UAAU,OAAO,cAAc,YAAY,UAAU;AAAA,EACxE;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,oBACZ,UACmB;AAEnB,QAAI;AACF,YAAM,EAAE,cAAAC,cAAa,IAAI,MAAM;AAC/B,MAAAA,cAAa,KAAK,QAAQ,EAAE;AAAA,IAC9B,QAAQ;AAAA,IAAC;AAET,QAAI,CAAC,YAAY,SAAS,eAAe,GAAG;AAC1C,gBAAU,MAAM;AAChB,aAAO,CAAC;AAAA,IACV;AAEA,aAAS,KAAK;AACd,QAAI;AACF,YAAM,EAAE,oBAAAC,qBAAoB,gBAAAC,gBAAe,IAAI,MAAM;AACrD,UAAI,CAACD,oBAAmB,GAAG;AAAE,iBAAS,MAAM;AAAG,eAAO,CAAC;AAAA,MAAG;AAE1D,YAAM,SAAS,MAAM,SAAS,OAAO;AACrC,eAAS,MAAM;AACf,UAAI,CAAC,OAAQ,QAAO,CAAC;AAErB,YAAM,EAAE,UAAAE,YAAU,QAAAC,QAAO,IAAI,MAAM,OAAO,aAAkB;AAE5D,YAAM,aAAa,MAAMF,gBAAe;AAAA,QACtC,KAAK,QAAQ;AAAA,QACb,qBAAqB,KAAK,IAAI,CAAC;AAAA,QAC/B;AAAA,QACA;AAAA,MACF;AAEA,YAAM,WAAW,MAAMC,WAAS,OAAO,IAAI;AAC3C,YAAM,MAAM,WAAW,WAAW;AAAA,QAChC,QAAQ;AAAA,QACR,SAAS,EAAE,gBAAgB,YAAY;AAAA,QACvC,MAAM;AAAA,MACR,CAAC;AACD,YAAMD,gBAAe,WAAW,WAAW,QAAQ,EAAE,WAAW,OAAO,UAAU,CAAC;AAElF,YAAM,SAAS,MAAMA,gBAAe,eAAe,WAAW,MAAM;AACpE,YAAME,QAAO,OAAO,IAAI,EAAE,MAAM,MAAM;AAAA,MAAC,CAAC;AAExC,cAAQ,IAAI,sCAAsC,OAAO,SAAS,SAAS;AAC3E,aAAO,CAAC,OAAO,WAAW;AAAA,IAC5B,SAAS,KAAU;AACjB,cAAQ,MAAM,8CAA8C,IAAI,OAAO;AACvE,eAAS,MAAM;AACf,aAAO,CAAC;AAAA,IACV;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,gBAAgB,WAAwC;AACpE,QAAI;AACF,YAAM,EAAE,oBAAAH,qBAAoB,gBAAAC,gBAAe,IAAI,MAAM;AACrD,UAAI,CAACD,oBAAmB,EAAG,QAAO,CAAC;AAEnC,YAAM,EAAE,UAAAE,WAAS,IAAI,MAAM,OAAO,aAAkB;AACpD,YAAM,EAAE,MAAAE,QAAM,UAAAC,UAAS,IAAI,MAAM,OAAO,MAAW;AAEnD,YAAM,OAAiB,CAAC;AAExB,iBAAW,YAAY,WAAW;AAChC,YAAI;AACF,gBAAM,WAAW,SAAS,WAAW,GAAG,IACpC,WACAD,OAAK,KAAK,QAAQ,kBAAkB,QAAQ;AAChD,gBAAM,WAAWC,UAAS,QAAQ;AAGlC,gBAAM,MAAM,SAAS,MAAM,GAAG,EAAE,IAAI,GAAG,YAAY,KAAK;AACxD,gBAAM,UAAkC;AAAA,YACtC,KAAK;AAAA,YAAmB,MAAM;AAAA,YAAoB,KAAK;AAAA,YACvD,KAAK;AAAA,YAAc,IAAI;AAAA,YAAiB,MAAM;AAAA,YAC9C,KAAK;AAAA,YAAa,KAAK;AAAA,YAAc,MAAM;AAAA,YAC3C,KAAK;AAAA,YAAa,KAAK;AAAA,YAAiB,KAAK;AAAA,YAC7C,KAAK;AAAA,UACP;AACA,gBAAM,cAAc,QAAQ,GAAG,KAAK;AAEpC,gBAAM,aAAa,MAAMJ,gBAAe;AAAA,YACtC,KAAK,QAAQ;AAAA,YACb;AAAA,YACA;AAAA,YACA;AAAA,UACF;AAEA,gBAAM,WAAW,MAAMC,WAAS,QAAQ;AACxC,gBAAM,MAAM,WAAW,WAAW;AAAA,YAChC,QAAQ;AAAA,YACR,SAAS,EAAE,gBAAgB,YAAY;AAAA,YACvC,MAAM;AAAA,UACR,CAAC;AAED,gBAAMD,gBAAe,WAAW,WAAW,QAAQ,EAAE,WAAW,SAAS,OAAO,CAAC;AAEjF,gBAAM,eAAe,MAAMA,gBAAe,eAAe,WAAW,MAAM;AAC1E,eAAK,KAAK,aAAa,WAAW;AAElC,kBAAQ,IAAI,yBAAyB,QAAQ,KAAK,SAAS,MAAM,SAAS;AAAA,QAC5E,SAAS,KAAU;AACjB,kBAAQ,MAAM,gCAAgC,QAAQ,KAAK,IAAI,OAAO;AAAA,QACxE;AAAA,MACF;AAEA,aAAO;AAAA,IACT,SAAS,KAAU;AACjB,cAAQ,MAAM,8BAA8B,IAAI,OAAO;AACvD,aAAO,CAAC;AAAA,IACV;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,sBAAsB,SAA0B,OAA0B;AAChF,UAAM,gBAAgB,KAAK,QAAQ;AACnC,UAAM,eAAwB,CAAC;AAC/B,UAAM,cAAc,SAAS,KAAK;AAElC,eAAW,CAAC,MAAM,YAAY,KAAK,OAAO,QAAQ,WAAW,GAAG;AAC9D,YAAM,gBAAgB,iBAAiB,MAAM,iBAAiB,MAAS;AAEvE,UAAI,CAAC,eAAe;AAClB,qBAAa,IAAI,IAAI;AACrB;AAAA,MACF;AAGA,mBAAa,IAAI,IAAIK,OAAK;AAAA,QACxB,aAAa,aAAa,eAAe;AAAA,QACzC,aAAc,aAAqB,eAAeC,IAAE,OAAO,CAAC,CAAC;AAAA,QAC7D,SAAS,OAAO,OAAgB,gBAAyC;AACvE,gBAAM,aAAa,YAAY,cAAcC,QAAO;AAGpD,gBAAM,YAAY,qBAAqB,OAAO;AAAA,YAC5C,WAAW,KAAK,QAAQ;AAAA,YACxB,UAAU;AAAA,YACV;AAAA,YACA;AAAA,YACA,kBAAkB;AAAA,YAClB,QAAQ;AAAA,UACV,CAAC;AAGD,eAAK,iBAAiB,IAAI,YAAY,MAAM,SAAS;AAGrD,kBAAQ,qBAAqB,MAAM,SAAS;AAG5C,gBAAM,eAAe,aAAa,KAAK,QAAQ,IAAI,SAAS;AAG5D,gBAAM,WAAW,MAAM,IAAI,QAAiB,CAACC,cAAY;AACvD,8BAAkB,IAAI,YAAY,EAAE,SAAAA,WAAS,WAAW,KAAK,QAAQ,GAAG,CAAC;AAAA,UAC3E,CAAC;AAGD,gBAAM,eAAe,kBAAkB,IAAI,UAAU;AACrD,4BAAkB,OAAO,UAAU;AACnC,eAAK,iBAAiB,OAAO,UAAU;AAEvC,gBAAMC,QAAO,MAAM;AACnB,cAAI,CAAC,UAAU;AAEb,kBAAM,SAAS,cAAc,UAAU;AACvC,kBAAM,qBAAqB,OAAOA,MAAK,EAAE;AACzC,kBAAM,eAAe,aAAa,KAAK,QAAQ,IAAI,QAAQ;AAE3D,mBAAO;AAAA,cACL,QAAQ;AAAA,cACR;AAAA,cACA,UAAU;AAAA,cACV;AAAA,cACA,SAAS,SAAS,IAAI,uCAAuC,MAAM;AAAA,YACrE;AAAA,UACF;AAGA,gBAAM,qBAAqB,QAAQA,MAAK,EAAE;AAC1C,gBAAM,eAAe,aAAa,KAAK,QAAQ,IAAI,QAAQ;AAE3D,cAAI;AACF,kBAAM,SAAS,MAAO,aAAqB,QAAQ,OAAO,WAAW;AACrE,kBAAM,qBAAqB,SAASA,MAAK,IAAI,MAAM;AACnD,mBAAO;AAAA,UACT,SAAS,OAAY;AACnB,kBAAM,qBAAqB,SAASA,MAAK,IAAI,MAAM,MAAM,OAAO;AAChE,kBAAM;AAAA,UACR;AAAA,QACF;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,mBAA6C;AACjD,WAAO,MAAM,KAAK,KAAK,iBAAiB,OAAO,CAAC;AAAA,EAClD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,QAAQ,YAAiD;AAE7D,UAAM,WAAW,kBAAkB,IAAI,UAAU;AACjD,QAAI,UAAU;AACZ,eAAS,QAAQ,IAAI;AACrB,aAAO,EAAE,UAAU,KAAK;AAAA,IAC1B;AAGA,UAAM,gBAAgB,MAAM,qBAAqB,oBAAoB,KAAK,QAAQ,EAAE;AACpF,UAAM,YAAY,cAAc,KAAK,CAAC,MAAqB,EAAE,eAAe,UAAU;AAEtF,QAAI,CAAC,WAAW;AACd,YAAM,IAAI,MAAM,sCAAsC,UAAU,EAAE;AAAA,IACpE;AAGA,UAAM,qBAAqB,QAAQ,UAAU,EAAE;AAC/C,WAAO,EAAE,UAAU,KAAK;AAAA,EAC1B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,OAAO,YAAoB,QAA8C;AAE7E,UAAM,WAAW,kBAAkB,IAAI,UAAU;AACjD,QAAI,UAAU;AACZ,eAAS,SAAS;AAClB,eAAS,QAAQ,KAAK;AACtB,aAAO,EAAE,UAAU,KAAK;AAAA,IAC1B;AAGA,UAAM,gBAAgB,MAAM,qBAAqB,oBAAoB,KAAK,QAAQ,EAAE;AACpF,UAAM,YAAY,cAAc,KAAK,CAAC,MAAqB,EAAE,eAAe,UAAU;AAEtF,QAAI,CAAC,WAAW;AACd,YAAM,IAAI,MAAM,sCAAsC,UAAU,EAAE;AAAA,IACpE;AAGA,UAAM,qBAAqB,OAAO,UAAU,EAAE;AAC9C,WAAO,EAAE,UAAU,KAAK;AAAA,EAC1B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,sBAAgD;AACpD,WAAO,qBAAqB,oBAAoB,KAAK,QAAQ,EAAE;AAAA,EACjE;AAAA;AAAA;AAAA;AAAA,EAKA,kBAAkB;AAChB,WAAO,KAAK,QAAQ,SAAS;AAAA,EAC/B;AAAA;AAAA;AAAA;AAAA,EAKA,eAAqB;AACnB,SAAK,QAAQ,MAAM;AAAA,EACrB;AACF;;;AD3+BA;;;A8BMA,IAAM,uBAAuB,oBAAI,IAA6B;AAKvD,SAAS,mBAAmB,WAAmB,SAAgC;AACpF,uBAAqB,IAAI,WAAW,OAAO;AAC7C;AAKO,SAAS,mBAAmB,WAA2C;AAC5E,uBAAqB;AACrB,SAAO,qBAAqB,IAAI,SAAS,KAAK;AAChD;AAKO,SAAS,uBAA6B;AAC3C,QAAM,MAAM,KAAK,IAAI;AACrB,aAAW,CAAC,WAAW,GAAG,KAAK,sBAAsB;AACnD,QAAI,MAAM,IAAI,cAAc,QAAQ,IAAI,KAAK,KAAM;AACjD,2BAAqB,OAAO,SAAS;AAAA,IACvC;AAAA,EACF;AACF;;;A9B7BA,IAAM,WAAW,IAAI,KAAK;AAM1B,IAAM,oBAAoB,oBAAI,IAA+C;AAG7E,SAAS,uBAAuB;AAC9B,QAAM,MAAM,KAAK,IAAI;AACrB,aAAW,CAAC,WAAW,KAAK,KAAK,mBAAmB;AAClD,QAAI,MAAM,MAAM,UAAU,QAAQ,IAAI,IAAI,KAAK,KAAM;AACnD,wBAAkB,OAAO,SAAS;AAAA,IACpC;AAAA,EACF;AACF;AAGA,IAAM,sBAAsBC,IAAE,OAAO;AAAA,EACnC,MAAMA,IAAE,OAAO,EAAE,SAAS;AAAA,EAC1B,kBAAkBA,IAAE,OAAO,EAAE,SAAS;AAAA,EACtC,OAAOA,IAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,eAAeA,IAAE,OAAOA,IAAE,OAAO,GAAGA,IAAE,QAAQ,CAAC,EAAE,SAAS;AAC5D,CAAC;AAED,IAAM,wBAAwBA,IAAE,OAAO;AAAA,EACrC,OAAOA,IAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,QAAQA,IAAE,OAAO,EAAE,SAAS;AAC9B,CAAC;AAED,IAAM,sBAAsBA,IAAE,OAAO;AAAA,EACnC,OAAOA,IAAE,OAAO,EAAE,SAAS;AAC7B,CAAC;AAGD,SAAS;AAAA,EACP;AAAA,EACA,WAAW,SAAS,qBAAqB;AAAA,EACzC,OAAO,MAAM;AACX,UAAM,QAAQ,EAAE,IAAI,MAAM,OAAO;AACjC,UAAM,QAAQ,SAAS,MAAM,SAAS,IAAI;AAC1C,UAAM,SAAS,SAAS,MAAM,UAAU,GAAG;AAE3C,UAAM,cAAc,MAAM,eAAe,KAAK,OAAO,MAAM;AAG3D,UAAM,yBAAyB,MAAM,QAAQ,IAAI,YAAY,IAAI,OAAO,MAAM;AAC5E,YAAM,eAAe,MAAM,oBAAoB,eAAe,EAAE,EAAE;AAClE,aAAO;AAAA,QACL,IAAI,EAAE;AAAA,QACN,MAAM,EAAE;AAAA,QACR,kBAAkB,EAAE;AAAA,QACpB,OAAO,EAAE;AAAA,QACT,QAAQ,EAAE;AAAA,QACV,QAAQ,EAAE;AAAA,QACV,aAAa,CAAC,CAAC;AAAA,QACf,WAAW,EAAE,UAAU,YAAY;AAAA,QACnC,WAAW,EAAE,UAAU,YAAY;AAAA,MACrC;AAAA,IACF,CAAC,CAAC;AAEF,WAAO,EAAE,KAAK;AAAA,MACZ,UAAU;AAAA,MACV,OAAO,YAAY;AAAA,MACnB;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AACF;AAGA,SAAS;AAAA,EACP;AAAA,EACA,WAAW,QAAQ,mBAAmB;AAAA,EACtC,OAAO,MAAM;AACX,UAAM,OAAO,EAAE,IAAI,MAAM,MAAM;AAC/B,UAAM,SAAS,UAAU;AAEzB,UAAM,QAAQ,MAAM,MAAM,OAAO;AAAA,MAC/B,MAAM,KAAK;AAAA,MACX,kBAAkB,KAAK,oBAAoB,OAAO;AAAA,MAClD,OAAO,KAAK,SAAS,OAAO;AAAA,MAC5B,eAAe,KAAK,gBAAgB,EAAE,eAAe,KAAK,cAAc,IAAI;AAAA,IAC9E,CAAC;AAED,UAAM,UAAU,MAAM,WAAW;AAEjC,WAAO,EAAE,KAAK;AAAA,MACZ,IAAI,QAAQ;AAAA,MACZ,MAAM,QAAQ;AAAA,MACd,kBAAkB,QAAQ;AAAA,MAC1B,OAAO,QAAQ;AAAA,MACf,QAAQ,QAAQ;AAAA,MAChB,WAAW,QAAQ,UAAU,YAAY;AAAA,IAC3C,GAAG,GAAG;AAAA,EACR;AACF;AAGA,SAAS,IAAI,QAAQ,OAAO,MAAM;AAChC,QAAM,KAAK,EAAE,IAAI,MAAM,IAAI;AAC3B,QAAM,UAAU,MAAM,eAAe,QAAQ,EAAE;AAE/C,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,EACnD;AAEA,QAAM,eAAe,OAAO,YAAY;AACtC,UAAM,QAAQ,MAAM,MAAM,OAAO,EAAE,WAAW,GAAG,CAAC;AAClD,WAAO,MAAM,gBAAgB;AAAA,EAC/B,GAAG;AAEH,QAAM,QAAQ,MAAM,YAAY,aAAa,EAAE;AAC/C,QAAM,mBAAmB,MAAM,qBAAqB,oBAAoB,EAAE;AAE1E,SAAO,EAAE,KAAK;AAAA,IACZ,IAAI,QAAQ;AAAA,IACZ,MAAM,QAAQ;AAAA,IACd,kBAAkB,QAAQ;AAAA,IAC1B,OAAO,QAAQ;AAAA,IACf,QAAQ,QAAQ;AAAA,IAChB,QAAQ,QAAQ;AAAA,IAChB,WAAW,QAAQ,UAAU,YAAY;AAAA,IACzC,WAAW,QAAQ,UAAU,YAAY;AAAA,IACzC,SAAS;AAAA,IACT,OAAO,MAAM,IAAI,CAAC,OAAO;AAAA,MACvB,IAAI,EAAE;AAAA,MACN,SAAS,EAAE;AAAA,MACX,QAAQ,EAAE;AAAA,MACV,OAAO,EAAE;AAAA,IACX,EAAE;AAAA,IACF,kBAAkB,iBAAiB,IAAI,CAAC,OAAO;AAAA,MAC7C,IAAI,EAAE;AAAA,MACN,YAAY,EAAE;AAAA,MACd,UAAU,EAAE;AAAA,MACZ,OAAO,EAAE;AAAA,IACX,EAAE;AAAA,EACJ,CAAC;AACH,CAAC;AAGD,SAAS;AAAA,EACP;AAAA,EACA,WAAW,SAAS,mBAAmB;AAAA,EACvC,OAAO,MAAM;AACX,UAAM,KAAK,EAAE,IAAI,MAAM,IAAI;AAC3B,UAAM,QAAQ,EAAE,IAAI,MAAM,OAAO;AACjC,UAAM,QAAQ,SAAS,MAAM,SAAS,KAAK;AAE3C,UAAM,UAAU,MAAM,eAAe,QAAQ,EAAE;AAC/C,QAAI,CAAC,SAAS;AACZ,aAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,IACnD;AAEA,UAAM,WAAW,MAAM,eAAe,mBAAmB,IAAI,KAAK;AAElE,WAAO,EAAE,KAAK;AAAA,MACZ,WAAW;AAAA,MACX,UAAU,SAAS,IAAI,CAAC,OAAO;AAAA,QAC7B,IAAI,EAAE;AAAA,QACN,GAAG,EAAE;AAAA;AAAA,QACL,WAAW,EAAE,UAAU,YAAY;AAAA,MACrC,EAAE;AAAA,MACF,OAAO,SAAS;AAAA,IAClB,CAAC;AAAA,EACH;AACF;AAGA,SAAS,IAAI,cAAc,OAAO,MAAM;AACtC,QAAM,KAAK,EAAE,IAAI,MAAM,IAAI;AAE3B,QAAM,UAAU,MAAM,eAAe,QAAQ,EAAE;AAC/C,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,EACnD;AAEA,QAAM,aAAa,MAAM,qBAAqB,aAAa,EAAE;AAE7D,SAAO,EAAE,KAAK;AAAA,IACZ,WAAW;AAAA,IACX,YAAY,WAAW,IAAI,CAAC,OAAO;AAAA,MACjC,IAAI,EAAE;AAAA,MACN,YAAY,EAAE;AAAA,MACd,UAAU,EAAE;AAAA,MACZ,OAAO,EAAE;AAAA,MACT,QAAQ,EAAE;AAAA,MACV,QAAQ,EAAE;AAAA,MACV,kBAAkB,EAAE;AAAA,MACpB,OAAO,EAAE;AAAA,MACT,WAAW,EAAE,UAAU,YAAY;AAAA,MACnC,aAAa,EAAE,aAAa,YAAY;AAAA,IAC1C,EAAE;AAAA,IACF,OAAO,WAAW;AAAA,EACpB,CAAC;AACH,CAAC;AAGD,IAAM,sBAAsBA,IAAE,OAAO;AAAA,EACnC,OAAOA,IAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,MAAMA,IAAE,OAAO,EAAE,SAAS;AAAA,EAC1B,eAAeA,IAAE,OAAOA,IAAE,OAAO,GAAGA,IAAE,QAAQ,CAAC,EAAE,SAAS;AAC5D,CAAC;AAED,SAAS;AAAA,EACP;AAAA,EACA,WAAW,QAAQ,mBAAmB;AAAA,EACtC,OAAO,MAAM;AACX,UAAM,KAAK,EAAE,IAAI,MAAM,IAAI;AAC3B,UAAM,OAAO,EAAE,IAAI,MAAM,MAAM;AAE/B,UAAM,UAAU,MAAM,eAAe,QAAQ,EAAE;AAC/C,QAAI,CAAC,SAAS;AACZ,aAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,IACnD;AAGA,UAAM,UAAqE,CAAC;AAC5E,QAAI,KAAK,MAAO,SAAQ,QAAQ,KAAK;AACrC,QAAI,KAAK,SAAS,OAAW,SAAQ,OAAO,KAAK;AAGjD,QAAI,KAAK,kBAAkB,QAAW;AACpC,YAAM,iBAAiB,QAAQ,UAAU,CAAC;AAC1C,YAAM,wBAAwB,eAAe,iBAAiB,CAAC;AAC/D,cAAQ,SAAS;AAAA,QACf,GAAG;AAAA,QACH,eAAe;AAAA,UACb,GAAG;AAAA,UACH,GAAG,KAAK;AAAA,QACV;AAAA,MACF;AAAA,IACF;AAEA,UAAM,iBAAiB,OAAO,KAAK,OAAO,EAAE,SAAS,IAChD,MAAM,eAAe,OAAO,IAAI,OAAO,KAAM,UAC9C;AAEJ,WAAO,EAAE,KAAK;AAAA,MACZ,IAAI,eAAe;AAAA,MACnB,MAAM,eAAe;AAAA,MACrB,OAAO,eAAe;AAAA,MACtB,QAAQ,eAAe;AAAA,MACvB,kBAAkB,eAAe;AAAA,MACjC,QAAQ,eAAe;AAAA,MACvB,WAAW,eAAe,UAAU,YAAY;AAAA,IAClD,CAAC;AAAA,EACH;AACF;AAGA,SAAS,OAAO,QAAQ,OAAO,MAAM;AACnC,QAAM,KAAK,EAAE,IAAI,MAAM,IAAI;AAG3B,MAAI;AACF,UAAM,UAAU,MAAM,eAAe,QAAQ,EAAE;AAC/C,QAAI,SAAS;AACX,YAAM,cAAc,MAAW,aAAa;AAC5C,iBAAW,OAAO,aAAa;AAC7B,cAAM,OAAO,MAAW,QAAQ,KAAK,QAAQ,gBAAgB;AAC7D,YAAI,QAAQ,KAAK,cAAc,IAAI;AACjC,gBAAW,aAAa,GAAG;AAAA,QAC7B;AAAA,MACF;AAAA,IACF;AAAA,EACF,SAAS,GAAG;AAAA,EAEZ;AAGA,yBAAuB,EAAE;AAEzB,QAAM,UAAU,MAAM,eAAe,OAAO,EAAE;AAC9C,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,EACnD;AAEA,SAAO,EAAE,KAAK,EAAE,SAAS,MAAM,GAAG,CAAC;AACrC,CAAC;AAGD,SAAS,KAAK,cAAc,OAAO,MAAM;AACvC,QAAM,KAAK,EAAE,IAAI,MAAM,IAAI;AAE3B,QAAM,UAAU,MAAM,eAAe,QAAQ,EAAE;AAC/C,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,EACnD;AAEA,QAAM,QAAQ,MAAM,MAAM,OAAO,EAAE,WAAW,GAAG,CAAC;AAClD,QAAM,MAAM,aAAa;AAEzB,SAAO,EAAE,KAAK,EAAE,SAAS,MAAM,WAAW,GAAG,CAAC;AAChD,CAAC;AAMD,IAAM,qBAAqBA,IAAE,OAAO;AAAA,EAClC,MAAMA,IAAE,OAAO;AACjB,CAAC;AAGD,SAAS;AAAA,EACP;AAAA,EACA,WAAW,QAAQ,kBAAkB;AAAA,EACrC,OAAO,MAAM;AACX,UAAM,KAAK,EAAE,IAAI,MAAM,IAAI;AAC3B,UAAM,EAAE,KAAK,IAAI,EAAE,IAAI,MAAM,MAAM;AAGnC,UAAM,UAAU,MAAM,eAAe,QAAQ,EAAE;AAC/C,QAAI,CAAC,SAAS;AACZ,aAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,IACnD;AAGA,sBAAkB,IAAI,IAAI,EAAE,MAAM,WAAW,oBAAI,KAAK,EAAE,CAAC;AAGzD,yBAAqB;AAErB,WAAO,EAAE,KAAK,EAAE,SAAS,MAAM,WAAW,GAAG,CAAC;AAAA,EAChD;AACF;AAGA,SAAS,IAAI,sBAAsB,OAAO,MAAM;AAC9C,QAAM,KAAK,EAAE,IAAI,MAAM,IAAI;AAG3B,uBAAqB;AAErB,QAAM,UAAU,kBAAkB,IAAI,EAAE;AACxC,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,iBAAiB,OAAO,MAAM,KAAK,CAAC;AAAA,EACtD;AAGA,oBAAkB,OAAO,EAAE;AAE3B,SAAO,EAAE,KAAK;AAAA,IACZ,iBAAiB;AAAA,IACjB,MAAM,QAAQ;AAAA,IACd,WAAW,QAAQ,UAAU,YAAY;AAAA,EAC3C,CAAC;AACH,CAAC;AAMD,IAAM,wBAAwBA,IAAE,OAAO;AAAA,EACrC,KAAKA,IAAE,OAAO;AAAA,EACd,MAAMA,IAAE,OAAO;AAAA,EACf,UAAUA,IAAE,OAAO,EAAE,SAAS;AAAA,EAC9B,aAAaA,IAAE,OAAO,EAAE,SAAS;AAAA,EACjC,cAAcA,IAAE,OAAO,EAAE,SAAS;AAAA,EAClC,kBAAkBA,IAAE,OAAO,EAAE,SAAS;AACxC,CAAC;AAGD,SAAS;AAAA,EACP;AAAA,EACA,WAAW,QAAQ,qBAAqB;AAAA,EACxC,OAAO,MAAM;AACX,UAAM,KAAK,EAAE,IAAI,MAAM,IAAI;AAC3B,UAAM,OAAO,EAAE,IAAI,MAAM,MAAM;AAG/B,uBAAmB,IAAI;AAAA,MACrB,KAAK,KAAK;AAAA,MACV,MAAM,KAAK;AAAA,MACX,UAAU,KAAK,YAAY,KAAK;AAAA,MAChC,aAAa,KAAK;AAAA,MAClB,cAAc,KAAK;AAAA,MACnB,kBAAkB,KAAK;AAAA,MACvB,eAAe,oBAAI,KAAK;AAAA,IAC1B,CAAC;AAED,WAAO,EAAE,KAAK,EAAE,SAAS,MAAM,WAAW,GAAG,CAAC;AAAA,EAChD;AACF;AAGA,SAAS,IAAI,yBAAyB,OAAO,MAAM;AACjD,QAAM,KAAK,EAAE,IAAI,MAAM,IAAI;AAE3B,QAAM,MAAM,mBAAe,EAAE;AAC7B,MAAI,CAAC,KAAK;AACR,WAAO,EAAE,KAAK,EAAE,WAAW,OAAO,SAAS,KAAK,CAAC;AAAA,EACnD;AAEA,SAAO,EAAE,KAAK;AAAA,IACZ,WAAW;AAAA,IACX,SAAS;AAAA,MACP,KAAK,IAAI;AAAA,MACT,MAAM,IAAI;AAAA,MACV,UAAU,IAAI;AAAA,MACd,aAAa,IAAI;AAAA,MACjB,cAAc,IAAI;AAAA,MAClB,kBAAkB,IAAI;AAAA,MACtB,eAAe,IAAI,cAAc,YAAY;AAAA,IAC/C;AAAA,EACF,CAAC;AACH,CAAC;AAGD,SAAS,IAAI,cAAc,OAAO,MAAM;AACtC,QAAM,KAAK,EAAE,IAAI,MAAM,IAAI;AAE3B,QAAM,UAAU,MAAM,eAAe,QAAQ,EAAE;AAC/C,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,EACnD;AAEA,QAAM,QAAQ,MAAM,YAAY,aAAa,EAAE;AAC/C,QAAM,UAAU,MAAM,OAAO,CAAC,MAA0B,EAAE,WAAW,SAAS;AAC9E,QAAM,aAAa,MAAM,OAAO,CAAC,MAA0B,EAAE,WAAW,aAAa;AACrF,QAAM,YAAY,MAAM,OAAO,CAAC,MAA0B,EAAE,WAAW,WAAW;AAClF,QAAM,YAAY,MAAM,OAAO,CAAC,MAA0B,EAAE,WAAW,WAAW;AAGlF,QAAM,WAAW,WAAW,CAAC,KAAK,QAAQ,CAAC,KAAK;AAEhD,SAAO,EAAE,KAAK;AAAA,IACZ,OAAO,MAAM,IAAI,CAAC,OAAO;AAAA,MACvB,IAAI,EAAE;AAAA,MACN,SAAS,EAAE;AAAA,MACX,QAAQ,EAAE;AAAA,MACV,OAAO,EAAE;AAAA,MACT,WAAW,EAAE,UAAU,YAAY;AAAA,MACnC,WAAW,EAAE,UAAU,YAAY;AAAA,IACrC,EAAE;AAAA,IACF,OAAO;AAAA,MACL,OAAO,MAAM;AAAA,MACb,SAAS,QAAQ;AAAA,MACjB,YAAY,WAAW;AAAA,MACvB,WAAW,UAAU;AAAA,MACrB,WAAW,UAAU;AAAA,IACvB;AAAA,IACA,UAAU,WAAW;AAAA,MACnB,IAAI,SAAS;AAAA,MACb,SAAS,SAAS;AAAA,MAClB,QAAQ,SAAS;AAAA,IACnB,IAAI;AAAA,EACN,CAAC;AACH,CAAC;AAGD,SAAS,IAAI,oBAAoB,OAAO,MAAM;AAC5C,QAAM,KAAK,EAAE,IAAI,MAAM,IAAI;AAE3B,QAAM,UAAU,MAAM,eAAe,QAAQ,EAAE;AAC/C,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,EACnD;AAEA,QAAM,cAAc,MAAM,eAAe,EAAE;AAE3C,SAAO,EAAE,KAAK;AAAA,IACZ,WAAW;AAAA,IACX,aAAa,YAAY,IAAI,CAAC,QAAQ;AAAA,MACpC,IAAI,GAAG;AAAA,MACP,iBAAiB,GAAG;AAAA,MACpB,SAAS,GAAG;AAAA,MACZ,WAAW,GAAG,UAAU,YAAY;AAAA,IACtC,EAAE;AAAA,IACF,OAAO,YAAY;AAAA,EACrB,CAAC;AACH,CAAC;AAGD,SAAS,KAAK,6BAA6B,OAAO,MAAM;AACtD,QAAM,YAAY,EAAE,IAAI,MAAM,IAAI;AAClC,QAAM,eAAe,EAAE,IAAI,MAAM,cAAc;AAE/C,QAAM,UAAU,MAAM,eAAe,QAAQ,SAAS;AACtD,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,EACnD;AAGA,QAAM,eAAe,MAAM,oBAAoB,eAAe,SAAS;AACvE,MAAI,cAAc;AAChB,WAAO,EAAE,KAAK;AAAA,MACZ,OAAO;AAAA,MACP,UAAU,aAAa;AAAA,IACzB,GAAG,GAAG;AAAA,EACR;AAEA,QAAM,SAAS,MAAM,mBAAmB,WAAW,YAAY;AAE/D,MAAI,CAAC,OAAO,SAAS;AACnB,WAAO,EAAE,KAAK,EAAE,OAAO,OAAO,MAAM,GAAG,GAAG;AAAA,EAC5C;AAEA,SAAO,EAAE,KAAK;AAAA,IACZ,SAAS;AAAA,IACT;AAAA,IACA;AAAA,IACA,eAAe,OAAO;AAAA,IACtB,cAAc,OAAO;AAAA,IACrB,iBAAiB,OAAO;AAAA,IACxB,oBAAoB,OAAO;AAAA,EAC7B,CAAC;AACH,CAAC;AAGD,SAAS,IAAI,aAAa,OAAO,MAAM;AACrC,QAAM,KAAK,EAAE,IAAI,MAAM,IAAI;AAE3B,QAAM,UAAU,MAAM,eAAe,QAAQ,EAAE;AAC/C,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,EACnD;AAEA,QAAM,OAAO,MAAM,eAAe,EAAE;AAEpC,SAAO,EAAE,KAAK;AAAA,IACZ,WAAW;AAAA,IACX,OAAO,KAAK,MAAM,IAAI,CAAC,OAAO;AAAA,MAC5B,MAAM,EAAE;AAAA,MACR,QAAQ,EAAE;AAAA,MACV,aAAa,EAAE,oBAAoB;AAAA,MACnC,YAAY,EAAE,mBAAmB;AAAA;AAAA;AAAA;AAAA,IAInC,EAAE;AAAA,IACF,SAAS;AAAA,MACP,SAAS,KAAK,MAAM,OAAO,OAAK,EAAE,WAAW,SAAS,EAAE;AAAA,MACxD,UAAU,KAAK,MAAM,OAAO,OAAK,EAAE,WAAW,UAAU,EAAE;AAAA,MAC1D,SAAS,KAAK,MAAM,OAAO,OAAK,EAAE,WAAW,SAAS,EAAE;AAAA,MACxD,OAAO,KAAK,MAAM;AAAA,IACpB;AAAA,EACF,CAAC;AACH,CAAC;AAGD,SAAS,IAAI,uBAAuB,OAAO,MAAM;AAC/C,QAAM,YAAY,EAAE,IAAI,MAAM,IAAI;AAClC,QAAM,WAAW,mBAAmB,EAAE,IAAI,MAAM,UAAU,CAAC;AAE3D,QAAM,UAAU,MAAM,eAAe,QAAQ,SAAS;AACtD,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,EACnD;AAEA,QAAM,OAAO,MAAM,eAAe,SAAS;AAC3C,QAAM,WAAW,KAAK,MAAM,KAAK,OAAK,EAAE,SAAS,QAAQ;AAEzD,MAAI,CAAC,UAAU;AACb,WAAO,EAAE,KAAK,EAAE,OAAO,yBAAyB,GAAG,GAAG;AAAA,EACxD;AAEA,SAAO,EAAE,KAAK;AAAA,IACZ;AAAA,IACA,MAAM,SAAS;AAAA,IACf,QAAQ,SAAS;AAAA,IACjB,iBAAiB,SAAS;AAAA,IAC1B,gBAAgB,SAAS;AAAA,EAC3B,CAAC;AACH,CAAC;AASD,SAAS,kBAAkB,WAA2B;AACpD,QAAM,aAAa,oBAAoB;AACvC,SAAOC,MAAK,YAAY,eAAe,SAAS;AAClD;AAKA,SAAS,qBAAqB,WAA2B;AACvD,QAAM,MAAM,kBAAkB,SAAS;AACvC,MAAI,CAACC,aAAW,GAAG,GAAG;AACpB,IAAAC,WAAU,KAAK,EAAE,WAAW,KAAK,CAAC;AAAA,EACpC;AACA,SAAO;AACT;AAGA,SAAS,IAAI,oBAAoB,OAAO,MAAM;AAC5C,QAAM,YAAY,EAAE,IAAI,MAAM,IAAI;AAElC,QAAM,UAAU,MAAM,eAAe,QAAQ,SAAS;AACtD,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,EACnD;AAEA,QAAM,MAAM,kBAAkB,SAAS;AACvC,MAAI,CAACD,aAAW,GAAG,GAAG;AACpB,WAAO,EAAE,KAAK,EAAE,WAAW,aAAa,CAAC,GAAG,OAAO,EAAE,CAAC;AAAA,EACxD;AAEA,QAAM,QAAQ,YAAY,GAAG;AAC7B,QAAM,cAAc,MAAM,IAAI,CAAC,aAAa;AAC1C,UAAM,WAAWD,MAAK,KAAK,QAAQ;AACnC,UAAM,QAAQG,UAAS,QAAQ;AAC/B,WAAO;AAAA,MACL,IAAI,SAAS,MAAM,GAAG,EAAE,CAAC;AAAA;AAAA,MACzB;AAAA,MACA,MAAM;AAAA,MACN,MAAM,MAAM;AAAA,MACZ,WAAW,MAAM,UAAU,YAAY;AAAA,IACzC;AAAA,EACF,CAAC;AAED,SAAO,EAAE,KAAK;AAAA,IACZ;AAAA,IACA;AAAA,IACA,OAAO,YAAY;AAAA,EACrB,CAAC;AACH,CAAC;AAGD,SAAS,KAAK,oBAAoB,OAAO,MAAM;AAC7C,QAAM,YAAY,EAAE,IAAI,MAAM,IAAI;AAElC,QAAM,UAAU,MAAM,eAAe,QAAQ,SAAS;AACtD,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,EACnD;AAEA,QAAM,cAAc,EAAE,IAAI,OAAO,cAAc,KAAK;AAGpD,MAAI,YAAY,SAAS,qBAAqB,GAAG;AAC/C,QAAI;AACF,YAAM,WAAW,MAAM,EAAE,IAAI,SAAS;AACtC,YAAM,OAAO,SAAS,IAAI,MAAM;AAEhC,UAAI,CAAC,QAAQ,EAAE,gBAAgB,OAAO;AACpC,eAAO,EAAE,KAAK,EAAE,OAAO,mBAAmB,GAAG,GAAG;AAAA,MAClD;AAEA,YAAM,MAAM,qBAAqB,SAAS;AAC1C,YAAM,KAAKC,QAAO,EAAE;AACpB,YAAM,MAAMC,SAAQ,KAAK,IAAI,KAAK;AAClC,YAAM,eAAe,GAAG,EAAE,IAAIC,UAAS,KAAK,IAAI,EAAE,QAAQ,oBAAoB,GAAG,CAAC;AAClF,YAAM,WAAWN,MAAK,KAAK,YAAY;AAEvC,YAAM,cAAc,MAAM,KAAK,YAAY;AAC3C,MAAAO,eAAc,UAAU,OAAO,KAAK,WAAW,CAAC;AAEhD,aAAO,EAAE,KAAK;AAAA,QACZ;AAAA,QACA,UAAU,KAAK;AAAA,QACf,UAAU;AAAA,QACV,MAAM;AAAA,QACN,MAAM,KAAK;AAAA,QACX,WAAW,KAAK;AAAA,QAChB;AAAA,MACF,GAAG,GAAG;AAAA,IACR,SAAS,KAAK;AACZ,cAAQ,MAAM,gCAAgC,GAAG;AACjD,aAAO,EAAE,KAAK,EAAE,OAAO,wBAAwB,GAAG,GAAG;AAAA,IACvD;AAAA,EACF;AAGA,MAAI;AACF,UAAM,OAAO,MAAM,EAAE,IAAI,KAAK;AAM9B,QAAI,CAAC,KAAK,YAAY,CAAC,KAAK,MAAM;AAChC,aAAO,EAAE,KAAK,EAAE,OAAO,2BAA2B,GAAG,GAAG;AAAA,IAC1D;AAEA,UAAM,MAAM,qBAAqB,SAAS;AAC1C,UAAM,KAAKH,QAAO,EAAE;AACpB,UAAM,MAAMC,SAAQ,KAAK,QAAQ,KAAK;AACtC,UAAM,eAAe,GAAG,EAAE,IAAIC,UAAS,KAAK,QAAQ,EAAE,QAAQ,oBAAoB,GAAG,CAAC;AACtF,UAAM,WAAWN,MAAK,KAAK,YAAY;AAGvC,QAAI,aAAa,KAAK;AACtB,QAAI,WAAW,SAAS,GAAG,GAAG;AAC5B,mBAAa,WAAW,MAAM,GAAG,EAAE,CAAC;AAAA,IACtC;AAEA,UAAM,SAAS,OAAO,KAAK,YAAY,QAAQ;AAC/C,IAAAO,eAAc,UAAU,MAAM;AAE9B,WAAO,EAAE,KAAK;AAAA,MACZ;AAAA,MACA,UAAU,KAAK;AAAA,MACf,UAAU;AAAA,MACV,MAAM;AAAA,MACN,MAAM,OAAO;AAAA,MACb,WAAW,KAAK;AAAA,MAChB;AAAA,IACF,GAAG,GAAG;AAAA,EACR,SAAS,KAAK;AACZ,YAAQ,MAAM,gCAAgC,GAAG;AACjD,WAAO,EAAE,KAAK,EAAE,OAAO,wBAAwB,GAAG,GAAG;AAAA,EACvD;AACF,CAAC;AAGD,SAAS,OAAO,kCAAkC,OAAO,MAAM;AAC7D,QAAM,YAAY,EAAE,IAAI,MAAM,IAAI;AAClC,QAAM,eAAe,EAAE,IAAI,MAAM,cAAc;AAE/C,QAAM,UAAU,MAAM,eAAe,QAAQ,SAAS;AACtD,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,EACnD;AAEA,QAAM,MAAM,kBAAkB,SAAS;AACvC,MAAI,CAACN,aAAW,GAAG,GAAG;AACpB,WAAO,EAAE,KAAK,EAAE,OAAO,uBAAuB,GAAG,GAAG;AAAA,EACtD;AAGA,QAAM,QAAQ,YAAY,GAAG;AAC7B,QAAM,OAAO,MAAM,KAAK,OAAK,EAAE,WAAW,eAAe,GAAG,CAAC;AAE7D,MAAI,CAAC,MAAM;AACT,WAAO,EAAE,KAAK,EAAE,OAAO,uBAAuB,GAAG,GAAG;AAAA,EACtD;AAEA,QAAM,WAAWD,MAAK,KAAK,IAAI;AAC/B,aAAW,QAAQ;AAEnB,SAAO,EAAE,KAAK,EAAE,SAAS,MAAM,IAAI,aAAa,CAAC;AACnD,CAAC;AAMD,IAAM,mBAAmBD,IAAE,OAAO;AAAA,EAChC,OAAOA,IAAE,OAAO,EAAE,SAAS;AAAA;AAAA,EAC3B,OAAOA,IAAE,OAAO,EAAE,SAAS;AAAA;AAC7B,CAAC;AAGD,IAAM,sBAAsB,oBAAI,IAAI;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AAGD,IAAM,qBAAqB,oBAAI,IAAI;AAAA,EACjC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AAUD,IAAM,sBAAsB;AAS5B,SAAS,aAAa,MAAc,cAAsB,OAAwB;AAChF,MAAI,CAAC,MAAO,QAAO;AAEnB,QAAM,aAAa,MAAM,YAAY;AACrC,QAAM,YAAY,KAAK,YAAY;AACnC,QAAM,YAAY,aAAa,YAAY;AAG3C,MAAI,UAAU,SAAS,UAAU,KAAK,UAAU,SAAS,UAAU,GAAG;AACpE,WAAO;AAAA,EACT;AAGA,QAAM,eAAe,UAAU,MAAM,GAAG;AACxC,aAAW,WAAW,cAAc;AAClC,QAAI,QAAQ,SAAS,UAAU,KAAK,QAAQ,WAAW,UAAU,GAAG;AAClE,aAAO;AAAA,IACT;AAAA,EACF;AAEA,SAAO;AACT;AAMA,SAAS,kBAAkB,MAAc,cAAsB,OAAuB;AACpF,MAAI,CAAC,MAAO,QAAO;AAEnB,QAAM,aAAa,MAAM,YAAY;AACrC,QAAM,YAAY,KAAK,YAAY;AACnC,QAAM,YAAY,aAAa,YAAY;AAG3C,MAAI,cAAc,WAAY,QAAO;AAGrC,MAAI,UAAU,WAAW,UAAU,EAAG,QAAO;AAG7C,MAAI,UAAU,SAAS,UAAU,EAAG,QAAO;AAG3C,QAAM,cAAc,UAAU,MAAM,GAAG,EAAE,IAAI,KAAK;AAClD,MAAI,gBAAgB,WAAY,QAAO;AACvC,MAAI,YAAY,WAAW,UAAU,EAAG,QAAO;AAC/C,MAAI,YAAY,SAAS,UAAU,EAAG,QAAO;AAG7C,MAAI,UAAU,SAAS,UAAU,EAAG,QAAO;AAG3C,QAAM,WAAW,UAAU,MAAM,GAAG;AACpC,aAAW,WAAW,UAAU;AAC9B,QAAI,QAAQ,WAAW,UAAU,EAAG,QAAO;AAAA,EAC7C;AAEA,SAAO;AACT;AAMA,eAAe,mBACb,SACA,YACA,OACA,OACA,UAA2B,CAAC,GAC5B,QAAgB,GACU;AAE1B,MAAI,QAAQ,uBAAuB,QAAQ,UAAU,QAAQ,GAAG;AAC9D,WAAO;AAAA,EACT;AAEA,MAAI;AACF,UAAM,UAAU,MAAMS,SAAQ,YAAY,EAAE,eAAe,KAAK,CAAC;AAEjE,eAAW,SAAS,SAAS;AAE3B,UAAI,QAAQ,UAAU,QAAQ,EAAG;AAEjC,YAAM,WAAWR,MAAK,YAAY,MAAM,IAAI;AAC5C,YAAM,eAAeS,UAAS,SAAS,QAAQ;AAG/C,UAAI,MAAM,YAAY,KAAK,oBAAoB,IAAI,MAAM,IAAI,GAAG;AAC9D;AAAA,MACF;AAGA,UAAI,MAAM,KAAK,WAAW,GAAG,GAAG;AAC9B;AAAA,MACF;AAGA,YAAM,MAAMJ,SAAQ,MAAM,IAAI,EAAE,YAAY;AAC5C,UAAI,mBAAmB,IAAI,GAAG,GAAG;AAC/B;AAAA,MACF;AAEA,UAAI,MAAM,YAAY,GAAG;AAEvB,YAAI,aAAa,MAAM,MAAM,cAAc,KAAK,GAAG;AACjD,kBAAQ,KAAK;AAAA,YACX,MAAM;AAAA,YACN,MAAM,MAAM;AAAA,YACZ,MAAM;AAAA,UACR,CAAC;AAAA,QACH;AAGA,cAAM,mBAAmB,SAAS,UAAU,OAAO,OAAO,SAAS,QAAQ,CAAC;AAAA,MAC9E,WAAW,MAAM,OAAO,GAAG;AAEzB,YAAI,aAAa,MAAM,MAAM,cAAc,KAAK,GAAG;AACjD,kBAAQ,KAAK;AAAA,YACX,MAAM;AAAA,YACN,MAAM,MAAM;AAAA,YACZ,MAAM;AAAA,YACN,WAAW,OAAO;AAAA,UACpB,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAAA,EACF,QAAQ;AAAA,EAER;AAEA,SAAO;AACT;AAGA,SAAS;AAAA,EACP;AAAA,EACA,WAAW,SAAS,gBAAgB;AAAA,EACpC,OAAO,MAAM;AACX,UAAM,YAAY,EAAE,IAAI,MAAM,IAAI;AAClC,UAAM,EAAE,QAAQ,IAAI,OAAO,WAAW,KAAK,IAAI,EAAE,IAAI,MAAM,OAAO;AAClE,UAAM,QAAQ,KAAK,IAAI,SAAS,QAAQ,KAAK,IAAI,GAAG;AAEpD,UAAM,UAAU,MAAM,eAAe,QAAQ,SAAS;AACtD,QAAI,CAAC,SAAS;AACZ,aAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,IACnD;AAEA,UAAM,mBAAmB,QAAQ;AAEjC,QAAI,CAACJ,aAAW,gBAAgB,GAAG;AACjC,aAAO,EAAE,KAAK;AAAA,QACZ;AAAA,QACA;AAAA,QACA,OAAO,CAAC;AAAA,QACR,OAAO;AAAA,QACP,OAAO;AAAA,MACT,CAAC;AAAA,IACH;AAEA,QAAI;AACF,YAAM,WAAW,MAAM;AAAA,QACrB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAGA,eAAS,KAAK,CAAC,GAAG,MAAM;AAEtB,YAAI,OAAO;AACT,gBAAM,SAAS,kBAAkB,EAAE,MAAM,EAAE,MAAM,KAAK;AACtD,gBAAM,SAAS,kBAAkB,EAAE,MAAM,EAAE,MAAM,KAAK;AACtD,cAAI,WAAW,QAAQ;AACrB,mBAAO,SAAS;AAAA,UAClB;AAAA,QACF;AAGA,YAAI,EAAE,SAAS,EAAE,MAAM;AACrB,iBAAO,EAAE,SAAS,WAAW,KAAK;AAAA,QACpC;AAGA,eAAO,EAAE,KAAK,cAAc,EAAE,IAAI;AAAA,MACpC,CAAC;AAGD,YAAM,QAAQ,SAAS,MAAM,GAAG,KAAK;AAErC,aAAO,EAAE,KAAK;AAAA,QACZ;AAAA,QACA;AAAA,QACA;AAAA,QACA,OAAO,MAAM;AAAA,QACb;AAAA,MACF,CAAC;AAAA,IACH,SAAS,KAAK;AACZ,cAAQ,MAAM,mCAAmC,GAAG;AACpD,aAAO,EAAE,KAAK;AAAA,QACZ,OAAO;AAAA,QACP;AAAA,QACA;AAAA,QACA,OAAO,CAAC;AAAA,QACR,OAAO;AAAA,MACT,GAAG,GAAG;AAAA,IACR;AAAA,EACF;AACF;AAMA,SAAS,IAAI,sBAAsB,OAAO,MAAM;AAC9C,QAAM,YAAY,EAAE,IAAI,MAAM,IAAI;AAElC,MAAI;AACF,UAAM,EAAE,oBAAAS,oBAAmB,IAAI,MAAM;AACrC,QAAI,CAACA,oBAAmB,GAAG;AACzB,aAAO,EAAE,KAAK,EAAE,OAAO,CAAC,EAAE,CAAC;AAAA,IAC7B;AAEA,UAAM,EAAE,gBAAAC,gBAAe,IAAI,MAAM;AACjC,UAAM,QAAQ,MAAMA,gBAAe,gBAAgB,SAAS;AAC5D,WAAO,EAAE,KAAK,EAAE,WAAW,MAAM,CAAC;AAAA,EACpC,SAAS,KAAU;AACjB,YAAQ,MAAM,gCAAgC,IAAI,OAAO;AACzD,WAAO,EAAE,KAAK,EAAE,WAAW,OAAO,CAAC,EAAE,CAAC;AAAA,EACxC;AACF,CAAC;AAED,SAAS,IAAI,2BAA2B,OAAO,MAAM;AACnD,QAAM,SAAS,EAAE,IAAI,MAAM,QAAQ;AAEnC,MAAI;AACF,UAAM,EAAE,oBAAAD,qBAAoB,gBAAAC,gBAAe,IAAI,MAAM;AACrD,QAAI,CAACD,oBAAmB,GAAG;AACzB,aAAO,EAAE,KAAK,EAAE,OAAO,+BAA+B,GAAG,GAAG;AAAA,IAC9D;AAEA,UAAM,SAAS,MAAMC,gBAAe,eAAe,MAAM;AACzD,WAAO,EAAE,KAAK,MAAM;AAAA,EACtB,SAAS,KAAU;AACjB,WAAO,EAAE,KAAK,EAAE,OAAO,IAAI,QAAQ,GAAG,GAAG;AAAA,EAC3C;AACF,CAAC;AAMD,SAAS,IAAI,0BAA0B,OAAO,MAAM;AAClD,QAAM,YAAY,EAAE,IAAI,MAAM,IAAI;AAElC,MAAI;AACF,UAAM,EAAE,oBAAAD,qBAAoB,gBAAAC,gBAAe,IAAI,MAAM;AACrD,QAAI,CAACD,oBAAmB,GAAG;AACzB,aAAO,EAAE,KAAK,EAAE,WAAW,YAAY,CAAC,EAAE,CAAC;AAAA,IAC7C;AAEA,UAAM,QAAQ,MAAMC,gBAAe,gBAAgB,SAAS;AAC5D,UAAM,aAAa,MAAM,OAAO,CAAC,MAAM,EAAE,aAAa,mBAAmB;AAEzE,QAAI,WAAW,WAAW,GAAG;AAC3B,aAAO,EAAE,KAAK,EAAE,WAAW,YAAY,CAAC,GAAG,SAAS,yCAAyC,CAAC;AAAA,IAChG;AAEA,WAAO,EAAE,KAAK;AAAA,MACZ;AAAA,MACA,YAAY,WAAW,IAAI,CAAC,OAAO;AAAA,QACjC,IAAI,EAAE;AAAA,QACN,UAAU,EAAE;AAAA,QACZ,WAAW,EAAE;AAAA,QACb,WAAW,EAAE;AAAA,QACb,aAAa,EAAE;AAAA,QACf,WAAW,EAAE;AAAA,MACf,EAAE;AAAA,IACJ,CAAC;AAAA,EACH,SAAS,KAAU;AACjB,YAAQ,MAAM,qCAAqC,IAAI,OAAO;AAC9D,WAAO,EAAE,KAAK,EAAE,WAAW,YAAY,CAAC,GAAG,OAAO,IAAI,QAAQ,CAAC;AAAA,EACjE;AACF,CAAC;;;A+BpkCD;AALA,SAAS,QAAAC,aAAY;AACrB,SAAS,cAAAC,mBAAkB;AAC3B,SAAS,KAAAC,WAAS;AAClB,SAAS,cAAAC,cAAY,aAAAC,YAAW,iBAAAC,sBAAqB;AACrD,SAAS,QAAAC,aAAY;AAGrB;;;ACIA,SAAS,oCAAoC;AAI7C,IAAM,QAAQ,oBAAI,IAAmD;AAGrE,IAAM,WAAW,oBAAI,IAA4C;AAIjE,IAAM,kBAAkB,YAAY,MAAM;AACxC,QAAM,MAAM,KAAK,IAAI;AACrB,aAAW,CAAC,KAAK,IAAI,KAAK,MAAM,QAAQ,GAAG;AACzC,QAAI,KAAK,aAAa,KAAK,YAAY,KAAK;AAC1C,YAAM,OAAO,GAAG;AAAA,IAClB;AAAA,EACF;AACF,GAAG,GAAK;AACR,gBAAgB,MAAM;AAEtB,IAAI,eAAe;AACnB,IAAI,iBAAiB;AAKrB,IAAM,YAAuB;AAAA,EAC3B,SAAS,YAAY;AAAA,EAErB;AAAA,EAEA,SAAS,OAAO,SAAiB,YAAoB;AACnD,UAAM,cAAc,SAAS,IAAI,OAAO;AACxC;AACA,UAAM,MAAM,KAAK,IAAI;AACrB,QAAI,MAAM,iBAAiB,KAAO;AAChC,cAAQ,IAAI,0CAA0C,YAAY,cAAc,SAAS,IAAI,WAAW,MAAM,IAAI,EAAE;AACpH,uBAAiB;AAAA,IACnB;AACA,QAAI,aAAa;AACf,iBAAW,YAAY,aAAa;AAClC,qBAAa,MAAM,SAAS,OAAO,CAAC;AAAA,MACtC;AAAA,IACF;AAAA,EACF;AAAA,EAEA,KAAK,OAAO,KAAa,OAAe,YAA8B;AACpE,UAAM,YAAY,SAAS,KAAK,KAAK,IAAI,IAAI,QAAQ,KAAK,MAAO;AACjE,UAAM,IAAI,KAAK,EAAE,OAAO,UAAU,CAAC;AACnC,QAAI,SAAS,IAAI;AACf,iBAAW,MAAM,MAAM,OAAO,GAAG,GAAG,QAAQ,KAAK,GAAI;AAAA,IACvD;AAAA,EACF;AAAA,EAEA,KAAK,OAAO,QAAgB;AAC1B,UAAM,OAAO,MAAM,IAAI,GAAG;AAC1B,QAAI,CAAC,KAAM,QAAO;AAElB,QAAI,KAAK,aAAa,KAAK,YAAY,KAAK,IAAI,GAAG;AACjD,YAAM,OAAO,GAAG;AAChB,aAAO;AAAA,IACT;AAEA,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,MAAM,OAAO,QAAgB;AAC3B,UAAM,OAAO,MAAM,IAAI,GAAG;AAC1B,UAAM,UAAU,OAAO,SAAS,KAAK,OAAO,EAAE,IAAI;AAClD,UAAM,QAAQ,MAAM,OAAO,IAAI,IAAI,WAAW;AAC9C,UAAM,IAAI,KAAK,EAAE,OAAO,OAAO,IAAI,GAAG,WAAW,MAAM,UAAU,CAAC;AAClE,WAAO;AAAA,EACT;AACF;AAKA,IAAM,aAAyB;AAAA,EAC7B,SAAS,YAAY;AAAA,EAErB;AAAA,EAEA,WAAW,OAAO,SAAiB,aAAwC;AACzE,QAAI,CAAC,SAAS,IAAI,OAAO,GAAG;AAC1B,eAAS,IAAI,SAAS,oBAAI,IAAI,CAAC;AAAA,IACjC;AACA,aAAS,IAAI,OAAO,EAAG,IAAI,QAAQ;AACnC,YAAQ,IAAI,2CAA2C,OAAO,yBAAyB,SAAS,IAAI,OAAO,EAAG,IAAI,GAAG;AAAA,EACvH;AAAA,EAEA,aAAa,OAAO,YAAoB;AACtC,UAAM,QAAQ,SAAS,IAAI,OAAO,GAAG,QAAQ;AAC7C,aAAS,OAAO,OAAO;AACvB,YAAQ,IAAI,+CAA+C,OAAO,cAAc,KAAK,eAAe;AAAA,EACtG;AACF;AAKO,IAAM,gBAAgB,6BAA6B;AAAA;AAAA,EAExD,WAAW,CAAC,YAA8B;AACxC,YAAQ,MAAM,CAAC,QAAQ;AACrB,cAAQ,MAAM,4CAA4C,GAAG;AAAA,IAC/D,CAAC;AAAA,EACH;AAAA,EACA;AAAA,EACA;AACF,CAAC;;;ADjHD,SAAS,UAAAC,eAAc;AAGvB;AACA;AACA;AAIA,IAAM,mBAAmB,oBAAI,IAA2B;AAExD,IAAM,wBAAwB,IAAI;AAClC,IAAM,yBAAyB,IAAI;AACnC,IAAM,sBAAsB,IAAI;AAEhC,SAAS,kBAAkB,UAAkB,OAAyB;AACpE,MAAI,aAAa,gBAAgB,CAAC,SAAS,OAAO,UAAU,UAAU;AACpE,WAAO;AAAA,EACT;AAEA,QAAM,OAAO;AACb,MAAI,UAAU;AACd,QAAM,OAAgC,EAAE,GAAG,KAAK;AAEhD,QAAM,UAAU,OAAO,KAAK,YAAY,WAAW,KAAK,UAAU;AAClE,MAAI,WAAW,QAAQ,SAAS,uBAAuB;AACrD,SAAK,UAAU,GAAG,QAAQ,MAAM,GAAG,sBAAsB,CAAC;AAAA;AAC1D,SAAK,gBAAgB,QAAQ;AAC7B,SAAK,mBAAmB;AACxB,cAAU;AAAA,EACZ;AAEA,QAAM,YAAY,OAAO,KAAK,eAAe,WAAW,KAAK,aAAa;AAC1E,MAAI,aAAa,UAAU,SAAS,uBAAuB;AACzD,SAAK,aAAa,GAAG,UAAU,MAAM,GAAG,sBAAsB,CAAC;AAAA;AAC/D,SAAK,kBAAkB,UAAU;AACjC,SAAK,qBAAqB;AAC1B,cAAU;AAAA,EACZ;AAEA,QAAM,YAAY,OAAO,KAAK,eAAe,WAAW,KAAK,aAAa;AAC1E,MAAI,aAAa,UAAU,SAAS,uBAAuB;AACzD,SAAK,aAAa,GAAG,UAAU,MAAM,GAAG,sBAAsB,CAAC;AAAA;AAC/D,SAAK,kBAAkB,UAAU;AACjC,SAAK,qBAAqB;AAC1B,cAAU;AAAA,EACZ;AAEA,MAAI,SAAS;AACX,YAAQ,IAAI,oEAAoE;AAAA,EAClF;AAEA,SAAO,UAAU,OAAO;AAC1B;AAEA,SAAS,kBAAkB,OAAwB;AACjD,MAAI;AACF,WAAO,KAAK,UAAU,SAAS,CAAC,CAAC;AAAA,EACnC,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAEA,eAAe,2BACb,UACA,gBACA,YACA,UACA,OACe;AACf,MAAI,eAAe,IAAI,UAAU,EAAG;AAEpC,iBAAe,IAAI,UAAU;AAC7B,QAAM,SAAS,KAAK,UAAU;AAAA,IAC5B,MAAM;AAAA,IACN;AAAA,IACA;AAAA,EACF,CAAC,CAAC;AAEF,MAAI,aAAa,aAAc;AAE/B,QAAM,WAAW,kBAAkB,KAAK;AACxC,WAAS,IAAI,GAAG,IAAI,SAAS,QAAQ,KAAK,qBAAqB;AAC7D,UAAM,QAAQ,SAAS,MAAM,GAAG,IAAI,mBAAmB;AACvD,UAAM,SAAS,KAAK,UAAU;AAAA,MAC5B,MAAM;AAAA,MACN;AAAA,MACA,eAAe;AAAA,IACjB,CAAC,CAAC;AACF,UAAM,IAAI,QAAQ,CAACC,cAAY,WAAWA,WAAS,CAAC,CAAC;AAAA,EACvD;AACF;AAOA,SAAS,wBAAwB,WAA2B;AAC1D,QAAM,MAAM,mBAAmB,SAAS;AACxC,MAAI,CAAC,IAAK,QAAO;AAEjB,QAAM,QAAkB,CAAC;AACzB,QAAM,KAAK,oBAAoB;AAC/B,QAAM,KAAK,gBAAgB,IAAI,GAAG,WAAW,IAAI,IAAI,WAAW,IAAI,QAAQ,MAAM;AAClF,MAAI,IAAI,eAAe,IAAI,cAAc;AACvC,UAAM,KAAK,sBAAsB,IAAI,WAAW,aAAa,IAAI,YAAY,IAAI,IAAI,mBAAmB,SAAS,IAAI,gBAAgB,MAAM,EAAE,KAAK;AAAA,EACpJ;AACA,QAAM,KAAK,qBAAqB;AAChC,SAAO,MAAM,KAAK,IAAI;AACxB;AAMA,SAAS,gCAAgC,WAAmB,QAAwB;AAClF,QAAM,MAAM,wBAAwB,SAAS;AAC7C,MAAI,CAAC,IAAK,QAAO;AACjB,SAAO,GAAG,GAAG;AAAA;AAAA,EAAO,MAAM;AAC5B;AAsDA,IAAM,SAAS,IAAIC,MAAK;AAGxB,IAAM,mBAAmBC,IAAE,OAAO;AAAA,EAChC,MAAMA,IAAE,KAAK,CAAC,SAAS,MAAM,CAAC;AAAA,EAC9B,MAAMA,IAAE,OAAO;AAAA;AAAA,EACf,WAAWA,IAAE,OAAO,EAAE,SAAS;AAAA,EAC/B,UAAUA,IAAE,OAAO,EAAE,SAAS;AAChC,CAAC;AAED,IAAM,kBAAkBA,IAAE,OAAO;AAAA,EAC/B,QAAQA,IAAE,OAAO;AAAA;AAAA,EACjB,aAAaA,IAAE,MAAM,gBAAgB,EAAE,SAAS;AAClD,CAAC,EAAE;AAAA,EACD,CAAC,SAAS,KAAK,OAAO,KAAK,EAAE,SAAS,KAAM,KAAK,eAAe,KAAK,YAAY,SAAS;AAAA,EAC1F,EAAE,SAAS,gDAAgD;AAC7D;AAEA,IAAM,mBAAmBA,IAAE,OAAO;AAAA,EAChC,QAAQA,IAAE,OAAO,EAAE,IAAI,CAAC;AAAA,EACxB,MAAMA,IAAE,OAAO,EAAE,SAAS;AAAA,EAC1B,kBAAkBA,IAAE,OAAO,EAAE,SAAS;AAAA,EACtC,OAAOA,IAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,eAAeA,IAAE,OAAOA,IAAE,OAAO,GAAGA,IAAE,QAAQ,CAAC,EAAE,SAAS;AAC5D,CAAC;AAED,IAAM,eAAeA,IAAE,OAAO;AAAA,EAC5B,QAAQA,IAAE,OAAO,EAAE,SAAS;AAC9B,CAAC,EAAE,SAAS;AAGZ,IAAM,yBAAyB,oBAAI,IAA6B;AAehE,SAAS,wBAAwB,WAA2B;AAC1D,QAAM,aAAa,oBAAoB;AACvC,SAAOC,MAAK,YAAY,eAAe,SAAS;AAClD;AAMA,eAAe,qBACb,WACA,YACA,OACiB;AACjB,QAAM,iBAAiB,wBAAwB,SAAS;AAExD,MAAI,CAACC,aAAW,cAAc,GAAG;AAC/B,IAAAC,WAAU,gBAAgB,EAAE,WAAW,KAAK,CAAC;AAAA,EAC/C;AAEA,MAAI,WAAW,WAAW;AAC1B,MAAI,CAAC,UAAU;AACb,UAAM,MAAM,0BAA0B,WAAW,WAAW,WAAW,IAAI;AAC3E,eAAW,cAAc,QAAQ,CAAC,GAAG,GAAG;AAAA,EAC1C;AAEA,MAAI,aAAa,WAAW;AAC5B,MAAI,WAAW,SAAS,GAAG,GAAG;AAC5B,iBAAa,WAAW,MAAM,GAAG,EAAE,CAAC;AAAA,EACtC;AAEA,MAAI,SAAiB,OAAO,KAAK,YAAY,QAAQ;AAErD,MAAI,WAAW,SAAS,SAAS;AAC/B,aAAS,MAAM,oBAAoB,QAAQ,WAAW,SAAS;AAC/D,UAAM,SAAS,WAAW,KAAK,SAAS,GAAG,IAAI,WAAW,KAAK,MAAM,GAAG,EAAE,CAAC,IAAI,MAAM;AACrF,eAAW,OAAO,SAAS,OAAO,SAAS,QAAQ;AAAA,EACrD;AAEA,QAAM,WAAWF,MAAK,gBAAgB,QAAQ;AAC9C,EAAAG,eAAc,UAAU,MAAM;AAE9B,SAAO;AACT;AAKA,SAAS,0BAA0B,WAAoB,MAAiC;AACtF,MAAI,CAAC,WAAW;AACd,WAAO,SAAS,UAAU,SAAS;AAAA,EACrC;AAEA,QAAM,YAAoC;AAAA,IACxC,aAAa;AAAA,IACb,cAAc;AAAA,IACd,aAAa;AAAA,IACb,aAAa;AAAA,IACb,cAAc;AAAA,IACd,iBAAiB;AAAA,IACjB,mBAAmB;AAAA,IACnB,cAAc;AAAA,IACd,iBAAiB;AAAA,IACjB,oBAAoB;AAAA,IACpB,0BAA0B;AAAA,IAC1B,mBAAmB;AAAA,IACnB,mBAAmB;AAAA,IACnB,aAAa;AAAA,IACb,YAAY;AAAA,EACd;AAEA,SAAO,UAAU,SAAS,KAAK;AACjC;AAaA,SAAS,0BACP,WACA,QACA,UACA,aAC8B;AAC9B,SAAO,MAAM;AAEX,UAAM,EAAE,UAAU,SAAS,IAAI,IAAI,gBAAgC;AACnE,UAAM,SAAS,SAAS,UAAU;AAClC,QAAI,eAAe;AACnB,UAAM,iBAAiB,oBAAI,IAAY;AAGvC,UAAM,kBAAkB,IAAI,gBAAgB;AAC5C,2BAAuB,IAAI,UAAU,eAAe;AAEpD,QAAI,gBAAgB;AACpB,QAAI,uBAAuB;AAC3B,QAAI,iBAAiB;AAGrB,UAAM,WAAW,OAAO,SAAiB;AACvC,UAAI,aAAc;AAClB,UAAI;AACF;AACA,cAAM,OAAO,MAAM,SAAS,IAAI;AAAA;AAAA,CAAM;AAAA,MACxC,SAAS,KAAU;AACjB;AACA,YAAI,mBAAmB,GAAG;AACxB,kBAAQ,IAAI,QAAQ,QAAQ,6DAA6D,aAAa,qBAAqB,oBAAoB,EAAE;AAAA,QACnJ;AACA,uBAAe;AAAA,MACjB;AAAA,IACF;AAGA,UAAM,YAAY,YAAY;AAC5B,UAAI,aAAc;AAClB,UAAI;AACF,gBAAQ,IAAI,QAAQ,QAAQ,mCAAmC,aAAa,qBAAqB,oBAAoB,mBAAmB,cAAc,EAAE;AACxJ,uBAAe;AACf,cAAM,OAAO,MAAM;AAAA,MACrB,QAAQ;AAAA,MAER;AAAA,IACF;AAGA,UAAM,yBAAyB,MAAM;AACnC,6BAAuB,OAAO,QAAQ;AAAA,IACxC;AAGA,KAAC,YAAY;AACX,UAAI,YAAY;AAEhB,UAAI;AACF,cAAM,QAAQ,MAAM,MAAM,OAAO,EAAE,UAAU,CAAC;AAG9C,cAAM,SAAS,KAAK,UAAU,EAAE,MAAM,kBAAkB,SAAS,CAAC,CAAC;AAKnE,YAAI;AACJ,YAAI,eAAe,YAAY,SAAS,GAAG;AACzC,gBAAM,eAAiJ,CAAC;AAGxJ,gBAAM,yBAAyB,YAC5B,IAAI,CAAC,GAAG,MAAM;AACb,kBAAM,OAAO,EAAE,YAAY,cAAc,IAAI,CAAC;AAC9C,kBAAM,YAAY,EAAE,SAAS,UAAU,UAAU;AACjD,kBAAM,WAAW,EAAE,aAAa;AAChC,mBAAO,GAAG,IAAI,CAAC,KAAK,SAAS,MAAM,IAAI,eAAe,QAAQ;AAAA,UAChE,CAAC,EACA,KAAK,IAAI;AACZ,uBAAa,KAAK;AAAA,YAChB,MAAM;AAAA,YACN,MAAM;AAAA,EAA2F,sBAAsB;AAAA;AAAA;AAAA,UACzH,CAAC;AAGD,cAAI,QAAQ;AACV,yBAAa,KAAK,EAAE,MAAM,QAAQ,MAAM;AAAA;AAAA,EAAqB,MAAM,GAAG,CAAC;AAAA,UACzE;AAGA,qBAAW,cAAc,aAAa;AACpC,gBAAI,WAAW,SAAS,SAAS;AAC/B,2BAAa,KAAK;AAAA,gBAChB,MAAM;AAAA,gBACN,OAAO,WAAW;AAAA,gBAClB,WAAW,WAAW;AAAA,gBACtB,UAAU,WAAW;AAAA,gBACrB,WAAW,WAAW;AAAA,cACxB,CAAC;AAAA,YACH,OAAO;AACL,2BAAa,KAAK;AAAA,gBAChB,MAAM;AAAA,gBACN,MAAM,WAAW;AAAA,gBACjB,WAAW,WAAW,aAAa;AAAA,gBACnC,UAAU,WAAW;AAAA,gBACrB,WAAW,WAAW;AAAA,cACxB,CAAC;AAAA,YACH;AAAA,UACF;AACA,6BAAmB;AAAA,QACrB,OAAO;AACL,6BAAmB;AAAA,QACrB;AAGA,cAAM,SAAS,KAAK,UAAU;AAAA,UAC5B,MAAM;AAAA,UACN,MAAM,EAAE,IAAI,QAAQ,KAAK,IAAI,CAAC,IAAI,SAAS,iBAAiB;AAAA,QAC9D,CAAC,CAAC;AAGF,cAAM,YAAY,OAAO,KAAK,IAAI,CAAC;AACnC,cAAM,SAAS,KAAK,UAAU,EAAE,MAAM,SAAS,UAAU,CAAC,CAAC;AAE3D,YAAI,SAAS,QAAQ,KAAK,IAAI,CAAC;AAC/B,YAAI,cAAc;AAElB,cAAM,SAAS,MAAM,MAAM,OAAO;AAAA,UAChC;AAAA,UACA;AAAA;AAAA,UACA,aAAa,gBAAgB;AAAA;AAAA,UAC7B,qBAAqB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAKrB,YAAY,YAAY;AAAA,UAExB;AAAA,UACA,cAAc,YAAY;AAAA,UAE1B;AAAA,UACA,oBAAoB,OAAO,cAAc;AACvC,kBAAM,SAAS,KAAK,UAAU;AAAA,cAC5B,MAAM;AAAA,cACN,MAAM;AAAA,gBACJ,IAAI,UAAU;AAAA,gBACd,YAAY,UAAU;AAAA,gBACtB,UAAU,UAAU;AAAA,gBACpB,OAAO,UAAU;AAAA,cACnB;AAAA,YACF,CAAC,CAAC;AAAA,UACJ;AAAA,UACA,gBAAgB,OAAO,aAAa;AAClC,kBAAM,SAAU,SAAS,MAAc,UAAU;AACjD,kBAAM,gBAAgB,OAAQ,SAAS,MAAc,YAAY,WAC5D,SAAS,KAAa,QAAQ,SAC/B;AACJ,kBAAM,aAAc,SAAS,MAAc;AAC3C,kBAAM,aAAc,SAAS,MAAc;AAC3C,oBAAQ;AAAA,cACN;AAAA,cACA,SAAS;AAAA,cACT;AAAA,cACA,kBAAkB,SAAY,iBAAiB,aAAa,KAAK;AAAA,cACjE,eAAe,UAAa,eAAe,SAAY,SAAS,UAAU,IAAI,UAAU,KAAK;AAAA,YAC/F;AACA,kBAAM,SAAS,KAAK,UAAU;AAAA,cAC5B,MAAM;AAAA,cACN,UAAU,SAAS;AAAA,cACnB,MAAM,SAAS;AAAA,YACjB,CAAC,CAAC;AACF,gBAAI,SAAS,aAAa,gBAAgB,WAAW,WAAW;AAC9D,oBAAM,SAAS,KAAK,UAAU;AAAA,gBAC5B,MAAM;AAAA,gBACN,OAAO;AAAA,gBACP;AAAA,gBACA;AAAA,gBACA;AAAA,cACF,CAAC,CAAC;AACF,oBAAM,IAAI,QAAQ,CAACC,cAAY,WAAWA,WAAS,CAAC,CAAC;AAAA,YACvD;AAGA,kBAAM,cAAe,SAAS,MAAc;AAC5C,kBAAM,gBAAiB,SAAS,MAAc;AAE9C,gBAAI,SAAS,aAAa,UAAU,eAAe;AACjD,sBAAQ,IAAI,mBAAmB,QAAQ,gEAAgE,SAAS,EAAE;AAClH,2BAAa,SAAS;AAAA,YACxB,WAAW,SAAS,aAAa,UAAU,aAAa;AACtD,sBAAQ,IAAI,mBAAmB,QAAQ,4DAA4D,WAAW,gBAAgB,SAAS,EAAE;AACzI,oBAAM,QAAQ,iBAAiB,WAAW,WAAW;AACrD,sBAAQ,IAAI,mBAAmB,QAAQ,4BAA4B,MAAM,SAAS,oBAAoB,MAAM,cAAc,OAAO,CAAC,qBAAqB,MAAM,cAAc,QAAQ,CAAC,EAAE;AAEtL,kBAAI,CAAC,iBAAiB,IAAI,SAAS,GAAG;AACpC,sBAAM,WAAW,IAAI,cAAc,SAAS;AAC5C,yBAAS,MAAM;AACf,iCAAiB,IAAI,WAAW,QAAQ;AAAA,cAC1C;AAKA,oBAAM,oBAAoB,MAAM,cAAc,OAAO;AACrD,kBAAI,oBAAoB,GAAG;AACzB,wBAAQ,IAAI,mBAAmB,QAAQ,eAAe,iBAAiB,+CAA+C;AACtH,sBAAM,mBAAmB,OAAO;AAChC,sBAAM,mBAAmB,QAAQ;AAAA,cACnC;AACA,sBAAQ,IAAI,mBAAmB,QAAQ,6CAA6C;AACpF,oBAAM,GAAG,SAAS,CAAC,UAAwB;AACzC;AACA,oBAAI,yBAAyB,GAAG;AAC9B,0BAAQ,IAAI,mBAAmB,QAAQ,4CAA4C,MAAM,MAAM,UAAU,CAAC,iBAAiB,YAAY,EAAE;AAAA,gBAC3I,WAAW,uBAAuB,OAAO,GAAG;AAC1C,0BAAQ,IAAI,mBAAmB,QAAQ,oBAAoB,oBAAoB,kBAAkB,YAAY,GAAG;AAAA,gBAClH;AACA,sBAAM,MAAM,iBAAiB,IAAI,SAAS;AAC1C,qBAAK,SAAS,KAAK;AACnB,yBAAS,KAAK,UAAU;AAAA,kBACtB,MAAM;AAAA,kBACN,MAAM,MAAM;AAAA,kBACZ,UAAU,MAAM;AAAA,gBAClB,CAAC,CAAC,EAAE,MAAM,CAAC,QAAQ;AACjB,0BAAQ,KAAK,mBAAmB,QAAQ,2CAA2C,GAAG;AAAA,gBACxF,CAAC;AAAA,cACH,CAAC;AACD,oBAAM,GAAG,UAAU,CAAC,MAAqB;AACvC,wBAAQ,IAAI,mBAAmB,QAAQ,qCAAqC,EAAE,SAAS,kBAAkB,EAAE,aAAa,aAAa,EAAE,aAAa,IAAI,EAAE,cAAc,EAAE;AAC1K,yBAAS,KAAK,UAAU;AAAA,kBACtB,MAAM;AAAA,kBACN,GAAG;AAAA,gBACL,CAAC,CAAC,EAAE,MAAM,CAAC,QAAQ;AACjB,0BAAQ,KAAK,mBAAmB,QAAQ,4CAA4C,GAAG;AAAA,gBACzF,CAAC;AAAA,cACH,CAAC;AAED,oBAAM,cAAc;AAAA,YACtB;AAAA,UACF;AAAA,UACA,cAAc,YAAY;AACxB,kBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,cAAc,CAAC,CAAC;AACtD,gBAAI,aAAa;AACf,oBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,YAAY,IAAI,OAAO,CAAC,CAAC;AAC/D,4BAAc;AACd,uBAAS,QAAQ,KAAK,IAAI,CAAC;AAAA,YAC7B;AAAA,UACF;AAAA,UACA,SAAS,OAAO,EAAE,MAAM,MAAM;AAC5B,wBAAY;AACZ,oBAAQ,IAAI,wBAAwB,MAAM,MAAM,QAAQ;AAAA,UAC1D;AAAA,QACF,CAAC;AAGD,YAAI,cAAc,aAAa,KAAK,IAAI,CAAC;AACzC,YAAI,mBAAmB;AAEvB,yBAAiB,QAAQ,OAAO,OAAO,YAAY;AACjD,cAAI,KAAK,SAAS,cAAc;AAC9B,gBAAI,CAAC,aAAa;AAChB,oBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,cAAc,IAAI,OAAO,CAAC,CAAC;AACjE,4BAAc;AAAA,YAChB;AACA,kBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,cAAc,IAAI,QAAQ,OAAO,KAAK,KAAK,CAAC,CAAC;AAAA,UACrF,WAAW,KAAK,SAAS,mBAAmB;AAC1C,kBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,mBAAmB,IAAI,YAAY,CAAC,CAAC;AAC3E,+BAAmB;AAAA,UACrB,WAAW,KAAK,SAAS,mBAAmB;AAC1C,kBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,mBAAmB,IAAI,aAAa,OAAO,KAAK,KAAK,CAAC,CAAC;AAAA,UAC/F,WAAW,KAAK,SAAS,iBAAiB;AACxC,gBAAI,kBAAkB;AACpB,oBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,iBAAiB,IAAI,YAAY,CAAC,CAAC;AACzE,iCAAmB;AACnB,4BAAc,aAAa,KAAK,IAAI,CAAC;AAAA,YACvC;AAAA,UACF,WAAY,KAAa,SAAS,6BAA6B;AAE7D,kBAAM,IAAI;AACV,kBAAM,SAAS,KAAK,UAAU;AAAA,cAC5B,MAAM;AAAA,cACN,YAAY,EAAE;AAAA,cACd,UAAU,EAAE;AAAA,YACd,CAAC,CAAC;AACF,2BAAe,IAAI,EAAE,UAAU;AAAA,UACjC,WAAY,KAAa,SAAS,mBAAmB;AAEnD,kBAAM,IAAI;AACV,kBAAM,SAAS,KAAK,UAAU;AAAA,cAC5B,MAAM;AAAA,cACN,YAAY,EAAE;AAAA,cACd,eAAe,EAAE;AAAA,YACnB,CAAC,CAAC;AAAA,UACJ,WAAW,KAAK,SAAS,aAAa;AACpC,kBAAM;AAAA,cACJ;AAAA,cACA;AAAA,cACA,KAAK;AAAA,cACL,KAAK;AAAA,cACL,KAAK;AAAA,YACP;AACA,kBAAM,SAAS,KAAK,UAAU;AAAA,cAC5B,MAAM;AAAA,cACN,YAAY,KAAK;AAAA,cACjB,UAAU,KAAK;AAAA,cACf,OAAO,kBAAkB,KAAK,UAAU,KAAK,KAAK;AAAA,YACpD,CAAC,CAAC;AACF,kBAAM,SAAS,KAAK,UAAU;AAAA,cAC5B,MAAM;AAAA,cACN,OAAO;AAAA,cACP,UAAU,KAAK;AAAA,YACjB,CAAC,CAAC;AAAA,UACJ,WAAW,KAAK,SAAS,eAAe;AACtC,kBAAM,SAAS,KAAK,UAAU;AAAA,cAC5B,MAAM;AAAA,cACN,YAAY,KAAK;AAAA,cACjB,QAAQ,KAAK;AAAA,YACf,CAAC,CAAC;AAAA,UACJ,WAAW,KAAK,SAAS,SAAS;AAChC,oBAAQ,MAAM,iBAAiB,KAAK,KAAK;AACzC,kBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,SAAS,WAAW,OAAO,KAAK,KAAK,EAAE,CAAC,CAAC;AAAA,UACjF;AAAA,QACF;AAGA,YAAI,aAAa;AACf,gBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,YAAY,IAAI,OAAO,CAAC,CAAC;AAAA,QACjE;AAGA,YAAI,kBAAkB;AACpB,gBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,iBAAiB,IAAI,YAAY,CAAC,CAAC;AAAA,QAC3E;AAGA,YAAI,CAAC,WAAW;AACd,gBAAM,OAAO,qBAAqB;AAAA,QACpC;AAGA,YAAI,WAAW;AACb,gBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,QAAQ,CAAC,CAAC;AAAA,QAClD,OAAO;AACL,gBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,SAAS,CAAC,CAAC;AAAA,QACnD;AAGA,YAAI;AACF,gBAAM,oBAAoB,OAAO,QAAQ;AAAA,QAC3C,QAAQ;AAAA,QAER;AAAA,MACF,SAAS,OAAY;AACnB,YAAI,MAAM,SAAS,gBAAgB,MAAM,SAAS,SAAS,SAAS,GAAG;AAErE,gBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,QAAQ,CAAC,CAAC;AAAA,QAClD,OAAO;AAEL,kBAAQ,MAAM,gBAAgB,KAAK;AACnC,gBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,SAAS,WAAW,MAAM,QAAQ,CAAC,CAAC;AAC1E,cAAI;AACF,kBAAM,oBAAoB,UAAU,QAAQ;AAAA,UAC9C,QAAQ;AAAA,UAER;AAAA,QACF;AAAA,MACF,UAAE;AACA,+BAAuB;AAIvB,cAAM,SAAS,QAAQ;AACvB,cAAM,UAAU;AAAA,MAClB;AAAA,IACF,GAAG;AAEH,WAAO;AAAA,EACT;AACF;AAGA,OAAO;AAAA,EACL;AAAA,EACAC,YAAW,QAAQ,eAAe;AAAA,EAClC,OAAO,MAAM;AACX,UAAM,KAAK,EAAE,IAAI,MAAM,IAAI;AAC3B,UAAM,EAAE,QAAQ,WAAW,YAAY,IAAI,EAAE,IAAI,MAAM,MAAM;AAG7D,UAAM,SAAS,gCAAgC,IAAI,SAAS;AAE5D,UAAM,UAAU,MAAM,eAAe,QAAQ,EAAE;AAC/C,QAAI,CAAC,SAAS;AACZ,aAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,IACnD;AAGA,UAAM,eAAe,MAAM,eAAe,gBAAgB,EAAE;AAI5D,UAAM,iBAAiB,IAAI,QAAQ,kBAAkB,YAAY;AAGjE,QAAI;AAGJ,UAAM,oBAAoD;AAE1D,QAAI,qBAAqB,kBAAkB,SAAS,GAAG;AAErD,eAAS,IAAI,GAAG,IAAI,kBAAkB,QAAQ,KAAK;AACjD,cAAM,aAAa,kBAAkB,CAAC;AACtC,YAAI;AACF,gBAAM,YAAY,MAAM,qBAAqB,IAAI,YAAY,CAAC;AAC9D,qBAAW,YAAY;AAAA,QACzB,SAAS,KAAK;AACZ,kBAAQ,MAAM,6BAA6B,CAAC,KAAK,GAAG;AAAA,QACtD;AAAA,MACF;AAGA,YAAM,eAAiJ,CAAC;AAGxJ,YAAM,yBAAyB,kBAC5B,IAAI,CAAC,GAAG,MAAM;AACb,cAAM,OAAO,EAAE,YAAY,cAAc,IAAI,CAAC;AAC9C,cAAM,YAAY,EAAE,SAAS,UAAU,UAAU;AACjD,cAAM,WAAW,EAAE,aAAa;AAChC,eAAO,GAAG,IAAI,CAAC,KAAK,SAAS,MAAM,IAAI,eAAe,QAAQ;AAAA,MAChE,CAAC,EACA,KAAK,IAAI;AACZ,mBAAa,KAAK;AAAA,QAChB,MAAM;AAAA,QACN,MAAM;AAAA,EAA2F,sBAAsB;AAAA;AAAA;AAAA,MACzH,CAAC;AAGD,UAAI,QAAQ;AACV,qBAAa,KAAK,EAAE,MAAM,QAAQ,MAAM;AAAA;AAAA,EAAqB,MAAM,GAAG,CAAC;AAAA,MACzE;AAGA,iBAAW,cAAc,mBAAmB;AAC1C,YAAI,WAAW,SAAS,SAAS;AAC/B,uBAAa,KAAK;AAAA,YAChB,MAAM;AAAA,YACN,OAAO,WAAW;AAAA;AAAA,YAClB,WAAW,WAAW;AAAA,YACtB,UAAU,WAAW;AAAA,YACrB,WAAW,WAAW;AAAA,UACxB,CAAC;AAAA,QACH,OAAO;AACL,uBAAa,KAAK;AAAA,YAChB,MAAM;AAAA,YACN,MAAM,WAAW;AAAA,YACjB,WAAW,WAAW,aAAa;AAAA,YACnC,UAAU,WAAW;AAAA,YACrB,WAAW,WAAW;AAAA,UACxB,CAAC;AAAA,QACH;AAAA,MACF;AAEA,2BAAqB;AAAA,IACvB,OAAO;AACL,2BAAqB;AAAA,IACvB;AAIA,UAAM,eAAe,OAAO,IAAI,EAAE,MAAM,QAAQ,SAAS,mBAA0B,CAAC;AAGpF,UAAM,WAAW,UAAU,EAAE,IAAIC,QAAO,EAAE,CAAC;AAC3C,YAAQ,IAAI,4BAA4B,QAAQ,gBAAgB,EAAE,EAAE;AAGpE,UAAM,oBAAoB,OAAO,IAAI,QAAQ;AAI7C,UAAM,SAAS,MAAM,cAAc;AAAA,MACjC;AAAA,MACA,0BAA0B,IAAI,QAAQ,UAAU,iBAAiB;AAAA,IACnE;AAEA,QAAI,CAAC,QAAQ;AACX,cAAQ,MAAM,8CAA8C,QAAQ,EAAE;AACtE,aAAO,EAAE,KAAK,EAAE,OAAO,0BAA0B,GAAG,GAAG;AAAA,IACzD;AACA,YAAQ,IAAI,mBAAmB,QAAQ,uBAAuB;AAG9D,UAAM,gBAAgB,OAAO,YAAY,IAAI,kBAAkB,CAAC;AAEhE,WAAO,IAAI,SAAS,eAAwD;AAAA,MAC1E,SAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,iBAAiB;AAAA,QACjB,cAAc;AAAA,QACd,iCAAiC;AAAA,QACjC,eAAe;AAAA,MACjB;AAAA,IACF,CAAC;AAAA,EACH;AACF;AAGA,OAAO,IAAI,cAAc,OAAO,MAAM;AACpC,QAAM,YAAY,EAAE,IAAI,MAAM,IAAI;AAClC,QAAM,WAAW,EAAE,IAAI,MAAM,UAAU;AACvC,QAAM,mBAAmB,EAAE,IAAI,MAAM,UAAU;AAE/C,QAAM,UAAU,MAAM,eAAe,QAAQ,SAAS;AACtD,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,EACnD;AAGA,MAAI,WAA+B;AACnC,MAAI,CAAC,UAAU;AACb,UAAM,eAAe,MAAM,oBAAoB,eAAe,SAAS;AACvE,QAAI,CAAC,cAAc;AACjB,aAAO,EAAE,KAAK,EAAE,OAAO,qCAAqC,MAAM,4CAA4C,GAAG,GAAG;AAAA,IACtH;AACA,eAAW,aAAa;AAAA,EAC1B;AAEA,UAAQ,IAAI,sCAAsC,SAAS,cAAc,QAAQ,cAAc,YAAY,MAAM,EAAE;AAGnH,QAAM,SAAS,MAAM,cAAc;AAAA,IACjC;AAAA,IACA,WAAW,SAAS,UAAU,EAAE,IAAI;AAAA,EACtC;AAEA,MAAI,CAAC,QAAQ;AACX,YAAQ,IAAI,uCAAkC,QAAQ,sBAAsB;AAC5E,WAAO,EAAE,KAAK;AAAA,MACZ,OAAO;AAAA,MACP;AAAA,MACA,MAAM;AAAA,IACR,GAAG,GAAG;AAAA,EACR;AACA,UAAQ,IAAI,mCAAmC,QAAQ,EAAE;AAEzD,QAAM,gBAAgB,OAAO,YAAY,IAAI,kBAAkB,CAAC;AAEhE,SAAO,IAAI,SAAS,eAAwD;AAAA,IAC1E,SAAS;AAAA,MACP,gBAAgB;AAAA,MAChB,iBAAiB;AAAA,MACjB,cAAc;AAAA,MACd,iCAAiC;AAAA,MACjC,eAAe,YAAY;AAAA,IAC7B;AAAA,EACF,CAAC;AACH,CAAC;AAGD,OAAO,IAAI,eAAe,OAAO,MAAM;AACrC,QAAM,YAAY,EAAE,IAAI,MAAM,IAAI;AAElC,QAAM,UAAU,MAAM,eAAe,QAAQ,SAAS;AACtD,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,EACnD;AAEA,QAAM,eAAe,MAAM,oBAAoB,eAAe,SAAS;AAEvE,SAAO,EAAE,KAAK;AAAA,IACZ;AAAA,IACA,iBAAiB,CAAC,CAAC;AAAA,IACnB,QAAQ,eAAe;AAAA,MACrB,IAAI,aAAa;AAAA,MACjB,UAAU,aAAa;AAAA,MACvB,QAAQ,aAAa;AAAA,MACrB,WAAW,aAAa,UAAU,YAAY;AAAA,IAChD,IAAI;AAAA,EACN,CAAC;AACH,CAAC;AAGD,OAAO;AAAA,EACL;AAAA,EACAD,YAAW,QAAQ,eAAe;AAAA,EAClC,OAAO,MAAM;AACX,UAAM,KAAK,EAAE,IAAI,MAAM,IAAI;AAC3B,UAAM,EAAE,QAAQ,UAAU,IAAI,EAAE,IAAI,MAAM,MAAM;AAGhD,UAAM,SAAS,gCAAgC,IAAI,SAAS;AAE5D,UAAM,UAAU,MAAM,eAAe,QAAQ,EAAE;AAC/C,QAAI,CAAC,SAAS;AACZ,aAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,IACnD;AAEA,QAAI;AACF,YAAM,QAAQ,MAAM,MAAM,OAAO,EAAE,WAAW,GAAG,CAAC;AAClD,YAAM,SAAS,MAAM,MAAM,IAAI,EAAE,OAAO,CAAC;AAEzC,aAAO,EAAE,KAAK;AAAA,QACZ,WAAW;AAAA,QACX,MAAM,OAAO;AAAA,QACb,WAAW,OAAO,MAAM;AAAA,MAC1B,CAAC;AAAA,IACH,SAAS,OAAY;AACnB,aAAO,EAAE,KAAK,EAAE,OAAO,MAAM,QAAQ,GAAG,GAAG;AAAA,IAC7C;AAAA,EACF;AACF;AAGA,OAAO,KAAK,4BAA4B,OAAO,MAAM;AACnD,QAAM,YAAY,EAAE,IAAI,MAAM,IAAI;AAClC,QAAM,aAAa,EAAE,IAAI,MAAM,YAAY;AAE3C,QAAM,UAAU,MAAM,eAAe,QAAQ,SAAS;AACtD,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,EACnD;AAEA,MAAI;AACF,UAAM,QAAQ,MAAM,MAAM,OAAO,EAAE,UAAU,CAAC;AAC9C,UAAM,SAAS,MAAM,MAAM,QAAQ,UAAU;AAE7C,WAAO,EAAE,KAAK;AAAA,MACZ,SAAS;AAAA,MACT;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH,SAAS,OAAY;AACnB,WAAO,EAAE,KAAK,EAAE,OAAO,MAAM,QAAQ,GAAG,GAAG;AAAA,EAC7C;AACF,CAAC;AAGD,OAAO;AAAA,EACL;AAAA,EACAA,YAAW,QAAQ,YAAY;AAAA,EAC/B,OAAO,MAAM;AACX,UAAM,YAAY,EAAE,IAAI,MAAM,IAAI;AAClC,UAAM,aAAa,EAAE,IAAI,MAAM,YAAY;AAC3C,UAAM,OAAO,EAAE,IAAI,MAAM,MAAM;AAE/B,UAAM,UAAU,MAAM,eAAe,QAAQ,SAAS;AACtD,QAAI,CAAC,SAAS;AACZ,aAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,IACnD;AAEA,QAAI;AACF,YAAM,QAAQ,MAAM,MAAM,OAAO,EAAE,UAAU,CAAC;AAC9C,YAAM,OAAO,YAAY,MAAM,MAAM;AAErC,aAAO,EAAE,KAAK;AAAA,QACZ,SAAS;AAAA,QACT;AAAA,QACA,UAAU;AAAA,MACZ,CAAC;AAAA,IACH,SAAS,OAAY;AACnB,aAAO,EAAE,KAAK,EAAE,OAAO,MAAM,QAAQ,GAAG,GAAG;AAAA,IAC7C;AAAA,EACF;AACF;AAGA,OAAO,IAAI,kBAAkB,OAAO,MAAM;AACxC,QAAM,YAAY,EAAE,IAAI,MAAM,IAAI;AAElC,QAAM,UAAU,MAAM,eAAe,QAAQ,SAAS;AACtD,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,EACnD;AAEA,QAAM,mBAAmB,MAAM,qBAAqB,oBAAoB,SAAS;AAEjF,SAAO,EAAE,KAAK;AAAA,IACZ;AAAA,IACA,kBAAkB,iBAAiB,IAAI,CAAC,OAAO;AAAA,MAC7C,IAAI,EAAE;AAAA,MACN,YAAY,EAAE;AAAA,MACd,UAAU,EAAE;AAAA,MACZ,OAAO,EAAE;AAAA,MACT,WAAW,EAAE,UAAU,YAAY;AAAA,IACrC,EAAE;AAAA,IACF,OAAO,iBAAiB;AAAA,EAC1B,CAAC;AACH,CAAC;AAID,OAAO,KAAK,cAAc,OAAO,MAAM;AACrC,QAAM,YAAY,EAAE,IAAI,MAAM,IAAI;AAElC,QAAM,UAAU,MAAM,eAAe,QAAQ,SAAS;AACtD,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,EACnD;AAGA,QAAM,eAAe,MAAM,oBAAoB,eAAe,SAAS;AACvE,MAAI,CAAC,cAAc;AACjB,WAAO,EAAE,KAAK,EAAE,OAAO,oCAAoC,GAAG,GAAG;AAAA,EACnE;AAGA,QAAM,kBAAkB,uBAAuB,IAAI,aAAa,QAAQ;AACxE,MAAI,iBAAiB;AACnB,oBAAgB,MAAM;AACtB,2BAAuB,OAAO,aAAa,QAAQ;AACnD,WAAO,EAAE,KAAK,EAAE,SAAS,MAAM,UAAU,aAAa,UAAU,SAAS,KAAK,CAAC;AAAA,EACjF;AAGA,SAAO,EAAE,KAAK;AAAA,IACZ,SAAS;AAAA,IACT,UAAU,aAAa;AAAA,IACvB,SAAS;AAAA,EACX,CAAC;AACH,CAAC;AAGD,OAAO;AAAA,EACL;AAAA,EACAA,YAAW,QAAQ,gBAAgB;AAAA,EACnC,OAAO,MAAM;AACX,UAAM,OAAO,EAAE,IAAI,MAAM,MAAM;AAC/B,UAAM,SAAS,UAAU;AAGzB,UAAM,QAAQ,MAAM,MAAM,OAAO;AAAA,MAC/B,MAAM,KAAK;AAAA,MACX,kBAAkB,KAAK,oBAAoB,OAAO;AAAA,MAClD,OAAO,KAAK,SAAS,OAAO;AAAA,MAC5B,eAAe,KAAK,gBAAgB,EAAE,eAAe,KAAK,cAAc,IAAI;AAAA,IAC9E,CAAC;AAED,UAAM,UAAU,MAAM,WAAW;AAGjC,UAAM,iBAAiB,gCAAgC,QAAQ,IAAI,KAAK,MAAM;AAE9E,UAAM,WAAW,UAAU,QAAQ,EAAE,IAAIC,QAAO,EAAE,CAAC;AAInD,UAAM,iBAAiB,QAAQ,IAAI,QAAQ,kBAAkB,CAAC;AAG9D,UAAM,oBAAoB,OAAO,QAAQ,IAAI,QAAQ;AAIrD,UAAM,4BAA4B,MAA8B;AAC9D,YAAM,EAAE,UAAU,SAAS,IAAI,IAAI,gBAAgC;AACnE,YAAM,SAAS,SAAS,UAAU;AAClC,UAAI,eAAe;AACnB,YAAM,iBAAiB,oBAAI,IAAY;AAGvC,YAAM,kBAAkB,IAAI,gBAAgB;AAC5C,6BAAuB,IAAI,UAAU,eAAe;AAEpD,UAAI,gBAAgB;AACpB,UAAI,uBAAuB;AAC3B,UAAI,iBAAiB;AAGrB,YAAM,WAAW,OAAO,SAAiB;AACvC,YAAI,aAAc;AAClB,YAAI;AACF;AACA,gBAAM,OAAO,MAAM,SAAS,IAAI;AAAA;AAAA,CAAM;AAAA,QACxC,SAAS,KAAU;AACjB;AACA,cAAI,mBAAmB,GAAG;AACxB,oBAAQ,IAAI,QAAQ,QAAQ,6DAA6D,aAAa,qBAAqB,oBAAoB,EAAE;AAAA,UACnJ;AACA,yBAAe;AAAA,QACjB;AAAA,MACF;AAEA,YAAM,YAAY,YAAY;AAC5B,YAAI,aAAc;AAClB,YAAI;AACF,kBAAQ,IAAI,QAAQ,QAAQ,mCAAmC,aAAa,qBAAqB,oBAAoB,mBAAmB,cAAc,EAAE;AACxJ,yBAAe;AACf,gBAAM,OAAO,MAAM;AAAA,QACrB,QAAQ;AAAA,QAER;AAAA,MACF;AAGA,YAAM,yBAAyB,MAAM;AACnC,+BAAuB,OAAO,QAAQ;AAAA,MACxC;AAEA,OAAC,YAAY;AACX,YAAI,YAAY;AAEhB,YAAI;AAEF,gBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,kBAAkB,SAAS,CAAC,CAAC;AACnE,gBAAM,SAAS,KAAK,UAAU;AAAA,YAC5B,MAAM;AAAA,YACN,MAAM;AAAA,cACJ,IAAI,QAAQ;AAAA,cACZ,MAAM,QAAQ;AAAA,cACd,kBAAkB,QAAQ;AAAA,cAC1B,OAAO,QAAQ;AAAA,YACjB;AAAA,UACF,CAAC,CAAC;AAEF,gBAAM,YAAY,OAAO,KAAK,IAAI,CAAC;AACnC,gBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,SAAS,UAAU,CAAC,CAAC;AAE3D,cAAI,SAAS,QAAQ,KAAK,IAAI,CAAC;AAC/B,cAAI,cAAc;AAElB,gBAAM,SAAS,MAAM,MAAM,OAAO;AAAA,YAChC,QAAQ;AAAA,YACR,aAAa,gBAAgB;AAAA;AAAA,YAC7B,gBAAgB,OAAO,aAAa;AAClC,oBAAM,SAAU,SAAS,MAAc,UAAU;AACjD,oBAAM,gBAAgB,OAAQ,SAAS,MAAc,YAAY,WAC5D,SAAS,KAAa,QAAQ,SAC/B;AACJ,oBAAM,aAAc,SAAS,MAAc;AAC3C,oBAAM,aAAc,SAAS,MAAc;AAC3C,sBAAQ;AAAA,gBACN;AAAA,gBACA,SAAS;AAAA,gBACT;AAAA,gBACA,kBAAkB,SAAY,iBAAiB,aAAa,KAAK;AAAA,gBACjE,eAAe,UAAa,eAAe,SAAY,SAAS,UAAU,IAAI,UAAU,KAAK;AAAA,cAC/F;AACA,oBAAM,SAAS,KAAK,UAAU;AAAA,gBAC5B,MAAM;AAAA,gBACN,UAAU,SAAS;AAAA,gBACnB,MAAM,SAAS;AAAA,cACjB,CAAC,CAAC;AACF,kBAAI,SAAS,aAAa,gBAAgB,WAAW,WAAW;AAC9D,sBAAM,SAAS,KAAK,UAAU;AAAA,kBAC5B,MAAM;AAAA,kBACN,OAAO;AAAA,kBACP;AAAA,kBACA;AAAA,kBACA;AAAA,gBACF,CAAC,CAAC;AACF,sBAAM,IAAI,QAAQ,CAACF,cAAY,WAAWA,WAAS,CAAC,CAAC;AAAA,cACvD;AAEA,oBAAM,cAAe,SAAS,MAAc;AAC5C,oBAAM,gBAAiB,SAAS,MAAc;AAE9C,kBAAI,SAAS,aAAa,UAAU,eAAe;AACjD,wBAAQ,IAAI,mBAAmB,QAAQ,gEAAgE,QAAQ,EAAE,EAAE;AACnH,6BAAa,QAAQ,EAAE;AAAA,cACzB,WAAW,SAAS,aAAa,UAAU,aAAa;AACtD,wBAAQ,IAAI,mBAAmB,QAAQ,0CAA0C,WAAW,gBAAgB,QAAQ,EAAE,EAAE;AACxH,sBAAM,QAAQ,iBAAiB,QAAQ,IAAI,WAAW;AACtD,wBAAQ,IAAI,mBAAmB,QAAQ,4BAA4B,MAAM,SAAS,oBAAoB,MAAM,cAAc,OAAO,CAAC,qBAAqB,MAAM,cAAc,QAAQ,CAAC,EAAE;AAEtL,oBAAI,CAAC,iBAAiB,IAAI,QAAQ,EAAE,GAAG;AACrC,wBAAM,WAAW,IAAI,cAAc,QAAQ,EAAE;AAC7C,2BAAS,MAAM;AACf,mCAAiB,IAAI,QAAQ,IAAI,QAAQ;AAAA,gBAC3C;AAGA,sBAAM,oBAAoB,MAAM,cAAc,OAAO;AACrD,oBAAI,oBAAoB,GAAG;AACzB,0BAAQ,IAAI,mBAAmB,QAAQ,eAAe,iBAAiB,+CAA+C;AACtH,wBAAM,mBAAmB,OAAO;AAChC,wBAAM,mBAAmB,QAAQ;AAAA,gBACnC;AACA,wBAAQ,IAAI,mBAAmB,QAAQ,6CAA6C;AACpF,sBAAM,GAAG,SAAS,CAAC,UAAwB;AACzC;AACA,sBAAI,yBAAyB,GAAG;AAC9B,4BAAQ,IAAI,mBAAmB,QAAQ,4CAA4C,MAAM,MAAM,UAAU,CAAC,iBAAiB,YAAY,EAAE;AAAA,kBAC3I,WAAW,uBAAuB,OAAO,GAAG;AAC1C,4BAAQ,IAAI,mBAAmB,QAAQ,oBAAoB,oBAAoB,kBAAkB,YAAY,GAAG;AAAA,kBAClH;AACA,wBAAM,MAAM,iBAAiB,IAAI,QAAQ,EAAE;AAC3C,uBAAK,SAAS,KAAK;AACnB,2BAAS,KAAK,UAAU;AAAA,oBACtB,MAAM;AAAA,oBACN,MAAM,MAAM;AAAA,oBACZ,UAAU,MAAM;AAAA,kBAClB,CAAC,CAAC,EAAE,MAAM,CAAC,QAAQ;AACjB,4BAAQ,KAAK,mBAAmB,QAAQ,2CAA2C,GAAG;AAAA,kBACxF,CAAC;AAAA,gBACH,CAAC;AACD,sBAAM,GAAG,UAAU,CAAC,MAAqB;AACvC,0BAAQ,IAAI,mBAAmB,QAAQ,qCAAqC,EAAE,SAAS,kBAAkB,EAAE,aAAa,aAAa,EAAE,aAAa,IAAI,EAAE,cAAc,EAAE;AAC1K,2BAAS,KAAK,UAAU;AAAA,oBACtB,MAAM;AAAA,oBACN,GAAG;AAAA,kBACL,CAAC,CAAC,EAAE,MAAM,CAAC,QAAQ;AACjB,4BAAQ,KAAK,mBAAmB,QAAQ,4CAA4C,GAAG;AAAA,kBACzF,CAAC;AAAA,gBACH,CAAC;AACD,sBAAM,cAAc;AAAA,cACtB;AAAA,YACF;AAAA,YACA,cAAc,YAAY;AACxB,oBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,cAAc,CAAC,CAAC;AACtD,kBAAI,aAAa;AACf,sBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,YAAY,IAAI,OAAO,CAAC,CAAC;AAC/D,8BAAc;AACd,yBAAS,QAAQ,KAAK,IAAI,CAAC;AAAA,cAC7B;AAAA,YACF;AAAA,YACA,SAAS,OAAO,EAAE,MAAM,MAAM;AAC5B,0BAAY;AACZ,sBAAQ,IAAI,wBAAwB,MAAM,MAAM,QAAQ;AAAA,YAC1D;AAAA,UACF,CAAC;AAED,cAAI,cAAc,aAAa,KAAK,IAAI,CAAC;AACzC,cAAI,mBAAmB;AAEvB,2BAAiB,QAAQ,OAAO,OAAO,YAAY;AACjD,gBAAI,KAAK,SAAS,cAAc;AAC9B,kBAAI,CAAC,aAAa;AAChB,sBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,cAAc,IAAI,OAAO,CAAC,CAAC;AACjE,8BAAc;AAAA,cAChB;AACA,oBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,cAAc,IAAI,QAAQ,OAAO,KAAK,KAAK,CAAC,CAAC;AAAA,YACrF,WAAW,KAAK,SAAS,mBAAmB;AAC1C,oBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,mBAAmB,IAAI,YAAY,CAAC,CAAC;AAC3E,iCAAmB;AAAA,YACrB,WAAW,KAAK,SAAS,mBAAmB;AAC1C,oBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,mBAAmB,IAAI,aAAa,OAAO,KAAK,KAAK,CAAC,CAAC;AAAA,YAC/F,WAAW,KAAK,SAAS,iBAAiB;AACxC,kBAAI,kBAAkB;AACpB,sBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,iBAAiB,IAAI,YAAY,CAAC,CAAC;AACzE,mCAAmB;AACnB,8BAAc,aAAa,KAAK,IAAI,CAAC;AAAA,cACvC;AAAA,YACF,WAAY,KAAa,SAAS,6BAA6B;AAE7D,oBAAM,IAAI;AACV,oBAAM,SAAS,KAAK,UAAU;AAAA,gBAC5B,MAAM;AAAA,gBACN,YAAY,EAAE;AAAA,gBACd,UAAU,EAAE;AAAA,cACd,CAAC,CAAC;AACF,6BAAe,IAAI,EAAE,UAAU;AAAA,YACjC,WAAY,KAAa,SAAS,mBAAmB;AAEnD,oBAAM,IAAI;AACV,oBAAM,SAAS,KAAK,UAAU;AAAA,gBAC5B,MAAM;AAAA,gBACN,YAAY,EAAE;AAAA,gBACd,eAAe,EAAE;AAAA,cACnB,CAAC,CAAC;AAAA,YACJ,WAAW,KAAK,SAAS,aAAa;AAEpC,oBAAM;AAAA,gBACJ;AAAA,gBACA;AAAA,gBACA,KAAK;AAAA,gBACL,KAAK;AAAA,gBACL,KAAK;AAAA,cACP;AACA,oBAAM,SAAS,KAAK,UAAU;AAAA,gBAC5B,MAAM;AAAA,gBACN,YAAY,KAAK;AAAA,gBACjB,UAAU,KAAK;AAAA,gBACf,OAAO,kBAAkB,KAAK,UAAU,KAAK,KAAK;AAAA,cACpD,CAAC,CAAC;AACF,oBAAM,SAAS,KAAK,UAAU;AAAA,gBAC5B,MAAM;AAAA,gBACN,OAAO;AAAA,gBACP,UAAU,KAAK;AAAA,cACjB,CAAC,CAAC;AAAA,YACJ,WAAW,KAAK,SAAS,eAAe;AACtC,oBAAM,SAAS,KAAK,UAAU;AAAA,gBAC5B,MAAM;AAAA,gBACN,YAAY,KAAK;AAAA,gBACjB,QAAQ,KAAK;AAAA,cACf,CAAC,CAAC;AAAA,YACJ,WAAW,KAAK,SAAS,SAAS;AAChC,sBAAQ,MAAM,iBAAiB,KAAK,KAAK;AACzC,oBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,SAAS,WAAW,OAAO,KAAK,KAAK,EAAE,CAAC,CAAC;AAAA,YACjF;AAAA,UACF;AAEA,cAAI,aAAa;AACf,kBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,YAAY,IAAI,OAAO,CAAC,CAAC;AAAA,UACjE;AAEA,cAAI,kBAAkB;AACpB,kBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,iBAAiB,IAAI,YAAY,CAAC,CAAC;AAAA,UAC3E;AAEA,cAAI,CAAC,WAAW;AACd,kBAAM,OAAO,qBAAqB;AAAA,UACpC;AAEA,cAAI,WAAW;AACb,kBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,QAAQ,CAAC,CAAC;AAAA,UAClD,OAAO;AACL,kBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,SAAS,CAAC,CAAC;AAAA,UACnD;AAEA,gBAAM,oBAAoB,OAAO,QAAQ;AAAA,QAC3C,SAAS,OAAY;AACnB,cAAI,MAAM,SAAS,gBAAgB,MAAM,SAAS,SAAS,SAAS,GAAG;AAErE,kBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,QAAQ,CAAC,CAAC;AAAA,UAClD,OAAO;AAEL,oBAAQ,MAAM,gBAAgB,KAAK;AACnC,kBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,SAAS,WAAW,MAAM,QAAQ,CAAC,CAAC;AAC1E,kBAAM,oBAAoB,UAAU,QAAQ;AAAA,UAC9C;AAAA,QACF,UAAE;AACA,iCAAuB;AAEvB,gBAAM,SAAS,QAAQ;AACvB,gBAAM,UAAU;AAAA,QAClB;AAAA,MACF,GAAG;AAEH,aAAO;AAAA,IACT;AAEA,UAAM,SAAS,MAAM,cAAc;AAAA,MACjC;AAAA,MACA;AAAA,IACF;AAEA,QAAI,CAAC,QAAQ;AACX,aAAO,EAAE,KAAK,EAAE,OAAO,0BAA0B,GAAG,GAAG;AAAA,IACzD;AAEA,UAAM,gBAAgB,OAAO,YAAY,IAAI,kBAAkB,CAAC;AAEhE,WAAO,IAAI,SAAS,eAAwD;AAAA,MAC1E,SAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,iBAAiB;AAAA,QACjB,cAAc;AAAA,QACd,iCAAiC;AAAA,QACjC,eAAe;AAAA,QACf,gBAAgB,QAAQ;AAAA,MAC1B;AAAA,IACF,CAAC;AAAA,EACH;AACF;AAGA,IAAM,qBAAqBL,IAAE,OAAO;AAAA,EAClC,MAAMA,IAAE,KAAK,CAAC,eAAe,kBAAkB,aAAa,CAAC;AAAA,EAC7D,WAAWA,IAAE,OAAO;AAAA,EACpB,GAAGA,IAAE,OAAO,EAAE,SAAS;AAAA,EACvB,GAAGA,IAAE,OAAO,EAAE,SAAS;AAAA,EACvB,QAAQA,IAAE,OAAO,EAAE,SAAS;AAAA,EAC5B,YAAYA,IAAE,OAAO,EAAE,SAAS;AAAA,EAChC,QAAQA,IAAE,OAAO,EAAE,SAAS;AAAA,EAC5B,QAAQA,IAAE,OAAO,EAAE,SAAS;AAAA,EAC5B,KAAKA,IAAE,OAAO,EAAE,SAAS;AAAA,EACzB,MAAMA,IAAE,OAAO,EAAE,SAAS;AAAA,EAC1B,MAAMA,IAAE,OAAO,EAAE,SAAS;AAAA,EAC1B,WAAWA,IAAE,OAAO,EAAE,SAAS;AAAA,EAC/B,aAAaA,IAAE,MAAMA,IAAE,OAAO;AAAA,IAC5B,GAAGA,IAAE,OAAO;AAAA,IACZ,GAAGA,IAAE,OAAO;AAAA,IACZ,IAAIA,IAAE,OAAO,EAAE,SAAS;AAAA,EAC1B,CAAC,CAAC,EAAE,SAAS;AACf,CAAC;AAED,OAAO;AAAA,EACL;AAAA,EACAM,YAAW,QAAQ,kBAAkB;AAAA,EACrC,OAAO,MAAM;AACX,UAAM,YAAY,EAAE,IAAI,MAAM,IAAI;AAClC,UAAM,QAAQ,EAAE,IAAI,MAAM,MAAM;AAEhC,UAAM,QAAQ,SAAS,SAAS;AAChC,QAAI,CAAC,SAAS,CAAC,MAAM,WAAW;AAC9B,aAAO,EAAE,KAAK,EAAE,OAAO,4CAA4C,GAAG,GAAG;AAAA,IAC3E;AAEA,UAAM,YAAY,KAAK;AACvB,WAAO,EAAE,KAAK,EAAE,SAAS,KAAK,CAAC;AAAA,EACjC;AACF;AAGA,OAAO,IAAI,uBAAuB,OAAO,MAAM;AAC7C,QAAM,YAAY,EAAE,IAAI,MAAM,IAAI;AAClC,QAAM,QAAQ,SAAS,SAAS;AAEhC,SAAO,EAAE,KAAK;AAAA,IACZ;AAAA,IACA,QAAQ,CAAC,CAAC,OAAO;AAAA,IACjB,UAAU,CAAC,CAAC;AAAA,IACZ,aAAa,OAAO,cAAc;AAAA,MAChC,UAAU,MAAM,YAAY;AAAA,MAC5B,WAAW,MAAM,YAAY;AAAA,IAC/B,IAAI;AAAA,EACN,CAAC;AACH,CAAC;;;AE14CD;AANA,SAAS,QAAAE,aAAY;AACrB,SAAS,cAAAC,mBAAkB;AAC3B,SAAS,KAAAC,WAAS;AAClB,SAAS,gBAAAC,qBAAoB;AAC7B,SAAS,iBAAAC,sBAAqB;AAC9B,SAAS,WAAAC,UAAS,QAAAC,cAAY;AAI9B,IAAM,aAAaF,eAAc,YAAY,GAAG;AAChD,IAAM,YAAYC,SAAQ,UAAU;AAKpC,IAAM,gBAAgB;AAAA,EACpBC,OAAK,WAAW,iBAAiB;AAAA;AAAA,EACjCA,OAAK,WAAW,oBAAoB;AAAA;AAAA,EACpCA,OAAK,WAAW,uBAAuB;AAAA;AAAA,EACvCA,OAAK,QAAQ,IAAI,GAAG,cAAc;AAAA;AACpC;AAEA,IAAI,iBAAiB;AACrB,IAAI,cAAc;AAElB,WAAW,mBAAmB,eAAe;AAC3C,MAAI;AACF,UAAM,cAAc,KAAK,MAAMH,cAAa,iBAAiB,OAAO,CAAC;AAErE,QAAI,YAAY,SAAS,eAAe;AACtC,uBAAiB,YAAY,WAAW;AACxC,oBAAc,YAAY,QAAQ;AAClC;AAAA,IACF;AAAA,EACF,QAAQ;AAAA,EAER;AACF;AAEA,IAAM,SAAS,IAAIH,MAAK;AAExB,OAAO,IAAI,KAAK,OAAO,MAAM;AAC3B,QAAM,SAAS,UAAU;AACzB,QAAM,eAAe,gBAAgB;AAGrC,QAAM,aAAa,aAAa,KAAK,OAAK,EAAE,aAAa,YAAY;AACrE,QAAM,YAAY,YAAY,cAAc;AAE5C,SAAO,EAAE,KAAK;AAAA,IACZ,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,QAAQ,QAAQ,OAAO;AAAA,IACvB,kBAAkB;AAAA,IAClB,QAAQ;AAAA,MACN,kBAAkB,OAAO;AAAA,MACzB,cAAc,OAAO;AAAA,MACrB,sBAAsB,OAAO,iBAAiB,CAAC;AAAA,MAC/C,MAAM,OAAO,OAAO;AAAA,IACtB;AAAA,IACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,EACpC,CAAC;AACH,CAAC;AAGD,OAAO,IAAI,YAAY,OAAO,MAAM;AAClC,MAAI,gBAAgB;AACpB,MAAI,kBAAkB;AACtB,MAAI;AAEJ,MAAI;AAEF,UAAM,cAAc,MAAM,MAAM,8BAA8B,WAAW,WAAW;AAAA,MAClF,SAAS,EAAE,UAAU,mBAAmB;AAAA,MACxC,QAAQ,YAAY,QAAQ,GAAI;AAAA;AAAA,IAClC,CAAC;AAED,QAAI,YAAY,IAAI;AAClB,YAAM,UAAU,MAAM,YAAY,KAAK;AACvC,sBAAgB,QAAQ,WAAW;AAGnC,YAAM,eAAe,CAAC,MAAc;AAClC,cAAM,QAAQ,EAAE,QAAQ,MAAM,EAAE,EAAE,MAAM,GAAG,EAAE,IAAI,MAAM;AACvD,eAAO,EAAE,OAAO,MAAM,CAAC,KAAK,GAAG,OAAO,MAAM,CAAC,KAAK,GAAG,OAAO,MAAM,CAAC,KAAK,EAAE;AAAA,MAC5E;AAEA,YAAM,UAAU,aAAa,cAAc;AAC3C,YAAM,SAAS,aAAa,aAAa;AAEzC,wBACE,OAAO,QAAQ,QAAQ,SACtB,OAAO,UAAU,QAAQ,SAAS,OAAO,QAAQ,QAAQ,SACzD,OAAO,UAAU,QAAQ,SAAS,OAAO,UAAU,QAAQ,SAAS,OAAO,QAAQ,QAAQ;AAAA,IAChG,OAAO;AACL,cAAQ,yBAAyB,YAAY,MAAM;AAAA,IACrD;AAAA,EACF,SAAS,KAAK;AACZ,YAAQ,eAAe,QAAQ,IAAI,UAAU;AAAA,EAC/C;AAEA,SAAO,EAAE,KAAK;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,eAAe,kBAAkB,kBAAkB,WAAW,YAAY;AAAA,IAC1E;AAAA,IACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,EACpC,CAAC;AACH,CAAC;AAED,OAAO,IAAI,UAAU,OAAO,MAAM;AAChC,MAAI;AAEF,cAAU;AAEV,WAAO,EAAE,KAAK;AAAA,MACZ,QAAQ;AAAA,MACR,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,IACpC,CAAC;AAAA,EACH,SAAS,OAAY;AACnB,WAAO,EAAE;AAAA,MACP;AAAA,QACE,QAAQ;AAAA,QACR,OAAO,MAAM;AAAA,QACb,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,MACpC;AAAA,MACA;AAAA,IACF;AAAA,EACF;AACF,CAAC;AAOD,OAAO,IAAI,aAAa,OAAO,MAAM;AACnC,QAAM,SAAS,gBAAgB;AAE/B,SAAO,EAAE,KAAK;AAAA,IACZ,WAAW;AAAA,IACX,oBAAoB;AAAA,EACtB,CAAC;AACH,CAAC;AAGD,IAAM,kBAAkBE,IAAE,OAAO;AAAA,EAC/B,UAAUA,IAAE,OAAO;AAAA,EACnB,QAAQA,IAAE,OAAO,EAAE,IAAI,CAAC;AAC1B,CAAC;AAED,OAAO;AAAA,EACL;AAAA,EACAD,YAAW,QAAQ,eAAe;AAAA,EAClC,OAAO,MAAM;AACX,UAAM,EAAE,UAAU,OAAO,IAAI,EAAE,IAAI,MAAM,MAAM;AAE/C,QAAI;AACF,gBAAU,UAAU,MAAM;AAC1B,YAAM,SAAS,gBAAgB;AAC/B,YAAM,iBAAiB,OAAO,KAAK,OAAK,EAAE,aAAa,SAAS,YAAY,CAAC;AAE7E,aAAO,EAAE,KAAK;AAAA,QACZ,SAAS;AAAA,QACT,UAAU,SAAS,YAAY;AAAA,QAC/B,WAAW,gBAAgB;AAAA,QAC3B,SAAS,eAAe,QAAQ;AAAA,MAClC,CAAC;AAAA,IACH,SAAS,OAAY;AACnB,aAAO,EAAE,KAAK,EAAE,OAAO,MAAM,QAAQ,GAAG,GAAG;AAAA,IAC7C;AAAA,EACF;AACF;AAGA,OAAO,OAAO,uBAAuB,OAAO,MAAM;AAChD,QAAM,WAAW,EAAE,IAAI,MAAM,UAAU;AAEvC,MAAI;AACF,iBAAa,QAAQ;AAErB,WAAO,EAAE,KAAK;AAAA,MACZ,SAAS;AAAA,MACT,UAAU,SAAS,YAAY;AAAA,MAC/B,SAAS,eAAe,QAAQ;AAAA,IAClC,CAAC;AAAA,EACH,SAAS,OAAY;AACnB,WAAO,EAAE,KAAK,EAAE,OAAO,MAAM,QAAQ,GAAG,GAAG;AAAA,EAC7C;AACF,CAAC;;;AC/LD,SAAS,QAAAM,aAAY;AACrB,SAAS,cAAAC,mBAAkB;AAC3B,SAAS,KAAAC,WAAS;AAElB;AAEO,IAAM,YAAY,IAAIC,MAAK;AAGlC,IAAM,cAAcC,IAAE,OAAO;AAAA,EAC3B,SAASA,IAAE,OAAO;AAAA,EAClB,KAAKA,IAAE,OAAO,EAAE,SAAS;AAAA,EACzB,MAAMA,IAAE,OAAO,EAAE,SAAS;AAC5B,CAAC;AAED,UAAU;AAAA,EACR;AAAA,EACAC,YAAW,QAAQ,WAAW;AAAA,EAC9B,OAAO,MAAM;AACX,UAAM,YAAY,EAAE,IAAI,MAAM,WAAW;AACzC,UAAM,OAAO,EAAE,IAAI,MAAM,MAAM;AAG/B,UAAM,UAAU,MAAM,eAAe,QAAQ,SAAS;AACtD,QAAI,CAAC,SAAS;AACZ,aAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,IACnD;AAGA,UAAM,UAAU,MAAW,gBAAgB;AAC3C,QAAI,CAAC,SAAS;AACZ,aAAO,EAAE,KAAK,EAAE,OAAO,4DAA4D,GAAG,GAAG;AAAA,IAC3F;AAEA,UAAM,mBAAmB,KAAK,OAAO,QAAQ;AAC7C,UAAM,SAAS,MAAW,cAAc,KAAK,SAAS,kBAAkB;AAAA,MACtE;AAAA,MACA,MAAM,KAAK;AAAA,IACb,CAAC;AAED,WAAO,EAAE,KAAK;AAAA,MACZ,IAAI,OAAO;AAAA,MACX,MAAM,KAAK,QAAQ;AAAA,MACnB,SAAS,KAAK;AAAA,MACd,KAAK;AAAA,MACL,QAAQ,OAAO;AAAA,MACf,KAAK;AAAA;AAAA,IACP,GAAG,GAAG;AAAA,EACR;AACF;AAGA,UAAU,IAAI,yBAAyB,OAAO,MAAM;AAClD,QAAM,YAAY,EAAE,IAAI,MAAM,WAAW;AAEzC,QAAM,UAAU,MAAM,eAAe,QAAQ,SAAS;AACtD,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,EACnD;AAGA,QAAM,mBAAmB,MAAW,qBAAqB,WAAW,QAAQ,gBAAgB;AAG5F,QAAM,eAAe,MAAM,QAAQ;AAAA,IACjC,iBAAiB,IAAI,OAAO,SAAS;AACnC,YAAM,UAAU,MAAW,UAAU,KAAK,EAAE;AAC5C,aAAO;AAAA,QACL,IAAI,KAAK;AAAA,QACT,MAAM,KAAK,QAAQ;AAAA,QACnB,SAAS,KAAK;AAAA,QACd,KAAK,KAAK;AAAA,QACV,QAAQ,UAAU,YAAY;AAAA,QAC9B,WAAW,KAAK;AAAA,MAClB;AAAA,IACF,CAAC;AAAA,EACH;AAEA,SAAO,EAAE,KAAK;AAAA,IACZ;AAAA,IACA,WAAW;AAAA,IACX,OAAO,aAAa;AAAA,IACpB,SAAS,aAAa,OAAO,OAAK,EAAE,WAAW,SAAS,EAAE;AAAA,EAC5D,CAAC;AACH,CAAC;AAGD,UAAU,IAAI,qCAAqC,OAAO,MAAM;AAC9D,QAAM,YAAY,EAAE,IAAI,MAAM,WAAW;AACzC,QAAM,aAAa,EAAE,IAAI,MAAM,YAAY;AAE3C,QAAM,UAAU,MAAM,eAAe,QAAQ,SAAS;AACtD,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,EACnD;AAEA,QAAM,OAAO,MAAW,QAAQ,YAAY,QAAQ,kBAAkB,SAAS;AAC/E,MAAI,CAAC,MAAM;AACT,WAAO,EAAE,KAAK,EAAE,OAAO,qBAAqB,GAAG,GAAG;AAAA,EACpD;AAEA,QAAM,UAAU,MAAW,UAAU,UAAU;AAE/C,SAAO,EAAE,KAAK;AAAA,IACZ,IAAI;AAAA,IACJ,SAAS,KAAK;AAAA,IACd,KAAK,KAAK;AAAA,IACV,QAAQ,UAAU,YAAY;AAAA,IAC9B,WAAW,KAAK;AAAA,IAChB,UAAU,UAAU,OAAO;AAAA;AAAA,EAC7B,CAAC;AACH,CAAC;AAGD,IAAM,kBAAkBD,IAAE,OAAO;AAAA,EAC/B,MAAMA,IAAE,OAAO,EAAE,SAAS,EAAE,UAAU,OAAK,IAAI,SAAS,GAAG,EAAE,IAAI,MAAS;AAC5E,CAAC;AAED,UAAU;AAAA,EACR;AAAA,EACAC,YAAW,SAAS,eAAe;AAAA,EACnC,OAAO,MAAM;AACX,UAAM,YAAY,EAAE,IAAI,MAAM,WAAW;AACzC,UAAM,aAAa,EAAE,IAAI,MAAM,YAAY;AAC3C,UAAM,QAAQ,EAAE,IAAI,MAAM,OAAO;AAEjC,UAAM,UAAU,MAAM,eAAe,QAAQ,SAAS;AACtD,QAAI,CAAC,SAAS;AACZ,aAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,IACnD;AAEA,UAAM,SAAS,MAAW,QAAQ,YAAY,QAAQ,kBAAkB,EAAE,MAAM,MAAM,MAAM,UAAU,CAAC;AAEvG,QAAI,OAAO,WAAW,WAAW;AAC/B,aAAO,EAAE,KAAK,EAAE,OAAO,qBAAqB,GAAG,GAAG;AAAA,IACpD;AAEA,WAAO,EAAE,KAAK;AAAA,MACZ;AAAA,MACA,MAAM,OAAO;AAAA,MACb,WAAW,OAAO,OAAO,MAAM,IAAI,EAAE;AAAA,IACvC,CAAC;AAAA,EACH;AACF;AAGA,IAAM,aAAaD,IAAE,OAAO;AAAA,EAC1B,QAAQA,IAAE,KAAK,CAAC,WAAW,SAAS,CAAC,EAAE,SAAS;AAClD,CAAC;AAED,UAAU;AAAA,EACR;AAAA,EACAC,YAAW,QAAQ,WAAW,SAAS,CAAC;AAAA,EACxC,OAAO,MAAM;AACX,UAAM,aAAa,EAAE,IAAI,MAAM,YAAY;AAE3C,UAAM,UAAU,MAAW,aAAa,UAAU;AAElD,QAAI,CAAC,SAAS;AACZ,aAAO,EAAE,KAAK,EAAE,OAAO,mDAAmD,GAAG,GAAG;AAAA,IAClF;AAEA,WAAO,EAAE,KAAK,EAAE,SAAS,MAAM,SAAS,kBAAkB,CAAC;AAAA,EAC7D;AACF;AAGA,IAAM,cAAcD,IAAE,OAAO;AAAA,EAC3B,OAAOA,IAAE,OAAO;AAClB,CAAC;AAED,UAAU;AAAA,EACR;AAAA,EACAC,YAAW,QAAQ,WAAW;AAAA,EAC9B,OAAO,MAAM;AACX,UAAM,aAAa,EAAE,IAAI,MAAM,YAAY;AAC3C,UAAM,OAAO,EAAE,IAAI,MAAM,MAAM;AAG/B,UAAMC,aAAY,MAAW,UAAU,UAAU;AACjD,QAAI,CAACA,YAAW;AACd,aAAO,EAAE,KAAK,EAAE,OAAO,0BAA0B,GAAG,GAAG;AAAA,IACzD;AAIA,UAAM,UAAU,MAAW,UAAU,YAAY,KAAK,OAAO,EAAE,YAAY,MAAM,CAAC;AAElF,QAAI,CAAC,SAAS;AACZ,aAAO,EAAE,KAAK,EAAE,OAAO,8BAA8B,GAAG,GAAG;AAAA,IAC7D;AAEA,WAAO,EAAE,KAAK,EAAE,SAAS,MAAM,SAAS,KAAK,MAAM,OAAO,CAAC;AAAA,EAC7D;AACF;AAGA,UAAU,KAAK,kCAAkC,OAAO,MAAM;AAC5D,QAAM,YAAY,EAAE,IAAI,MAAM,WAAW;AAEzC,QAAM,UAAU,MAAM,eAAe,QAAQ,SAAS;AACtD,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,EACnD;AAGA,QAAM,mBAAmB,MAAW,qBAAqB,WAAW,QAAQ,gBAAgB;AAC5F,MAAI,SAAS;AAEb,aAAW,YAAY,kBAAkB;AACvC,UAAMA,aAAY,MAAW,UAAU,SAAS,EAAE;AAClD,QAAIA,YAAW;AACb,YAAM,UAAU,MAAW,aAAa,SAAS,EAAE;AACnD,UAAI,QAAS;AAAA,IACf;AAAA,EACF;AAEA,SAAO,EAAE,KAAK,EAAE,SAAS,MAAM,OAAO,CAAC;AACzC,CAAC;AAID,UAAU,IAAI,uBAAuB,OAAO,MAAM;AAChD,QAAM,aAAa,EAAE,IAAI,MAAM,YAAY;AAG3C,QAAMC,YAAW,MAAM,eAAe,KAAK;AAC3C,MAAI,eAAyD;AAC7D,MAAI,mBAAmB,QAAQ,IAAI;AACnC,MAAI;AAGJ,aAAW,WAAWA,WAAU;AAC9B,mBAAe,MAAW,QAAQ,YAAY,QAAQ,kBAAkB,QAAQ,EAAE;AAClF,QAAI,cAAc;AAChB,yBAAmB,QAAQ;AAC3B,uBAAiB,QAAQ;AACzB;AAAA,IACF;AAAA,EACF;AAGA,MAAI,CAAC,cAAc;AACjB,eAAW,WAAWA,WAAU;AAC9B,qBAAe,MAAW,QAAQ,YAAY,QAAQ,gBAAgB;AACtE,UAAI,cAAc;AAChB,2BAAmB,QAAQ;AAC3B,yBAAiB,aAAa;AAC9B;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,QAAM,WAAW,MAAW,UAAU,UAAU;AAChD,MAAI,CAAC,gBAAgB,CAAC,UAAU;AAC9B,WAAO,EAAE,KAAK,EAAE,OAAO,qBAAqB,GAAG,GAAG;AAAA,EACpD;AAGA,SAAO,IAAI;AAAA,IACT,IAAI,eAAe;AAAA,MACjB,MAAM,MAAM,YAAY;AACtB,cAAM,UAAU,IAAI,YAAY;AAChC,YAAI,aAAa;AACjB,YAAID,aAAY;AAChB,YAAI,YAAY;AAChB,cAAM,WAAW;AAGjB,mBAAW;AAAA,UACT,QAAQ,OAAO;AAAA,QAAwB,KAAK,UAAU,EAAE,YAAY,QAAQ,YAAY,CAAC,CAAC;AAAA;AAAA,CAAM;AAAA,QAClG;AAEA,eAAOA,cAAa,YAAY,UAAU;AACxC,cAAI;AACF,kBAAM,SAAS,MAAW,QAAQ,YAAY,kBAAkB,EAAE,WAAW,eAAe,CAAC;AAG7F,gBAAI,OAAO,WAAW,YAAY;AAChC,oBAAM,aAAa,OAAO,OAAO,MAAM,WAAW,MAAM;AACxD,kBAAI,YAAY;AACd,2BAAW;AAAA,kBACT,QAAQ,OAAO;AAAA,QAAwB,KAAK,UAAU,EAAE,MAAM,WAAW,CAAC,CAAC;AAAA;AAAA,CAAM;AAAA,gBACnF;AAAA,cACF;AACA,2BAAa,OAAO;AAAA,YACtB;AAEA,YAAAA,aAAY,OAAO,WAAW;AAE9B,gBAAI,CAACA,YAAW;AACd,yBAAW;AAAA,gBACT,QAAQ,OAAO;AAAA,QAAsB,KAAK,UAAU,EAAE,QAAQ,UAAU,CAAC,CAAC;AAAA;AAAA,CAAM;AAAA,cAClF;AACA;AAAA,YACF;AAGA,kBAAM,IAAI,QAAQ,OAAK,WAAW,GAAG,GAAG,CAAC;AACzC;AAAA,UACF,QAAQ;AACN;AAAA,UACF;AAAA,QACF;AAEA,mBAAW,MAAM;AAAA,MACnB;AAAA,IACF,CAAC;AAAA,IACD;AAAA,MACE,SAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,iBAAiB;AAAA,QACjB,cAAc;AAAA,MAChB;AAAA,IACF;AAAA,EACF;AACF,CAAC;AAGD,UAAU,IAAI,4CAA4C,OAAO,MAAM;AACrE,QAAM,YAAY,EAAE,IAAI,MAAM,WAAW;AACzC,QAAM,aAAa,EAAE,IAAI,MAAM,YAAY;AAE3C,QAAM,UAAU,MAAM,eAAe,QAAQ,SAAS;AACtD,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,EACnD;AAEA,QAAM,OAAO,MAAW,QAAQ,YAAY,QAAQ,kBAAkB,SAAS;AAC/E,MAAI,CAAC,MAAM;AACT,WAAO,EAAE,KAAK,EAAE,OAAO,qBAAqB,GAAG,GAAG;AAAA,EACpD;AAGA,SAAO,IAAI;AAAA,IACT,IAAI,eAAe;AAAA,MACjB,MAAM,MAAM,YAAY;AACtB,cAAM,UAAU,IAAI,YAAY;AAChC,YAAI,aAAa;AACjB,YAAIA,aAAY;AAEhB,eAAOA,YAAW;AAChB,cAAI;AACF,kBAAM,SAAS,MAAW,QAAQ,YAAY,QAAQ,kBAAkB,EAAE,UAAU,CAAC;AAGrF,gBAAI,OAAO,WAAW,YAAY;AAChC,oBAAM,aAAa,OAAO,OAAO,MAAM,WAAW,MAAM;AACxD,kBAAI,YAAY;AACd,2BAAW;AAAA,kBACT,QAAQ,OAAO;AAAA,QAAwB,KAAK,UAAU,EAAE,MAAM,WAAW,CAAC,CAAC;AAAA;AAAA,CAAM;AAAA,gBACnF;AAAA,cACF;AACA,2BAAa,OAAO;AAAA,YACtB;AAEA,YAAAA,aAAY,OAAO,WAAW;AAE9B,gBAAI,CAACA,YAAW;AACd,yBAAW;AAAA,gBACT,QAAQ,OAAO;AAAA,QAAsB,KAAK,UAAU,EAAE,QAAQ,UAAU,CAAC,CAAC;AAAA;AAAA,CAAM;AAAA,cAClF;AACA;AAAA,YACF;AAGA,kBAAM,IAAI,QAAQ,OAAK,WAAW,GAAG,GAAG,CAAC;AAAA,UAC3C,QAAQ;AACN;AAAA,UACF;AAAA,QACF;AAEA,mBAAW,MAAM;AAAA,MACnB;AAAA,IACF,CAAC;AAAA,IACD;AAAA,MACE,SAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,iBAAiB;AAAA,QACjB,cAAc;AAAA,MAChB;AAAA,IACF;AAAA,EACF;AACF,CAAC;;;AC5XD;AAJA,SAAS,QAAAE,aAAY;AACrB,SAAS,cAAAC,mBAAkB;AAC3B,SAAS,KAAAC,WAAS;AAClB,SAAS,UAAAC,eAAc;AAGvB;AAGA,IAAM,QAAQ,IAAIC,MAAK;AAGvB,IAAM,uBAAuB,oBAAI,IAA6B;AAE9D,IAAM,mBAAmBC,IAAE,OAAO;AAAA,EAChC,QAAQA,IAAE,OAAO,EAAE,IAAI,CAAC;AAAA,EACxB,cAAcA,IAAE,OAAOA,IAAE,OAAO,GAAGA,IAAE,QAAQ,CAAC;AAAA,EAC9C,YAAYA,IAAE,OAAO,EAAE,IAAI,EAAE,SAAS;AAAA,EACtC,OAAOA,IAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,kBAAkBA,IAAE,OAAO,EAAE,SAAS;AAAA,EACtC,MAAMA,IAAE,OAAO,EAAE,SAAS;AAAA,EAC1B,eAAeA,IAAE,OAAO,EAAE,IAAI,EAAE,IAAI,CAAC,EAAE,IAAI,GAAG,EAAE,SAAS;AAC3D,CAAC;AAGD,MAAM;AAAA,EACJ;AAAA,EACAC,YAAW,QAAQ,gBAAgB;AAAA,EACnC,OAAO,MAAM;AACX,UAAM,OAAO,EAAE,IAAI,MAAM,MAAM;AAC/B,UAAM,SAAS,UAAU;AAEzB,UAAM,aAAyB;AAAA,MAC7B,SAAS;AAAA,MACT,cAAc,KAAK;AAAA,MACnB,YAAY,KAAK;AAAA,MACjB,eAAe,KAAK,iBAAiB;AAAA,MACrC,QAAQ;AAAA,IACV;AAGA,UAAM,QAAQ,MAAM,MAAM,OAAO;AAAA,MAC/B,MAAM,KAAK,QAAQ;AAAA,MACnB,kBAAkB,KAAK,oBAAoB,OAAO;AAAA,MAClD,OAAO,KAAK,SAAS,OAAO;AAAA,MAC5B,eAAe;AAAA,QACb,eAAe,EAAE,MAAM,OAAO,YAAY,OAAO,WAAW,MAAM;AAAA,QAClE,MAAM;AAAA,MACR;AAAA,IACF,CAAC;AAED,UAAM,SAAS,MAAM;AACrB,UAAM,kBAAkB,IAAI,gBAAgB;AAC5C,yBAAqB,IAAI,QAAQ,eAAe;AAIhD,UAAM,WAAW,UAAU,MAAM,IAAIC,QAAO,EAAE,CAAC;AAC/C,UAAM,oBAAoB,OAAO,QAAQ,QAAQ;AAEjD,UAAM,qBAAqB,MAAM;AAC/B,YAAM,EAAE,UAAU,SAAS,IAAI,IAAI,gBAAgC;AACnE,YAAM,SAAS,SAAS,UAAU;AAClC,UAAI,eAAe;AAEnB,YAAM,WAAW,OAAO,SAAiB;AACvC,YAAI,aAAc;AAClB,YAAI;AACF,gBAAM,OAAO,MAAM,SAAS,IAAI;AAAA;AAAA,CAAM;AAAA,QACxC,QAAQ;AACN,yBAAe;AAAA,QACjB;AAAA,MACF;AAEA,OAAC,YAAY;AACX,cAAM,SAAS,KAAK,UAAU,EAAE,MAAM,kBAAkB,SAAS,CAAC,CAAC;AACnE,YAAI;AACF,gBAAM,MAAM,QAAQ;AAAA,YAClB,QAAQ,KAAK;AAAA,YACb;AAAA,YACA,aAAa,gBAAgB;AAAA,YAC7B;AAAA,UACF,CAAC;AACD,gBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,SAAS,CAAC,CAAC;AAAA,QACnD,SAAS,KAAU;AACjB,cAAI,IAAI,SAAS,gBAAgB,gBAAgB,OAAO,SAAS;AAC/D,oBAAQ,IAAI,eAAe,MAAM,gBAAgB;AACjD,kBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,QAAQ,CAAC,CAAC;AAAA,UAClD,OAAO;AACL,oBAAQ,MAAM,wBAAwB,MAAM,KAAK,IAAI,OAAO;AAC5D,kBAAM,WAAW,IAAI,WAAW;AAChC,kBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,SAAS,WAAW,SAAS,CAAC,CAAC;AACrE,kBAAM,aAAyB;AAAA,cAC7B,GAAG;AAAA,cACH,QAAQ;AAAA,cACR,OAAO;AAAA,YACT;AACA,kBAAM,eAAe,OAAO,QAAQ;AAAA,cAClC,QAAQ;AAAA,gBACN,eAAe,EAAE,MAAM,OAAO,YAAY,OAAO,WAAW,MAAM;AAAA,gBAClE,MAAM;AAAA,cACR;AAAA,YACF,CAAC;AACD,gBAAI,WAAW,YAAY;AACzB,oBAAM,EAAE,aAAAC,aAAY,IAAI,MAAM;AAC9B,cAAAA,aAAY,WAAW,YAAY;AAAA,gBACjC,MAAM;AAAA,gBACN;AAAA,gBACA,WAAW;AAAA,gBACX,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,gBAClC,MAAM,EAAE,QAAQ,UAAU,OAAO,SAAS;AAAA,cAC5C,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF,UAAE;AACA,gBAAM,SAAS,QAAQ;AACvB,iBAAO,MAAM,EAAE,MAAM,MAAM;AAAA,UAAC,CAAC;AAC7B,gBAAM,oBAAoB,OAAO,QAAQ,EAAE,MAAM,MAAM;AAAA,UAAC,CAAC;AACzD,+BAAqB,OAAO,MAAM;AAAA,QACpC;AAAA,MACF,GAAG;AAEH,aAAO;AAAA,IACT;AAEA,UAAM,cAAc,gBAAgB,UAAU,kBAAkB;AAEhE,WAAO,EAAE,KAAK,EAAE,QAAQ,QAAQ,UAAU,GAAG,GAAG;AAAA,EAClD;AACF;AAGA,MAAM,IAAI,QAAQ,OAAO,MAAM;AAC7B,QAAM,KAAK,EAAE,IAAI,MAAM,IAAI;AAC3B,QAAM,UAAU,MAAM,eAAe,QAAQ,EAAE;AAE/C,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,iBAAiB,GAAG,GAAG;AAAA,EAChD;AAEA,QAAM,OAAO,QAAQ,QAAQ;AAC7B,MAAI,CAAC,MAAM,SAAS;AAClB,WAAO,EAAE,KAAK,EAAE,OAAO,wBAAwB,GAAG,GAAG;AAAA,EACvD;AAGA,MAAI,oBAAuG,CAAC;AAC5G,MAAI;AACF,UAAM,EAAE,oBAAAC,qBAAoB,gBAAAC,gBAAe,IAAI,MAAM;AACrD,QAAID,oBAAmB,GAAG;AACxB,YAAM,QAAQ,MAAMC,gBAAe,gBAAgB,EAAE;AACrD,0BAAoB,MACjB,OAAO,CAAC,MAAM,EAAE,aAAa,mBAAmB,EAChD,IAAI,CAAC,OAAO,EAAE,UAAU,EAAE,UAAU,aAAa,EAAE,aAAa,WAAW,EAAE,UAAU,EAAE;AAAA,IAC9F;AAAA,EACF,QAAQ;AAAA,EAAC;AAET,SAAO,EAAE,KAAK;AAAA,IACZ,QAAQ;AAAA,IACR,QAAQ,KAAK;AAAA,IACb,QAAQ,KAAK;AAAA,IACb,OAAO,KAAK;AAAA,IACZ,YAAY,KAAK;AAAA,IACjB,OAAO,QAAQ;AAAA,IACf,MAAM,QAAQ;AAAA,IACd,WAAW,QAAQ,UAAU,YAAY;AAAA,IACzC,WAAW,QAAQ,UAAU,YAAY;AAAA,IACzC,mBAAmB,kBAAkB,SAAS,IAAI,oBAAoB;AAAA,EACxE,CAAC;AACH,CAAC;AAGD,MAAM,KAAK,eAAe,OAAO,MAAM;AACrC,QAAM,KAAK,EAAE,IAAI,MAAM,IAAI;AAC3B,QAAM,UAAU,MAAM,eAAe,QAAQ,EAAE;AAE/C,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,iBAAiB,GAAG,GAAG;AAAA,EAChD;AAEA,QAAM,OAAO,QAAQ,QAAQ;AAC7B,MAAI,CAAC,MAAM,SAAS;AAClB,WAAO,EAAE,KAAK,EAAE,OAAO,wBAAwB,GAAG,GAAG;AAAA,EACvD;AAEA,MAAI,KAAK,WAAW,WAAW;AAC7B,WAAO,EAAE,KAAK,EAAE,OAAO,mBAAmB,KAAK,MAAM,GAAG,GAAG,GAAG;AAAA,EAChE;AAEA,QAAM,kBAAkB,qBAAqB,IAAI,EAAE;AACnD,MAAI,iBAAiB;AACnB,oBAAgB,MAAM;AACtB,yBAAqB,OAAO,EAAE;AAAA,EAChC;AAEA,QAAM,gBAA4B;AAAA,IAChC,GAAG;AAAA,IACH,QAAQ;AAAA,IACR,OAAO;AAAA,EACT;AACA,QAAM,eAAe,OAAO,IAAI;AAAA,IAC9B,QAAQ,EAAE,GAAG,QAAQ,QAAQ,MAAM,cAAc;AAAA,EACnD,CAAC;AAED,MAAI,KAAK,YAAY;AACnB,UAAM,EAAE,aAAAF,aAAY,IAAI,MAAM;AAC9B,IAAAA,aAAY,KAAK,YAAY;AAAA,MAC3B,MAAM;AAAA,MACN,QAAQ;AAAA,MACR,WAAW;AAAA,MACX,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,MAClC,MAAM,EAAE,QAAQ,UAAU,OAAO,yBAAyB;AAAA,IAC5D,CAAC;AAAA,EACH;AAEA,SAAO,EAAE,KAAK,EAAE,QAAQ,IAAI,QAAQ,UAAU,OAAO,yBAAyB,CAAC;AACjF,CAAC;AAED,IAAO,gBAAQ;;;ApC1Mf;AACA;;;AqCVA,SAAS,QAAAG,aAAY;AACrB,SAAS,aAAAC,kBAAiB;AAC1B,SAAS,YAAAC,iBAAgB;AAEzB,IAAMC,aAAYF,WAAUD,KAAI;AAYhC,SAAS,yBAAiC;AACxC,QAAMI,MAAKF,UAAS;AAEpB,MAAIE,QAAO,UAAU;AACnB,WAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMT,KAAK;AAAA,EACL;AAEA,MAAIA,QAAO,SAAS;AAClB,WAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUT,KAAK;AAAA,EACL;AAGA,SAAO;AAAA;AAAA;AAAA;AAAA,EAIP,KAAK;AACP;AAKA,eAAsB,YAA4C;AAChE,MAAI;AACF,UAAM,EAAE,OAAO,IAAI,MAAMD,WAAU,WAAW,EAAE,SAAS,IAAK,CAAC;AAC/D,UAAM,UAAU,OAAO,KAAK;AAE5B,WAAO;AAAA,MACL,WAAW;AAAA,MACX;AAAA,IACF;AAAA,EACF,SAAS,OAAY;AACnB,WAAO;AAAA,MACL,WAAW;AAAA,MACX,OAAO;AAAA,MACP,qBAAqB,uBAAuB;AAAA,IAC9C;AAAA,EACF;AACF;AAMA,eAAsB,kBAAkB,UAGpC,CAAC,GAAqB;AACxB,QAAM,EAAE,QAAQ,OAAO,gBAAgB,KAAK,IAAI;AAEhD,QAAM,YAAY,MAAM,UAAU;AAElC,MAAI,CAAC,UAAU,WAAW;AACxB,QAAI,CAAC,OAAO;AACV,cAAQ,MAAM,4CAAuC;AACrD,cAAQ,MAAM,EAAE;AAChB,cAAQ,MAAM,4DAA4D;AAC1E,cAAQ,MAAM,EAAE;AAChB,UAAI,UAAU,qBAAqB;AACjC,gBAAQ,MAAM,UAAU,mBAAmB;AAAA,MAC7C;AACA,cAAQ,MAAM,EAAE;AAChB,cAAQ,MAAM,+CAA+C;AAC7D,cAAQ,MAAM,EAAE;AAAA,IAClB;AAEA,QAAI,eAAe;AACjB,cAAQ,KAAK,CAAC;AAAA,IAChB;AAEA,WAAO;AAAA,EACT;AAEA,MAAI,CAAC,OAAO;AAAA,EAEZ;AAEA,SAAO;AACT;;;ArCjGA,IAAI,iBAAoC;AACxC,IAAI,eAAoC;AAGxC,IAAM,mBAAmB;AACzB,IAAM,oBAAoB,CAAC,MAAM,MAAM,MAAM,MAAM,MAAM,MAAM,MAAM,MAAM,MAAM,IAAI;AAcrF,SAAS,kBAAiC;AACxC,MAAI;AACF,UAAM,aAAaE,SAAQC,eAAc,YAAY,GAAG,CAAC;AACzD,UAAM,SAASC,UAAQ,YAAY,MAAM,KAAK;AAE9C,QAAIC,aAAW,MAAM,KAAKA,aAAWC,OAAK,QAAQ,cAAc,CAAC,GAAG;AAClE,aAAO;AAAA,IACT;AAEA,UAAM,YAAYF,UAAQ,YAAY,MAAM,MAAM,KAAK;AACvD,QAAIC,aAAW,SAAS,KAAKA,aAAWC,OAAK,WAAW,cAAc,CAAC,GAAG;AACxE,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAGA,eAAe,uBAAuB,MAAgC;AACpE,MAAI;AACF,UAAM,WAAW,MAAM,MAAM,oBAAoB,IAAI,eAAe;AAAA,MAClE,QAAQ,YAAY,QAAQ,GAAI;AAAA,IAClC,CAAC;AACD,QAAI,SAAS,IAAI;AACf,YAAM,OAAO,MAAM,SAAS,KAAK;AACjC,aAAO,KAAK,SAAS;AAAA,IACvB;AACA,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAGA,SAAS,YAAY,MAAgC;AACnD,SAAO,IAAI,QAAQ,CAACF,cAAY;AAC9B,UAAM,SAAS,gBAAgB;AAE/B,WAAO,KAAK,SAAS,CAAC,QAA+B;AACnD,UAAI,IAAI,SAAS,cAAc;AAC7B,QAAAA,UAAQ,IAAI;AAAA,MACd,OAAO;AACL,QAAAA,UAAQ,KAAK;AAAA,MACf;AAAA,IACF,CAAC;AAED,WAAO,KAAK,aAAa,MAAM;AAC7B,aAAO,MAAM;AACb,MAAAA,UAAQ,KAAK;AAAA,IACf,CAAC;AAED,WAAO,OAAO,MAAM,SAAS;AAAA,EAC/B,CAAC;AACH;AAGA,eAAe,YAAY,eAA2E;AACpG,MAAI,MAAM,uBAAuB,aAAa,GAAG;AAC/C,WAAO,EAAE,MAAM,eAAe,gBAAgB,KAAK;AAAA,EACrD;AAEA,MAAI,CAAE,MAAM,YAAY,aAAa,GAAI;AACvC,WAAO,EAAE,MAAM,eAAe,gBAAgB,MAAM;AAAA,EACtD;AAEA,aAAW,QAAQ,mBAAmB;AACpC,QAAI,SAAS,cAAe;AAE5B,QAAI,MAAM,uBAAuB,IAAI,GAAG;AACtC,aAAO,EAAE,MAAM,gBAAgB,KAAK;AAAA,IACtC;AAEA,QAAI,CAAE,MAAM,YAAY,IAAI,GAAI;AAC9B,aAAO,EAAE,MAAM,gBAAgB,MAAM;AAAA,IACvC;AAAA,EACF;AAEA,SAAO,EAAE,MAAM,eAAe,gBAAgB,MAAM;AACtD;AAGA,SAAS,mBAAmB,QAAyB;AACnD,QAAM,cAAcE,OAAK,QAAQ,SAAS,UAAU;AACpD,SAAOD,aAAW,WAAW;AAC/B;AAGA,SAAS,eAAe,QAAyB;AAE/C,QAAM,SAASC,OAAK,QAAQ,OAAO,KAAK;AACxC,QAAM,WAAWA,OAAK,QAAQ,OAAO,OAAO;AAC5C,QAAM,aAAaA,OAAK,QAAQ,KAAK;AACrC,QAAM,eAAeA,OAAK,QAAQ,OAAO;AAEzC,SAAOD,aAAW,MAAM,KAAKA,aAAW,QAAQ,KAAKA,aAAW,UAAU,KAAKA,aAAW,YAAY;AACxG;AAGA,SAAS,wBAAwB,QAA+B;AAE9D,QAAME,iBAAgB;AAAA,IACpBD,OAAK,QAAQ,SAAS,cAAc,WAAW;AAAA,IAC/CA,OAAK,QAAQ,SAAS,cAAc,OAAO,WAAW;AAAA,EACxD;AAEA,aAAW,cAAcC,gBAAe;AACtC,QAAIF,aAAW,UAAU,GAAG;AAC1B,aAAO;AAAA,IACT;AAAA,EACF;AAEA,SAAO;AACT;AAGA,SAAS,WAAW,SAAiB,MAAgB,KAAa,KAAuE;AACvI,SAAO,IAAI,QAAQ,CAACD,cAAY;AAC9B,UAAM,QAAQI,OAAM,SAAS,MAAM;AAAA,MACjC;AAAA,MACA,OAAO,CAAC,UAAU,QAAQ,MAAM;AAAA,MAChC;AAAA,MACA,OAAO;AAAA,IACT,CAAC;AAED,QAAI,SAAS;AACb,UAAM,QAAQ,GAAG,QAAQ,CAAC,SAAiB;AAAE,gBAAU,KAAK,SAAS;AAAA,IAAG,CAAC;AACzE,UAAM,QAAQ,GAAG,QAAQ,CAAC,SAAiB;AAAE,gBAAU,KAAK,SAAS;AAAA,IAAG,CAAC;AAEzE,UAAM,GAAG,SAAS,CAAC,SAAS;AAC1B,MAAAJ,UAAQ,EAAE,SAAS,SAAS,GAAG,OAAO,CAAC;AAAA,IACzC,CAAC;AAED,UAAM,GAAG,SAAS,CAAC,QAAQ;AACzB,MAAAA,UAAQ,EAAE,SAAS,OAAO,QAAQ,IAAI,QAAQ,CAAC;AAAA,IACjD,CAAC;AAAA,EACH,CAAC;AACH;AAGA,eAAe,WACb,SACA,UAAkB,kBAClB,QAAiB,OACjB,WAC4E;AAC5E,QAAM,SAAS,gBAAgB;AAE/B,MAAI,CAAC,QAAQ;AACX,QAAI,CAAC,MAAO,SAAQ,IAAI,wCAAmC;AAC3D,WAAO,EAAE,SAAS,MAAM,MAAM,QAAQ;AAAA,EACxC;AAEA,QAAM,EAAE,MAAM,YAAY,eAAe,IAAI,MAAM,YAAY,OAAO;AAEtE,MAAI,gBAAgB;AAClB,QAAI,CAAC,MAAO,SAAQ,IAAI,uDAAkD,UAAU,EAAE;AACtF,WAAO,EAAE,SAAS,MAAM,MAAM,WAAW;AAAA,EAC3C;AAGA,QAAM,UAAUC,aAAWC,OAAK,QAAQ,gBAAgB,CAAC;AACzD,QAAM,SAAS,CAAC,WAAWD,aAAWC,OAAK,QAAQ,mBAAmB,CAAC;AAEvE,QAAM,aAAa,UAAU,SAAS,SAAS,QAAQ;AAIvD,QAAM,EAAE,cAAc,mBAAmB,GAAG,SAAS,IAAI,QAAQ;AAGjE,QAAM,SAAS,aAAa,oBAAoB,OAAO;AAIvD,QAAM,gBAAgB,EAAE,YAAY,OAAO;AAC3C,QAAM,oBAAoBA,OAAK,QAAQ,qBAAqB;AAC5D,MAAI;AACF,IAAAG,eAAc,mBAAmB,KAAK,UAAU,eAAe,MAAM,CAAC,CAAC;AACvE,QAAI,CAAC,MAAO,SAAQ,IAAI,yCAAkC,iBAAiB,EAAE;AAAA,EAC/E,SAAS,KAAK;AACZ,QAAI,CAAC,MAAO,SAAQ,KAAK,4CAAuC,GAAG,EAAE;AAAA,EACvE;AAEA,QAAM,SAAkE;AAAA,IACtE,GAAG;AAAA,IACH,MAAM,OAAO,UAAU;AAAA;AAAA,EACzB;AAOA,QAAM,YAAY,eAAe,MAAM;AACvC,QAAM,uBAAuB,wBAAwB,MAAM;AAC3D,QAAM,WAAW,mBAAmB,MAAM;AAC1C,QAAM,eAAe,QAAQ,IAAI,aAAa;AAE9C,MAAI;AACJ,MAAI;AACJ,MAAI,MAAM;AAEV,MAAI,sBAAsB;AAGxB,cAAU;AACV,WAAO,CAAC,WAAW;AAGnB,UAAMP,SAAQ,oBAAoB;AAGlC,WAAO,OAAO,OAAO,UAAU;AAC/B,WAAO,WAAW;AAElB,QAAI,CAAC,MAAO,SAAQ,IAAI,sDAA+C;AAAA,EACzE,WAAW,aAAa,gBAAgB,CAAC,YAAY;AAInD,cAAU;AAEV,WAAO,eAAe,QAClB,CAAC,QAAQ,SAAS,MAAM,OAAO,UAAU,CAAC,IAC1C,CAAC,OAAO,OAAO;AAAA,EACrB,WAAW,WAAW;AAEpB,QAAI,gBAAgB,CAAC,UAAU;AAE7B,UAAI,CAAC,MAAO,SAAQ,IAAI,+CAAwC;AAEhE,YAAM,YAAY,eAAe,QAC7B,CAAC,QAAQ,OAAO,IAChB,CAAC,OAAO,OAAO;AAEnB,YAAM,cAAc,MAAM,WAAW,YAAY,WAAW,QAAQ,MAAM;AAE1E,UAAI,CAAC,YAAY,SAAS;AACxB,YAAI,CAAC,MAAO,SAAQ,MAAM,8BAAyB;AACnD,eAAO,EAAE,SAAS,MAAM,MAAM,WAAW;AAAA,MAC3C;AAEA,UAAI,CAAC,MAAO,SAAQ,IAAI,gCAA2B;AAEnD,gBAAU;AAEV,aAAO,eAAe,QAClB,CAAC,QAAQ,SAAS,MAAM,OAAO,UAAU,CAAC,IAC1C,CAAC,OAAO,OAAO;AAAA,IACrB,OAAO;AAEL,gBAAU;AAEV,aAAO,eAAe,QAClB,CAAC,QAAQ,OAAO,MAAM,OAAO,UAAU,CAAC,IACxC,CAAC,OAAO,KAAK;AAAA,IACnB;AAAA,EACF,OAAO;AAEL,QAAI,CAAC,OAAO;AACV,cAAQ,MAAM,8DAAyD;AACvE,cAAQ,MAAM,mEAAmE;AAAA,IACnF;AACA,WAAO,EAAE,SAAS,MAAM,MAAM,WAAW;AAAA,EAC3C;AAEA,QAAM,QAAQM,OAAM,SAAS,MAAM;AAAA,IACjC;AAAA,IACA,OAAO,CAAC,UAAU,QAAQ,MAAM;AAAA,IAChC,KAAK;AAAA,IACL,UAAU;AAAA,IACV,OAAO;AAAA,EACT,CAAC;AAGD,QAAM,iBAAiB;AACvB,MAAI,UAAU;AACd,MAAI,SAAS;AACb,MAAI,WAA0B;AAE9B,QAAM,iBAAiB,IAAI,QAAiB,CAACJ,cAAY;AACvD,UAAM,UAAU,WAAW,MAAM;AAC/B,UAAI,CAAC,WAAW,CAAC,QAAQ;AACvB,QAAAA,UAAQ,KAAK;AAAA,MACf;AAAA,IACF,GAAG,cAAc;AAEjB,UAAM,QAAQ,GAAG,QAAQ,CAAC,SAAiB;AACzC,YAAM,SAAS,KAAK,SAAS;AAC7B,UAAI,CAAC,OAAO;AAEV,cAAM,QAAQ,OAAO,KAAK,EAAE,MAAM,IAAI,EAAE,OAAO,OAAK,EAAE,KAAK,CAAC;AAC5D,mBAAW,QAAQ,OAAO;AACxB,kBAAQ,IAAI,aAAa,IAAI,EAAE;AAAA,QACjC;AAAA,MACF;AACA,UAAI,CAAC,YAAY,OAAO,SAAS,OAAO,KAAK,OAAO,SAAS,SAAS,KAAK,OAAO,SAAS,WAAW,IAAI;AACxG,kBAAU;AACV,qBAAa,OAAO;AACpB,QAAAA,UAAQ,IAAI;AAAA,MACd;AAAA,IACF,CAAC;AAED,UAAM,QAAQ,GAAG,QAAQ,CAAC,SAAiB;AACzC,YAAM,SAAS,KAAK,SAAS,EAAE,KAAK;AACpC,UAAI,CAAC,SAAS,QAAQ;AACpB,gBAAQ,MAAM,aAAa,OAAO,MAAM,GAAG,GAAG,CAAC,EAAE;AAAA,MACnD;AAAA,IACF,CAAC;AAED,UAAM,GAAG,SAAS,CAAC,QAAQ;AACzB,UAAI,CAAC,MAAO,SAAQ,MAAM,gCAA2B,IAAI,OAAO,EAAE;AAClE,mBAAa,OAAO;AACpB,MAAAA,UAAQ,KAAK;AAAA,IACf,CAAC;AAED,UAAM,GAAG,QAAQ,CAAC,SAAS;AACzB,eAAS;AACT,iBAAW;AACX,UAAI,CAAC,SAAS;AACZ,qBAAa,OAAO;AACpB,QAAAA,UAAQ,KAAK;AAAA,MACf;AACA,qBAAe;AAAA,IACjB,CAAC;AAAA,EACH,CAAC;AAED,iBAAe;AAGf,QAAM,WAAW,MAAM;AAEvB,MAAI,CAAC,UAAU;AACb,QAAI,UAAU,aAAa,GAAG;AAC5B,UAAI,CAAC,MAAO,SAAQ,MAAM,+CAA0C,QAAQ,GAAG;AAAA,IACjF,WAAW,CAAC,QAAQ;AAClB,UAAI,CAAC,MAAO,SAAQ,IAAI,yDAAoD;AAAA,IAC9E;AAAA,EAEF;AAEA,SAAO,EAAE,SAAS,OAAO,MAAM,YAAY,SAAS,SAAS;AAC/D;AAGO,SAAS,YAAkB;AAChC,MAAI,cAAc;AAChB,iBAAa,KAAK,SAAS;AAC3B,mBAAe;AAAA,EACjB;AACF;AAEA,eAAsB,UAAU,UAA+B,CAAC,GAAG;AACjE,QAAM,MAAM,IAAIM,MAAK;AAGrB,MAAI,IAAI,KAAK,KAAK;AAAA,IAChB,QAAQ;AAAA;AAAA,IACR,cAAc,CAAC,OAAO,QAAQ,OAAO,SAAS,UAAU,SAAS;AAAA,IACjE,cAAc,CAAC,gBAAgB,iBAAiB,kBAAkB;AAAA,IAClE,eAAe,CAAC,eAAe,aAAa;AAAA,IAC5C,QAAQ;AAAA;AAAA,EACV,CAAC,CAAC;AAGF,MAAI,CAAC,QAAQ,OAAO;AAClB,QAAI,IAAI,KAAK,OAAO,CAAC;AAAA,EACvB;AAGA,MAAI,MAAM,WAAW,MAAM;AAG3B,MAAI,MAAM,aAAa,QAAQ;AAC/B,MAAI,MAAM,WAAW,MAAM;AAC3B,MAAI,MAAM,aAAa,SAAS;AAChC,MAAI,MAAM,cAAc,SAAS;AACjC,MAAI,MAAM,UAAU,aAAK;AAGzB,MAAI,IAAI,iBAAiB,OAAO,MAAM;AACpC,WAAO,EAAE,KAAK,oBAAoB,CAAC;AAAA,EACrC,CAAC;AAGD,MAAI,IAAI,YAAY,CAAC,MAAM;AACzB,UAAM,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAoBb,WAAO,EAAE,KAAK,IAAI;AAAA,EACpB,CAAC;AAGD,MAAI,IAAI,KAAK,CAAC,MAAM;AAClB,WAAO,EAAE,KAAK;AAAA,MACZ,MAAM;AAAA,MACN,SAAS;AAAA,MACT,aAAa;AAAA,MACb,MAAM;AAAA,MACN,WAAW;AAAA,QACT,QAAQ;AAAA,QACR,UAAU;AAAA,QACV,QAAQ;AAAA,QACR,WAAW;AAAA,MACb;AAAA,IACF,CAAC;AAAA,EACH,CAAC;AAED,SAAO;AACT;AAEA,eAAsB,YAAY,UAAyB,CAAC,GAAG;AAE7D,QAAM,SAAS,MAAM,kBAAkB,EAAE,OAAO,QAAQ,OAAO,eAAe,MAAM,CAAC;AACrF,MAAI,CAAC,QAAQ;AACX,UAAM,IAAI,MAAM,6EAA6E;AAAA,EAC/F;AAGA,QAAM,SAAS,MAAM,WAAW,QAAQ,YAAY,QAAQ,gBAAgB;AAG5E,qBAAmB;AAGnB,MAAI,QAAQ,kBAAkB;AAC5B,WAAO,2BAA2B,QAAQ;AAAA,EAC5C;AAGA,MAAI,CAACL,aAAW,OAAO,wBAAwB,GAAG;AAChD,IAAAM,WAAU,OAAO,0BAA0B,EAAE,WAAW,KAAK,CAAC;AAC9D,QAAI,CAAC,QAAQ,MAAO,SAAQ,IAAI,sCAA+B,OAAO,wBAAwB,EAAE;AAAA,EAClG;AAGA,MAAI,CAAC,OAAO,qBAAqB,KAAK;AACpC,UAAM,IAAI,MAAM,wGAAwG;AAAA,EAC1H;AAEA,MAAIC,WAAU,OAAO,qBAAqB;AAC1C,MAAI,CAACA,UAAS;AACZ,QAAI,CAAC,QAAQ,MAAO,SAAQ,IAAI,6CAAsC;AACtE,IAAAA,WAAU,MAAM,oBAAoB,OAAO,qBAAqB,GAAG;AACnE,QAAI,CAAC,QAAQ,MAAO,SAAQ,IAAI,sCAAiC;AAAA,EACnE;AACA,eAAa,EAAE,KAAK,OAAO,qBAAqB,KAAK,SAAAA,SAAQ,CAAC;AAC9D,MAAI,CAAC,QAAQ,MAAO,SAAQ,IAAI,oCAA6B,OAAO,qBAAqB,GAAG,EAAE;AAE9F,QAAM,OAAO,QAAQ,QAAQ,OAAO,OAAO;AAC3C,QAAM,OAAO,QAAQ,QAAQ,OAAO,OAAO,QAAQ;AAEnD,QAAM,YAAY,QAAQ,aAAa,OAAO,OAAO;AAErD,QAAM,MAAM,MAAM,UAAU,EAAE,OAAO,QAAQ,MAAM,CAAC;AAEpD,MAAI,CAAC,QAAQ,OAAO;AAClB,YAAQ,IAAI;AAAA,iCAA6B;AACzC,YAAQ,IAAI,+BAA0B,IAAI,IAAI,IAAI,EAAE;AACpD,QAAI,WAAW;AACb,cAAQ,IAAI,yBAAoB,SAAS,EAAE;AAAA,IAC7C;AACA,YAAQ,IAAI,gCAA2B,OAAO,wBAAwB,EAAE;AACxE,YAAQ,IAAI,4BAAuB,OAAO,YAAY,EAAE;AACxD,YAAQ,IAAI,kCAA6B,IAAI,IAAI,IAAI;AAAA,CAAiB;AAAA,EACxE;AAEA,mBAAiB,MAAM;AAAA,IACrB,OAAO,IAAI;AAAA,IACX;AAAA,IACA,UAAU;AAAA,EACZ,CAAC;AAGD,MAAI;AACJ,MAAI;AACJ,MAAI,QAAQ,UAAU,OAAO;AAC3B,UAAM,SAAS,MAAM,WAAW,MAAM,QAAQ,WAAW,kBAAkB,QAAQ,OAAO,SAAS;AACnG,cAAU,OAAO;AACjB,iBAAa,OAAO;AAAA,EACtB;AAEA,SAAO,EAAE,KAAK,MAAM,MAAM,SAAS,WAAW;AAChD;AAEO,SAAS,aAAa;AAE3B,YAAU;AAGV,EAAK,aAAa,EAAE,KAAK,OAAOC,cAAa;AAC3C,eAAW,MAAMA,WAAU;AACzB,YAAW,aAAa,EAAE;AAAA,IAC5B;AAAA,EACF,CAAC,EAAE,MAAM,MAAM;AAAA,EAEf,CAAC;AAED,MAAI,gBAAgB;AAClB,mBAAe,MAAM;AACrB,qBAAiB;AAAA,EACnB;AACA,gBAAc;AAChB;AAEA,SAAS,sBAAsB;AAC7B,SAAO;AAAA,IACL,SAAS;AAAA,IACT,MAAM;AAAA,MACJ,OAAO;AAAA,MACP,SAAS;AAAA,MACT,aACE;AAAA,IACJ;AAAA,IACA,SAAS,CAAC,EAAE,KAAK,yBAAyB,aAAa,oBAAoB,CAAC;AAAA,IAC5E,OAAO;AAAA,MACL,KAAK;AAAA,QACH,KAAK;AAAA,UACH,SAAS;AAAA,UACT,aAAa;AAAA,UACb,WAAW;AAAA,YACT,KAAK;AAAA,cACH,aAAa;AAAA,cACb,SAAS,EAAE,oBAAoB,CAAC,EAAE;AAAA,YACpC;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,MACA,WAAW;AAAA,QACT,KAAK;AAAA,UACH,SAAS;AAAA,UACT,aAAa;AAAA,UACb,WAAW;AAAA,YACT,KAAK;AAAA,cACH,aAAa;AAAA,cACb,SAAS,EAAE,oBAAoB,CAAC,EAAE;AAAA,YACpC;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,MACA,iBAAiB;AAAA,QACf,KAAK;AAAA,UACH,SAAS;AAAA,UACT,aAAa;AAAA,UACb,WAAW;AAAA,YACT,KAAK,EAAE,aAAa,eAAe;AAAA,YACnC,KAAK,EAAE,aAAa,mBAAmB;AAAA,UACzC;AAAA,QACF;AAAA,MACF;AAAA,MACA,aAAa;AAAA,QACX,KAAK;AAAA,UACH,SAAS;AAAA,UACT,aAAa;AAAA,UACb,YAAY;AAAA,YACV,EAAE,MAAM,SAAS,IAAI,SAAS,QAAQ,EAAE,MAAM,WAAW,SAAS,GAAG,EAAE;AAAA,YACvE,EAAE,MAAM,UAAU,IAAI,SAAS,QAAQ,EAAE,MAAM,WAAW,SAAS,EAAE,EAAE;AAAA,UACzE;AAAA,UACA,WAAW;AAAA,YACT,KAAK,EAAE,aAAa,mBAAmB;AAAA,UACzC;AAAA,QACF;AAAA,QACA,MAAM;AAAA,UACJ,SAAS;AAAA,UACT,aAAa;AAAA,UACb,aAAa;AAAA,YACX,SAAS;AAAA,cACP,oBAAoB;AAAA,gBAClB,QAAQ;AAAA,kBACN,MAAM;AAAA,kBACN,YAAY;AAAA,oBACV,MAAM,EAAE,MAAM,SAAS;AAAA,oBACvB,kBAAkB,EAAE,MAAM,SAAS;AAAA,oBACnC,OAAO,EAAE,MAAM,SAAS;AAAA,oBACxB,eAAe,EAAE,MAAM,SAAS;AAAA,kBAClC;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UACA,WAAW;AAAA,YACT,KAAK,EAAE,aAAa,kBAAkB;AAAA,UACxC;AAAA,QACF;AAAA,MACF;AAAA,MACA,kBAAkB;AAAA,QAChB,KAAK;AAAA,UACH,SAAS;AAAA,UACT,aAAa;AAAA,UACb,YAAY,CAAC,EAAE,MAAM,MAAM,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE,CAAC;AAAA,UACnF,WAAW;AAAA,YACT,KAAK,EAAE,aAAa,kBAAkB;AAAA,YACtC,KAAK,EAAE,aAAa,oBAAoB;AAAA,UAC1C;AAAA,QACF;AAAA,QACA,QAAQ;AAAA,UACN,SAAS;AAAA,UACT,aAAa;AAAA,UACb,YAAY,CAAC,EAAE,MAAM,MAAM,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE,CAAC;AAAA,UACnF,WAAW;AAAA,YACT,KAAK,EAAE,aAAa,kBAAkB;AAAA,YACtC,KAAK,EAAE,aAAa,oBAAoB;AAAA,UAC1C;AAAA,QACF;AAAA,MACF;AAAA,MACA,2BAA2B;AAAA,QACzB,KAAK;AAAA,UACH,SAAS;AAAA,UACT,aAAa;AAAA,UACb,YAAY;AAAA,YACV,EAAE,MAAM,MAAM,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE;AAAA,YACrE,EAAE,MAAM,SAAS,IAAI,SAAS,QAAQ,EAAE,MAAM,WAAW,SAAS,IAAI,EAAE;AAAA,UAC1E;AAAA,UACA,WAAW;AAAA,YACT,KAAK,EAAE,aAAa,kBAAkB;AAAA,YACtC,KAAK,EAAE,aAAa,oBAAoB;AAAA,UAC1C;AAAA,QACF;AAAA,MACF;AAAA,MACA,wBAAwB;AAAA,QACtB,MAAM;AAAA,UACJ,SAAS;AAAA,UACT,aAAa;AAAA,UACb,YAAY,CAAC,EAAE,MAAM,MAAM,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE,CAAC;AAAA,UACnF,WAAW;AAAA,YACT,KAAK,EAAE,aAAa,kBAAkB;AAAA,YACtC,KAAK,EAAE,aAAa,oBAAoB;AAAA,UAC1C;AAAA,QACF;AAAA,MACF;AAAA,MACA,oBAAoB;AAAA,QAClB,MAAM;AAAA,UACJ,SAAS;AAAA,UACT,aACE;AAAA,UACF,YAAY,CAAC,EAAE,MAAM,MAAM,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE,CAAC;AAAA,UACnF,aAAa;AAAA,YACX,SAAS;AAAA,cACP,oBAAoB;AAAA,gBAClB,QAAQ;AAAA,kBACN,MAAM;AAAA,kBACN,UAAU,CAAC,QAAQ;AAAA,kBACnB,YAAY;AAAA,oBACV,QAAQ,EAAE,MAAM,SAAS;AAAA,kBAC3B;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UACA,WAAW;AAAA,YACT,KAAK;AAAA,cACH,aAAa;AAAA,cACb,SAAS,EAAE,qBAAqB,CAAC,EAAE;AAAA,YACrC;AAAA,YACA,KAAK,EAAE,aAAa,oBAAoB;AAAA,UAC1C;AAAA,QACF;AAAA,MACF;AAAA,MACA,yBAAyB;AAAA,QACvB,MAAM;AAAA,UACJ,SAAS;AAAA,UACT,aAAa;AAAA,UACb,YAAY,CAAC,EAAE,MAAM,MAAM,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE,CAAC;AAAA,UACnF,aAAa;AAAA,YACX,SAAS;AAAA,cACP,oBAAoB;AAAA,gBAClB,QAAQ;AAAA,kBACN,MAAM;AAAA,kBACN,UAAU,CAAC,QAAQ;AAAA,kBACnB,YAAY;AAAA,oBACV,QAAQ,EAAE,MAAM,SAAS;AAAA,kBAC3B;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UACA,WAAW;AAAA,YACT,KAAK,EAAE,aAAa,iBAAiB;AAAA,YACrC,KAAK,EAAE,aAAa,oBAAoB;AAAA,UAC1C;AAAA,QACF;AAAA,MACF;AAAA,MACA,qCAAqC;AAAA,QACnC,MAAM;AAAA,UACJ,SAAS;AAAA,UACT,aAAa;AAAA,UACb,YAAY;AAAA,YACV,EAAE,MAAM,MAAM,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE;AAAA,YACrE,EAAE,MAAM,cAAc,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE;AAAA,UAC/E;AAAA,UACA,WAAW;AAAA,YACT,KAAK,EAAE,aAAa,6BAA6B;AAAA,YACjD,KAAK,EAAE,aAAa,kBAAkB;AAAA,YACtC,KAAK,EAAE,aAAa,oBAAoB;AAAA,UAC1C;AAAA,QACF;AAAA,MACF;AAAA,MACA,oCAAoC;AAAA,QAClC,MAAM;AAAA,UACJ,SAAS;AAAA,UACT,aAAa;AAAA,UACb,YAAY;AAAA,YACV,EAAE,MAAM,MAAM,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE;AAAA,YACrE,EAAE,MAAM,cAAc,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE;AAAA,UAC/E;AAAA,UACA,aAAa;AAAA,YACX,SAAS;AAAA,cACP,oBAAoB;AAAA,gBAClB,QAAQ;AAAA,kBACN,MAAM;AAAA,kBACN,YAAY;AAAA,oBACV,QAAQ,EAAE,MAAM,SAAS;AAAA,kBAC3B;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UACA,WAAW;AAAA,YACT,KAAK,EAAE,aAAa,gBAAgB;AAAA,YACpC,KAAK,EAAE,aAAa,mBAAmB;AAAA,YACvC,KAAK,EAAE,aAAa,oBAAoB;AAAA,UAC1C;AAAA,QACF;AAAA,MACF;AAAA,MACA,0BAA0B;AAAA,QACxB,KAAK;AAAA,UACH,SAAS;AAAA,UACT,aAAa;AAAA,UACb,YAAY,CAAC,EAAE,MAAM,MAAM,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE,CAAC;AAAA,UACnF,WAAW;AAAA,YACT,KAAK,EAAE,aAAa,oBAAoB;AAAA,YACxC,KAAK,EAAE,aAAa,oBAAoB;AAAA,UAC1C;AAAA,QACF;AAAA,MACF;AAAA,MACA,iBAAiB;AAAA,QACf,MAAM;AAAA,UACJ,SAAS;AAAA,UACT,aAAa;AAAA,UACb,aAAa;AAAA,YACX,SAAS;AAAA,cACP,oBAAoB;AAAA,gBAClB,QAAQ;AAAA,kBACN,MAAM;AAAA,kBACN,UAAU,CAAC,QAAQ;AAAA,kBACnB,YAAY;AAAA,oBACV,QAAQ,EAAE,MAAM,SAAS;AAAA,oBACzB,MAAM,EAAE,MAAM,SAAS;AAAA,oBACvB,kBAAkB,EAAE,MAAM,SAAS;AAAA,oBACnC,OAAO,EAAE,MAAM,SAAS;AAAA,oBACxB,eAAe,EAAE,MAAM,SAAS;AAAA,kBAClC;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UACA,WAAW;AAAA,YACT,KAAK;AAAA,cACH,aAAa;AAAA,cACb,SAAS,EAAE,qBAAqB,CAAC,EAAE;AAAA,YACrC;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,MACA,mCAAmC;AAAA,QACjC,KAAK;AAAA,UACH,SAAS;AAAA,UACT,aAAa;AAAA,UACb,YAAY,CAAC,EAAE,MAAM,aAAa,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE,CAAC;AAAA,UAC1F,WAAW;AAAA,YACT,KAAK,EAAE,aAAa,oBAAoB;AAAA,YACxC,KAAK,EAAE,aAAa,oBAAoB;AAAA,UAC1C;AAAA,QACF;AAAA,QACA,MAAM;AAAA,UACJ,SAAS;AAAA,UACT,aAAa;AAAA,UACb,YAAY,CAAC,EAAE,MAAM,aAAa,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE,CAAC;AAAA,UAC1F,aAAa;AAAA,YACX,SAAS;AAAA,cACP,oBAAoB;AAAA,gBAClB,QAAQ;AAAA,kBACN,MAAM;AAAA,kBACN,UAAU,CAAC,SAAS;AAAA,kBACpB,YAAY;AAAA,oBACV,SAAS,EAAE,MAAM,SAAS;AAAA,oBAC1B,KAAK,EAAE,MAAM,SAAS;AAAA,oBACtB,MAAM,EAAE,MAAM,SAAS;AAAA,kBACzB;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UACA,WAAW;AAAA,YACT,KAAK,EAAE,aAAa,mBAAmB;AAAA,YACvC,KAAK,EAAE,aAAa,oBAAoB;AAAA,UAC1C;AAAA,QACF;AAAA,MACF;AAAA,MACA,gDAAgD;AAAA,QAC9C,KAAK;AAAA,UACH,SAAS;AAAA,UACT,aAAa;AAAA,UACb,YAAY;AAAA,YACV,EAAE,MAAM,aAAa,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE;AAAA,YAC5E,EAAE,MAAM,cAAc,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE;AAAA,UAC/E;AAAA,UACA,WAAW;AAAA,YACT,KAAK,EAAE,aAAa,kBAAkB;AAAA,YACtC,KAAK,EAAE,aAAa,qBAAqB;AAAA,UAC3C;AAAA,QACF;AAAA,MACF;AAAA,MACA,qDAAqD;AAAA,QACnD,KAAK;AAAA,UACH,SAAS;AAAA,UACT,aAAa;AAAA,UACb,YAAY;AAAA,YACV,EAAE,MAAM,aAAa,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE;AAAA,YAC5E,EAAE,MAAM,cAAc,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE;AAAA,YAC7E,EAAE,MAAM,QAAQ,IAAI,SAAS,QAAQ,EAAE,MAAM,UAAU,EAAE;AAAA,UAC3D;AAAA,UACA,WAAW;AAAA,YACT,KAAK,EAAE,aAAa,gBAAgB;AAAA,YACpC,KAAK,EAAE,aAAa,qBAAqB;AAAA,UAC3C;AAAA,QACF;AAAA,MACF;AAAA,MACA,qDAAqD;AAAA,QACnD,MAAM;AAAA,UACJ,SAAS;AAAA,UACT,aAAa;AAAA,UACb,YAAY;AAAA,YACV,EAAE,MAAM,aAAa,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE;AAAA,YAC5E,EAAE,MAAM,cAAc,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE;AAAA,UAC/E;AAAA,UACA,aAAa;AAAA,YACX,SAAS;AAAA,cACP,oBAAoB;AAAA,gBAClB,QAAQ;AAAA,kBACN,MAAM;AAAA,kBACN,YAAY;AAAA,oBACV,QAAQ,EAAE,MAAM,UAAU,MAAM,CAAC,WAAW,SAAS,EAAE;AAAA,kBACzD;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UACA,WAAW;AAAA,YACT,KAAK,EAAE,aAAa,kBAAkB;AAAA,YACtC,KAAK,EAAE,aAAa,0BAA0B;AAAA,UAChD;AAAA,QACF;AAAA,MACF;AAAA,MACA,sDAAsD;AAAA,QACpD,MAAM;AAAA,UACJ,SAAS;AAAA,UACT,aAAa;AAAA,UACb,YAAY;AAAA,YACV,EAAE,MAAM,aAAa,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE;AAAA,YAC5E,EAAE,MAAM,cAAc,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE;AAAA,UAC/E;AAAA,UACA,aAAa;AAAA,YACX,SAAS;AAAA,cACP,oBAAoB;AAAA,gBAClB,QAAQ;AAAA,kBACN,MAAM;AAAA,kBACN,UAAU,CAAC,OAAO;AAAA,kBAClB,YAAY;AAAA,oBACV,OAAO,EAAE,MAAM,SAAS;AAAA,kBAC1B;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UACA,WAAW;AAAA,YACT,KAAK,EAAE,aAAa,aAAa;AAAA,YACjC,KAAK,EAAE,aAAa,kBAAkB;AAAA,UACxC;AAAA,QACF;AAAA,MACF;AAAA,MACA,uDAAuD;AAAA,QACrD,KAAK;AAAA,UACH,SAAS;AAAA,UACT,aAAa;AAAA,UACb,YAAY;AAAA,YACV,EAAE,MAAM,aAAa,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE;AAAA,YAC5E,EAAE,MAAM,cAAc,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE;AAAA,UAC/E;AAAA,UACA,WAAW;AAAA,YACT,KAAK,EAAE,aAAa,cAAc,SAAS,EAAE,qBAAqB,CAAC,EAAE,EAAE;AAAA,YACvE,KAAK,EAAE,aAAa,qBAAqB;AAAA,UAC3C;AAAA,QACF;AAAA,MACF;AAAA,MACA,4CAA4C;AAAA,QAC1C,MAAM;AAAA,UACJ,SAAS;AAAA,UACT,aAAa;AAAA,UACb,YAAY,CAAC,EAAE,MAAM,aAAa,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE,CAAC;AAAA,UAC1F,WAAW;AAAA,YACT,KAAK,EAAE,aAAa,mBAAmB;AAAA,UACzC;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,IACA,YAAY;AAAA,MACV,SAAS;AAAA,QACP,SAAS;AAAA,UACP,MAAM;AAAA,UACN,YAAY;AAAA,YACV,IAAI,EAAE,MAAM,SAAS;AAAA,YACrB,MAAM,EAAE,MAAM,SAAS;AAAA,YACvB,kBAAkB,EAAE,MAAM,SAAS;AAAA,YACnC,OAAO,EAAE,MAAM,SAAS;AAAA,YACxB,QAAQ,EAAE,MAAM,UAAU,MAAM,CAAC,UAAU,WAAW,aAAa,OAAO,EAAE;AAAA,YAC5E,WAAW,EAAE,MAAM,UAAU,QAAQ,YAAY;AAAA,YACjD,WAAW,EAAE,MAAM,UAAU,QAAQ,YAAY;AAAA,UACnD;AAAA,QACF;AAAA,QACA,SAAS;AAAA,UACP,MAAM;AAAA,UACN,YAAY;AAAA,YACV,IAAI,EAAE,MAAM,SAAS;AAAA,YACrB,MAAM,EAAE,MAAM,UAAU,MAAM,CAAC,QAAQ,aAAa,UAAU,MAAM,EAAE;AAAA,YACtE,SAAS,EAAE,MAAM,SAAS;AAAA,YAC1B,WAAW,EAAE,MAAM,UAAU,QAAQ,YAAY;AAAA,UACnD;AAAA,QACF;AAAA,QACA,eAAe;AAAA,UACb,MAAM;AAAA,UACN,YAAY;AAAA,YACV,IAAI,EAAE,MAAM,SAAS;AAAA,YACrB,YAAY,EAAE,MAAM,SAAS;AAAA,YAC7B,UAAU,EAAE,MAAM,SAAS;AAAA,YAC3B,OAAO,EAAE,MAAM,SAAS;AAAA,YACxB,QAAQ,EAAE,MAAM,SAAS;AAAA,YACzB,QAAQ,EAAE,MAAM,UAAU,MAAM,CAAC,WAAW,YAAY,YAAY,aAAa,OAAO,EAAE;AAAA,YAC1F,kBAAkB,EAAE,MAAM,UAAU;AAAA,UACtC;AAAA,QACF;AAAA,QACA,UAAU;AAAA,UACR,MAAM;AAAA,UACN,YAAY;AAAA,YACV,IAAI,EAAE,MAAM,SAAS;AAAA,YACrB,MAAM,EAAE,MAAM,SAAS;AAAA,YACvB,SAAS,EAAE,MAAM,SAAS;AAAA,YAC1B,KAAK,EAAE,MAAM,SAAS;AAAA,YACtB,KAAK,EAAE,MAAM,UAAU;AAAA,YACvB,QAAQ,EAAE,MAAM,UAAU,MAAM,CAAC,WAAW,WAAW,OAAO,EAAE;AAAA,YAChE,UAAU,EAAE,MAAM,UAAU;AAAA,YAC5B,OAAO,EAAE,MAAM,SAAS;AAAA,YACxB,WAAW,EAAE,MAAM,UAAU,QAAQ,YAAY;AAAA,YACjD,WAAW,EAAE,MAAM,UAAU,QAAQ,YAAY;AAAA,UACnD;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;","names":["authKey","readFile","resolve","extname","relative","existsSync","init_types","createHash","extname","basename","remoteServerUrl","authKey","readFileSync","relative","minimatch","MAX_FILE_SIZE","init_types","tool","z","existsSync","readFileSync","join","minimatch","exec","promisify","writeFile","mkdir","readFile","unlink","readdir","join","nanoid","execAsync","Hono","existsSync","mkdirSync","writeFileSync","resolve","dirname","join","spawn","fileURLToPath","z","existsSync","mkdirSync","writeFileSync","statSync","readdir","join","basename","extname","relative","nanoid","streamText","generateText","tool","stepCountIs","z","nanoid","z","exec","promisify","existsSync","mkdirSync","join","output","isRunning","terminals","readdir","execAsync","promisify","exec","MAX_OUTPUT_CHARS","z","execAsync","MAX_OUTPUT_CHARS","output","status","truncatedOutput","tool","z","readFile","resolve","existsSync","existsSync","mkdirSync","readFileSync","writeFileSync","join","MAX_OUTPUT_CHARS","z","tool","resolve","existsSync","readFile","tool","z","readFile","writeFile","mkdir","resolve","relative","isAbsolute","dirname","existsSync","readFile","writeFile","mkdir","existsSync","resolve","relative","dirname","exec","promisify","execAsync","resolve","relative","existsSync","readFile","dirname","mkdir","writeFile","extname","dirname","existsSync","resolve","dirname","exec","promisify","execAsync","readFile","existsSync","extname","resolve","cleanup","extname","dirname","extname","z","tool","isAbsolute","resolve","relative","existsSync","dirname","mkdir","writeFile","readFile","tool","z","tool","z","tool","z","resolve","relative","isAbsolute","extname","existsSync","readdir","stat","z","readdir","resolve","extname","tool","isAbsolute","existsSync","stat","relative","tool","z","nanoid","nanoid","resolve","tool","z","exec","promisify","readFile","stat","readdir","resolve","relative","isAbsolute","existsSync","tool","z","resolve","relative","isAbsolute","basename","readFile","readdir","existsSync","fileURLToPath","z","relative","resolve","readdir","readFile","tool","isAbsolute","existsSync","lines","fileURLToPath","basename","execAsync","promisify","exec","MAX_OUTPUT_CHARS","MAX_FILE_SIZE","tool","z","resolve","isAbsolute","existsSync","stat","readFile","relative","readdir","createSemanticSearchTool","tool","z","tool","z","tool","z","readFile","stat","join","basename","extname","isRemoteConfigured","storageQueries","generateText","platform","loadAllSkills","generateText","streamText","stepCountIs","generateText","getOrCreateProxy","FrameRecorder","updatedTask","destroyProxy","isRemoteConfigured","storageQueries","readFile","unlink","join","basename","tool","z","nanoid","resolve","exec","z","join","existsSync","mkdirSync","statSync","nanoid","extname","basename","writeFileSync","readdir","relative","isRemoteConfigured","storageQueries","Hono","zValidator","z","existsSync","mkdirSync","writeFileSync","join","nanoid","resolve","Hono","z","join","existsSync","mkdirSync","writeFileSync","resolve","zValidator","nanoid","Hono","zValidator","z","readFileSync","fileURLToPath","dirname","join","Hono","zValidator","z","Hono","z","zValidator","isRunning","sessions","Hono","zValidator","z","nanoid","Hono","z","zValidator","nanoid","sendWebhook","isRemoteConfigured","storageQueries","exec","promisify","platform","execAsync","os","dirname","fileURLToPath","resolve","existsSync","join","possiblePaths","spawn","writeFileSync","Hono","mkdirSync","authKey","sessions"]}
|
|
1
|
+
{"version":3,"sources":["../../src/db/remote.ts","../../src/db/index.ts","../../src/config/types.ts","../../src/config/index.ts","../../src/skills/index.ts","../../src/semantic/types.ts","../../src/semantic/namespace.ts","../../src/semantic/hasher.ts","../../src/semantic/chunker.ts","../../src/semantic/client.ts","../../src/semantic/indexer.ts","../../src/semantic/index.ts","../../src/tools/semantic-search.ts","../../src/utils/webhook.ts","../../src/browser/stream-proxy.ts","../../src/browser/recorder.ts","../../src/server/index.ts","../../src/server/routes/sessions.ts","../../src/agent/index.ts","../../src/agent/model.ts","../../src/agent/remote-model.ts","../../src/tools/bash.ts","../../src/utils/tokens.ts","../../src/utils/truncate.ts","../../src/terminal/tmux.ts","../../src/tools/read-file.ts","../../src/utils/resize-image.ts","../../src/tools/write-file.ts","../../src/checkpoints/index.ts","../../src/lsp/index.ts","../../src/lsp/servers.ts","../../src/lsp/client.ts","../../src/lsp/types.ts","../../src/tools/todo.ts","../../src/tools/load-skill.ts","../../src/tools/linter.ts","../../src/tools/search.ts","../../src/agent/subagent.ts","../../src/agent/subagents/search.ts","../../src/tools/code-graph.ts","../../src/tools/index.ts","../../src/tools/task.ts","../../src/tools/upload-file.ts","../../src/agent/context.ts","../../src/agent/prompts.ts","../../src/utils/sanitize-messages.ts","../../src/agent/model-limits.ts","../../src/server/devtools-store.ts","../../src/server/routes/agents.ts","../../src/server/resumable-stream.ts","../../src/server/routes/health.ts","../../src/server/routes/terminals.ts","../../src/server/routes/tasks.ts","../../src/utils/dependencies.ts"],"sourcesContent":["/**\n * Remote database client\n * \n * Implements the same interface as the local SQLite database\n * but calls the remote server via HTTP.\n */\n\nimport type {\n Session,\n Message,\n ToolExecution,\n TodoItem,\n ModelMessage,\n Terminal,\n ActiveStream,\n Checkpoint,\n FileBackup,\n SubagentExecution,\n SubagentStep,\n IndexedChunk,\n IndexStatusRecord,\n LoadedSkill,\n} from './schema.js';\n\nlet remoteServerUrl: string | null = null;\nlet authKey: string | null = null;\n\n/**\n * Initialize the remote database client\n */\nexport function initRemoteDatabase(serverUrl: string, key: string) {\n remoteServerUrl = serverUrl.replace(/\\/$/, ''); // Remove trailing slash\n authKey = key;\n}\n\n/**\n * Close the remote client (no-op, just for API compatibility)\n */\nexport function closeRemoteDatabase() {\n remoteServerUrl = null;\n authKey = null;\n}\n\n/**\n * Check if remote database is configured\n */\nexport function isRemoteConfigured(): boolean {\n return !!remoteServerUrl && !!authKey;\n}\n\n/**\n * Date fields that should be parsed from ISO strings to Date objects.\n * These are top-level metadata fields on database records (Session, Message, etc.),\n * NOT fields inside modelMessage content (tool outputs, etc.).\n */\nconst DATE_FIELDS = ['createdAt', 'updatedAt', 'startedAt', 'completedAt', 'stoppedAt', 'finishedAt', 'loadedAt', 'indexedAt', 'lastFullIndex', 'lastIncrementalIndex'];\n\n/**\n * Fields that contain AI SDK ModelMessage data and should NOT be recursively\n * processed by parseDates. The AI SDK's Zod schema requires tool output values\n * to be valid JSON primitives (string, number, boolean, null, object, array).\n * Converting date strings to Date objects inside these fields corrupts them and\n * causes AI_InvalidPromptError when the messages are passed back to streamText().\n */\nconst MODEL_MESSAGE_FIELDS = ['modelMessage', 'modelMessages'];\n\n/**\n * Parse date strings to Date objects on top-level record fields only.\n * \n * IMPORTANT: Does NOT recurse into `modelMessage` / `modelMessages` fields.\n * Those contain AI SDK ModelMessage data that must remain JSON-serializable.\n * Recursing into them converts date strings (e.g. `createdAt` inside tool\n * result outputs) to Date objects, which violates the AI SDK's jsonValueSchema\n * and triggers AI_InvalidPromptError on subsequent streamText() calls.\n */\nfunction parseDates(obj: any): any {\n if (obj === null || obj === undefined) return obj;\n if (Array.isArray(obj)) return obj.map(parseDates);\n if (typeof obj !== 'object' || obj instanceof Date) return obj;\n \n const result = { ...obj };\n for (const key of Object.keys(result)) {\n // Skip modelMessage fields entirely - these must stay JSON-serializable\n if (MODEL_MESSAGE_FIELDS.includes(key)) {\n continue;\n }\n if (DATE_FIELDS.includes(key) && typeof result[key] === 'string') {\n result[key] = new Date(result[key]);\n } else if (typeof result[key] === 'object') {\n result[key] = parseDates(result[key]);\n }\n }\n return result;\n}\n\n/**\n * HTTP helper for remote API calls\n * @param options.skipParseDates - If true, skip the parseDates post-processing.\n * Use for endpoints that return ModelMessage[] directly, since those must\n * remain JSON-serializable for the AI SDK.\n */\nasync function api<T>(\n path: string,\n options: { method?: string; body?: unknown; skipParseDates?: boolean } = {}\n): Promise<T> {\n if (!remoteServerUrl || !authKey) {\n throw new Error('Remote database not initialized');\n }\n \n const url = `${remoteServerUrl}/db${path}`;\n const init: RequestInit = {\n method: options.method || 'GET',\n headers: {\n 'Content-Type': 'application/json',\n 'Authorization': `Bearer ${authKey}`,\n },\n };\n \n if (options.body) {\n init.body = JSON.stringify(options.body);\n }\n \n const response = await fetch(url, init);\n \n if (!response.ok) {\n const error = await response.json().catch(() => ({ error: 'Unknown error' })) as { error?: string };\n throw new Error(error.error || `HTTP ${response.status}`);\n }\n \n const text = await response.text();\n if (!text || text === 'null') {\n return null as T;\n }\n \n const parsed = JSON.parse(text);\n\n // Skip date parsing for raw ModelMessage data - it must stay JSON-serializable\n if (options.skipParseDates) {\n return parsed as T;\n }\n\n // Parse JSON and convert date strings to Date objects\n return parseDates(parsed) as T;\n}\n\n// ============================================\n// Session Queries\n// ============================================\n\nexport const remoteSessionQueries = {\n create(data: { workingDirectory: string; model: string; name?: string; config?: any }): Promise<Session> {\n return api<Session>('/sessions', { method: 'POST', body: data });\n },\n\n getById(id: string): Promise<Session | undefined> {\n return api<Session | undefined>(`/sessions/${id}`).catch(() => undefined);\n },\n\n list(limit = 50, offset = 0): Promise<Session[]> {\n return api<Session[]>(`/sessions?limit=${limit}&offset=${offset}`);\n },\n\n updateStatus(id: string, status: Session['status']): Promise<Session | undefined> {\n return api<Session | undefined>(`/sessions/${id}`, { method: 'PATCH', body: { status } });\n },\n\n updateModel(id: string, model: string): Promise<Session | undefined> {\n return api<Session | undefined>(`/sessions/${id}`, { method: 'PATCH', body: { model } });\n },\n\n update(id: string, updates: { model?: string; name?: string; config?: any }): Promise<Session | undefined> {\n return api<Session | undefined>(`/sessions/${id}`, { method: 'PATCH', body: updates });\n },\n\n delete(id: string): Promise<boolean> {\n return api<{ success: boolean }>(`/sessions/${id}`, { method: 'DELETE' }).then(r => r?.success ?? false);\n },\n};\n\n// ============================================\n// Message Queries\n// ============================================\n\nexport const remoteMessageQueries = {\n async getNextSequence(sessionId: string): Promise<number> {\n const result = await api<{ nextSequence: number }>(`/messages/session/${sessionId}/next-sequence`);\n return result.nextSequence;\n },\n\n create(sessionId: string, modelMessage: ModelMessage): Promise<Message> {\n return api<Message>('/messages', { method: 'POST', body: { sessionId, modelMessage } });\n },\n\n addMany(sessionId: string, modelMessages: ModelMessage[]): Promise<Message[]> {\n return api<Message[]>('/messages/batch', { method: 'POST', body: { sessionId, modelMessages } });\n },\n\n getBySession(sessionId: string): Promise<Message[]> {\n return api<Message[]>(`/messages/session/${sessionId}`);\n },\n\n getModelMessages(sessionId: string): Promise<ModelMessage[]> {\n // IMPORTANT: skipParseDates=true because ModelMessage data must remain\n // JSON-serializable. The parseDates function would convert date strings\n // inside tool result outputs (e.g. todo items with createdAt) to Date\n // objects, which violates the AI SDK's jsonValueSchema and causes\n // AI_InvalidPromptError on subsequent streamText() calls.\n return api<ModelMessage[]>(`/messages/session/${sessionId}/model-messages`, { skipParseDates: true });\n },\n\n async getRecentBySession(sessionId: string, limit = 50): Promise<Message[]> {\n const messages = await api<Message[]>(`/messages/session/${sessionId}`);\n return messages.slice(-limit);\n },\n\n async countBySession(sessionId: string): Promise<number> {\n const result = await api<{ count: number }>(`/messages/session/${sessionId}/count`);\n return result.count;\n },\n\n async deleteBySession(sessionId: string): Promise<number> {\n const result = await api<{ deleted: number }>(`/messages/session/${sessionId}`, { method: 'DELETE' });\n return result.deleted;\n },\n\n async deleteFromSequence(sessionId: string, fromSequence: number): Promise<number> {\n const result = await api<{ deleted: number }>(\n `/messages/session/${sessionId}/from-sequence/${fromSequence}`,\n { method: 'DELETE' }\n );\n return result.deleted;\n },\n};\n\n// ============================================\n// Tool Execution Queries\n// ============================================\n\nexport const remoteToolExecutionQueries = {\n create(data: {\n sessionId: string;\n messageId?: string;\n toolName: string;\n toolCallId: string;\n input?: any;\n requiresApproval?: boolean;\n status?: 'pending' | 'approved' | 'rejected' | 'completed' | 'error';\n }): Promise<ToolExecution> {\n return api<ToolExecution>('/tool-executions', { method: 'POST', body: data });\n },\n\n getById(id: string): Promise<ToolExecution | undefined> {\n return api<ToolExecution | undefined>(`/tool-executions/${id}`).catch(() => undefined);\n },\n\n getByToolCallId(toolCallId: string): Promise<ToolExecution | undefined> {\n return api<ToolExecution | undefined>(`/tool-executions/by-tool-call-id/${toolCallId}`).catch(() => undefined);\n },\n\n getPendingApprovals(sessionId: string): Promise<ToolExecution[]> {\n return api<ToolExecution[]>(`/tool-executions/session/${sessionId}/pending`);\n },\n\n approve(id: string): Promise<ToolExecution | undefined> {\n return api<ToolExecution | undefined>(`/tool-executions/${id}`, { method: 'PATCH', body: { status: 'approved' } });\n },\n\n reject(id: string): Promise<ToolExecution | undefined> {\n return api<ToolExecution | undefined>(`/tool-executions/${id}`, { method: 'PATCH', body: { status: 'rejected' } });\n },\n\n complete(id: string, output: unknown, error?: string): Promise<ToolExecution | undefined> {\n return api<ToolExecution | undefined>(`/tool-executions/${id}`, {\n method: 'PATCH',\n body: { status: error ? 'error' : 'completed', output, error },\n });\n },\n\n getBySession(sessionId: string): Promise<ToolExecution[]> {\n return api<ToolExecution[]>(`/tool-executions/session/${sessionId}`);\n },\n\n async deleteAfterTime(sessionId: string, afterTime: Date | string): Promise<number> {\n // Handle both Date objects and ISO strings\n const timestamp = afterTime instanceof Date ? afterTime.getTime() : new Date(afterTime).getTime();\n const result = await api<{ deleted: number }>(\n `/tool-executions/session/${sessionId}/after/${timestamp}`,\n { method: 'DELETE' }\n );\n return result.deleted;\n },\n};\n\n// ============================================\n// Todo Queries\n// ============================================\n\nexport const remoteTodoQueries = {\n create(data: { sessionId: string; content: string; order?: number }): Promise<TodoItem> {\n return api<TodoItem>('/todos', { method: 'POST', body: data });\n },\n\n createMany(sessionId: string, items: Array<{ content: string; order?: number }>): Promise<TodoItem[]> {\n return api<TodoItem[]>('/todos/batch', { method: 'POST', body: { sessionId, items } });\n },\n\n getBySession(sessionId: string): Promise<TodoItem[]> {\n return api<TodoItem[]>(`/todos/session/${sessionId}`);\n },\n\n updateStatus(id: string, status: TodoItem['status']): Promise<TodoItem | undefined> {\n return api<TodoItem | undefined>(`/todos/${id}`, { method: 'PATCH', body: { status } });\n },\n\n async delete(id: string): Promise<boolean> {\n const result = await api<{ success: boolean }>(`/todos/${id}`, { method: 'DELETE' });\n return result?.success ?? false;\n },\n\n async clearSession(sessionId: string): Promise<number> {\n const result = await api<{ deleted: number }>(`/todos/session/${sessionId}`, { method: 'DELETE' });\n return result.deleted;\n },\n};\n\n// ============================================\n// Skill Queries\n// ============================================\n\nexport const remoteSkillQueries = {\n load(sessionId: string, skillName: string): Promise<LoadedSkill> {\n return api<LoadedSkill>('/skills', { method: 'POST', body: { sessionId, skillName } });\n },\n\n getBySession(sessionId: string): Promise<LoadedSkill[]> {\n return api<LoadedSkill[]>(`/skills/session/${sessionId}`);\n },\n\n async isLoaded(sessionId: string, skillName: string): Promise<boolean> {\n const result = await api<{ isLoaded: boolean }>(`/skills/session/${sessionId}/is-loaded/${skillName}`);\n return result.isLoaded;\n },\n};\n\n// ============================================\n// Terminal Queries\n// ============================================\n\nexport const remoteTerminalQueries = {\n create(data: { sessionId: string; command: string; cwd: string; name?: string }): Promise<Terminal> {\n return api<Terminal>('/terminals', { method: 'POST', body: data });\n },\n\n getById(id: string): Promise<Terminal | undefined> {\n return api<Terminal | undefined>(`/terminals/${id}`).catch(() => undefined);\n },\n\n getBySession(sessionId: string): Promise<Terminal[]> {\n return api<Terminal[]>(`/terminals/session/${sessionId}`);\n },\n\n getRunning(sessionId: string): Promise<Terminal[]> {\n return api<Terminal[]>(`/terminals/session/${sessionId}/running`);\n },\n\n updateStatus(id: string, status: Terminal['status'], exitCode?: number, error?: string): Promise<Terminal | undefined> {\n return api<Terminal | undefined>(`/terminals/${id}`, { method: 'PATCH', body: { status, exitCode, error } });\n },\n\n updatePid(id: string, pid: number): Promise<Terminal | undefined> {\n return api<Terminal | undefined>(`/terminals/${id}`, { method: 'PATCH', body: { pid } });\n },\n\n async delete(id: string): Promise<boolean> {\n const result = await api<{ success: boolean }>(`/terminals/${id}`, { method: 'DELETE' });\n return result?.success ?? false;\n },\n\n async deleteBySession(sessionId: string): Promise<number> {\n const result = await api<{ deleted: number }>(`/terminals/session/${sessionId}`, { method: 'DELETE' });\n return result.deleted;\n },\n};\n\n// ============================================\n// Active Stream Queries\n// ============================================\n\nexport const remoteActiveStreamQueries = {\n create(sessionId: string, streamId: string): Promise<ActiveStream> {\n return api<ActiveStream>('/streams', { method: 'POST', body: { sessionId, streamId } });\n },\n\n getBySessionId(sessionId: string): Promise<ActiveStream | undefined> {\n return api<ActiveStream | null>(`/streams/session/${sessionId}`).then(r => r ?? undefined);\n },\n\n getByStreamId(streamId: string): Promise<ActiveStream | undefined> {\n return api<ActiveStream | undefined>(`/streams/by-stream-id/${streamId}`).catch(() => undefined);\n },\n\n finish(streamId: string): Promise<ActiveStream | undefined> {\n return api<ActiveStream | undefined>(`/streams/by-stream-id/${streamId}`, { method: 'PATCH', body: { status: 'finished' } });\n },\n\n markError(streamId: string): Promise<ActiveStream | undefined> {\n return api<ActiveStream | undefined>(`/streams/by-stream-id/${streamId}`, { method: 'PATCH', body: { status: 'error' } });\n },\n\n async deleteBySession(sessionId: string): Promise<number> {\n const result = await api<{ deleted: number }>(`/streams/session/${sessionId}`, { method: 'DELETE' });\n return result.deleted;\n },\n};\n\n// ============================================\n// Checkpoint Queries\n// ============================================\n\nexport const remoteCheckpointQueries = {\n create(data: { sessionId: string; messageSequence: number; gitHead?: string }): Promise<Checkpoint> {\n return api<Checkpoint>('/checkpoints', { method: 'POST', body: data });\n },\n\n getById(id: string): Promise<Checkpoint | undefined> {\n return api<Checkpoint | undefined>(`/checkpoints/${id}`).catch(() => undefined);\n },\n\n getBySession(sessionId: string): Promise<Checkpoint[]> {\n return api<Checkpoint[]>(`/checkpoints/session/${sessionId}`);\n },\n\n getByMessageSequence(sessionId: string, messageSequence: number): Promise<Checkpoint | undefined> {\n return api<Checkpoint | null>(`/checkpoints/session/${sessionId}/by-sequence/${messageSequence}`).then(r => r ?? undefined);\n },\n\n getLatest(sessionId: string): Promise<Checkpoint | undefined> {\n return api<Checkpoint | null>(`/checkpoints/session/${sessionId}/latest`).then(r => r ?? undefined);\n },\n\n async deleteAfterSequence(sessionId: string, messageSequence: number): Promise<number> {\n const result = await api<{ deleted: number }>(\n `/checkpoints/session/${sessionId}/after-sequence/${messageSequence}`,\n { method: 'DELETE' }\n );\n return result.deleted;\n },\n\n async deleteBySession(sessionId: string): Promise<number> {\n const result = await api<{ deleted: number }>(`/checkpoints/session/${sessionId}`, { method: 'DELETE' });\n return result.deleted;\n },\n};\n\n// ============================================\n// File Backup Queries\n// ============================================\n\nexport const remoteFileBackupQueries = {\n create(data: {\n checkpointId: string;\n sessionId: string;\n filePath: string;\n originalContent: string | null;\n existed: boolean;\n }): Promise<FileBackup> {\n return api<FileBackup>('/file-backups', { method: 'POST', body: data });\n },\n\n getByCheckpoint(checkpointId: string): Promise<FileBackup[]> {\n return api<FileBackup[]>(`/file-backups/checkpoint/${checkpointId}`);\n },\n\n getBySession(sessionId: string): Promise<FileBackup[]> {\n return api<FileBackup[]>(`/file-backups/session/${sessionId}`);\n },\n\n getFromSequence(sessionId: string, messageSequence: number): Promise<FileBackup[]> {\n return api<FileBackup[]>(`/file-backups/session/${sessionId}/from-sequence/${messageSequence}`);\n },\n\n async hasBackup(checkpointId: string, filePath: string): Promise<boolean> {\n const result = await api<{ hasBackup: boolean }>(\n `/file-backups/checkpoint/${checkpointId}/has-backup/${encodeURIComponent(filePath)}`\n );\n return result.hasBackup;\n },\n\n async deleteBySession(sessionId: string): Promise<number> {\n const result = await api<{ deleted: number }>(`/file-backups/session/${sessionId}`, { method: 'DELETE' });\n return result.deleted;\n },\n};\n\n// ============================================\n// Subagent Queries\n// ============================================\n\nexport const remoteSubagentQueries = {\n create(data: {\n sessionId: string;\n toolCallId: string;\n subagentType: string;\n task: string;\n model: string;\n }): Promise<SubagentExecution> {\n return api<SubagentExecution>('/subagents', { method: 'POST', body: data });\n },\n\n getById(id: string): Promise<SubagentExecution | undefined> {\n return api<SubagentExecution | undefined>(`/subagents/${id}`).catch(() => undefined);\n },\n\n getByToolCallId(toolCallId: string): Promise<SubagentExecution | undefined> {\n return api<SubagentExecution | undefined>(`/subagents/by-tool-call-id/${toolCallId}`).catch(() => undefined);\n },\n\n getBySession(sessionId: string): Promise<SubagentExecution[]> {\n return api<SubagentExecution[]>(`/subagents/session/${sessionId}`);\n },\n\n addStep(id: string, step: SubagentStep): Promise<SubagentExecution | undefined> {\n return api<SubagentExecution | undefined>(`/subagents/${id}/add-step`, { method: 'POST', body: { step } }).catch(() => undefined);\n },\n\n complete(id: string, result: unknown): Promise<SubagentExecution | undefined> {\n return api<SubagentExecution | undefined>(`/subagents/${id}`, { method: 'PATCH', body: { status: 'completed', result } }).catch(() => undefined);\n },\n\n markError(id: string, error: string): Promise<SubagentExecution | undefined> {\n return api<SubagentExecution | undefined>(`/subagents/${id}`, { method: 'PATCH', body: { status: 'error', error } }).catch(() => undefined);\n },\n\n cancel(id: string): Promise<SubagentExecution | undefined> {\n return api<SubagentExecution | undefined>(`/subagents/${id}`, { method: 'PATCH', body: { status: 'cancelled' } }).catch(() => undefined);\n },\n\n async deleteBySession(sessionId: string): Promise<number> {\n const result = await api<{ deleted: number }>(`/subagents/session/${sessionId}`, { method: 'DELETE' });\n return result.deleted;\n },\n};\n\n// ============================================\n// Indexed Chunk Queries\n// ============================================\n\nexport const remoteIndexedChunkQueries = {\n upsert(\n _db: any, // Ignored - for API compatibility\n data: {\n id: string;\n contentHash: string;\n filePath: string;\n repoNamespace: string;\n startLine?: number;\n endLine?: number;\n language?: string;\n }\n ): Promise<IndexedChunk> {\n return api<IndexedChunk>('/indexed-chunks', { method: 'POST', body: data });\n },\n\n batchUpsert(\n _db: any,\n chunks: Array<{\n id: string;\n contentHash: string;\n filePath: string;\n repoNamespace: string;\n startLine?: number;\n endLine?: number;\n language?: string;\n }>\n ): Promise<{ created: number; updated: number }> {\n return api<{ created: number; updated: number }>('/indexed-chunks/batch', { \n method: 'POST', \n body: { chunks } \n });\n },\n\n getById(_db: any, id: string): Promise<IndexedChunk | undefined> {\n return api<IndexedChunk | undefined>(`/indexed-chunks/${id}`).catch(() => undefined);\n },\n\n getByNamespace(_db: any, namespace: string): Promise<IndexedChunk[]> {\n return api<IndexedChunk[]>(`/indexed-chunks/namespace/${namespace}`);\n },\n\n getByFilePath(_db: any, namespace: string, filePath: string): Promise<IndexedChunk[]> {\n return api<IndexedChunk[]>(`/indexed-chunks/namespace/${namespace}/file/${encodeURIComponent(filePath)}`);\n },\n\n async deleteByNamespace(_db: any, namespace: string): Promise<number> {\n const result = await api<{ deleted: number }>(`/indexed-chunks/namespace/${namespace}`, { method: 'DELETE' });\n return result.deleted;\n },\n\n async deleteByFilePath(_db: any, namespace: string, filePath: string): Promise<number> {\n const result = await api<{ deleted: number }>(\n `/indexed-chunks/namespace/${namespace}/file/${encodeURIComponent(filePath)}`,\n { method: 'DELETE' }\n );\n return result.deleted;\n },\n\n async countByNamespace(_db: any, namespace: string): Promise<number> {\n const result = await api<{ count: number }>(`/indexed-chunks/namespace/${namespace}/count`);\n return result.count;\n },\n};\n\n// ============================================\n// Index Status Queries\n// ============================================\n\nexport const remoteIndexStatusQueries = {\n upsert(\n _db: any, // Ignored\n data: {\n id: string;\n repoNamespace: string;\n totalChunks?: number;\n lastFullIndex?: Date;\n lastIncrementalIndex?: Date;\n }\n ): Promise<IndexStatusRecord> {\n return api<IndexStatusRecord>('/index-status', {\n method: 'POST',\n body: {\n ...data,\n lastFullIndex: data.lastFullIndex?.toISOString(),\n lastIncrementalIndex: data.lastIncrementalIndex?.toISOString(),\n },\n });\n },\n\n get(_db: any, namespace: string): Promise<IndexStatusRecord | undefined> {\n return api<IndexStatusRecord | null>(`/index-status/namespace/${namespace}`).then(r => r ?? undefined);\n },\n\n async delete(_db: any, namespace: string): Promise<boolean> {\n const result = await api<{ success: boolean }>(`/index-status/namespace/${namespace}`, { method: 'DELETE' });\n return result?.success ?? false;\n },\n\n list(_db: any): Promise<IndexStatusRecord[]> {\n return api<IndexStatusRecord[]>('/index-status');\n },\n};\n\n// ============================================\n// Storage (GCS) — calls /storage/* endpoints\n// ============================================\n\nexport interface SessionFile {\n id: string;\n fileName: string;\n contentType: string;\n sizeBytes: number | null;\n category: string;\n createdAt: string;\n downloadUrl: string | null;\n downloadUrlExpiresAt: string | null;\n}\n\nexport interface UploadUrlResponse {\n fileId: string;\n uploadUrl: string;\n gcsPath: string;\n expiresAt: string;\n}\n\nasync function storageApi<T>(\n path: string,\n options: { method?: string; body?: unknown } = {}\n): Promise<T> {\n if (!remoteServerUrl || !authKey) {\n throw new Error('Remote database not initialized');\n }\n\n const url = `${remoteServerUrl}/storage${path}`;\n const init: RequestInit = {\n method: options.method || 'GET',\n headers: {\n 'Content-Type': 'application/json',\n 'Authorization': `Bearer ${authKey}`,\n },\n };\n\n if (options.body) {\n init.body = JSON.stringify(options.body);\n }\n\n const response = await fetch(url, init);\n if (!response.ok) {\n const errorText = await response.text().catch(() => 'Unknown error');\n throw new Error(`Storage API error ${response.status}: ${errorText}`);\n }\n return response.json() as Promise<T>;\n}\n\nexport const storageQueries = {\n async getUploadUrl(\n sessionId: string,\n fileName: string,\n contentType: string,\n category?: string\n ): Promise<UploadUrlResponse> {\n return storageApi<UploadUrlResponse>('/upload-url', {\n method: 'POST',\n body: { sessionId, fileName, contentType, category },\n });\n },\n\n async getSessionFiles(sessionId: string): Promise<SessionFile[]> {\n const result = await storageApi<{ files: SessionFile[] }>(`/files/${sessionId}`);\n return result.files;\n },\n\n async getDownloadUrl(fileId: string): Promise<{ downloadUrl: string; expiresAt: string }> {\n return storageApi<{ downloadUrl: string; expiresAt: string }>(`/download/${fileId}`);\n },\n\n async deleteFile(fileId: string): Promise<void> {\n await storageApi(`/files/${fileId}`, { method: 'DELETE' });\n },\n\n async updateFile(fileId: string, data: { sizeBytes?: number }): Promise<void> {\n await storageApi(`/files/${fileId}`, { method: 'PATCH', body: data });\n },\n};\n","/**\n * Database layer - Remote MongoDB only\n * \n * All data is stored on the remote server at agent.sparkecode.com\n */\n\nimport {\n initRemoteDatabase,\n closeRemoteDatabase,\n remoteSessionQueries,\n remoteMessageQueries,\n remoteToolExecutionQueries,\n remoteTodoQueries,\n remoteSkillQueries,\n remoteTerminalQueries,\n remoteActiveStreamQueries,\n remoteCheckpointQueries,\n remoteFileBackupQueries,\n remoteSubagentQueries,\n remoteIndexedChunkQueries,\n remoteIndexStatusQueries,\n} from './remote.js';\n\n// Re-export types from schema\nexport type {\n Session,\n NewSession,\n Message,\n NewMessage,\n ToolExecution,\n NewToolExecution,\n TodoItem,\n NewTodoItem,\n SessionConfig,\n ModelMessage,\n UserModelMessage,\n UserContentPart,\n UserTextPart,\n UserImagePart,\n UserFilePart,\n Terminal,\n NewTerminal,\n ActiveStream,\n NewActiveStream,\n Checkpoint,\n NewCheckpoint,\n FileBackup,\n NewFileBackup,\n SubagentExecution,\n NewSubagentExecution,\n SubagentStep,\n IndexedChunk,\n NewIndexedChunk,\n IndexStatusRecord,\n NewIndexStatusRecord,\n LoadedSkill,\n TaskConfig,\n} from './schema.js';\n\nlet initialized = false;\n\n/**\n * Initialize the database with remote server config\n * @param config - Remote server configuration { url, authKey }\n */\nexport function initDatabase(config: { url: string; authKey: string }) {\n initRemoteDatabase(config.url, config.authKey);\n initialized = true;\n}\n\n/**\n * Get a stub database object for API compatibility\n * Functions that take a db parameter will ignore it for remote operations\n */\nexport function getDb() {\n if (!initialized) {\n throw new Error('Database not initialized. Call initDatabase first.');\n }\n // Return a stub - the actual queries use remote API calls\n return {} as any;\n}\n\n/**\n * Check if using remote database (always true now)\n */\nexport function isUsingRemote(): boolean {\n return true;\n}\n\n/**\n * Close the database connection\n */\nexport function closeDatabase() {\n closeRemoteDatabase();\n initialized = false;\n}\n\n// Re-export query objects with cleaner names\nexport const sessionQueries = remoteSessionQueries;\nexport const messageQueries = remoteMessageQueries;\nexport const toolExecutionQueries = remoteToolExecutionQueries;\nexport const todoQueries = remoteTodoQueries;\nexport const skillQueries = remoteSkillQueries;\nexport const terminalQueries = remoteTerminalQueries;\nexport const activeStreamQueries = remoteActiveStreamQueries;\nexport const checkpointQueries = remoteCheckpointQueries;\nexport const fileBackupQueries = remoteFileBackupQueries;\nexport const subagentQueries = remoteSubagentQueries;\nexport const indexedChunkQueries = remoteIndexedChunkQueries;\nexport const indexStatusQueries = remoteIndexStatusQueries;\n","import { z } from 'zod';\n\n// Tool approval configuration\nexport const ToolApprovalConfigSchema = z.object({\n bash: z.boolean().optional().default(true),\n write_file: z.boolean().optional().default(false),\n read_file: z.boolean().optional().default(false),\n load_skill: z.boolean().optional().default(false),\n todo: z.boolean().optional().default(false),\n});\n\n// Skill definition (from frontmatter)\nexport const SkillMetadataSchema = z.object({\n name: z.string(),\n description: z.string(),\n // Whether to always inject this skill into context (vs on-demand loading)\n alwaysApply: z.boolean().optional().default(false),\n // Glob patterns - auto-inject when working with matching files\n globs: z.array(z.string()).optional().default([]),\n});\n\n// Skill loading type\nexport type SkillLoadType = 'always' | 'on_demand' | 'glob_matched';\n\n// Task mode configuration (stored inside SessionConfig)\nexport const TaskConfigSchema = z.object({\n enabled: z.boolean(),\n outputSchema: z.record(z.string(), z.unknown()),\n webhookUrl: z.string().url().optional(),\n maxIterations: z.number().optional(),\n status: z.enum(['running', 'completed', 'failed']),\n result: z.unknown().optional(),\n error: z.string().optional(),\n iterations: z.number().optional(),\n});\n\n// Session-specific config (stored in DB)\nexport const SessionConfigSchema = z.object({\n toolApprovals: z.record(z.string(), z.boolean()).optional(),\n approvalWebhook: z.string().url().optional(),\n skillsDirectory: z.string().optional(),\n maxContextChars: z.number().optional().default(200_000),\n task: TaskConfigSchema.optional(),\n});\n\n// Vector Gateway configuration for semantic search\nexport const VectorGatewayConfigSchema = z\n .object({\n // Redis cluster nodes URL for Vector Gateway (or use REDIS_CLUSTER_NODES env var)\n redisUrl: z.string().optional(),\n // HTTP URL for database operations (or use VECTOR_HTTP_URL env var)\n httpUrl: z.string().optional(),\n // Embedding model to use (default: text-embedding-3-small)\n embeddingModel: z.string().default('gemini-embedding-001'),\n // Custom namespace override (auto-generated from git remote if not set)\n namespace: z.string().optional(),\n // File patterns to include in indexing\n include: z\n .array(z.string())\n .optional()\n .default([\n '**/*.ts',\n '**/*.tsx',\n '**/*.js',\n '**/*.jsx',\n '**/*.py',\n '**/*.go',\n '**/*.rs',\n '**/*.java',\n '**/*.md',\n '**/*.mdx',\n '**/*.txt',\n ]),\n // File patterns to exclude from indexing\n exclude: z\n .array(z.string())\n .optional()\n .default([\n '**/node_modules/**',\n '**/dist/**',\n '**/build/**',\n '**/.git/**',\n '**/.next/**',\n '**/*.min.js',\n '**/*.bundle.js',\n '**/pnpm-lock.yaml',\n '**/package-lock.json',\n '**/yarn.lock',\n '**/.test-workspace/**',\n '**/.semantic-test-workspace/**',\n '**/.semantic-integration-test/**',\n ]),\n })\n .optional();\n\n// Remote server configuration\nexport const RemoteServerConfigSchema = z\n .object({\n // URL of the remote server (e.g., https://agent.sparkecode.com)\n url: z.string().url().optional(),\n // Auth key for the remote server (auto-generated on first use if not set)\n // Can also be set via SPARKECODER_AUTH_KEY env var\n authKey: z.string().optional(),\n })\n .optional();\n\n// Main sparkecoder config file schema\nexport const SparkcoderConfigSchema = z.object({\n // Default model to use (Vercel AI Gateway format)\n defaultModel: z.string().default('anthropic/claude-opus-4-6'),\n\n // Working directory for file operations\n workingDirectory: z.string().optional(),\n\n // Tool approval settings\n toolApprovals: ToolApprovalConfigSchema.optional().default({}),\n\n // Approval webhook URL (called when approval is needed)\n approvalWebhook: z.string().url().optional(),\n\n // Skills configuration\n skills: z\n .object({\n // Directory containing skill files\n directory: z.string().optional().default('./skills'),\n // Additional skill directories to include\n additionalDirectories: z.array(z.string()).optional().default([]),\n })\n .optional()\n .default({}),\n\n // Context management\n context: z\n .object({\n // Maximum context size before summarization (in characters)\n maxChars: z.number().optional().default(200_000),\n // Enable automatic summarization\n autoSummarize: z.boolean().optional().default(true),\n // Number of recent messages to keep after summarization\n keepRecentMessages: z.number().optional().default(10),\n })\n .optional()\n .default({}),\n\n // Server configuration\n server: z\n .object({\n port: z.number().default(3141),\n host: z.string().default('127.0.0.1'),\n // Public URL for web UI to connect to API (for Docker/remote access)\n // If not set, defaults to http://{host}:{port}\n publicUrl: z.string().url().optional(),\n })\n .default({ port: 3141, host: '127.0.0.1' }),\n\n // Database path (used for local SQLite - ignored if remoteServer is configured)\n databasePath: z.string().optional().default('./sparkecoder.db'),\n\n // Remote server configuration (for centralized storage)\n // If configured, uses remote MongoDB instead of local SQLite\n remoteServer: RemoteServerConfigSchema,\n\n // Vector Gateway configuration for semantic search\n vectorGateway: VectorGatewayConfigSchema,\n});\n\nexport type ToolApprovalConfig = z.infer<typeof ToolApprovalConfigSchema>;\nexport type SkillMetadata = z.infer<typeof SkillMetadataSchema>;\nexport type SessionConfig = z.infer<typeof SessionConfigSchema>;\nexport type VectorGatewayConfig = z.infer<typeof VectorGatewayConfigSchema>;\nexport type RemoteServerConfig = z.infer<typeof RemoteServerConfigSchema>;\nexport type SparkcoderConfig = z.infer<typeof SparkcoderConfigSchema>;\n\n// Discovered skill sources\nexport interface DiscoveredSkills {\n // Directories where all skills are always loaded\n alwaysLoadedDirs: Array<{ path: string; priority: number }>;\n // Directories where skills are on-demand (frontmatter can override)\n onDemandDirs: Array<{ path: string; priority: number }>;\n // Path to AGENTS.md if it exists (always loaded)\n agentsMdPath: string | null;\n // All directories in priority order (for deduplication)\n allDirectories: string[];\n}\n\n// Resolved vector gateway config with env var overrides applied\nexport interface ResolvedVectorGatewayConfig {\n redisUrl: string | null;\n httpUrl: string | null;\n embeddingModel: string;\n namespace: string | null;\n include: string[];\n exclude: string[];\n}\n\n// Resolved remote server config\nexport interface ResolvedRemoteServerConfig {\n url: string | null;\n authKey: string | null;\n isConfigured: boolean;\n}\n\n// Runtime config with resolved paths\nexport interface ResolvedConfig extends Omit<SparkcoderConfig, 'server'> {\n server: {\n port: number;\n host: string;\n publicUrl?: string;\n };\n resolvedWorkingDirectory: string;\n resolvedSkillsDirectories: string[];\n resolvedDatabasePath: string;\n // Enhanced skill discovery\n discoveredSkills: DiscoveredSkills;\n // Resolved vector gateway config (with env var overrides)\n resolvedVectorGateway: ResolvedVectorGatewayConfig;\n // Resolved remote server config (with env var overrides)\n resolvedRemoteServer: ResolvedRemoteServerConfig;\n}\n","import { existsSync, readFileSync, mkdirSync, writeFileSync } from 'node:fs';\nimport { resolve, dirname, join } from 'node:path';\nimport { homedir, platform } from 'node:os';\nimport {\n SparkcoderConfig,\n SparkcoderConfigSchema,\n ResolvedConfig,\n DiscoveredSkills,\n ResolvedVectorGatewayConfig,\n ResolvedRemoteServerConfig,\n} from './types.js';\n\nconst CONFIG_FILE_NAMES = [\n 'sparkecoder.config.json',\n 'sparkecoder.json',\n '.sparkecoder.json',\n];\n\n/**\n * Discover all skill directories in the working directory\n * Searches for:\n * - .sparkecoder/rules/ (always loaded, priority 1)\n * - .sparkecoder/skills/ (on-demand, priority 2)\n * - .cursor/rules/ (parse frontmatter, priority 3)\n * - .claude/skills/ (on-demand, priority 4)\n * - skills/ (legacy, on-demand, priority 5)\n * - AGENTS.md (always loaded)\n */\nexport function discoverSkillDirectories(workingDir: string): DiscoveredSkills {\n const alwaysLoadedDirs: Array<{ path: string; priority: number }> = [];\n const onDemandDirs: Array<{ path: string; priority: number }> = [];\n const allDirectories: string[] = [];\n let agentsMdPath: string | null = null;\n\n // Priority 1: .sparkecoder/rules/ (always loaded)\n const sparkRulesDir = join(workingDir, '.sparkecoder', 'rules');\n if (existsSync(sparkRulesDir)) {\n alwaysLoadedDirs.push({ path: sparkRulesDir, priority: 1 });\n allDirectories.push(sparkRulesDir);\n }\n\n // Priority 2: .sparkecoder/skills/ (on-demand)\n const sparkSkillsDir = join(workingDir, '.sparkecoder', 'skills');\n if (existsSync(sparkSkillsDir)) {\n onDemandDirs.push({ path: sparkSkillsDir, priority: 2 });\n allDirectories.push(sparkSkillsDir);\n }\n\n // Priority 3: .cursor/rules/ (parse frontmatter for alwaysApply)\n const cursorRulesDir = join(workingDir, '.cursor', 'rules');\n if (existsSync(cursorRulesDir)) {\n // Cursor rules can be either - will be determined by frontmatter\n onDemandDirs.push({ path: cursorRulesDir, priority: 3 });\n allDirectories.push(cursorRulesDir);\n }\n\n // Priority 4: .claude/skills/ (on-demand)\n const claudeSkillsDir = join(workingDir, '.claude', 'skills');\n if (existsSync(claudeSkillsDir)) {\n onDemandDirs.push({ path: claudeSkillsDir, priority: 4 });\n allDirectories.push(claudeSkillsDir);\n }\n\n // Priority 5: skills/ (legacy, on-demand)\n const legacySkillsDir = join(workingDir, 'skills');\n if (existsSync(legacySkillsDir)) {\n onDemandDirs.push({ path: legacySkillsDir, priority: 5 });\n allDirectories.push(legacySkillsDir);\n }\n\n // Check for AGENTS.md (always loaded)\n const agentsMd = join(workingDir, 'AGENTS.md');\n if (existsSync(agentsMd)) {\n agentsMdPath = agentsMd;\n }\n\n // Also add built-in skills directory\n // Try multiple paths: dev mode (tsx) resolves from src/config/, production resolves from dist/\n const baseDir = dirname(import.meta.url.replace('file://', ''));\n const builtInCandidates = [\n resolve(baseDir, '../skills/default'), // dev: src/config → src/skills/default\n resolve(baseDir, './skills/default'), // prod: dist/ → dist/skills/default\n ];\n const builtInSkillsDir = builtInCandidates.find(p => existsSync(p));\n if (builtInSkillsDir) {\n onDemandDirs.push({ path: builtInSkillsDir, priority: 100 }); // Lowest priority\n allDirectories.push(builtInSkillsDir);\n }\n\n return {\n alwaysLoadedDirs,\n onDemandDirs,\n agentsMdPath,\n allDirectories,\n };\n}\n\n/**\n * Get the standard application data directory for the current OS\n * - macOS: ~/Library/Application Support/sparkecoder\n * - Windows: %APPDATA%/sparkecoder\n * - Linux: ~/.local/share/sparkecoder\n */\nexport function getAppDataDirectory(): string {\n const appName = 'sparkecoder';\n \n switch (platform()) {\n case 'darwin':\n return join(homedir(), 'Library', 'Application Support', appName);\n case 'win32':\n return join(process.env.APPDATA || join(homedir(), 'AppData', 'Roaming'), appName);\n default:\n // Linux and other Unix-like systems\n return join(process.env.XDG_DATA_HOME || join(homedir(), '.local', 'share'), appName);\n }\n}\n\n/**\n * Ensure the app data directory exists\n */\nexport function ensureAppDataDirectory(): string {\n const dir = getAppDataDirectory();\n if (!existsSync(dir)) {\n mkdirSync(dir, { recursive: true });\n }\n return dir;\n}\n\nlet cachedConfig: ResolvedConfig | null = null;\n\n/**\n * Find the config file by searching:\n * 1. Up the directory tree from startDir (project-specific config)\n * 2. In the app data directory (global config)\n */\nfunction findConfigFile(startDir: string): string | null {\n // First, search up the directory tree\n let currentDir = startDir;\n\n while (currentDir !== dirname(currentDir)) {\n for (const fileName of CONFIG_FILE_NAMES) {\n const configPath = resolve(currentDir, fileName);\n if (existsSync(configPath)) {\n return configPath;\n }\n }\n currentDir = dirname(currentDir);\n }\n\n // If not found, check the app data directory for a global config\n const appDataDir = getAppDataDirectory();\n for (const fileName of CONFIG_FILE_NAMES) {\n const configPath = join(appDataDir, fileName);\n if (existsSync(configPath)) {\n return configPath;\n }\n }\n\n return null;\n}\n\n/**\n * Load and parse the config file\n */\nexport function loadConfig(\n configPath?: string,\n workingDirectory?: string\n): ResolvedConfig {\n const cwd = workingDirectory || process.cwd();\n\n // Try to find config file\n let rawConfig: Partial<SparkcoderConfig> = {};\n let configDir = cwd;\n\n if (configPath) {\n if (!existsSync(configPath)) {\n throw new Error(`Config file not found: ${configPath}`);\n }\n const content = readFileSync(configPath, 'utf-8');\n rawConfig = JSON.parse(content);\n configDir = dirname(resolve(configPath));\n } else {\n const foundPath = findConfigFile(cwd);\n if (foundPath) {\n const content = readFileSync(foundPath, 'utf-8');\n rawConfig = JSON.parse(content);\n configDir = dirname(foundPath);\n }\n }\n\n // Override with environment variables\n if (process.env.SPARKECODER_MODEL) {\n rawConfig.defaultModel = process.env.SPARKECODER_MODEL;\n }\n if (process.env.SPARKECODER_PORT) {\n rawConfig.server = {\n port: parseInt(process.env.SPARKECODER_PORT, 10),\n host: rawConfig.server?.host ?? '127.0.0.1',\n };\n }\n if (process.env.DATABASE_PATH) {\n rawConfig.databasePath = process.env.DATABASE_PATH;\n }\n\n // Parse and validate\n const config = SparkcoderConfigSchema.parse(rawConfig);\n\n // Resolve working directory\n // Priority: CLI argument > absolute path in config > current working directory\n // Note: workingDirectory in config is only used if it's an absolute path,\n // otherwise we default to where the CLI was run from\n let resolvedWorkingDirectory: string;\n if (workingDirectory) {\n // Explicitly passed via CLI\n resolvedWorkingDirectory = workingDirectory;\n } else if (config.workingDirectory && config.workingDirectory !== '.' && config.workingDirectory.startsWith('/')) {\n // Absolute path in config\n resolvedWorkingDirectory = config.workingDirectory;\n } else {\n // Default to current working directory (where CLI was run)\n resolvedWorkingDirectory = process.cwd();\n }\n\n // Discover skill directories from standard locations\n const discovered = discoverSkillDirectories(resolvedWorkingDirectory);\n\n // Combine discovered directories with any additional configured directories\n const additionalDirs = (config.skills?.additionalDirectories || [])\n .map((dir) => resolve(configDir, dir))\n .filter((dir) => existsSync(dir));\n\n const resolvedSkillsDirectories = [\n ...discovered.allDirectories,\n ...additionalDirs,\n ];\n\n // Use app data directory for database by default, unless explicitly configured\n let resolvedDatabasePath: string;\n if (config.databasePath && config.databasePath !== './sparkecoder.db') {\n // User explicitly set a custom path\n resolvedDatabasePath = resolve(configDir, config.databasePath);\n } else {\n // Use standard OS app data directory\n const appDataDir = ensureAppDataDirectory();\n resolvedDatabasePath = join(appDataDir, 'sparkecoder.db');\n }\n\n // Resolve vector gateway config with env var overrides\n const resolvedVectorGateway: ResolvedVectorGatewayConfig = {\n redisUrl: process.env.REDIS_CLUSTER_NODES || config.vectorGateway?.redisUrl || null,\n httpUrl: process.env.VECTOR_HTTP_URL || config.vectorGateway?.httpUrl || null,\n embeddingModel:\n process.env.VECTOR_EMBEDDING_MODEL ||\n config.vectorGateway?.embeddingModel ||\n 'gemini-embedding-001',\n namespace: config.vectorGateway?.namespace || null,\n include: config.vectorGateway?.include || [\n '**/*.ts',\n '**/*.tsx',\n '**/*.js',\n '**/*.jsx',\n '**/*.py',\n '**/*.go',\n '**/*.rs',\n '**/*.java',\n '**/*.md',\n '**/*.mdx',\n '**/*.txt',\n ],\n exclude: config.vectorGateway?.exclude || [\n '**/node_modules/**',\n '**/dist/**',\n '**/build/**',\n '**/.git/**',\n '**/.next/**',\n '**/*.min.js',\n '**/*.bundle.js',\n '**/pnpm-lock.yaml',\n '**/package-lock.json',\n '**/yarn.lock',\n '**/.test-workspace/**',\n '**/.semantic-test-workspace/**',\n '**/.semantic-integration-test/**',\n ],\n };\n\n // Resolve remote server config with env var overrides\n // Default to production server when not explicitly configured\n const DEFAULT_REMOTE_URL = 'https://agent-remote-server.sparkecode.com';\n const remoteUrl = process.env.SPARKECODER_REMOTE_URL || config.remoteServer?.url || DEFAULT_REMOTE_URL;\n const remoteAuthKey = process.env.SPARKECODER_AUTH_KEY || config.remoteServer?.authKey || loadStoredAuthKey();\n \n const resolvedRemoteServer: ResolvedRemoteServerConfig = {\n url: remoteUrl,\n authKey: remoteAuthKey,\n isConfigured: !!remoteUrl && !!remoteAuthKey,\n };\n\n const resolved: ResolvedConfig = {\n ...config,\n server: {\n port: config.server.port,\n host: config.server.host ?? '127.0.0.1',\n publicUrl: config.server.publicUrl,\n },\n resolvedWorkingDirectory,\n resolvedSkillsDirectories,\n resolvedDatabasePath,\n discoveredSkills: discovered,\n resolvedVectorGateway,\n resolvedRemoteServer,\n };\n\n cachedConfig = resolved;\n return resolved;\n}\n\n/**\n * Get the cached config (must call loadConfig first)\n */\nexport function getConfig(): ResolvedConfig {\n if (!cachedConfig) {\n throw new Error('Config not loaded. Call loadConfig first.');\n }\n return cachedConfig;\n}\n\n/**\n * Check if a tool requires approval\n */\nexport function requiresApproval(\n toolName: string,\n sessionConfig?: { toolApprovals?: Record<string, boolean> }\n): boolean {\n const config = getConfig();\n\n // Session-level wildcard \"*\" overrides everything (used by --dangerously-skip-approvals)\n if (sessionConfig?.toolApprovals?.['*'] !== undefined) {\n return sessionConfig.toolApprovals['*'];\n }\n\n // Session-level per-tool override takes precedence\n if (sessionConfig?.toolApprovals?.[toolName] !== undefined) {\n return sessionConfig.toolApprovals[toolName];\n }\n\n // Check global config\n const globalApprovals = config.toolApprovals as Record<string, boolean>;\n if (globalApprovals[toolName] !== undefined) {\n return globalApprovals[toolName];\n }\n\n // Default: bash requires approval, others don't\n if (toolName === 'bash') {\n return true;\n }\n\n return false;\n}\n\n/**\n * Create a default config file\n */\nexport function createDefaultConfig(): SparkcoderConfig {\n return {\n defaultModel: 'anthropic/claude-opus-4-6',\n // workingDirectory is intentionally not set - defaults to where CLI is run\n toolApprovals: {\n bash: true,\n write_file: false,\n read_file: false,\n load_skill: false,\n todo: false,\n },\n skills: {\n directory: './skills',\n additionalDirectories: [],\n },\n context: {\n maxChars: 200_000,\n autoSummarize: true,\n keepRecentMessages: 10,\n },\n server: {\n port: 3141,\n host: '127.0.0.1',\n },\n databasePath: './sparkecoder.db',\n };\n}\n\n// ============================================\n// Auth Key Management (for remote server)\n// ============================================\n\nconst AUTH_KEY_FILE = 'auth-key.json';\n\ninterface StoredAuthKey {\n authKey: string;\n createdAt: string;\n userId?: string;\n}\n\n/**\n * Load stored auth key from app data directory\n */\nfunction loadStoredAuthKey(): string | null {\n const keysPath = join(getAppDataDirectory(), AUTH_KEY_FILE);\n if (!existsSync(keysPath)) {\n return null;\n }\n try {\n const content = readFileSync(keysPath, 'utf-8');\n const data = JSON.parse(content) as StoredAuthKey;\n return data.authKey || null;\n } catch {\n return null;\n }\n}\n\n/**\n * Save auth key to app data directory\n */\nexport function saveAuthKey(authKey: string, userId?: string): void {\n const appDir = ensureAppDataDirectory();\n const keysPath = join(appDir, AUTH_KEY_FILE);\n const data: StoredAuthKey = {\n authKey,\n createdAt: new Date().toISOString(),\n userId,\n };\n writeFileSync(keysPath, JSON.stringify(data, null, 2), { mode: 0o600 });\n}\n\n/**\n * Get stored auth key info\n */\nexport function getStoredAuthKeyInfo(): StoredAuthKey | null {\n const keysPath = join(getAppDataDirectory(), AUTH_KEY_FILE);\n if (!existsSync(keysPath)) {\n return null;\n }\n try {\n const content = readFileSync(keysPath, 'utf-8');\n return JSON.parse(content) as StoredAuthKey;\n } catch {\n return null;\n }\n}\n\n/**\n * Register with remote server and get new auth key\n */\nexport async function registerWithRemoteServer(\n serverUrl: string,\n name?: string\n): Promise<{ authKey: string; userId: string }> {\n const response = await fetch(`${serverUrl}/auth/register`, {\n method: 'POST',\n headers: { 'Content-Type': 'application/json' },\n body: JSON.stringify({ name: name || `CLI ${new Date().toISOString()}` }),\n });\n \n if (!response.ok) {\n const error = await response.json().catch(() => ({})) as { error?: string };\n throw new Error(error.error || `Failed to register: HTTP ${response.status}`);\n }\n \n const data = await response.json() as { authKey: string; userId: string };\n \n // Save the auth key\n saveAuthKey(data.authKey, data.userId);\n \n return data;\n}\n\n/**\n * Ensure we have a valid auth key for the remote server\n * If not configured, registers with the remote server to get one\n */\nexport async function ensureRemoteAuthKey(serverUrl: string): Promise<string> {\n // Check env var first\n if (process.env.SPARKECODER_AUTH_KEY) {\n return process.env.SPARKECODER_AUTH_KEY;\n }\n \n // Check stored key\n const storedKey = loadStoredAuthKey();\n if (storedKey) {\n return storedKey;\n }\n \n // Register with remote server\n const { authKey } = await registerWithRemoteServer(serverUrl);\n return authKey;\n}\n\n// ============================================\n// API Key Management\n// ============================================\n\nconst API_KEYS_FILE = 'api-keys.json';\n\n// Provider to environment variable mapping\nconst PROVIDER_ENV_MAP: Record<string, string> = {\n anthropic: 'ANTHROPIC_API_KEY',\n openai: 'OPENAI_API_KEY',\n google: 'GOOGLE_GENERATIVE_AI_API_KEY',\n xai: 'XAI_API_KEY',\n 'ai-gateway': 'AI_GATEWAY_API_KEY',\n};\n\n// All supported providers\nexport const SUPPORTED_PROVIDERS = Object.keys(PROVIDER_ENV_MAP);\n\ninterface StoredApiKeys {\n [provider: string]: string;\n}\n\n/**\n * Get the path to the API keys file\n */\nfunction getApiKeysPath(): string {\n const appDir = ensureAppDataDirectory();\n return join(appDir, API_KEYS_FILE);\n}\n\n/**\n * Load stored API keys from file\n */\nfunction loadStoredApiKeys(): StoredApiKeys {\n const keysPath = getApiKeysPath();\n if (!existsSync(keysPath)) {\n return {};\n }\n try {\n const content = readFileSync(keysPath, 'utf-8');\n return JSON.parse(content);\n } catch {\n return {};\n }\n}\n\n/**\n * Save API keys to file\n */\nfunction saveStoredApiKeys(keys: StoredApiKeys): void {\n const keysPath = getApiKeysPath();\n writeFileSync(keysPath, JSON.stringify(keys, null, 2), { mode: 0o600 }); // Secure permissions\n}\n\n/**\n * Load API keys from storage into environment variables.\n * Called on startup.\n *\n * When a remote server is configured for inference, local API keys are not\n * required -- all LLM calls are proxied through the remote server which\n * holds the keys. This function still runs so that local-only fallback\n * mode keeps working.\n */\nexport function loadApiKeysIntoEnv(): void {\n const storedKeys = loadStoredApiKeys();\n \n for (const [provider, envVar] of Object.entries(PROVIDER_ENV_MAP)) {\n if (!process.env[envVar] && storedKeys[provider]) {\n process.env[envVar] = storedKeys[provider];\n }\n }\n}\n\n/**\n * Check whether AI inference is handled by the remote server.\n * When true, local API keys are not needed.\n */\nexport function isRemoteInferenceConfigured(): boolean {\n try {\n const config = getConfig();\n return config.resolvedRemoteServer.isConfigured;\n } catch {\n return false;\n }\n}\n\n/**\n * Set an API key for a provider\n * Saves to storage and sets in current process env\n */\nexport function setApiKey(provider: string, apiKey: string): void {\n const normalizedProvider = provider.toLowerCase();\n const envVar = PROVIDER_ENV_MAP[normalizedProvider];\n \n if (!envVar) {\n throw new Error(`Unknown provider: ${provider}. Supported: ${SUPPORTED_PROVIDERS.join(', ')}`);\n }\n \n // Save to storage\n const storedKeys = loadStoredApiKeys();\n storedKeys[normalizedProvider] = apiKey;\n saveStoredApiKeys(storedKeys);\n \n // Set in current process\n process.env[envVar] = apiKey;\n}\n\n/**\n * Remove an API key for a provider\n */\nexport function removeApiKey(provider: string): void {\n const normalizedProvider = provider.toLowerCase();\n const envVar = PROVIDER_ENV_MAP[normalizedProvider];\n \n if (!envVar) {\n throw new Error(`Unknown provider: ${provider}. Supported: ${SUPPORTED_PROVIDERS.join(', ')}`);\n }\n \n // Remove from storage\n const storedKeys = loadStoredApiKeys();\n delete storedKeys[normalizedProvider];\n saveStoredApiKeys(storedKeys);\n \n // Remove from current process (if it was from storage)\n // Note: We can't know if it was from env or storage, so we don't remove from env\n}\n\n/**\n * Get API key status for all providers\n * Returns masked keys (first 4 and last 4 chars) and source (env/storage/none)\n */\nexport function getApiKeyStatus(): Array<{\n provider: string;\n envVar: string;\n configured: boolean;\n source: 'env' | 'storage' | 'none';\n maskedKey: string | null;\n}> {\n const storedKeys = loadStoredApiKeys();\n \n return SUPPORTED_PROVIDERS.map((provider) => {\n const envVar = PROVIDER_ENV_MAP[provider];\n const envValue = process.env[envVar];\n const storedValue = storedKeys[provider];\n \n let source: 'env' | 'storage' | 'none' = 'none';\n let value: string | undefined;\n \n if (envValue) {\n // Check if it came from storage (by comparing)\n if (storedValue && envValue === storedValue) {\n source = 'storage';\n } else {\n source = 'env';\n }\n value = envValue;\n } else if (storedValue) {\n source = 'storage';\n value = storedValue;\n }\n \n return {\n provider,\n envVar,\n configured: !!value,\n source,\n maskedKey: value ? maskApiKey(value) : null,\n };\n });\n}\n\n/**\n * Mask an API key for display (show first 4 and last 4 chars)\n */\nfunction maskApiKey(key: string): string {\n if (key.length <= 12) {\n return '****' + key.slice(-4);\n }\n return key.slice(0, 4) + '...' + key.slice(-4);\n}\n\nexport * from './types.js';\n","import { readFile, readdir } from 'node:fs/promises';\nimport { resolve, basename, extname, relative } from 'node:path';\nimport { existsSync, readFileSync } from 'node:fs';\nimport { minimatch } from 'minimatch';\nimport { SkillMetadata, SkillMetadataSchema, SkillLoadType, DiscoveredSkills } from '../config/types.js';\n\nexport interface Skill {\n name: string;\n description: string;\n filePath: string;\n content?: string; // Only loaded when explicitly requested\n // Enhanced properties\n alwaysApply: boolean;\n globs: string[];\n loadType: SkillLoadType;\n priority: number; // Lower = higher priority for deduplication\n sourceDir: string; // Which directory this skill came from\n}\n\nexport interface SkillWithContent extends Skill {\n content: string;\n}\n\n/**\n * Parse skill metadata from frontmatter\n * Handles YAML-like format including arrays for globs\n */\nfunction parseSkillFrontmatter(content: string): { metadata: SkillMetadata; body: string } | null {\n const frontmatterMatch = content.match(/^---\\n([\\s\\S]*?)\\n---\\n([\\s\\S]*)$/);\n \n if (!frontmatterMatch) {\n return null;\n }\n\n const [, frontmatter, body] = frontmatterMatch;\n \n try {\n // Parse YAML-like frontmatter\n const lines = frontmatter.split('\\n');\n const data: Record<string, unknown> = {};\n let currentArray: string[] | null = null;\n let currentArrayKey: string | null = null;\n \n for (const line of lines) {\n // Check if this is an array item (starts with -)\n if (currentArrayKey && line.trim().startsWith('-')) {\n let value = line.trim().slice(1).trim();\n // Remove quotes if present\n if ((value.startsWith('\"') && value.endsWith('\"')) ||\n (value.startsWith(\"'\") && value.endsWith(\"'\"))) {\n value = value.slice(1, -1);\n }\n currentArray?.push(value);\n continue;\n }\n \n // Close any open array when we hit a non-array line\n if (currentArrayKey && currentArray) {\n data[currentArrayKey] = currentArray;\n currentArray = null;\n currentArrayKey = null;\n }\n \n const colonIndex = line.indexOf(':');\n if (colonIndex > 0) {\n const key = line.slice(0, colonIndex).trim();\n let value = line.slice(colonIndex + 1).trim();\n \n // Check if this starts an array (empty value followed by - items)\n if (value === '' || value === '[]') {\n currentArrayKey = key;\n currentArray = [];\n continue;\n }\n \n // Handle inline arrays like globs: [\"*.tsx\", \"*.jsx\"]\n if (value.startsWith('[') && value.endsWith(']')) {\n const arrayContent = value.slice(1, -1);\n const items = arrayContent.split(',').map(item => {\n let trimmed = item.trim();\n if ((trimmed.startsWith('\"') && trimmed.endsWith('\"')) ||\n (trimmed.startsWith(\"'\") && trimmed.endsWith(\"'\"))) {\n trimmed = trimmed.slice(1, -1);\n }\n return trimmed;\n }).filter(item => item.length > 0);\n data[key] = items;\n continue;\n }\n \n // Remove quotes if present\n if ((value.startsWith('\"') && value.endsWith('\"')) ||\n (value.startsWith(\"'\") && value.endsWith(\"'\"))) {\n value = value.slice(1, -1);\n }\n \n // Handle boolean values\n if (value === 'true') {\n data[key] = true;\n } else if (value === 'false') {\n data[key] = false;\n } else {\n data[key] = value;\n }\n }\n }\n \n // Close any remaining open array\n if (currentArrayKey && currentArray) {\n data[currentArrayKey] = currentArray;\n }\n\n const metadata = SkillMetadataSchema.parse(data);\n return { metadata, body: body.trim() };\n } catch {\n return null;\n }\n}\n\n/**\n * Get skill name from filename if no frontmatter\n */\nfunction getSkillNameFromPath(filePath: string): string {\n return basename(filePath, extname(filePath))\n .replace(/[-_]/g, ' ')\n .replace(/\\b\\w/g, (c) => c.toUpperCase());\n}\n\n/**\n * Options for loading skills from a directory\n */\ninterface LoadSkillsOptions {\n // Priority for deduplication (lower = higher priority)\n priority?: number;\n // Default load type if not specified in frontmatter\n defaultLoadType?: SkillLoadType;\n // Force alwaysApply for all skills in this directory\n forceAlwaysApply?: boolean;\n}\n\n/**\n * Load all skills from a directory (metadata only)\n */\nexport async function loadSkillsFromDirectory(\n directory: string,\n options: LoadSkillsOptions = {}\n): Promise<Skill[]> {\n const {\n priority = 50,\n defaultLoadType = 'on_demand',\n forceAlwaysApply = false,\n } = options;\n\n if (!existsSync(directory)) {\n return [];\n }\n\n const skills: Skill[] = [];\n const entries = await readdir(directory, { withFileTypes: true });\n\n for (const entry of entries) {\n // Handle both files and directories (for Claude-style SKILL.md in subdirs)\n let filePath: string;\n let fileName: string;\n\n if (entry.isDirectory()) {\n // Check for SKILL.md inside the directory (Claude format)\n const skillMdPath = resolve(directory, entry.name, 'SKILL.md');\n if (existsSync(skillMdPath)) {\n filePath = skillMdPath;\n fileName = entry.name;\n } else {\n continue;\n }\n } else if (entry.name.endsWith('.md') || entry.name.endsWith('.mdc')) {\n filePath = resolve(directory, entry.name);\n fileName = entry.name;\n } else {\n continue;\n }\n\n const content = await readFile(filePath, 'utf-8');\n const parsed = parseSkillFrontmatter(content);\n\n if (parsed) {\n const alwaysApply = forceAlwaysApply || parsed.metadata.alwaysApply;\n const loadType: SkillLoadType = alwaysApply ? 'always' : defaultLoadType;\n\n skills.push({\n name: parsed.metadata.name,\n description: parsed.metadata.description,\n filePath,\n alwaysApply,\n globs: parsed.metadata.globs,\n loadType,\n priority,\n sourceDir: directory,\n });\n } else {\n // Use filename as name, first paragraph as description\n const name = getSkillNameFromPath(filePath);\n const firstParagraph = content.split('\\n\\n')[0]?.slice(0, 200) || 'No description';\n \n skills.push({\n name,\n description: firstParagraph.replace(/^#\\s*/, '').trim(),\n filePath,\n alwaysApply: forceAlwaysApply,\n globs: [],\n loadType: forceAlwaysApply ? 'always' : defaultLoadType,\n priority,\n sourceDir: directory,\n });\n }\n }\n\n return skills;\n}\n\n/**\n * Load all skills from multiple directories (legacy function for backwards compatibility)\n */\nexport async function loadAllSkills(directories: string[]): Promise<Skill[]> {\n const allSkills: Skill[] = [];\n const seenNames = new Set<string>();\n\n for (const dir of directories) {\n const skills = await loadSkillsFromDirectory(dir);\n for (const skill of skills) {\n // Avoid duplicates (first one wins)\n if (!seenNames.has(skill.name.toLowerCase())) {\n seenNames.add(skill.name.toLowerCase());\n allSkills.push(skill);\n }\n }\n }\n\n return allSkills;\n}\n\n/**\n * Load all skills from discovered directories with proper priority and typing\n */\nexport async function loadAllSkillsFromDiscovered(\n discovered: DiscoveredSkills\n): Promise<{ always: SkillWithContent[]; onDemand: Skill[]; all: Skill[] }> {\n const allSkills: Skill[] = [];\n const seenNames = new Set<string>();\n\n // Load from always-loaded directories (force alwaysApply = true)\n for (const { path, priority } of discovered.alwaysLoadedDirs) {\n const skills = await loadSkillsFromDirectory(path, {\n priority,\n defaultLoadType: 'always',\n forceAlwaysApply: true,\n });\n for (const skill of skills) {\n if (!seenNames.has(skill.name.toLowerCase())) {\n seenNames.add(skill.name.toLowerCase());\n allSkills.push(skill);\n }\n }\n }\n\n // Load from on-demand directories (respect frontmatter)\n for (const { path, priority } of discovered.onDemandDirs) {\n const skills = await loadSkillsFromDirectory(path, {\n priority,\n defaultLoadType: 'on_demand',\n forceAlwaysApply: false,\n });\n for (const skill of skills) {\n if (!seenNames.has(skill.name.toLowerCase())) {\n seenNames.add(skill.name.toLowerCase());\n allSkills.push(skill);\n }\n }\n }\n\n // Separate into always-loaded (with content) and on-demand\n const alwaysSkills = allSkills.filter(s => s.alwaysApply || s.loadType === 'always');\n const onDemandSkills = allSkills.filter(s => !s.alwaysApply && s.loadType !== 'always');\n\n // Load content for always-applied skills\n const alwaysWithContent: SkillWithContent[] = await Promise.all(\n alwaysSkills.map(async (skill) => {\n const content = await readFile(skill.filePath, 'utf-8');\n const parsed = parseSkillFrontmatter(content);\n return {\n ...skill,\n content: parsed ? parsed.body : content,\n };\n })\n );\n\n return {\n always: alwaysWithContent,\n onDemand: onDemandSkills,\n all: allSkills,\n };\n}\n\n/**\n * Get skills that should be auto-injected based on glob patterns matching active files\n */\nexport async function getGlobMatchedSkills(\n skills: Skill[],\n activeFiles: string[],\n workingDirectory: string\n): Promise<SkillWithContent[]> {\n if (activeFiles.length === 0) {\n return [];\n }\n\n // Normalize active files to relative paths\n const relativeFiles = activeFiles.map(f => {\n if (f.startsWith(workingDirectory)) {\n return relative(workingDirectory, f);\n }\n return f;\n });\n\n // Find skills with matching globs that aren't already always-applied\n const matchedSkills = skills.filter(skill => {\n // Skip if already always applied (those are loaded separately)\n if (skill.alwaysApply || skill.loadType === 'always') {\n return false;\n }\n\n // Skip if no globs defined\n if (!skill.globs || skill.globs.length === 0) {\n return false;\n }\n\n // Check if any active file matches any glob\n return relativeFiles.some(file =>\n skill.globs.some(pattern => minimatch(file, pattern, { matchBase: true }))\n );\n });\n\n // Load content for matched skills\n const matchedWithContent: SkillWithContent[] = await Promise.all(\n matchedSkills.map(async (skill) => {\n const content = await readFile(skill.filePath, 'utf-8');\n const parsed = parseSkillFrontmatter(content);\n return {\n ...skill,\n content: parsed ? parsed.body : content,\n loadType: 'glob_matched' as SkillLoadType,\n };\n })\n );\n\n return matchedWithContent;\n}\n\n/**\n * Load AGENTS.md content if it exists\n */\nexport async function loadAgentsMd(agentsMdPath: string | null): Promise<string | null> {\n if (!agentsMdPath || !existsSync(agentsMdPath)) {\n return null;\n }\n\n const content = await readFile(agentsMdPath, 'utf-8');\n return content;\n}\n\n/**\n * Load a skill's full content by name\n */\nexport async function loadSkillContent(\n skillName: string,\n directories: string[]\n): Promise<SkillWithContent | null> {\n const allSkills = await loadAllSkills(directories);\n const skill = allSkills.find(\n (s) => s.name.toLowerCase() === skillName.toLowerCase()\n );\n\n if (!skill) {\n return null;\n }\n\n const content = await readFile(skill.filePath, 'utf-8');\n const parsed = parseSkillFrontmatter(content);\n\n return {\n ...skill,\n content: parsed ? parsed.body : content,\n };\n}\n\n/**\n * Format on-demand skills list for context (shows as available to load)\n */\nexport function formatSkillsForContext(skills: Skill[]): string {\n // Filter to only on-demand skills\n const onDemandSkills = skills.filter(s => !s.alwaysApply && s.loadType !== 'always');\n\n if (onDemandSkills.length === 0) {\n return 'No on-demand skills available.';\n }\n\n const lines = ['Available skills (use load_skill tool to load into context):'];\n for (const skill of onDemandSkills) {\n const globInfo = skill.globs?.length ? ` [auto-loads for: ${skill.globs.join(', ')}]` : '';\n lines.push(`- ${skill.name}: ${skill.description}${globInfo}`);\n }\n\n return lines.join('\\n');\n}\n\n/**\n * Format always-loaded skills content for injection into system prompt\n */\nexport function formatAlwaysLoadedSkills(skills: SkillWithContent[]): string {\n if (skills.length === 0) {\n return '';\n }\n\n const sections: string[] = [];\n \n for (const skill of skills) {\n sections.push(`### ${skill.name}\\n\\n${skill.content}`);\n }\n\n return `## Active Rules & Skills (Always Loaded)\\n\\n${sections.join('\\n\\n---\\n\\n')}`;\n}\n\n/**\n * Format glob-matched skills content for injection into system prompt\n */\nexport function formatGlobMatchedSkills(skills: SkillWithContent[]): string {\n if (skills.length === 0) {\n return '';\n }\n\n const sections: string[] = [];\n \n for (const skill of skills) {\n sections.push(`### ${skill.name}\\n\\n${skill.content}`);\n }\n\n return `## Context-Relevant Skills (Auto-loaded based on active files)\\n\\n${sections.join('\\n\\n---\\n\\n')}`;\n}\n\n/**\n * Format AGENTS.md content for injection\n */\nexport function formatAgentsMdContent(content: string | null): string {\n if (!content) {\n return '';\n }\n\n return `## Project Instructions (AGENTS.md)\\n\\n${content}`;\n}\n","/**\n * Types for semantic search and indexing\n */\n\n// Chunk types for different code structures\nexport type ChunkType = 'function' | 'class' | 'block' | 'sliding';\n\n// A chunk of code/text ready for embedding\nexport interface Chunk {\n // Unique ID: {contentHash}_{chunkIndex}\n id: string;\n // The text content to embed\n text: string;\n // SHA-256 hash of the chunk content\n contentHash: string;\n // Chunk index within the file\n chunkIndex: number;\n // Metadata for filtering and display\n metadata: ChunkMetadata;\n}\n\nexport interface ChunkMetadata {\n // Relative file path from repo root\n filePath: string;\n // Line range in the source file\n startLine: number;\n endLine: number;\n // Detected language\n language: string;\n // Type of chunk (function, class, sliding window, etc.)\n chunkType: ChunkType;\n // Optional: function/class name if semantic chunk\n symbolName?: string;\n}\n\n// Options for the indexing operation\nexport interface IndexOptions {\n // Working directory (repo root)\n workingDirectory: string;\n // Force full re-index (ignore existing hashes)\n force?: boolean;\n // Verbose logging\n verbose?: boolean;\n // Progress callback\n onProgress?: (progress: IndexProgress) => void;\n}\n\n// Progress during indexing\nexport interface IndexProgress {\n phase: 'scanning' | 'chunking' | 'checking' | 'embedding' | 'done';\n totalFiles: number;\n processedFiles: number;\n totalChunks: number;\n newChunks: number;\n skippedChunks: number;\n currentFile?: string;\n}\n\n// Result of indexing operation\nexport interface IndexResult {\n success: boolean;\n namespace: string;\n totalFiles: number;\n totalChunks: number;\n newChunks: number;\n skippedChunks: number;\n failedChunks: number;\n duration: number; // milliseconds\n errors: Array<{ file: string; error: string }>;\n}\n\n// Index status for a repository\nexport interface IndexStatus {\n namespace: string;\n totalChunks: number;\n lastFullIndex: Date | null;\n lastIncrementalIndex: Date | null;\n isConfigured: boolean;\n}\n\n// Semantic search match result\nexport interface SemanticMatch {\n // File path relative to repo root\n filePath: string;\n // Line range\n startLine: number;\n endLine: number;\n // Similarity score (0-1)\n score: number;\n // Snippet of matching text\n snippet: string;\n // Symbol name if available\n symbolName?: string;\n // Language\n language: string;\n}\n\n// Options for semantic search\nexport interface SemanticSearchOptions {\n // Number of results to return\n topK?: number;\n // Filter by file glob pattern\n filePattern?: string;\n // Filter by language\n language?: string;\n // Minimum score threshold (0-1)\n minScore?: number;\n}\n\n// Result of semantic search\nexport interface SemanticSearchResult {\n success: boolean;\n query: string;\n matches: SemanticMatch[];\n duration: number; // milliseconds\n error?: string;\n}\n","/**\n * Git remote to namespace resolution\n * Converts git remote URLs to TurboPuffer namespaces\n */\n\nimport { execSync } from 'node:child_process';\n\n/**\n * Get the git remote URL for a repository\n * Returns null if not a git repo or no remote configured\n */\nexport function getGitRemoteUrl(workingDirectory: string): string | null {\n try {\n const result = execSync('git remote get-url origin', {\n cwd: workingDirectory,\n encoding: 'utf-8',\n stdio: ['pipe', 'pipe', 'pipe'],\n });\n return result.trim();\n } catch {\n return null;\n }\n}\n\n/**\n * Parse a git remote URL to extract org and repo name\n * Supports:\n * - https://github.com/org/repo.git\n * - git@github.com:org/repo.git\n * - https://gitlab.com/org/repo\n * - ssh://git@bitbucket.org/org/repo.git\n */\nexport function parseGitRemoteUrl(url: string): { org: string; repo: string } | null {\n // Remove .git suffix if present\n const cleanUrl = url.replace(/\\.git$/, '');\n\n // Try SSH format: git@github.com:org/repo\n const sshMatch = cleanUrl.match(/git@[^:]+:([^/]+)\\/(.+)$/);\n if (sshMatch) {\n return { org: sshMatch[1], repo: sshMatch[2] };\n }\n\n // Try HTTPS format: https://github.com/org/repo\n const httpsMatch = cleanUrl.match(/https?:\\/\\/[^/]+\\/([^/]+)\\/(.+)$/);\n if (httpsMatch) {\n return { org: httpsMatch[1], repo: httpsMatch[2] };\n }\n\n // Try SSH with protocol: ssh://git@github.com/org/repo\n const sshProtoMatch = cleanUrl.match(/ssh:\\/\\/[^/]+\\/([^/]+)\\/(.+)$/);\n if (sshProtoMatch) {\n return { org: sshProtoMatch[1], repo: sshProtoMatch[2] };\n }\n\n return null;\n}\n\n/**\n * Sanitize a string for use in a namespace\n * - Lowercase\n * - Replace non-alphanumeric with underscores\n * - Remove leading/trailing underscores\n * - Collapse multiple underscores\n */\nfunction sanitizeForNamespace(str: string): string {\n return str\n .toLowerCase()\n .replace(/[^a-z0-9]/g, '_')\n .replace(/^_+|_+$/g, '')\n .replace(/_+/g, '_');\n}\n\n/**\n * Get the namespace for a repository\n * Format: sparkecoder_{org}_{repo}\n * \n * @param workingDirectory - The repo working directory\n * @param configuredNamespace - Optional namespace override from config\n * @returns The namespace string, or null if not a git repo\n */\nexport async function getRepoNamespace(\n workingDirectory: string,\n configuredNamespace?: string | null\n): Promise<string | null> {\n // Use configured namespace if provided\n if (configuredNamespace) {\n return configuredNamespace;\n }\n\n // Get git remote URL\n const remoteUrl = getGitRemoteUrl(workingDirectory);\n if (!remoteUrl) {\n return null;\n }\n\n // Parse org and repo\n const parsed = parseGitRemoteUrl(remoteUrl);\n if (!parsed) {\n return null;\n }\n\n // Build namespace\n const org = sanitizeForNamespace(parsed.org);\n const repo = sanitizeForNamespace(parsed.repo);\n return `sparkecoder_${org}_${repo}`;\n}\n\n/**\n * Check if the working directory is a git repository\n */\nexport function isGitRepository(workingDirectory: string): boolean {\n try {\n execSync('git rev-parse --git-dir', {\n cwd: workingDirectory,\n encoding: 'utf-8',\n stdio: ['pipe', 'pipe', 'pipe'],\n });\n return true;\n } catch {\n return false;\n }\n}\n\n/**\n * Get the current git branch name\n */\nexport function getCurrentBranch(workingDirectory: string): string | null {\n try {\n const result = execSync('git rev-parse --abbrev-ref HEAD', {\n cwd: workingDirectory,\n encoding: 'utf-8',\n stdio: ['pipe', 'pipe', 'pipe'],\n });\n return result.trim();\n } catch {\n return null;\n }\n}\n\n/**\n * Get the current git commit hash (short form)\n */\nexport function getCurrentCommit(workingDirectory: string): string | null {\n try {\n const result = execSync('git rev-parse --short HEAD', {\n cwd: workingDirectory,\n encoding: 'utf-8',\n stdio: ['pipe', 'pipe', 'pipe'],\n });\n return result.trim();\n } catch {\n return null;\n }\n}\n","/**\n * Content hashing utilities for deduplication\n * Uses SHA-256 for deterministic content hashing\n */\n\nimport { createHash } from 'node:crypto';\n\n/**\n * Compute SHA-256 hash of content\n * Returns first 16 characters of hex digest for a reasonable ID length\n */\nexport function computeContentHash(content: string): string {\n const hash = createHash('sha256');\n hash.update(content, 'utf-8');\n return hash.digest('hex').slice(0, 16);\n}\n\n/**\n * Compute a full SHA-256 hash (64 chars)\n */\nexport function computeFullHash(content: string): string {\n const hash = createHash('sha256');\n hash.update(content, 'utf-8');\n return hash.digest('hex');\n}\n\n/**\n * Generate a chunk ID from content hash and chunk index\n * Format: {contentHash}_{chunkIndex}\n */\nexport function generateChunkId(contentHash: string, chunkIndex: number): string {\n return `${contentHash}_${chunkIndex}`;\n}\n\n/**\n * Parse a chunk ID to extract content hash and chunk index\n */\nexport function parseChunkId(chunkId: string): { contentHash: string; chunkIndex: number } | null {\n const match = chunkId.match(/^([a-f0-9]+)_(\\d+)$/);\n if (!match) {\n return null;\n }\n return {\n contentHash: match[1],\n chunkIndex: parseInt(match[2], 10),\n };\n}\n\n/**\n * Compute hash for a file's content\n * Normalizes line endings for consistent hashing across platforms\n */\nexport function computeFileHash(content: string): string {\n // Normalize line endings to LF\n const normalized = content.replace(/\\r\\n/g, '\\n');\n return computeContentHash(normalized);\n}\n","/**\n * Hybrid code chunking\n * - Semantic chunking for code files (by function/class)\n * - Sliding window for documentation and text files\n */\n\nimport { extname, basename } from 'node:path';\nimport { Chunk, ChunkMetadata, ChunkType } from './types.js';\nimport { computeContentHash, generateChunkId } from './hasher.js';\n\n// Language detection by file extension\nconst LANGUAGE_MAP: Record<string, string> = {\n '.ts': 'typescript',\n '.tsx': 'typescript',\n '.js': 'javascript',\n '.jsx': 'javascript',\n '.mjs': 'javascript',\n '.cjs': 'javascript',\n '.py': 'python',\n '.go': 'go',\n '.rs': 'rust',\n '.java': 'java',\n '.kt': 'kotlin',\n '.swift': 'swift',\n '.rb': 'ruby',\n '.php': 'php',\n '.c': 'c',\n '.cpp': 'cpp',\n '.h': 'c',\n '.hpp': 'cpp',\n '.cs': 'csharp',\n '.md': 'markdown',\n '.mdx': 'markdown',\n '.txt': 'text',\n '.json': 'json',\n '.yaml': 'yaml',\n '.yml': 'yaml',\n '.toml': 'toml',\n '.xml': 'xml',\n '.html': 'html',\n '.css': 'css',\n '.scss': 'scss',\n '.less': 'less',\n '.sql': 'sql',\n '.sh': 'shell',\n '.bash': 'shell',\n '.zsh': 'shell',\n};\n\n// Languages that support semantic chunking\nconst SEMANTIC_LANGUAGES = new Set([\n 'typescript',\n 'javascript',\n 'python',\n 'go',\n 'rust',\n 'java',\n 'kotlin',\n 'swift',\n 'ruby',\n 'php',\n 'c',\n 'cpp',\n 'csharp',\n]);\n\n// Sliding window config\nconst SLIDING_WINDOW_SIZE = 1500; // ~500 tokens\nconst SLIDING_WINDOW_OVERLAP = 300; // ~100 tokens\n\n// Max chunk size (to avoid very long embeddings)\nconst MAX_CHUNK_SIZE = 4000; // ~1300 tokens\n\n/**\n * Detect language from file path\n */\nexport function detectLanguage(filePath: string): string {\n const ext = extname(filePath).toLowerCase();\n return LANGUAGE_MAP[ext] || 'unknown';\n}\n\n/**\n * Check if a language supports semantic chunking\n */\nexport function supportsSemanticChunking(language: string): boolean {\n return SEMANTIC_LANGUAGES.has(language);\n}\n\n/**\n * Chunk a file into embedding-ready chunks\n */\nexport function chunkFile(filePath: string, content: string): Chunk[] {\n const language = detectLanguage(filePath);\n \n // Skip empty files\n if (!content.trim()) {\n return [];\n }\n\n // Use semantic chunking for supported languages, sliding window otherwise\n if (supportsSemanticChunking(language)) {\n return chunkCodeSemantic(filePath, content, language);\n } else {\n return chunkSlidingWindow(filePath, content, language);\n }\n}\n\n/**\n * Semantic chunking for code files\n * Extracts functions, classes, and significant blocks\n */\nfunction chunkCodeSemantic(filePath: string, content: string, language: string): Chunk[] {\n const chunks: Chunk[] = [];\n const lines = content.split('\\n');\n \n // Simple regex-based extraction (tree-sitter would be more accurate)\n // This is a pragmatic approach that works for most cases\n \n const blocks = extractCodeBlocks(lines, language);\n \n if (blocks.length === 0) {\n // Fall back to sliding window if no blocks found\n return chunkSlidingWindow(filePath, content, language);\n }\n\n for (let i = 0; i < blocks.length; i++) {\n const block = blocks[i];\n const blockContent = lines.slice(block.startLine, block.endLine + 1).join('\\n');\n \n // Skip very small blocks\n if (blockContent.trim().length < 50) {\n continue;\n }\n\n // If block is too large, split it\n if (blockContent.length > MAX_CHUNK_SIZE) {\n const subChunks = splitLargeBlock(filePath, blockContent, block.startLine, language, block.type, block.name);\n chunks.push(...subChunks);\n } else {\n const contentHash = computeContentHash(blockContent);\n const chunkId = generateChunkId(contentHash, i);\n \n chunks.push({\n id: chunkId,\n text: buildChunkText(filePath, blockContent, block.name),\n contentHash,\n chunkIndex: i,\n metadata: {\n filePath,\n startLine: block.startLine + 1, // 1-indexed\n endLine: block.endLine + 1,\n language,\n chunkType: block.type as ChunkType,\n symbolName: block.name,\n },\n });\n }\n }\n\n // If no meaningful chunks, fall back to sliding window\n if (chunks.length === 0) {\n return chunkSlidingWindow(filePath, content, language);\n }\n\n return reindexChunks(chunks);\n}\n\n/**\n * Extract code blocks (functions, classes) from source code\n */\nfunction extractCodeBlocks(\n lines: string[],\n language: string\n): Array<{ startLine: number; endLine: number; type: string; name?: string }> {\n const blocks: Array<{ startLine: number; endLine: number; type: string; name?: string }> = [];\n \n // Language-specific patterns\n const patterns = getLanguagePatterns(language);\n \n let i = 0;\n while (i < lines.length) {\n const line = lines[i];\n \n // Check for function/class definitions\n for (const pattern of patterns) {\n const match = line.match(pattern.regex);\n if (match) {\n const name = match[1];\n const endLine = findBlockEnd(lines, i, language);\n \n blocks.push({\n startLine: i,\n endLine,\n type: pattern.type,\n name,\n });\n \n i = endLine + 1;\n break;\n }\n }\n \n i++;\n }\n\n // Merge adjacent small blocks\n return mergeSmallBlocks(blocks, lines);\n}\n\n/**\n * Get regex patterns for extracting code blocks\n */\nfunction getLanguagePatterns(language: string): Array<{ regex: RegExp; type: string }> {\n switch (language) {\n case 'typescript':\n case 'javascript':\n return [\n { regex: /^\\s*(?:export\\s+)?(?:async\\s+)?function\\s+(\\w+)/, type: 'function' },\n { regex: /^\\s*(?:export\\s+)?(?:const|let|var)\\s+(\\w+)\\s*=\\s*(?:async\\s+)?(?:\\([^)]*\\)|[^=])\\s*=>/, type: 'function' },\n { regex: /^\\s*(?:export\\s+)?class\\s+(\\w+)/, type: 'class' },\n { regex: /^\\s*(?:export\\s+)?interface\\s+(\\w+)/, type: 'class' },\n { regex: /^\\s*(?:export\\s+)?type\\s+(\\w+)/, type: 'class' },\n ];\n case 'python':\n return [\n { regex: /^\\s*(?:async\\s+)?def\\s+(\\w+)/, type: 'function' },\n { regex: /^\\s*class\\s+(\\w+)/, type: 'class' },\n ];\n case 'go':\n return [\n { regex: /^\\s*func\\s+(?:\\([^)]+\\)\\s+)?(\\w+)/, type: 'function' },\n { regex: /^\\s*type\\s+(\\w+)\\s+struct/, type: 'class' },\n { regex: /^\\s*type\\s+(\\w+)\\s+interface/, type: 'class' },\n ];\n case 'rust':\n return [\n { regex: /^\\s*(?:pub\\s+)?(?:async\\s+)?fn\\s+(\\w+)/, type: 'function' },\n { regex: /^\\s*(?:pub\\s+)?struct\\s+(\\w+)/, type: 'class' },\n { regex: /^\\s*(?:pub\\s+)?impl\\s+(?:<[^>]+>\\s+)?(\\w+)/, type: 'class' },\n { regex: /^\\s*(?:pub\\s+)?trait\\s+(\\w+)/, type: 'class' },\n ];\n case 'java':\n case 'kotlin':\n return [\n { regex: /^\\s*(?:public|private|protected)?\\s*(?:static\\s+)?(?:\\w+\\s+)?(\\w+)\\s*\\(/, type: 'function' },\n { regex: /^\\s*(?:public|private|protected)?\\s*(?:abstract\\s+)?class\\s+(\\w+)/, type: 'class' },\n { regex: /^\\s*(?:public|private|protected)?\\s*interface\\s+(\\w+)/, type: 'class' },\n ];\n default:\n return [\n { regex: /^\\s*(?:function|def|fn|func)\\s+(\\w+)/, type: 'function' },\n { regex: /^\\s*class\\s+(\\w+)/, type: 'class' },\n ];\n }\n}\n\n/**\n * Find the end of a code block (matching braces/indentation)\n */\nfunction findBlockEnd(lines: string[], startLine: number, language: string): number {\n // Python uses indentation\n if (language === 'python') {\n return findPythonBlockEnd(lines, startLine);\n }\n \n // Most languages use braces\n return findBraceBlockEnd(lines, startLine);\n}\n\n/**\n * Find block end for brace-based languages\n */\nfunction findBraceBlockEnd(lines: string[], startLine: number): number {\n let braceCount = 0;\n let foundOpen = false;\n \n for (let i = startLine; i < lines.length; i++) {\n const line = lines[i];\n \n for (const char of line) {\n if (char === '{') {\n braceCount++;\n foundOpen = true;\n } else if (char === '}') {\n braceCount--;\n }\n }\n \n if (foundOpen && braceCount === 0) {\n return i;\n }\n }\n \n // If no matching brace found, return a reasonable chunk\n return Math.min(startLine + 50, lines.length - 1);\n}\n\n/**\n * Find block end for Python (indentation-based)\n */\nfunction findPythonBlockEnd(lines: string[], startLine: number): number {\n const startIndent = getIndentLevel(lines[startLine]);\n \n for (let i = startLine + 1; i < lines.length; i++) {\n const line = lines[i];\n \n // Skip empty lines\n if (!line.trim()) {\n continue;\n }\n \n const indent = getIndentLevel(line);\n \n // Block ends when we return to same or lower indentation\n if (indent <= startIndent && line.trim()) {\n return i - 1;\n }\n }\n \n return lines.length - 1;\n}\n\n/**\n * Get indentation level of a line\n */\nfunction getIndentLevel(line: string): number {\n const match = line.match(/^(\\s*)/);\n return match ? match[1].length : 0;\n}\n\n/**\n * Merge small adjacent blocks\n */\nfunction mergeSmallBlocks(\n blocks: Array<{ startLine: number; endLine: number; type: string; name?: string }>,\n lines: string[]\n): Array<{ startLine: number; endLine: number; type: string; name?: string }> {\n if (blocks.length === 0) {\n return blocks;\n }\n\n const merged: typeof blocks = [];\n let current = blocks[0];\n\n for (let i = 1; i < blocks.length; i++) {\n const next = blocks[i];\n const currentContent = lines.slice(current.startLine, current.endLine + 1).join('\\n');\n const gap = next.startLine - current.endLine;\n \n // Merge if current block is small and gap is small\n if (currentContent.length < 500 && gap <= 3) {\n current = {\n startLine: current.startLine,\n endLine: next.endLine,\n type: 'block',\n name: current.name,\n };\n } else {\n merged.push(current);\n current = next;\n }\n }\n \n merged.push(current);\n return merged;\n}\n\n/**\n * Split a large block into smaller chunks\n */\nfunction splitLargeBlock(\n filePath: string,\n content: string,\n startLine: number,\n language: string,\n type: string,\n name?: string\n): Chunk[] {\n const chunks: Chunk[] = [];\n const lines = content.split('\\n');\n \n let currentStart = 0;\n let currentChunk = '';\n \n for (let i = 0; i < lines.length; i++) {\n const line = lines[i];\n const newChunk = currentChunk + (currentChunk ? '\\n' : '') + line;\n \n if (newChunk.length > MAX_CHUNK_SIZE && currentChunk) {\n const contentHash = computeContentHash(currentChunk);\n chunks.push({\n id: generateChunkId(contentHash, chunks.length),\n text: buildChunkText(filePath, currentChunk, name),\n contentHash,\n chunkIndex: chunks.length,\n metadata: {\n filePath,\n startLine: startLine + currentStart + 1,\n endLine: startLine + i,\n language,\n chunkType: type as ChunkType,\n symbolName: name,\n },\n });\n \n currentStart = i;\n currentChunk = line;\n } else {\n currentChunk = newChunk;\n }\n }\n \n // Add remaining content\n if (currentChunk.trim()) {\n const contentHash = computeContentHash(currentChunk);\n chunks.push({\n id: generateChunkId(contentHash, chunks.length),\n text: buildChunkText(filePath, currentChunk, name),\n contentHash,\n chunkIndex: chunks.length,\n metadata: {\n filePath,\n startLine: startLine + currentStart + 1,\n endLine: startLine + lines.length,\n language,\n chunkType: type as ChunkType,\n symbolName: name,\n },\n });\n }\n\n return chunks;\n}\n\n/**\n * Sliding window chunking for non-code files\n */\nfunction chunkSlidingWindow(filePath: string, content: string, language: string): Chunk[] {\n const chunks: Chunk[] = [];\n \n // If content is small enough, return as single chunk\n if (content.length <= MAX_CHUNK_SIZE) {\n const contentHash = computeContentHash(content);\n chunks.push({\n id: generateChunkId(contentHash, 0),\n text: buildChunkText(filePath, content),\n contentHash,\n chunkIndex: 0,\n metadata: {\n filePath,\n startLine: 1,\n endLine: content.split('\\n').length,\n language,\n chunkType: 'sliding',\n },\n });\n return chunks;\n }\n\n // Split by lines to preserve line boundaries\n const lines = content.split('\\n');\n let currentStart = 0;\n let currentChunk = '';\n let currentLineStart = 0;\n\n for (let i = 0; i < lines.length; i++) {\n const line = lines[i];\n const newChunk = currentChunk + (currentChunk ? '\\n' : '') + line;\n \n if (newChunk.length >= SLIDING_WINDOW_SIZE) {\n const contentHash = computeContentHash(currentChunk || newChunk);\n chunks.push({\n id: generateChunkId(contentHash, chunks.length),\n text: buildChunkText(filePath, currentChunk || newChunk),\n contentHash,\n chunkIndex: chunks.length,\n metadata: {\n filePath,\n startLine: currentLineStart + 1,\n endLine: i + 1,\n language,\n chunkType: 'sliding',\n },\n });\n \n // Move back for overlap\n const overlapLines = Math.floor(SLIDING_WINDOW_OVERLAP / 50); // rough estimate\n currentLineStart = Math.max(currentStart, i - overlapLines);\n currentChunk = lines.slice(currentLineStart, i + 1).join('\\n');\n currentStart = currentLineStart;\n } else {\n currentChunk = newChunk;\n }\n }\n\n // Add remaining content\n if (currentChunk.trim() && currentChunk.length > 50) {\n const contentHash = computeContentHash(currentChunk);\n chunks.push({\n id: generateChunkId(contentHash, chunks.length),\n text: buildChunkText(filePath, currentChunk),\n contentHash,\n chunkIndex: chunks.length,\n metadata: {\n filePath,\n startLine: currentLineStart + 1,\n endLine: lines.length,\n language,\n chunkType: 'sliding',\n },\n });\n }\n\n return reindexChunks(chunks);\n}\n\n/**\n * Build the text to embed, including file context\n */\nfunction buildChunkText(filePath: string, content: string, symbolName?: string): string {\n const fileName = basename(filePath);\n let text = `File: ${filePath}\\n`;\n \n if (symbolName) {\n text += `Symbol: ${symbolName}\\n`;\n }\n \n text += `\\n${content}`;\n return text;\n}\n\n/**\n * Re-index chunks to ensure sequential chunk indices\n */\nfunction reindexChunks(chunks: Chunk[]): Chunk[] {\n return chunks.map((chunk, index) => ({\n ...chunk,\n chunkIndex: index,\n id: generateChunkId(chunk.contentHash, index),\n }));\n}\n","/**\n * Vector client - uses remote server API for vector operations\n * This removes the need for the private vector SDK in the client\n */\n\nimport { getConfig } from '../config/index.js';\n\n// Types for vector operations (matching remote server API)\nexport interface EmbeddingRequest {\n texts: Array<{ id: string; text: string; document: Record<string, unknown> }>;\n namespace: string;\n embeddingModel?: string;\n}\n\nexport interface EmbeddingError {\n id?: string;\n error: string;\n}\n\nexport interface EmbeddingResult {\n processedCount: number;\n failedCount: number;\n errors?: EmbeddingError[];\n}\n\nexport interface SearchRequest {\n query: string;\n namespace: string;\n topK?: number;\n embeddingModel?: string;\n}\n\nexport interface SearchMatch {\n id: string;\n score: number;\n metadata?: Record<string, unknown>;\n}\n\nexport interface SearchResult {\n matches: SearchMatch[];\n}\n\n// Remote vector client state\nlet remoteServerUrl: string | null = null;\nlet authKey: string | null = null;\n\n/**\n * Initialize the vector client with remote server config\n */\nexport function initVectorClient(serverUrl: string, key: string) {\n remoteServerUrl = serverUrl.replace(/\\/$/, '');\n authKey = key;\n}\n\n/**\n * Check if vector client is configured\n */\nexport function isVectorClientConfigured(): boolean {\n return !!remoteServerUrl && !!authKey;\n}\n\n/**\n * HTTP helper for remote vector API calls\n */\nasync function vectorApi<T>(\n path: string,\n options: { method?: string; body?: unknown } = {}\n): Promise<T> {\n if (!remoteServerUrl || !authKey) {\n throw new Error('Vector client not initialized - remote server not configured');\n }\n \n const url = `${remoteServerUrl}/vectors${path}`;\n const init: RequestInit = {\n method: options.method || 'GET',\n headers: {\n 'Content-Type': 'application/json',\n 'Authorization': `Bearer ${authKey}`,\n },\n };\n \n if (options.body) {\n init.body = JSON.stringify(options.body);\n }\n \n const response = await fetch(url, init);\n \n if (!response.ok) {\n const error = await response.json().catch(() => ({ error: 'Unknown error' })) as { error?: string };\n throw new Error(error.error || `HTTP ${response.status}`);\n }\n \n return response.json() as Promise<T>;\n}\n\n/**\n * Remote vector client that calls the remote server\n * Implements same interface as the old VectorClient SDK\n */\nexport const remoteVectorClient = {\n embeddings: {\n /**\n * Create embeddings and store in vector DB\n */\n async createAndWait(\n texts: Array<{ id: string; text: string; document: Record<string, unknown> }>,\n options: {\n namespace: string;\n embeddingModel?: string;\n }\n ): Promise<EmbeddingResult> {\n return vectorApi<EmbeddingResult>('/embed', {\n method: 'POST',\n body: {\n texts,\n namespace: options.namespace,\n embeddingModel: options.embeddingModel,\n },\n });\n },\n },\n \n search: {\n /**\n * Query vectors using semantic search\n */\n async queryAndWait(\n query: string,\n options: {\n namespace: string;\n topK?: number;\n includeMetadata?: boolean;\n embeddingModel?: string;\n }\n ): Promise<SearchResult> {\n return vectorApi<SearchResult>('/search', {\n method: 'POST',\n body: {\n query,\n namespace: options.namespace,\n topK: options.topK || 10,\n embeddingModel: options.embeddingModel,\n },\n });\n },\n },\n \n /**\n * Delete a namespace (if supported)\n */\n async deleteNamespace(namespace: string): Promise<void> {\n await vectorApi(`/namespace/${encodeURIComponent(namespace)}`, {\n method: 'DELETE',\n });\n },\n \n /**\n * Close client (no-op for HTTP client)\n */\n async close(): Promise<void> {\n // No-op - HTTP connections don't need cleanup\n },\n};\n\n// Type alias for the vector client\nexport type VectorClient = typeof remoteVectorClient;\n\n/**\n * Get the Vector client\n * Returns null if remote server is not configured\n */\nexport function getVectorClient(): VectorClient | null {\n if (!isVectorClientConfigured()) {\n // Try to initialize from config\n try {\n const config = getConfig();\n if (config.resolvedRemoteServer.url && config.resolvedRemoteServer.authKey) {\n initVectorClient(config.resolvedRemoteServer.url, config.resolvedRemoteServer.authKey);\n } else {\n return null;\n }\n } catch {\n return null;\n }\n }\n \n return remoteVectorClient;\n}\n\n/**\n * Close the vector client (no-op for HTTP client)\n */\nexport async function closeVectorClient(): Promise<void> {\n // No-op - HTTP connections don't need cleanup\n}\n\n/**\n * Check if Vector Gateway is configured (via remote server)\n */\nexport function isVectorGatewayConfigured(): boolean {\n try {\n const config = getConfig();\n return !!(config.resolvedRemoteServer.url && config.resolvedRemoteServer.authKey);\n } catch {\n return false;\n }\n}\n\n/**\n * Get the configured embedding model\n */\nexport function getEmbeddingModel(): string {\n try {\n const config = getConfig();\n return config.resolvedVectorGateway.embeddingModel;\n } catch {\n return 'gemini-embedding-001';\n }\n}\n","/**\n * Repository indexing pipeline\n * Walks the repo, chunks files, and sends to Vector Gateway for embedding\n */\n\nimport { readFileSync, statSync } from 'node:fs';\nimport { join, relative } from 'node:path';\nimport { minimatch } from 'minimatch';\nimport { getConfig } from '../config/index.js';\nimport { getDb, indexedChunkQueries, indexStatusQueries } from '../db/index.js';\nimport { Chunk, IndexOptions, IndexProgress, IndexResult, IndexStatus } from './types.js';\nimport { getRepoNamespace, isGitRepository } from './namespace.js';\nimport { chunkFile } from './chunker.js';\nimport { getVectorClient, closeVectorClient, getEmbeddingModel } from './client.js';\n\n// Max file size to index (1MB)\nconst MAX_FILE_SIZE = 1024 * 1024;\n\n// Batch size and concurrency for embedding requests\nconst EMBEDDING_BATCH_SIZE = 50;\nconst EMBEDDING_CONCURRENCY = 5;\nconst EMBEDDING_RETRIES = 2;\n\nfunction parsePositiveInt(value: string | undefined, fallback: number): number {\n const parsed = Number(value);\n if (!Number.isFinite(parsed) || parsed <= 0) {\n return fallback;\n }\n return Math.floor(parsed);\n}\n\nfunction formatError(error: unknown): string {\n if (error instanceof Error) {\n return error.message || 'Unknown error';\n }\n if (typeof error === 'string') {\n return error;\n }\n try {\n return JSON.stringify(error);\n } catch {\n return 'Unknown error';\n }\n}\n\nfunction sleep(ms: number): Promise<void> {\n return new Promise((resolve) => setTimeout(resolve, ms));\n}\n\n/**\n * Check if a path matches any exclude pattern\n */\nfunction isPathExcluded(relativePath: string, exclude: string[]): boolean {\n return exclude.some((pattern) => {\n // Direct match\n if (minimatch(relativePath, pattern, { dot: true })) {\n return true;\n }\n // For directory patterns like \"node_modules/**\", also check if the path\n // starts with the directory name (for skipping entire directories)\n if (pattern.endsWith('/**')) {\n const dirPattern = pattern.slice(0, -3); // Remove \"/**\"\n if (relativePath === dirPattern || relativePath.startsWith(dirPattern + '/')) {\n return true;\n }\n }\n return false;\n });\n}\n\n/**\n * Walk directory and collect files matching patterns\n */\nasync function walkDirectory(\n dir: string,\n include: string[],\n exclude: string[],\n baseDir: string\n): Promise<string[]> {\n const { readdirSync } = await import('node:fs');\n const { join, relative } = await import('node:path');\n \n const files: string[] = [];\n \n function walk(currentDir: string) {\n let entries;\n try {\n entries = readdirSync(currentDir, { withFileTypes: true });\n } catch {\n // Skip directories we can't read\n return;\n }\n \n for (const entry of entries) {\n const fullPath = join(currentDir, entry.name);\n const relativePath = relative(baseDir, fullPath);\n \n // Check exclusions first\n if (isPathExcluded(relativePath, exclude)) {\n continue;\n }\n \n if (entry.isDirectory()) {\n walk(fullPath);\n } else if (entry.isFile()) {\n // Check inclusions\n const isIncluded = include.some((pattern) => {\n return minimatch(relativePath, pattern, { dot: true });\n });\n \n if (isIncluded) {\n files.push(fullPath);\n }\n }\n }\n }\n \n walk(dir);\n return files;\n}\n\n/**\n * Check if a file should be skipped (binary, too large, etc.)\n */\nfunction shouldSkipFile(filePath: string): { skip: boolean; reason?: string } {\n try {\n const stats = statSync(filePath);\n \n if (stats.size > MAX_FILE_SIZE) {\n return { skip: true, reason: 'File too large (>1MB)' };\n }\n \n if (stats.size === 0) {\n return { skip: true, reason: 'Empty file' };\n }\n \n // For text detection, just try to read the file as UTF-8\n // If it fails or has null bytes, it's likely binary\n try {\n const content = readFileSync(filePath, 'utf-8');\n // Check for null bytes in first 1000 chars (binary indicator)\n const sample = content.slice(0, 1000);\n if (sample.includes('\\0')) {\n return { skip: true, reason: 'Binary file' };\n }\n } catch {\n return { skip: true, reason: 'Cannot read as text' };\n }\n \n return { skip: false };\n } catch (error) {\n return { skip: true, reason: `Error reading file: ${error}` };\n }\n}\n\n/**\n * Index a repository for semantic search\n */\nexport async function indexRepository(options: IndexOptions): Promise<IndexResult> {\n const startTime = Date.now();\n const errors: Array<{ file: string; error: string }> = [];\n \n const progress: IndexProgress = {\n phase: 'scanning',\n totalFiles: 0,\n processedFiles: 0,\n totalChunks: 0,\n newChunks: 0,\n skippedChunks: 0,\n };\n \n const reportProgress = () => {\n if (options.onProgress) {\n options.onProgress({ ...progress });\n }\n };\n\n // Check if git repository\n if (!isGitRepository(options.workingDirectory)) {\n return {\n success: false,\n namespace: '',\n totalFiles: 0,\n totalChunks: 0,\n newChunks: 0,\n skippedChunks: 0,\n failedChunks: 0,\n duration: Date.now() - startTime,\n errors: [{ file: '', error: 'Not a git repository' }],\n };\n }\n\n // Get config\n const config = getConfig();\n const { include, exclude, namespace: configNamespace } = config.resolvedVectorGateway;\n\n // Get namespace\n const namespace = await getRepoNamespace(options.workingDirectory, configNamespace);\n if (!namespace) {\n return {\n success: false,\n namespace: '',\n totalFiles: 0,\n totalChunks: 0,\n newChunks: 0,\n skippedChunks: 0,\n failedChunks: 0,\n duration: Date.now() - startTime,\n errors: [{ file: '', error: 'Could not determine repository namespace. Ensure git remote is configured.' }],\n };\n }\n\n // Get vector client\n const client = getVectorClient();\n if (!client) {\n return {\n success: false,\n namespace,\n totalFiles: 0,\n totalChunks: 0,\n newChunks: 0,\n skippedChunks: 0,\n failedChunks: 0,\n duration: Date.now() - startTime,\n errors: [{ file: '', error: 'Remote server not configured. Set SPARKECODER_REMOTE_URL/SPARKECODER_AUTH_KEY or remoteServer in sparkecoder.config.json' }],\n };\n }\n\n try {\n // Phase 1: Scan files\n progress.phase = 'scanning';\n reportProgress();\n \n const files = await walkDirectory(\n options.workingDirectory,\n include,\n exclude,\n options.workingDirectory\n );\n \n progress.totalFiles = files.length;\n reportProgress();\n\n // Phase 2: Chunk files\n progress.phase = 'chunking';\n reportProgress();\n \n const allChunks: Chunk[] = [];\n \n for (const filePath of files) {\n const relativePath = relative(options.workingDirectory, filePath);\n progress.currentFile = relativePath;\n \n const skipCheck = shouldSkipFile(filePath);\n if (skipCheck.skip) {\n if (options.verbose) {\n console.log(`Skipping ${relativePath}: ${skipCheck.reason}`);\n }\n progress.processedFiles++;\n reportProgress();\n continue;\n }\n \n try {\n const content = readFileSync(filePath, 'utf-8');\n const chunks = chunkFile(relativePath, content);\n allChunks.push(...chunks);\n progress.totalChunks += chunks.length;\n } catch (error) {\n errors.push({ file: relativePath, error: String(error) });\n }\n \n progress.processedFiles++;\n reportProgress();\n }\n\n // Phase 3: Check existing hashes\n progress.phase = 'checking';\n reportProgress();\n \n const db = getDb();\n const existingHashes = new Set<string>();\n \n if (!options.force) {\n // Get all existing chunk IDs for this namespace\n const existingChunks = await indexedChunkQueries.getByNamespace(db, namespace);\n for (const chunk of existingChunks) {\n existingHashes.add(chunk.id);\n }\n }\n \n // Filter to new chunks only\n const newChunks = allChunks.filter((chunk) => !existingHashes.has(chunk.id));\n progress.newChunks = newChunks.length;\n progress.skippedChunks = allChunks.length - newChunks.length;\n reportProgress();\n\n // Phase 4: Embed new chunks\n progress.phase = 'embedding';\n reportProgress();\n \n const embeddingModel = getEmbeddingModel();\n let failedChunks = 0;\n \n // Process in batches (parallelized with a worker pool)\n const batchSize = parsePositiveInt(process.env.SPARKECODER_INDEX_BATCH_SIZE, EMBEDDING_BATCH_SIZE);\n const concurrency = parsePositiveInt(process.env.SPARKECODER_INDEX_CONCURRENCY, EMBEDDING_CONCURRENCY);\n const maxRetries = parsePositiveInt(process.env.SPARKECODER_INDEX_RETRIES, EMBEDDING_RETRIES);\n const totalBatches = Math.ceil(newChunks.length / batchSize);\n console.log(\n `[indexer] Starting embedding: ${newChunks.length} chunks in ${totalBatches} batches (batchSize=${batchSize}, concurrency=${concurrency}, retries=${maxRetries})`\n );\n \n const batches = newChunks.reduce<Array<{ batchNum: number; batch: Chunk[] }>>((acc, chunk, index) => {\n if (index % batchSize === 0) {\n acc.push({\n batchNum: Math.floor(index / batchSize) + 1,\n batch: newChunks.slice(index, index + batchSize),\n });\n }\n return acc;\n }, []);\n\n const processBatch = async (batchNum: number, batch: Chunk[]) => {\n console.log(`[indexer] Batch ${batchNum}/${totalBatches}: embedding ${batch.length} chunks...`);\n const texts = batch.map((chunk) => ({\n id: chunk.id,\n text: chunk.text,\n document: {\n filePath: chunk.metadata.filePath,\n startLine: chunk.metadata.startLine,\n endLine: chunk.metadata.endLine,\n language: chunk.metadata.language,\n chunkType: chunk.metadata.chunkType,\n symbolName: chunk.metadata.symbolName,\n contentHash: chunk.contentHash,\n },\n }));\n\n for (let attempt = 0; attempt <= maxRetries; attempt++) {\n const batchStartTime = Date.now();\n try {\n const result = await client.embeddings.createAndWait(texts, {\n namespace,\n embeddingModel,\n });\n \n const embedTime = Date.now() - batchStartTime;\n console.log(`[indexer] Batch ${batchNum}: embed completed in ${embedTime}ms - processed: ${result.processedCount}, failed: ${result.failedCount}`);\n \n failedChunks += result.failedCount;\n \n // Record successful chunks in local DB (batch upsert)\n const successfulChunks = batch.filter(\n (chunk) => !result.errors?.find((e) => e.id === chunk.id)\n );\n \n if (successfulChunks.length > 0) {\n console.log(`[indexer] Batch ${batchNum}: recording ${successfulChunks.length} chunks to DB...`);\n const dbStartTime = Date.now();\n \n await indexedChunkQueries.batchUpsert(db, successfulChunks.map((chunk) => ({\n id: chunk.id,\n contentHash: chunk.contentHash,\n filePath: chunk.metadata.filePath,\n repoNamespace: namespace,\n startLine: chunk.metadata.startLine,\n endLine: chunk.metadata.endLine,\n language: chunk.metadata.language,\n })));\n \n const dbTime = Date.now() - dbStartTime;\n console.log(`[indexer] Batch ${batchNum}: DB batch upsert completed in ${dbTime}ms`);\n }\n \n if (result.errors?.length) {\n for (const err of result.errors) {\n const chunk = batch.find((c) => c.id === err.id);\n if (chunk) {\n errors.push({ file: chunk.metadata.filePath, error: err.error });\n }\n }\n } else if (result.failedCount > 0) {\n errors.push({ file: `batch ${batchNum}`, error: `Embedding failed for ${result.failedCount} chunks (no error details returned)` });\n }\n \n return;\n } catch (error) {\n const errorMsg = formatError(error);\n console.error(`[indexer] Batch ${batchNum}: ERROR (attempt ${attempt + 1}/${maxRetries + 1}) - ${errorMsg}`);\n if (attempt >= maxRetries) {\n failedChunks += batch.length;\n errors.push({ file: `batch ${batchNum}`, error: errorMsg });\n return;\n }\n await sleep(500 * (attempt + 1));\n } finally {\n reportProgress();\n }\n }\n };\n\n let nextBatchIndex = 0;\n const workerCount = Math.min(concurrency, batches.length);\n const workers = Array.from({ length: workerCount }, async () => {\n while (nextBatchIndex < batches.length) {\n const currentIndex = nextBatchIndex;\n nextBatchIndex += 1;\n const { batchNum, batch } = batches[currentIndex];\n await processBatch(batchNum, batch);\n }\n });\n\n await Promise.all(workers);\n \n console.log(`[indexer] Embedding complete. Updating index status...`);\n\n // Update index status\n try {\n console.log(`[indexer] Calling indexStatusQueries.upsert with totalChunks: ${allChunks.length}`);\n await indexStatusQueries.upsert(db, {\n id: namespace,\n repoNamespace: namespace,\n totalChunks: allChunks.length,\n lastFullIndex: options.force ? new Date() : undefined,\n lastIncrementalIndex: new Date(),\n });\n console.log(`[indexer] Index status updated successfully`);\n } catch (statusError) {\n console.error(`[indexer] Failed to update index status:`, statusError);\n throw statusError;\n }\n\n // Phase 5: Done\n progress.phase = 'done';\n reportProgress();\n\n return {\n success: true,\n namespace,\n totalFiles: files.length,\n totalChunks: allChunks.length,\n newChunks: newChunks.length - failedChunks,\n skippedChunks: progress.skippedChunks,\n failedChunks,\n duration: Date.now() - startTime,\n errors,\n };\n } finally {\n await closeVectorClient();\n }\n}\n\n/**\n * Get the index status for a repository\n */\nexport async function getIndexStatus(workingDirectory: string): Promise<IndexStatus> {\n const config = getConfig();\n const namespace = await getRepoNamespace(\n workingDirectory,\n config.resolvedVectorGateway.namespace\n );\n \n const isConfigured = config.resolvedRemoteServer.isConfigured;\n \n if (!namespace) {\n return {\n namespace: '',\n totalChunks: 0,\n lastFullIndex: null,\n lastIncrementalIndex: null,\n isConfigured,\n };\n }\n \n try {\n const db = getDb();\n const status = await indexStatusQueries.get(db, namespace);\n \n if (!status) {\n return {\n namespace,\n totalChunks: 0,\n lastFullIndex: null,\n lastIncrementalIndex: null,\n isConfigured,\n };\n }\n \n return {\n namespace,\n totalChunks: status.totalChunks ?? 0,\n lastFullIndex: status.lastFullIndex ?? null,\n lastIncrementalIndex: status.lastIncrementalIndex ?? null,\n isConfigured,\n };\n } catch {\n return {\n namespace,\n totalChunks: 0,\n lastFullIndex: null,\n lastIncrementalIndex: null,\n isConfigured,\n };\n }\n}\n\n/**\n * Check if an index exists for a repository\n */\nexport async function checkIndexExists(workingDirectory: string): Promise<boolean> {\n const status = await getIndexStatus(workingDirectory);\n return status.totalChunks > 0;\n}\n","/**\n * Semantic search module\n * Provides indexing and semantic search capabilities using Vector Gateway\n */\n\n// Types\nexport * from './types.js';\n\n// Namespace resolution\nexport {\n getRepoNamespace,\n isGitRepository,\n getCurrentBranch,\n getCurrentCommit,\n getGitRemoteUrl,\n parseGitRemoteUrl,\n} from './namespace.js';\n\n// Content hashing\nexport {\n computeContentHash,\n computeFullHash,\n computeFileHash,\n generateChunkId,\n parseChunkId,\n} from './hasher.js';\n\n// File chunking\nexport {\n chunkFile,\n detectLanguage,\n supportsSemanticChunking,\n} from './chunker.js';\n\n// Vector client\nexport {\n getVectorClient,\n closeVectorClient,\n isVectorGatewayConfigured,\n getEmbeddingModel,\n} from './client.js';\n\n// Indexing\nexport {\n indexRepository,\n getIndexStatus,\n checkIndexExists,\n} from './indexer.js';\n","/**\n * Semantic Search Tool\n * Uses Vector Gateway to perform semantic similarity search on indexed codebase\n */\n\nimport { tool } from 'ai';\nimport { z } from 'zod';\nimport { existsSync, readFileSync } from 'node:fs';\nimport { join } from 'node:path';\nimport { minimatch } from 'minimatch';\nimport {\n getVectorClient,\n closeVectorClient,\n getRepoNamespace,\n getEmbeddingModel,\n} from '../semantic/index.js';\nimport { getConfig } from '../config/index.js';\nimport { SemanticMatch } from '../semantic/types.js';\n\nexport interface SemanticSearchToolOptions {\n workingDirectory: string;\n}\n\nexport interface SemanticSearchResult {\n success: boolean;\n query?: string;\n matches?: SemanticMatch[];\n totalMatches?: number;\n duration?: number;\n error?: string;\n}\n\nconst semanticSearchInputSchema = z.object({\n query: z\n .string()\n .describe('Natural language search query describing what you want to find'),\n topK: z\n .number()\n .optional()\n .default(10)\n .describe('Number of results to return (default: 10, max: 50)'),\n filePattern: z\n .string()\n .optional()\n .describe('Filter results by file glob pattern (e.g., \"*.ts\", \"src/**/*.py\")'),\n language: z\n .string()\n .optional()\n .describe('Filter by programming language (e.g., \"typescript\", \"python\")'),\n});\n\n/**\n * Create the semantic_search tool\n */\nexport function createSemanticSearchTool(options: SemanticSearchToolOptions) {\n return tool({\n description: `Search the codebase using semantic similarity. This tool finds code by understanding its meaning, not just matching text.\n\nUse this tool when:\n- You need to understand how something works in the codebase\n- You're looking for code related to a concept (e.g., \"authentication\", \"database queries\")\n- You want to find implementations of features\n- The user asks \"where is X?\" or \"how does Y work?\"\n\nThis tool requires the repository to be indexed first with 'sparkecoder index'.\n\nReturns matching code snippets with file paths, line numbers, and relevance scores.`,\n\n inputSchema: semanticSearchInputSchema,\n\n execute: async ({\n query,\n topK,\n filePattern,\n language,\n }: z.infer<typeof semanticSearchInputSchema>): Promise<SemanticSearchResult> => {\n const startTime = Date.now();\n\n try {\n const config = getConfig();\n\n const namespace = await getRepoNamespace(\n options.workingDirectory,\n config.resolvedVectorGateway.namespace\n );\n\n if (!namespace) {\n return {\n success: false,\n error: 'Repository namespace not found. Ensure this is a git repository with a remote configured.',\n };\n }\n\n const client = getVectorClient();\n if (!client) {\n return {\n success: false,\n error: 'Remote server not configured. Set SPARKECODER_REMOTE_URL/SPARKECODER_AUTH_KEY or run sparkecoder to register.',\n };\n }\n\n try {\n const limitedTopK = Math.min(Math.max(1, topK), 50);\n\n const embeddingModel = getEmbeddingModel();\n const result = await client.search.queryAndWait(query, {\n namespace,\n topK: limitedTopK * 2,\n includeMetadata: true,\n embeddingModel,\n });\n\n const matches: SemanticMatch[] = [];\n\n for (const match of result.matches) {\n const metadata = match.metadata as Record<string, unknown> | undefined;\n if (!metadata) continue;\n\n const filePath = metadata.filePath as string;\n const startLine = metadata.startLine as number;\n const endLine = metadata.endLine as number;\n const matchLanguage = metadata.language as string;\n const symbolName = metadata.symbolName as string | undefined;\n\n if (filePattern) {\n const matchesPattern = minimatch(filePath, filePattern, { dot: true });\n if (!matchesPattern) continue;\n }\n\n if (language && matchLanguage !== language.toLowerCase()) {\n continue;\n }\n\n const fullPath = join(options.workingDirectory, filePath);\n if (!existsSync(fullPath)) {\n continue;\n }\n\n let snippet = '';\n try {\n const content = readFileSync(fullPath, 'utf-8');\n const lines = content.split('\\n');\n const snippetLines = lines.slice(\n Math.max(0, startLine - 1),\n Math.min(lines.length, endLine)\n );\n snippet = snippetLines.join('\\n');\n\n if (snippet.length > 500) {\n snippet = snippet.slice(0, 500) + '\\n... (truncated)';\n }\n } catch {\n // Ignore read errors\n }\n\n matches.push({\n filePath,\n startLine,\n endLine,\n score: match.score,\n snippet,\n symbolName,\n language: matchLanguage,\n });\n\n if (matches.length >= limitedTopK) {\n break;\n }\n }\n\n return {\n success: true,\n query,\n matches,\n totalMatches: matches.length,\n duration: Date.now() - startTime,\n };\n } finally {\n await closeVectorClient();\n }\n } catch (error) {\n return {\n success: false,\n error: `Semantic search failed: ${error instanceof Error ? error.message : String(error)}`,\n };\n }\n },\n });\n}\n","export type WebhookEventType =\n | 'task.started'\n | 'task.message'\n | 'task.tool_call'\n | 'task.tool_result'\n | 'task.step_finished'\n | 'task.completed'\n | 'task.failed';\n\nexport interface WebhookEvent {\n type: WebhookEventType;\n taskId: string;\n sessionId: string;\n timestamp: string;\n data: unknown;\n}\n\n/**\n * Fire-and-forget POST to a webhook URL.\n * Silently ignores network/timeout errors so the agent loop is never blocked.\n */\nexport async function sendWebhook(url: string, event: WebhookEvent): Promise<void> {\n try {\n const controller = new AbortController();\n const timeout = setTimeout(() => controller.abort(), 5000);\n\n await fetch(url, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n 'X-SparkECoder-Event': event.type,\n },\n body: JSON.stringify(event),\n signal: controller.signal,\n });\n\n clearTimeout(timeout);\n } catch {\n // Fire-and-forget: silently ignore errors\n }\n}\n","import WebSocket from 'ws';\nimport { EventEmitter } from 'node:events';\n\nexport interface BrowserFrameMetadata {\n deviceWidth: number;\n deviceHeight: number;\n pageScaleFactor: number;\n offsetTop: number;\n scrollOffsetX: number;\n scrollOffsetY: number;\n}\n\nexport interface BrowserFrame {\n data: string; // base64-encoded JPEG\n metadata: BrowserFrameMetadata;\n timestamp: number;\n}\n\nexport interface BrowserStatus {\n connected: boolean;\n screencasting: boolean;\n viewportWidth?: number;\n viewportHeight?: number;\n}\n\nexport interface BrowserMouseEvent {\n type: 'input_mouse';\n eventType: 'mousePressed' | 'mouseReleased' | 'mouseMoved' | 'mouseWheel';\n x: number;\n y: number;\n button?: 'left' | 'right' | 'middle';\n clickCount?: number;\n deltaX?: number;\n deltaY?: number;\n}\n\nexport interface BrowserKeyboardEvent {\n type: 'input_keyboard';\n eventType: 'keyDown' | 'keyUp' | 'char';\n key: string;\n code?: string;\n text?: string;\n modifiers?: number;\n}\n\nexport interface BrowserTouchEvent {\n type: 'input_touch';\n eventType: 'touchStart' | 'touchMove' | 'touchEnd';\n touchPoints: Array<{ x: number; y: number; id?: number }>;\n}\n\nexport type BrowserInputEvent = BrowserMouseEvent | BrowserKeyboardEvent | BrowserTouchEvent;\n\nexport interface BrowserStreamProxyEvents {\n frame: (frame: BrowserFrame) => void;\n status: (status: BrowserStatus) => void;\n close: () => void;\n error: (error: Error) => void;\n}\n\nconst RECONNECT_DELAY_MS = 1000;\nconst MAX_RECONNECT_ATTEMPTS = 20;\nconst FRAME_THROTTLE_MS = 100; // ~10 fps max\n\n/**\n * Proxy that connects to an agent-browser WebSocket stream server\n * and forwards frames/status to consumers via EventEmitter.\n */\nexport class BrowserStreamProxy extends EventEmitter {\n private ws: WebSocket | null = null;\n private port: number;\n private reconnectAttempts = 0;\n private reconnectTimer: ReturnType<typeof setTimeout> | null = null;\n private destroyed = false;\n private lastFrameTime = 0;\n private _latestFrame: BrowserFrame | null = null;\n private _connected = false;\n\n constructor(port: number) {\n super();\n this.port = port;\n }\n\n get connected(): boolean {\n return this._connected;\n }\n\n get latestFrame(): BrowserFrame | null {\n return this._latestFrame;\n }\n\n connect(): void {\n if (this.destroyed) return;\n console.log(`[BROWSER-WS] connect() called for port ${this.port}`);\n this.doConnect();\n }\n\n private doConnect(): void {\n if (this.destroyed) return;\n\n const url = `ws://localhost:${this.port}`;\n console.log(`[BROWSER-WS] Attempting WebSocket connection to ${url} (attempt ${this.reconnectAttempts + 1}/${MAX_RECONNECT_ATTEMPTS})`);\n try {\n this.ws = new WebSocket(url);\n } catch (err) {\n console.warn(`[BROWSER-WS] WebSocket constructor threw for ${url}:`, err);\n this.scheduleReconnect();\n return;\n }\n\n this.ws.on('open', () => {\n console.log(`[BROWSER-WS] Connected to ${url} (after ${this.reconnectAttempts} retries)`);\n this.reconnectAttempts = 0;\n this._connected = true;\n // Don't emit screencasting: true here — wait for the real status\n // from the StreamServer. It sends status immediately on connection\n // with the actual isScreencasting state.\n });\n\n this.ws.on('message', (raw: Buffer | string) => {\n try {\n const msg = JSON.parse(typeof raw === 'string' ? raw : raw.toString('utf8'));\n this.handleMessage(msg);\n } catch (err) {\n console.warn(`[BROWSER-WS] Malformed message from ${url}:`, err);\n }\n });\n\n this.ws.on('close', (code, reason) => {\n const wasConnected = this._connected;\n this._connected = false;\n console.log(`[BROWSER-WS] Connection closed: code=${code} reason=\"${reason?.toString() || ''}\" wasConnected=${wasConnected} destroyed=${this.destroyed}`);\n if (wasConnected) {\n this.emit('status', { connected: false, screencasting: false } satisfies BrowserStatus);\n }\n if (!this.destroyed) {\n this.scheduleReconnect();\n }\n });\n\n this.ws.on('error', (err) => {\n console.warn(`[BROWSER-WS] WebSocket error on port ${this.port}:`, err.message);\n });\n }\n\n private frameCount = 0;\n private throttledCount = 0;\n private lastFrameLogTime = 0;\n\n private handleMessage(msg: any): void {\n if (msg.type === 'frame') {\n const now = Date.now();\n if (now - this.lastFrameTime < FRAME_THROTTLE_MS) {\n this.throttledCount++;\n return;\n }\n this.lastFrameTime = now;\n this.frameCount++;\n\n // Log frame stats every 5 seconds\n if (now - this.lastFrameLogTime > 5000) {\n console.log(`[BROWSER-WS] Frame stats: emitted=${this.frameCount} throttled=${this.throttledCount} listeners=${this.listenerCount('frame')} dataSize=${msg.data?.length ?? 0}`);\n this.lastFrameLogTime = now;\n }\n\n const frame: BrowserFrame = {\n data: msg.data,\n metadata: msg.metadata ?? {\n deviceWidth: 1280,\n deviceHeight: 720,\n pageScaleFactor: 1,\n offsetTop: 0,\n scrollOffsetX: 0,\n scrollOffsetY: 0,\n },\n timestamp: now,\n };\n this._latestFrame = frame;\n this.emit('frame', frame);\n } else if (msg.type === 'status') {\n console.log(`[BROWSER-WS] Status message received:`, JSON.stringify(msg));\n this.emit('status', {\n connected: msg.connected ?? true,\n screencasting: msg.screencasting ?? true,\n viewportWidth: msg.viewportWidth,\n viewportHeight: msg.viewportHeight,\n } satisfies BrowserStatus);\n } else {\n console.log(`[BROWSER-WS] Unknown message type: ${msg.type}`);\n }\n }\n\n private scheduleReconnect(): void {\n if (this.destroyed || this.reconnectAttempts >= MAX_RECONNECT_ATTEMPTS) {\n console.log(`[BROWSER-WS] Giving up reconnection: destroyed=${this.destroyed} attempts=${this.reconnectAttempts}/${MAX_RECONNECT_ATTEMPTS}`);\n this.emit('close');\n return;\n }\n this.reconnectAttempts++;\n // Fast retries for the first 5 attempts (browser is starting up),\n // then back off for later retries (browser may have closed)\n const delay = this.reconnectAttempts <= 5\n ? RECONNECT_DELAY_MS\n : RECONNECT_DELAY_MS * (this.reconnectAttempts - 4);\n console.log(`[BROWSER-WS] Scheduling reconnect in ${delay}ms (attempt ${this.reconnectAttempts}/${MAX_RECONNECT_ATTEMPTS})`);\n this.reconnectTimer = setTimeout(() => this.doConnect(), delay);\n }\n\n /**\n * Send an input event to the browser for pair-browsing.\n */\n injectInput(event: BrowserInputEvent): void {\n if (this.ws?.readyState === WebSocket.OPEN) {\n this.ws.send(JSON.stringify(event));\n }\n }\n\n /**\n * Ask the StreamServer to send its current status (triggers sendStatus and\n * re-evaluates screencasting). Useful when listeners are replaced on a new\n * stream and we want a fresh status event.\n */\n requestStatus(): void {\n if (this.ws?.readyState === WebSocket.OPEN) {\n console.log(`[BROWSER-WS] Requesting fresh status from StreamServer`);\n this.ws.send(JSON.stringify({ type: 'status' }));\n }\n }\n\n destroy(): void {\n console.log(`[BROWSER-WS] Destroying proxy for port ${this.port} (emitted ${this.frameCount} frames, throttled ${this.throttledCount})`);\n this.destroyed = true;\n if (this.reconnectTimer) {\n clearTimeout(this.reconnectTimer);\n this.reconnectTimer = null;\n }\n if (this.ws) {\n this.ws.removeAllListeners();\n this.ws.close();\n this.ws = null;\n }\n this._connected = false;\n this.removeAllListeners();\n }\n}\n\n// Registry of active proxies per session\nconst activeProxies = new Map<string, BrowserStreamProxy>();\n\nexport function getOrCreateProxy(sessionId: string, port: number): BrowserStreamProxy {\n const existing = activeProxies.get(sessionId);\n if (existing) {\n console.log(`[BROWSER-WS] Reusing existing proxy for session ${sessionId} (connected=${existing.connected})`);\n return existing;\n }\n\n console.log(`[BROWSER-WS] Creating new proxy for session ${sessionId} on port ${port} (active proxies: ${activeProxies.size})`);\n const proxy = new BrowserStreamProxy(port);\n activeProxies.set(sessionId, proxy);\n proxy.on('close', () => {\n console.log(`[BROWSER-WS] Proxy closed for session ${sessionId}, removing from registry`);\n activeProxies.delete(sessionId);\n });\n proxy.connect();\n return proxy;\n}\n\nexport function getProxy(sessionId: string): BrowserStreamProxy | undefined {\n return activeProxies.get(sessionId);\n}\n\nexport function destroyProxy(sessionId: string): void {\n const proxy = activeProxies.get(sessionId);\n if (proxy) {\n console.log(`[BROWSER-WS] destroyProxy() called for session ${sessionId}`);\n proxy.destroy();\n activeProxies.delete(sessionId);\n } else {\n console.log(`[BROWSER-WS] destroyProxy() called but no proxy exists for session ${sessionId}`);\n }\n}\n","import { exec } from 'node:child_process';\nimport { promisify } from 'node:util';\nimport { writeFile, mkdir, readFile, unlink, readdir, rm } from 'node:fs/promises';\nimport { join } from 'node:path';\nimport { tmpdir } from 'node:os';\nimport { nanoid } from 'nanoid';\nimport type { BrowserFrame } from './stream-proxy.js';\n\nconst execAsync = promisify(exec);\n\ninterface RecordedFrame {\n data: Buffer;\n timestamp: number;\n}\n\nexport class FrameRecorder {\n private frames: RecordedFrame[] = [];\n private startTime: number | null = null;\n private recording = false;\n private sessionId: string;\n\n constructor(sessionId: string) {\n this.sessionId = sessionId;\n }\n\n get isRecording(): boolean {\n return this.recording;\n }\n\n get frameCount(): number {\n return this.frames.length;\n }\n\n start(): void {\n this.frames = [];\n this.startTime = Date.now();\n this.recording = true;\n }\n\n addFrame(frame: BrowserFrame): void {\n if (!this.recording) return;\n this.frames.push({\n data: Buffer.from(frame.data, 'base64'),\n timestamp: frame.timestamp,\n });\n }\n\n stop(): void {\n this.recording = false;\n }\n\n /**\n * Encode recorded frames into an MP4 using ffmpeg.\n * Returns the file path to the generated MP4, or null if encoding fails.\n */\n async encode(): Promise<{ path: string; sizeBytes: number } | null> {\n if (this.frames.length === 0) return null;\n\n const workDir = join(tmpdir(), `sparkecoder-recording-${nanoid(8)}`);\n await mkdir(workDir, { recursive: true });\n\n try {\n // Write frames as numbered JPEG files\n for (let i = 0; i < this.frames.length; i++) {\n const framePath = join(workDir, `frame_${String(i).padStart(6, '0')}.jpg`);\n await writeFile(framePath, this.frames[i].data);\n }\n\n // Calculate average FPS from timestamps\n const duration = (this.frames[this.frames.length - 1].timestamp - this.frames[0].timestamp) / 1000;\n const fps = duration > 0 ? Math.round(this.frames.length / duration) : 10;\n const clampedFps = Math.max(1, Math.min(fps, 30));\n\n const outputPath = join(workDir, `recording_${this.sessionId}.mp4`);\n\n // Try ffmpeg\n const hasFfmpeg = await checkFfmpeg();\n if (hasFfmpeg) {\n await execAsync(\n `ffmpeg -y -framerate ${clampedFps} -i \"${join(workDir, 'frame_%06d.jpg')}\" ` +\n `-c:v libx264 -pix_fmt yuv420p -preset fast -crf 23 ` +\n `\"${outputPath}\"`,\n { timeout: 120_000 }\n );\n } else {\n // Fallback: create MJPEG-in-MP4 using raw ffmpeg with mjpeg codec\n // If ffmpeg isn't available at all, return null\n console.warn('[RECORDER] ffmpeg not available, cannot encode recording');\n await cleanup(workDir);\n return null;\n }\n\n const outputBuf = await readFile(outputPath);\n\n // Clean up frame files but keep the output\n const files = await readdir(workDir);\n for (const f of files) {\n if (f.startsWith('frame_')) {\n await unlink(join(workDir, f)).catch(() => {});\n }\n }\n\n return { path: outputPath, sizeBytes: outputBuf.length };\n } catch (error) {\n console.error('[RECORDER] Failed to encode recording:', error);\n await cleanup(workDir);\n return null;\n }\n }\n\n /** Discard all frames and free memory */\n clear(): void {\n this.frames = [];\n this.startTime = null;\n this.recording = false;\n }\n}\n\nasync function checkFfmpeg(): Promise<boolean> {\n try {\n await execAsync('ffmpeg -version', { timeout: 5000 });\n return true;\n } catch {\n return false;\n }\n}\n\nasync function cleanup(dir: string): Promise<void> {\n try {\n await rm(dir, { recursive: true, force: true });\n } catch {\n // Best effort\n }\n}\n","import 'dotenv/config'; // Load .env file early\nimport { Hono } from 'hono';\nimport { serve, type ServerType } from '@hono/node-server';\nimport { cors } from 'hono/cors';\nimport { logger } from 'hono/logger';\nimport { existsSync, mkdirSync, writeFileSync } from 'node:fs';\nimport { resolve, dirname, join } from 'node:path';\nimport { spawn, type ChildProcess } from 'node:child_process';\nimport { createServer as createNetServer } from 'node:net';\nimport { fileURLToPath } from 'node:url';\nimport { sessions } from './routes/sessions.js';\nimport { agents } from './routes/agents.js';\nimport { health } from './routes/health.js';\nimport { terminals } from './routes/terminals.js';\nimport tasks from './routes/tasks.js';\nimport * as tmux from '../terminal/tmux.js';\nimport { loadConfig, getConfig, loadApiKeysIntoEnv, ensureRemoteAuthKey, saveAuthKey } from '../config/index.js';\nimport { initDatabase, closeDatabase, isUsingRemote } from '../db/index.js';\nimport { checkDependencies } from '../utils/dependencies.js';\n\nlet serverInstance: ServerType | null = null;\nlet webUIProcess: ChildProcess | null = null;\n\n// Default web UI port\nconst DEFAULT_WEB_PORT = 6969;\nconst WEB_PORT_SEQUENCE = [6969, 6970, 6971, 6972, 6973, 6974, 6975, 6976, 6977, 6978];\n\nexport interface ServerOptions {\n port?: number;\n host?: string;\n workingDirectory?: string;\n configPath?: string;\n quiet?: boolean; // Disable HTTP request logging\n webUI?: boolean; // Start web UI (default: true)\n webPort?: number; // Web UI port (default: 6969)\n publicUrl?: string; // Public URL for web UI to connect to API (for Docker/remote access)\n}\n\n// Get the web directory path\nfunction getWebDirectory(): string | null {\n try {\n const currentDir = dirname(fileURLToPath(import.meta.url));\n const webDir = resolve(currentDir, '..', 'web');\n \n if (existsSync(webDir) && existsSync(join(webDir, 'package.json'))) {\n return webDir;\n }\n \n const altWebDir = resolve(currentDir, '..', '..', 'web');\n if (existsSync(altWebDir) && existsSync(join(altWebDir, 'package.json'))) {\n return altWebDir;\n }\n \n return null;\n } catch {\n return null;\n }\n}\n\n// Check if a SparkECoder web UI is running on a port\nasync function isSparkcoderWebRunning(port: number): Promise<boolean> {\n try {\n const response = await fetch(`http://localhost:${port}/api/health`, { \n signal: AbortSignal.timeout(1000) \n });\n if (response.ok) {\n const data = await response.json() as { name?: string };\n return data.name === 'sparkecoder-web';\n }\n return false;\n } catch {\n return false;\n }\n}\n\n// Check if a port is in use\nfunction isPortInUse(port: number): Promise<boolean> {\n return new Promise((resolve) => {\n const server = createNetServer();\n \n server.once('error', (err: NodeJS.ErrnoException) => {\n if (err.code === 'EADDRINUSE') {\n resolve(true);\n } else {\n resolve(false);\n }\n });\n \n server.once('listening', () => {\n server.close();\n resolve(false);\n });\n \n server.listen(port, '0.0.0.0');\n });\n}\n\n// Find an available port for the web UI\nasync function findWebPort(preferredPort: number): Promise<{ port: number; alreadyRunning: boolean }> {\n if (await isSparkcoderWebRunning(preferredPort)) {\n return { port: preferredPort, alreadyRunning: true };\n }\n \n if (!(await isPortInUse(preferredPort))) {\n return { port: preferredPort, alreadyRunning: false };\n }\n \n for (const port of WEB_PORT_SEQUENCE) {\n if (port === preferredPort) continue;\n \n if (await isSparkcoderWebRunning(port)) {\n return { port, alreadyRunning: true };\n }\n \n if (!(await isPortInUse(port))) {\n return { port, alreadyRunning: false };\n }\n }\n \n return { port: preferredPort, alreadyRunning: false };\n}\n\n// Check if production build exists\nfunction hasProductionBuild(webDir: string): boolean {\n const buildIdPath = join(webDir, '.next', 'BUILD_ID');\n return existsSync(buildIdPath);\n}\n\n// Check if source files exist (for dev mode)\nfunction hasSourceFiles(webDir: string): boolean {\n // Check for app directory (Next.js App Router)\n const appDir = join(webDir, 'src', 'app');\n const pagesDir = join(webDir, 'src', 'pages');\n const rootAppDir = join(webDir, 'app');\n const rootPagesDir = join(webDir, 'pages');\n \n return existsSync(appDir) || existsSync(pagesDir) || existsSync(rootAppDir) || existsSync(rootPagesDir);\n}\n\n// Check if standalone build exists and find the server.js path\nfunction getStandaloneServerPath(webDir: string): string | null {\n // Check for standalone server - may be at different paths depending on project structure\n const possiblePaths = [\n join(webDir, '.next', 'standalone', 'server.js'),\n join(webDir, '.next', 'standalone', 'web', 'server.js'),\n ];\n \n for (const serverPath of possiblePaths) {\n if (existsSync(serverPath)) {\n return serverPath;\n }\n }\n \n return null;\n}\n\n// Run a command and wait for it to complete\nfunction runCommand(command: string, args: string[], cwd: string, env: NodeJS.ProcessEnv): Promise<{ success: boolean; output: string }> {\n return new Promise((resolve) => {\n const child = spawn(command, args, {\n cwd,\n stdio: ['ignore', 'pipe', 'pipe'],\n env,\n shell: true,\n });\n \n let output = '';\n child.stdout?.on('data', (data: Buffer) => { output += data.toString(); });\n child.stderr?.on('data', (data: Buffer) => { output += data.toString(); });\n \n child.on('close', (code) => {\n resolve({ success: code === 0, output });\n });\n \n child.on('error', (err) => {\n resolve({ success: false, output: err.message });\n });\n });\n}\n\n// Start the web UI\nasync function startWebUI(\n apiPort: number,\n webPort: number = DEFAULT_WEB_PORT,\n quiet: boolean = false,\n publicUrl?: string\n): Promise<{ process: ChildProcess | null; port: number; started?: boolean }> {\n const webDir = getWebDirectory();\n \n if (!webDir) {\n if (!quiet) console.log(' ⚠ Web UI not found, skipping...');\n return { process: null, port: webPort };\n }\n \n const { port: actualPort, alreadyRunning } = await findWebPort(webPort);\n \n if (alreadyRunning) {\n if (!quiet) console.log(` ✓ Web UI already running at http://localhost:${actualPort}`);\n return { process: null, port: actualPort };\n }\n \n // Determine which package manager to use (prefer pnpm if available)\n const usePnpm = existsSync(join(webDir, 'pnpm-lock.yaml'));\n const useNpm = !usePnpm && existsSync(join(webDir, 'package-lock.json'));\n \n const pkgManager = usePnpm ? 'pnpm' : useNpm ? 'npm' : 'npx';\n \n // Create a clean environment without tsx's module resolution pollution\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n const { NODE_OPTIONS, TSX_TSCONFIG_PATH, ...cleanEnv } = process.env;\n \n // Use publicUrl if provided, otherwise default to localhost\n const apiUrl = publicUrl || `http://127.0.0.1:${apiPort}`;\n \n // Write runtime config to a JSON file that the web app can read\n // This avoids NEXT_PUBLIC_* env var build-time issues\n const runtimeConfig = { apiBaseUrl: apiUrl };\n const runtimeConfigPath = join(webDir, 'runtime-config.json');\n try {\n writeFileSync(runtimeConfigPath, JSON.stringify(runtimeConfig, null, 2));\n if (!quiet) console.log(` 📝 Runtime config written to ${runtimeConfigPath}`);\n } catch (err) {\n if (!quiet) console.warn(` ⚠ Could not write runtime config: ${err}`);\n }\n \n const webEnv: NodeJS.ProcessEnv & { PORT: string; HOSTNAME?: string } = {\n ...cleanEnv,\n PORT: String(actualPort), // Next.js respects PORT env var\n };\n \n // Determine which mode to run in based on available files\n // Priority:\n // 1. Standalone build (for npm package distribution)\n // 2. Production build with next start\n // 3. Development mode with next dev (only if source files exist)\n const hasSource = hasSourceFiles(webDir);\n const standaloneServerPath = getStandaloneServerPath(webDir);\n const hasBuild = hasProductionBuild(webDir);\n const isProduction = process.env.NODE_ENV === 'production';\n \n let command: string;\n let args: string[];\n let cwd = webDir; // Default cwd\n \n if (standaloneServerPath) {\n // Standalone build available - use Node to run the standalone server\n // This is the preferred mode for npm package distribution\n command = 'node';\n args = ['server.js'];\n \n // Standalone server must run from its own directory\n cwd = dirname(standaloneServerPath);\n \n // Standalone server uses PORT env var\n webEnv.PORT = String(actualPort);\n webEnv.HOSTNAME = '0.0.0.0';\n \n if (!quiet) console.log(' 📦 Starting Web UI from standalone build...');\n } else if (hasBuild && (isProduction || !hasSource)) {\n // Production build exists and either:\n // - Running in production mode, OR\n // - No source files (npm package without source)\n command = pkgManager;\n // Use PORT env var for npm/pnpm (already set in webEnv), only pass -p for npx\n args = pkgManager === 'npx' \n ? ['next', 'start', '-p', String(actualPort)]\n : ['run', 'start'];\n } else if (hasSource) {\n // Development mode: run next dev (source files required)\n if (isProduction && !hasBuild) {\n // Production mode but no build - need to build first\n if (!quiet) console.log(' 📦 Building Web UI for production...');\n \n const buildArgs = pkgManager === 'npx' \n ? ['next', 'build'] \n : ['run', 'build'];\n \n const buildResult = await runCommand(pkgManager, buildArgs, webDir, webEnv);\n \n if (!buildResult.success) {\n if (!quiet) console.error(' ❌ Web UI build failed');\n return { process: null, port: actualPort };\n }\n \n if (!quiet) console.log(' ✓ Web UI build complete');\n \n command = pkgManager;\n // Use PORT env var for npm/pnpm (already set in webEnv), only pass -p for npx\n args = pkgManager === 'npx' \n ? ['next', 'start', '-p', String(actualPort)]\n : ['run', 'start'];\n } else {\n // Development mode with source files\n command = pkgManager;\n // Use PORT env var for npm/pnpm (already set in webEnv), only pass -p for npx\n args = pkgManager === 'npx'\n ? ['next', 'dev', '-p', String(actualPort)]\n : ['run', 'dev'];\n }\n } else {\n // No standalone, no build, no source files - can't start web UI\n if (!quiet) {\n console.error(' ❌ Web UI cannot start: no build or source files found');\n console.error(' This may be a packaging issue. Try reinstalling sparkecoder.');\n }\n return { process: null, port: actualPort };\n }\n \n const child = spawn(command, args, {\n cwd,\n stdio: ['ignore', 'pipe', 'pipe'],\n env: webEnv,\n detached: false,\n shell: true,\n });\n \n // Wait for the web UI to actually start (with timeout)\n const startupTimeout = 30000; // 30 seconds\n let started = false;\n let exited = false;\n let exitCode: number | null = null;\n \n const startedPromise = new Promise<boolean>((resolve) => {\n const timeout = setTimeout(() => {\n if (!started && !exited) {\n resolve(false);\n }\n }, startupTimeout);\n \n child.stdout?.on('data', (data: Buffer) => {\n const output = data.toString();\n if (!quiet) {\n // Show all stdout in verbose mode\n const lines = output.trim().split('\\n').filter(l => l.trim());\n for (const line of lines) {\n console.log(` Web UI: ${line}`);\n }\n }\n if (!started && (output.includes('Ready') || output.includes('started') || output.includes('localhost'))) {\n started = true;\n clearTimeout(timeout);\n resolve(true);\n }\n });\n \n child.stderr?.on('data', (data: Buffer) => {\n const output = data.toString().trim();\n if (!quiet && output) {\n console.error(` Web UI: ${output.slice(0, 500)}`);\n }\n });\n \n child.on('error', (err) => {\n if (!quiet) console.error(` ❌ Web UI spawn error: ${err.message}`);\n clearTimeout(timeout);\n resolve(false);\n });\n \n child.on('exit', (code) => {\n exited = true;\n exitCode = code;\n if (!started) {\n clearTimeout(timeout);\n resolve(false);\n }\n webUIProcess = null;\n });\n });\n \n webUIProcess = child;\n \n // Wait for startup (but don't block indefinitely)\n const didStart = await startedPromise;\n \n if (!didStart) {\n if (exited && exitCode !== 0) {\n if (!quiet) console.error(` ❌ Web UI failed to start (exit code: ${exitCode})`);\n } else if (!exited) {\n if (!quiet) console.log(` ⚠ Web UI startup timed out, continuing anyway...`);\n }\n // Don't kill the process, it might still be starting\n }\n \n return { process: child, port: actualPort, started: didStart };\n}\n\n// Stop the web UI\nexport function stopWebUI(): void {\n if (webUIProcess) {\n webUIProcess.kill('SIGTERM');\n webUIProcess = null;\n }\n}\n\nexport async function createApp(options: { quiet?: boolean } = {}) {\n const app = new Hono();\n\n // Middleware - CORS for cross-origin requests from web UI\n app.use('*', cors({\n origin: '*', // Allow all origins\n allowMethods: ['GET', 'POST', 'PUT', 'PATCH', 'DELETE', 'OPTIONS'],\n allowHeaders: ['Content-Type', 'Authorization', 'X-Requested-With'],\n exposeHeaders: ['X-Stream-Id', 'x-stream-id'],\n maxAge: 86400, // 24 hours\n }));\n \n // Only enable HTTP logging if not in quiet mode\n if (!options.quiet) {\n app.use('*', logger());\n }\n\n // Health checks\n app.route('/health', health);\n\n // API routes\n app.route('/sessions', sessions);\n app.route('/agents', agents);\n app.route('/sessions', terminals); // Terminal routes are nested under /sessions/:sessionId/terminals\n app.route('/terminals', terminals); // Also mount at /terminals for simpler direct access (e.g., /terminals/stream/:id)\n app.route('/tasks', tasks);\n\n // OpenAPI spec (manual, simplified)\n app.get('/openapi.json', async (c) => {\n return c.json(generateOpenAPISpec());\n });\n\n // Swagger UI\n app.get('/swagger', (c) => {\n const html = `<!DOCTYPE html>\n<html lang=\"en\">\n<head>\n <meta charset=\"UTF-8\">\n <title>SparkECoder API - Swagger UI</title>\n <link rel=\"stylesheet\" href=\"https://unpkg.com/swagger-ui-dist@5/swagger-ui.css\">\n</head>\n<body>\n <div id=\"swagger-ui\"></div>\n <script src=\"https://unpkg.com/swagger-ui-dist@5/swagger-ui-bundle.js\"></script>\n <script>\n SwaggerUIBundle({\n url: '/openapi.json',\n dom_id: '#swagger-ui',\n presets: [SwaggerUIBundle.presets.apis, SwaggerUIBundle.SwaggerUIStandalonePreset],\n layout: \"BaseLayout\"\n });\n </script>\n</body>\n</html>`;\n return c.html(html);\n });\n\n // Root endpoint\n app.get('/', (c) => {\n return c.json({\n name: 'SparkECoder API',\n version: '0.1.0',\n description: 'A powerful coding agent CLI with HTTP API',\n docs: '/openapi.json',\n endpoints: {\n health: '/health',\n sessions: '/sessions',\n agents: '/agents',\n terminals: '/sessions/:sessionId/terminals',\n },\n });\n });\n\n return app;\n}\n\nexport async function startServer(options: ServerOptions = {}) {\n // Check for required dependencies (tmux) before starting\n const depsOk = await checkDependencies({ quiet: options.quiet, exitOnFailure: false });\n if (!depsOk) {\n throw new Error('Missing required dependency: tmux. See above for installation instructions.');\n }\n\n // Load config\n const config = await loadConfig(options.configPath, options.workingDirectory);\n\n // Load API keys from storage into environment (before making any API calls)\n loadApiKeysIntoEnv();\n\n // Override working directory if provided\n if (options.workingDirectory) {\n config.resolvedWorkingDirectory = options.workingDirectory;\n }\n\n // Ensure working directory exists (create agent-workspace if needed)\n if (!existsSync(config.resolvedWorkingDirectory)) {\n mkdirSync(config.resolvedWorkingDirectory, { recursive: true });\n if (!options.quiet) console.log(`📁 Created agent workspace: ${config.resolvedWorkingDirectory}`);\n }\n\n // Initialize database (remote MongoDB)\n if (!config.resolvedRemoteServer.url) {\n throw new Error('Remote server not configured. Set REMOTE_SERVER_URL environment variable or remoteServer.url in config');\n }\n \n let authKey = config.resolvedRemoteServer.authKey;\n if (!authKey) {\n if (!options.quiet) console.log('📡 Registering with remote server...');\n authKey = await ensureRemoteAuthKey(config.resolvedRemoteServer.url);\n if (!options.quiet) console.log('✓ Registered with remote server');\n }\n initDatabase({ url: config.resolvedRemoteServer.url, authKey });\n if (!options.quiet) console.log(`📡 Using remote database: ${config.resolvedRemoteServer.url}`);\n\n const port = options.port || config.server.port;\n const host = options.host || config.server.host || '0.0.0.0';\n // Public URL for web UI: CLI option > config > auto-detect\n const publicUrl = options.publicUrl || config.server.publicUrl;\n\n const app = await createApp({ quiet: options.quiet });\n\n if (!options.quiet) {\n console.log(`\\n🚀 SparkECoder API Server`);\n console.log(` → Running at http://${host}:${port}`);\n if (publicUrl) {\n console.log(` → Public URL: ${publicUrl}`);\n }\n console.log(` → Working directory: ${config.resolvedWorkingDirectory}`);\n console.log(` → Default model: ${config.defaultModel}`);\n console.log(` → OpenAPI spec: http://${host}:${port}/openapi.json\\n`);\n }\n\n serverInstance = serve({\n fetch: app.fetch,\n port,\n hostname: host,\n });\n\n // Start Web UI if enabled (default: true)\n let webPort: number | undefined;\n let webStarted: boolean | undefined;\n if (options.webUI !== false) {\n const result = await startWebUI(port, options.webPort || DEFAULT_WEB_PORT, options.quiet, publicUrl);\n webPort = result.port;\n webStarted = result.started;\n }\n\n return { app, port, host, webPort, webStarted };\n}\n\nexport function stopServer() {\n // Stop web UI first\n stopWebUI();\n \n // Kill all sparkecoder tmux sessions (cleanup) - fire and forget\n tmux.listSessions().then(async (sessions) => {\n for (const id of sessions) {\n await tmux.killTerminal(id);\n }\n }).catch(() => {\n // Ignore cleanup errors\n });\n \n if (serverInstance) {\n serverInstance.close();\n serverInstance = null;\n }\n closeDatabase();\n}\n\nfunction generateOpenAPISpec() {\n return {\n openapi: '3.1.0',\n info: {\n title: 'SparkECoder API',\n version: '0.1.0',\n description:\n 'A powerful coding agent CLI with HTTP API for development environments. Supports streaming responses following the Vercel AI SDK data stream protocol.',\n },\n servers: [{ url: 'http://localhost:3141', description: 'Local development' }],\n paths: {\n '/': {\n get: {\n summary: 'API Info',\n description: 'Get basic API information and available endpoints',\n responses: {\n 200: {\n description: 'API information',\n content: { 'application/json': {} },\n },\n },\n },\n },\n '/health': {\n get: {\n summary: 'Health Check',\n description: 'Check API health status and configuration',\n responses: {\n 200: {\n description: 'API is healthy',\n content: { 'application/json': {} },\n },\n },\n },\n },\n '/health/ready': {\n get: {\n summary: 'Readiness Check',\n description: 'Check if the API is ready to accept requests',\n responses: {\n 200: { description: 'API is ready' },\n 503: { description: 'API is not ready' },\n },\n },\n },\n '/sessions': {\n get: {\n summary: 'List Sessions',\n description: 'Get a list of all agent sessions',\n parameters: [\n { name: 'limit', in: 'query', schema: { type: 'integer', default: 50 } },\n { name: 'offset', in: 'query', schema: { type: 'integer', default: 0 } },\n ],\n responses: {\n 200: { description: 'List of sessions' },\n },\n },\n post: {\n summary: 'Create Session',\n description: 'Create a new agent session',\n requestBody: {\n content: {\n 'application/json': {\n schema: {\n type: 'object',\n properties: {\n name: { type: 'string' },\n workingDirectory: { type: 'string' },\n model: { type: 'string' },\n toolApprovals: { type: 'object' },\n },\n },\n },\n },\n },\n responses: {\n 201: { description: 'Session created' },\n },\n },\n },\n '/sessions/{id}': {\n get: {\n summary: 'Get Session',\n description: 'Get details of a specific session',\n parameters: [{ name: 'id', in: 'path', required: true, schema: { type: 'string' } }],\n responses: {\n 200: { description: 'Session details' },\n 404: { description: 'Session not found' },\n },\n },\n delete: {\n summary: 'Delete Session',\n description: 'Delete a session and all its data',\n parameters: [{ name: 'id', in: 'path', required: true, schema: { type: 'string' } }],\n responses: {\n 200: { description: 'Session deleted' },\n 404: { description: 'Session not found' },\n },\n },\n },\n '/sessions/{id}/messages': {\n get: {\n summary: 'Get Messages',\n description: 'Get message history for a session',\n parameters: [\n { name: 'id', in: 'path', required: true, schema: { type: 'string' } },\n { name: 'limit', in: 'query', schema: { type: 'integer', default: 100 } },\n ],\n responses: {\n 200: { description: 'Message history' },\n 404: { description: 'Session not found' },\n },\n },\n },\n '/sessions/{id}/clear': {\n post: {\n summary: 'Clear Context',\n description: 'Clear conversation context for a session',\n parameters: [{ name: 'id', in: 'path', required: true, schema: { type: 'string' } }],\n responses: {\n 200: { description: 'Context cleared' },\n 404: { description: 'Session not found' },\n },\n },\n },\n '/agents/{id}/run': {\n post: {\n summary: 'Run Agent (Streaming)',\n description:\n 'Run the agent with a prompt and receive streaming response. Returns SSE stream following Vercel AI SDK data stream protocol.',\n parameters: [{ name: 'id', in: 'path', required: true, schema: { type: 'string' } }],\n requestBody: {\n content: {\n 'application/json': {\n schema: {\n type: 'object',\n required: ['prompt'],\n properties: {\n prompt: { type: 'string' },\n },\n },\n },\n },\n },\n responses: {\n 200: {\n description: 'SSE stream of agent output',\n content: { 'text/event-stream': {} },\n },\n 404: { description: 'Session not found' },\n },\n },\n },\n '/agents/{id}/generate': {\n post: {\n summary: 'Run Agent (Non-streaming)',\n description: 'Run the agent and receive complete response',\n parameters: [{ name: 'id', in: 'path', required: true, schema: { type: 'string' } }],\n requestBody: {\n content: {\n 'application/json': {\n schema: {\n type: 'object',\n required: ['prompt'],\n properties: {\n prompt: { type: 'string' },\n },\n },\n },\n },\n },\n responses: {\n 200: { description: 'Agent response' },\n 404: { description: 'Session not found' },\n },\n },\n },\n '/agents/{id}/approve/{toolCallId}': {\n post: {\n summary: 'Approve Tool',\n description: 'Approve a pending tool execution',\n parameters: [\n { name: 'id', in: 'path', required: true, schema: { type: 'string' } },\n { name: 'toolCallId', in: 'path', required: true, schema: { type: 'string' } },\n ],\n responses: {\n 200: { description: 'Tool approved and executed' },\n 400: { description: 'Approval failed' },\n 404: { description: 'Session not found' },\n },\n },\n },\n '/agents/{id}/reject/{toolCallId}': {\n post: {\n summary: 'Reject Tool',\n description: 'Reject a pending tool execution',\n parameters: [\n { name: 'id', in: 'path', required: true, schema: { type: 'string' } },\n { name: 'toolCallId', in: 'path', required: true, schema: { type: 'string' } },\n ],\n requestBody: {\n content: {\n 'application/json': {\n schema: {\n type: 'object',\n properties: {\n reason: { type: 'string' },\n },\n },\n },\n },\n },\n responses: {\n 200: { description: 'Tool rejected' },\n 400: { description: 'Rejection failed' },\n 404: { description: 'Session not found' },\n },\n },\n },\n '/agents/{id}/approvals': {\n get: {\n summary: 'Get Pending Approvals',\n description: 'Get all pending tool approvals for a session',\n parameters: [{ name: 'id', in: 'path', required: true, schema: { type: 'string' } }],\n responses: {\n 200: { description: 'Pending approvals' },\n 404: { description: 'Session not found' },\n },\n },\n },\n '/agents/quick': {\n post: {\n summary: 'Quick Start',\n description: 'Create a session and run agent in one request',\n requestBody: {\n content: {\n 'application/json': {\n schema: {\n type: 'object',\n required: ['prompt'],\n properties: {\n prompt: { type: 'string' },\n name: { type: 'string' },\n workingDirectory: { type: 'string' },\n model: { type: 'string' },\n toolApprovals: { type: 'object' },\n },\n },\n },\n },\n },\n responses: {\n 200: {\n description: 'SSE stream of agent output',\n content: { 'text/event-stream': {} },\n },\n },\n },\n },\n '/sessions/{sessionId}/terminals': {\n get: {\n summary: 'List Terminals',\n description: 'Get all terminals for a session',\n parameters: [{ name: 'sessionId', in: 'path', required: true, schema: { type: 'string' } }],\n responses: {\n 200: { description: 'List of terminals' },\n 404: { description: 'Session not found' },\n },\n },\n post: {\n summary: 'Spawn Terminal',\n description: 'Start a new background process',\n parameters: [{ name: 'sessionId', in: 'path', required: true, schema: { type: 'string' } }],\n requestBody: {\n content: {\n 'application/json': {\n schema: {\n type: 'object',\n required: ['command'],\n properties: {\n command: { type: 'string' },\n cwd: { type: 'string' },\n name: { type: 'string' },\n },\n },\n },\n },\n },\n responses: {\n 201: { description: 'Terminal spawned' },\n 404: { description: 'Session not found' },\n },\n },\n },\n '/sessions/{sessionId}/terminals/{terminalId}': {\n get: {\n summary: 'Get Terminal Status',\n description: 'Get status and details of a terminal',\n parameters: [\n { name: 'sessionId', in: 'path', required: true, schema: { type: 'string' } },\n { name: 'terminalId', in: 'path', required: true, schema: { type: 'string' } },\n ],\n responses: {\n 200: { description: 'Terminal status' },\n 404: { description: 'Terminal not found' },\n },\n },\n },\n '/sessions/{sessionId}/terminals/{terminalId}/logs': {\n get: {\n summary: 'Get Terminal Logs',\n description: 'Get output logs from a terminal',\n parameters: [\n { name: 'sessionId', in: 'path', required: true, schema: { type: 'string' } },\n { name: 'terminalId', in: 'path', required: true, schema: { type: 'string' } },\n { name: 'tail', in: 'query', schema: { type: 'integer' } },\n ],\n responses: {\n 200: { description: 'Terminal logs' },\n 404: { description: 'Terminal not found' },\n },\n },\n },\n '/sessions/{sessionId}/terminals/{terminalId}/kill': {\n post: {\n summary: 'Kill Terminal',\n description: 'Stop a running terminal process',\n parameters: [\n { name: 'sessionId', in: 'path', required: true, schema: { type: 'string' } },\n { name: 'terminalId', in: 'path', required: true, schema: { type: 'string' } },\n ],\n requestBody: {\n content: {\n 'application/json': {\n schema: {\n type: 'object',\n properties: {\n signal: { type: 'string', enum: ['SIGTERM', 'SIGKILL'] },\n },\n },\n },\n },\n },\n responses: {\n 200: { description: 'Terminal killed' },\n 400: { description: 'Failed to kill terminal' },\n },\n },\n },\n '/sessions/{sessionId}/terminals/{terminalId}/write': {\n post: {\n summary: 'Write to Terminal',\n description: 'Send input to terminal stdin',\n parameters: [\n { name: 'sessionId', in: 'path', required: true, schema: { type: 'string' } },\n { name: 'terminalId', in: 'path', required: true, schema: { type: 'string' } },\n ],\n requestBody: {\n content: {\n 'application/json': {\n schema: {\n type: 'object',\n required: ['input'],\n properties: {\n input: { type: 'string' },\n },\n },\n },\n },\n },\n responses: {\n 200: { description: 'Input sent' },\n 400: { description: 'Failed to write' },\n },\n },\n },\n '/sessions/{sessionId}/terminals/{terminalId}/stream': {\n get: {\n summary: 'Stream Terminal Output',\n description: 'SSE stream of terminal output',\n parameters: [\n { name: 'sessionId', in: 'path', required: true, schema: { type: 'string' } },\n { name: 'terminalId', in: 'path', required: true, schema: { type: 'string' } },\n ],\n responses: {\n 200: { description: 'SSE stream', content: { 'text/event-stream': {} } },\n 404: { description: 'Terminal not found' },\n },\n },\n },\n '/sessions/{sessionId}/terminals/kill-all': {\n post: {\n summary: 'Kill All Terminals',\n description: 'Stop all running terminals for a session',\n parameters: [{ name: 'sessionId', in: 'path', required: true, schema: { type: 'string' } }],\n responses: {\n 200: { description: 'Terminals killed' },\n },\n },\n },\n },\n components: {\n schemas: {\n Session: {\n type: 'object',\n properties: {\n id: { type: 'string' },\n name: { type: 'string' },\n workingDirectory: { type: 'string' },\n model: { type: 'string' },\n status: { type: 'string', enum: ['active', 'waiting', 'completed', 'error'] },\n createdAt: { type: 'string', format: 'date-time' },\n updatedAt: { type: 'string', format: 'date-time' },\n },\n },\n Message: {\n type: 'object',\n properties: {\n id: { type: 'string' },\n role: { type: 'string', enum: ['user', 'assistant', 'system', 'tool'] },\n content: { type: 'object' },\n createdAt: { type: 'string', format: 'date-time' },\n },\n },\n ToolExecution: {\n type: 'object',\n properties: {\n id: { type: 'string' },\n toolCallId: { type: 'string' },\n toolName: { type: 'string' },\n input: { type: 'object' },\n output: { type: 'object' },\n status: { type: 'string', enum: ['pending', 'approved', 'rejected', 'completed', 'error'] },\n requiresApproval: { type: 'boolean' },\n },\n },\n Terminal: {\n type: 'object',\n properties: {\n id: { type: 'string' },\n name: { type: 'string' },\n command: { type: 'string' },\n cwd: { type: 'string' },\n pid: { type: 'integer' },\n status: { type: 'string', enum: ['running', 'stopped', 'error'] },\n exitCode: { type: 'integer' },\n error: { type: 'string' },\n createdAt: { type: 'string', format: 'date-time' },\n stoppedAt: { type: 'string', format: 'date-time' },\n },\n },\n },\n },\n };\n}\n\n","import { Hono } from 'hono';\nimport { zValidator } from '@hono/zod-validator';\nimport { z } from 'zod';\nimport { existsSync, mkdirSync, writeFileSync, readdirSync, statSync, unlinkSync } from 'node:fs';\nimport { readdir } from 'node:fs/promises';\nimport { join, basename, extname, relative } from 'node:path';\nimport { nanoid } from 'nanoid';\nimport { sessionQueries, messageQueries, todoQueries, toolExecutionQueries, activeStreamQueries, checkpointQueries, type SessionConfig } from '../../db/index.js';\nimport { Agent } from '../../agent/index.js';\nimport { getConfig, getAppDataDirectory } from '../../config/index.js';\nimport * as tmux from '../../terminal/tmux.js';\nimport { getCheckpoints, revertToCheckpoint, getSessionDiff, clearCheckpointManager } from '../../checkpoints/index.js';\n\nconst sessions = new Hono();\n\nimport { setDevtoolsContext, getDevtoolsContext as getDevtoolsCtx } from '../devtools-store.js';\n\n// In-memory store for pending input (from devtools, etc.)\n// Key: sessionId, Value: { text: string, createdAt: Date }\nconst pendingInputStore = new Map<string, { text: string; createdAt: Date }>();\n\n// Clean up old pending inputs (older than 5 minutes)\nfunction cleanupPendingInputs() {\n const now = Date.now();\n for (const [sessionId, entry] of pendingInputStore) {\n if (now - entry.createdAt.getTime() > 5 * 60 * 1000) {\n pendingInputStore.delete(sessionId);\n }\n }\n}\n\n// Schemas\nconst createSessionSchema = z.object({\n name: z.string().optional(),\n workingDirectory: z.string().optional(),\n model: z.string().optional(),\n toolApprovals: z.record(z.string(), z.boolean()).optional(),\n});\n\nconst paginationQuerySchema = z.object({\n limit: z.string().optional(),\n offset: z.string().optional(),\n});\n\nconst messagesQuerySchema = z.object({\n limit: z.string().optional(),\n});\n\n// List all sessions\nsessions.get(\n '/',\n zValidator('query', paginationQuerySchema),\n async (c) => {\n const query = c.req.valid('query');\n const limit = parseInt(query.limit || '50');\n const offset = parseInt(query.offset || '0');\n\n const allSessions = await sessionQueries.list(limit, offset);\n\n // Check for active streams for each session\n const sessionsWithStreamInfo = await Promise.all(allSessions.map(async (s) => {\n const activeStream = await activeStreamQueries.getBySessionId(s.id);\n return {\n id: s.id,\n name: s.name,\n workingDirectory: s.workingDirectory,\n model: s.model,\n status: s.status,\n config: s.config,\n isStreaming: !!activeStream,\n createdAt: s.createdAt.toISOString(),\n updatedAt: s.updatedAt.toISOString(),\n };\n }));\n\n return c.json({\n sessions: sessionsWithStreamInfo,\n count: allSessions.length,\n limit,\n offset,\n });\n }\n);\n\n// Create a new session\nsessions.post(\n '/',\n zValidator('json', createSessionSchema),\n async (c) => {\n const body = c.req.valid('json');\n const config = getConfig();\n\n const agent = await Agent.create({\n name: body.name,\n workingDirectory: body.workingDirectory || config.resolvedWorkingDirectory,\n model: body.model || config.defaultModel,\n sessionConfig: body.toolApprovals ? { toolApprovals: body.toolApprovals } : undefined,\n });\n\n const session = agent.getSession();\n\n return c.json({\n id: session.id,\n name: session.name,\n workingDirectory: session.workingDirectory,\n model: session.model,\n status: session.status,\n createdAt: session.createdAt.toISOString(),\n }, 201);\n }\n);\n\n// Get a specific session\nsessions.get('/:id', async (c) => {\n const id = c.req.param('id');\n const session = await sessionQueries.getById(id);\n\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n const contextStats = await (async () => {\n const agent = await Agent.create({ sessionId: id });\n return agent.getContextStats();\n })();\n\n const todos = await todoQueries.getBySession(id);\n const pendingApprovals = await toolExecutionQueries.getPendingApprovals(id);\n\n return c.json({\n id: session.id,\n name: session.name,\n workingDirectory: session.workingDirectory,\n model: session.model,\n status: session.status,\n config: session.config,\n createdAt: session.createdAt.toISOString(),\n updatedAt: session.updatedAt.toISOString(),\n context: contextStats,\n todos: todos.map((t) => ({\n id: t.id,\n content: t.content,\n status: t.status,\n order: t.order,\n })),\n pendingApprovals: pendingApprovals.map((p) => ({\n id: p.id,\n toolCallId: p.toolCallId,\n toolName: p.toolName,\n input: p.input,\n })),\n });\n});\n\n// Get session messages (returns AI SDK ModelMessage format)\nsessions.get(\n '/:id/messages',\n zValidator('query', messagesQuerySchema),\n async (c) => {\n const id = c.req.param('id');\n const query = c.req.valid('query');\n const limit = parseInt(query.limit || '100');\n\n const session = await sessionQueries.getById(id);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n const messages = await messageQueries.getRecentBySession(id, limit);\n\n return c.json({\n sessionId: id,\n messages: messages.map((m) => ({\n id: m.id,\n ...m.modelMessage, // Spread the AI SDK ModelMessage (role, content)\n createdAt: m.createdAt.toISOString(),\n })),\n count: messages.length,\n });\n }\n);\n\n// Get session tool executions\nsessions.get('/:id/tools', async (c) => {\n const id = c.req.param('id');\n\n const session = await sessionQueries.getById(id);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n const executions = await toolExecutionQueries.getBySession(id);\n\n return c.json({\n sessionId: id,\n executions: executions.map((e) => ({\n id: e.id,\n toolCallId: e.toolCallId,\n toolName: e.toolName,\n input: e.input,\n output: e.output,\n status: e.status,\n requiresApproval: e.requiresApproval,\n error: e.error,\n startedAt: e.startedAt.toISOString(),\n completedAt: e.completedAt?.toISOString(),\n })),\n count: executions.length,\n });\n});\n\n// Update session (e.g., change model)\nconst updateSessionSchema = z.object({\n model: z.string().optional(),\n name: z.string().optional(),\n toolApprovals: z.record(z.string(), z.boolean()).optional(),\n});\n\nsessions.patch(\n '/:id',\n zValidator('json', updateSessionSchema),\n async (c) => {\n const id = c.req.param('id');\n const body = c.req.valid('json');\n\n const session = await sessionQueries.getById(id);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n // Build updates object\n const updates: { model?: string; name?: string; config?: SessionConfig } = {};\n if (body.model) updates.model = body.model;\n if (body.name !== undefined) updates.name = body.name;\n \n // Handle toolApprovals update - merge with existing config AND existing toolApprovals\n if (body.toolApprovals !== undefined) {\n const existingConfig = session.config || {};\n const existingToolApprovals = existingConfig.toolApprovals || {};\n updates.config = {\n ...existingConfig,\n toolApprovals: {\n ...existingToolApprovals,\n ...body.toolApprovals,\n },\n };\n }\n\n const updatedSession = Object.keys(updates).length > 0\n ? (await sessionQueries.update(id, updates)) || session\n : session;\n\n return c.json({\n id: updatedSession.id,\n name: updatedSession.name,\n model: updatedSession.model,\n status: updatedSession.status,\n workingDirectory: updatedSession.workingDirectory,\n config: updatedSession.config,\n updatedAt: updatedSession.updatedAt.toISOString(),\n });\n }\n);\n\n// Delete a session\nsessions.delete('/:id', async (c) => {\n const id = c.req.param('id');\n\n // Kill all running terminals for this session before deleting\n try {\n const session = await sessionQueries.getById(id);\n if (session) {\n const terminalIds = await tmux.listSessions();\n for (const tid of terminalIds) {\n const meta = await tmux.getMeta(tid, session.workingDirectory);\n if (meta && meta.sessionId === id) {\n await tmux.killTerminal(tid);\n }\n }\n }\n } catch (e) {\n // Ignore cleanup errors\n }\n\n // Clear the checkpoint manager for this session\n clearCheckpointManager(id);\n\n const deleted = await sessionQueries.delete(id);\n if (!deleted) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n return c.json({ success: true, id });\n});\n\n// Clear session context\nsessions.post('/:id/clear', async (c) => {\n const id = c.req.param('id');\n\n const session = await sessionQueries.getById(id);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n const agent = await Agent.create({ sessionId: id });\n await agent.clearContext();\n\n return c.json({ success: true, sessionId: id });\n});\n\n// ============================================\n// Pending Input (for devtools integration)\n// ============================================\n\nconst pendingInputSchema = z.object({\n text: z.string(),\n});\n\n// Set pending input for a session (from devtools, CLI, etc.)\nsessions.post(\n '/:id/pending-input',\n zValidator('json', pendingInputSchema),\n async (c) => {\n const id = c.req.param('id');\n const { text } = c.req.valid('json');\n\n // Verify session exists\n const session = await sessionQueries.getById(id);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n // Store pending input\n pendingInputStore.set(id, { text, createdAt: new Date() });\n\n // Cleanup old entries\n cleanupPendingInputs();\n\n return c.json({ success: true, sessionId: id });\n }\n);\n\n// Get and clear pending input for a session\nsessions.get('/:id/pending-input', async (c) => {\n const id = c.req.param('id');\n\n // Cleanup old entries first\n cleanupPendingInputs();\n\n const pending = pendingInputStore.get(id);\n if (!pending) {\n return c.json({ hasPendingInput: false, text: null });\n }\n\n // Clear after reading\n pendingInputStore.delete(id);\n\n return c.json({\n hasPendingInput: true,\n text: pending.text,\n createdAt: pending.createdAt.toISOString(),\n });\n});\n\n// ============================================\n// Devtools Context (current page user is viewing)\n// ============================================\n\nconst devtoolsContextSchema = z.object({\n url: z.string(),\n path: z.string(),\n pageName: z.string().optional(),\n screenWidth: z.number().optional(),\n screenHeight: z.number().optional(),\n devicePixelRatio: z.number().optional(),\n});\n\n// Update devtools context (heartbeat with current page info)\nsessions.post(\n '/:id/devtools-context',\n zValidator('json', devtoolsContextSchema),\n async (c) => {\n const id = c.req.param('id');\n const body = c.req.valid('json');\n\n // Store context (don't require session to exist - devtools may connect before session is created)\n setDevtoolsContext(id, {\n url: body.url,\n path: body.path,\n pageName: body.pageName || body.path,\n screenWidth: body.screenWidth,\n screenHeight: body.screenHeight,\n devicePixelRatio: body.devicePixelRatio,\n lastHeartbeat: new Date(),\n });\n\n return c.json({ success: true, sessionId: id });\n }\n);\n\n// Get devtools context for a session\nsessions.get('/:id/devtools-context', async (c) => {\n const id = c.req.param('id');\n\n const ctx = getDevtoolsCtx(id);\n if (!ctx) {\n return c.json({ connected: false, context: null });\n }\n\n return c.json({\n connected: true,\n context: {\n url: ctx.url,\n path: ctx.path,\n pageName: ctx.pageName,\n screenWidth: ctx.screenWidth,\n screenHeight: ctx.screenHeight,\n devicePixelRatio: ctx.devicePixelRatio,\n lastHeartbeat: ctx.lastHeartbeat.toISOString(),\n },\n });\n});\n\n// Get todos for a session\nsessions.get('/:id/todos', async (c) => {\n const id = c.req.param('id');\n\n const session = await sessionQueries.getById(id);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n const todos = await todoQueries.getBySession(id);\n const pending = todos.filter((t: { status: string }) => t.status === 'pending');\n const inProgress = todos.filter((t: { status: string }) => t.status === 'in_progress');\n const completed = todos.filter((t: { status: string }) => t.status === 'completed');\n const cancelled = todos.filter((t: { status: string }) => t.status === 'cancelled');\n\n // Find the \"next\" todo: first in_progress, or first pending\n const nextTodo = inProgress[0] || pending[0] || null;\n\n return c.json({\n todos: todos.map((t) => ({\n id: t.id,\n content: t.content,\n status: t.status,\n order: t.order,\n createdAt: t.createdAt.toISOString(),\n updatedAt: t.updatedAt.toISOString(),\n })),\n stats: {\n total: todos.length,\n pending: pending.length,\n inProgress: inProgress.length,\n completed: completed.length,\n cancelled: cancelled.length,\n },\n nextTodo: nextTodo ? {\n id: nextTodo.id,\n content: nextTodo.content,\n status: nextTodo.status,\n } : null,\n });\n});\n\n// Get checkpoints for a session\nsessions.get('/:id/checkpoints', async (c) => {\n const id = c.req.param('id');\n\n const session = await sessionQueries.getById(id);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n const checkpoints = await getCheckpoints(id);\n\n return c.json({\n sessionId: id,\n checkpoints: checkpoints.map((cp) => ({\n id: cp.id,\n messageSequence: cp.messageSequence,\n gitHead: cp.gitHead,\n createdAt: cp.createdAt.toISOString(),\n })),\n count: checkpoints.length,\n });\n});\n\n// Revert session to a specific checkpoint\nsessions.post('/:id/revert/:checkpointId', async (c) => {\n const sessionId = c.req.param('id');\n const checkpointId = c.req.param('checkpointId');\n\n const session = await sessionQueries.getById(sessionId);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n // Check if there's an active stream - can't revert while streaming\n const activeStream = await activeStreamQueries.getBySessionId(sessionId);\n if (activeStream) {\n return c.json({ \n error: 'Cannot revert while a stream is active. Stop the stream first.',\n streamId: activeStream.streamId,\n }, 409);\n }\n\n const result = await revertToCheckpoint(sessionId, checkpointId);\n\n if (!result.success) {\n return c.json({ error: result.error }, 400);\n }\n\n return c.json({\n success: true,\n sessionId,\n checkpointId,\n filesRestored: result.filesRestored,\n filesDeleted: result.filesDeleted,\n messagesDeleted: result.messagesDeleted,\n checkpointsDeleted: result.checkpointsDeleted,\n });\n});\n\n// Get the diff for a session (all file changes since start)\nsessions.get('/:id/diff', async (c) => {\n const id = c.req.param('id');\n\n const session = await sessionQueries.getById(id);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n const diff = await getSessionDiff(id);\n\n return c.json({\n sessionId: id,\n files: diff.files.map((f) => ({\n path: f.path,\n status: f.status,\n hasOriginal: f.originalContent !== null,\n hasCurrent: f.currentContent !== null,\n // Optionally include content (can be large)\n // originalContent: f.originalContent,\n // currentContent: f.currentContent,\n })),\n summary: {\n created: diff.files.filter(f => f.status === 'created').length,\n modified: diff.files.filter(f => f.status === 'modified').length,\n deleted: diff.files.filter(f => f.status === 'deleted').length,\n total: diff.files.length,\n },\n });\n});\n\n// Get full diff content for a specific file\nsessions.get('/:id/diff/:filePath', async (c) => {\n const sessionId = c.req.param('id');\n const filePath = decodeURIComponent(c.req.param('filePath'));\n\n const session = await sessionQueries.getById(sessionId);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n const diff = await getSessionDiff(sessionId);\n const fileDiff = diff.files.find(f => f.path === filePath);\n\n if (!fileDiff) {\n return c.json({ error: 'File not found in diff' }, 404);\n }\n\n return c.json({\n sessionId,\n path: fileDiff.path,\n status: fileDiff.status,\n originalContent: fileDiff.originalContent,\n currentContent: fileDiff.currentContent,\n });\n});\n\n// ============================================\n// Attachments API\n// ============================================\n\n/**\n * Get the attachments directory for a session\n */\nfunction getAttachmentsDir(sessionId: string): string {\n const appDataDir = getAppDataDirectory();\n return join(appDataDir, 'attachments', sessionId);\n}\n\n/**\n * Ensure the attachments directory exists\n */\nfunction ensureAttachmentsDir(sessionId: string): string {\n const dir = getAttachmentsDir(sessionId);\n if (!existsSync(dir)) {\n mkdirSync(dir, { recursive: true });\n }\n return dir;\n}\n\n// List attachments for a session\nsessions.get('/:id/attachments', async (c) => {\n const sessionId = c.req.param('id');\n\n const session = await sessionQueries.getById(sessionId);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n const dir = getAttachmentsDir(sessionId);\n if (!existsSync(dir)) {\n return c.json({ sessionId, attachments: [], count: 0 });\n }\n\n const files = readdirSync(dir);\n const attachments = files.map((filename) => {\n const filePath = join(dir, filename);\n const stats = statSync(filePath);\n return {\n id: filename.split('_')[0], // Extract the nanoid prefix\n filename,\n path: filePath,\n size: stats.size,\n createdAt: stats.birthtime.toISOString(),\n };\n });\n\n return c.json({\n sessionId,\n attachments,\n count: attachments.length,\n });\n});\n\n// Upload an attachment\nsessions.post('/:id/attachments', async (c) => {\n const sessionId = c.req.param('id');\n\n const session = await sessionQueries.getById(sessionId);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n const contentType = c.req.header('content-type') || '';\n \n // Handle multipart form data\n if (contentType.includes('multipart/form-data')) {\n try {\n const formData = await c.req.formData();\n const file = formData.get('file');\n \n if (!file || !(file instanceof File)) {\n return c.json({ error: 'No file provided' }, 400);\n }\n\n const dir = ensureAttachmentsDir(sessionId);\n const id = nanoid(10);\n const ext = extname(file.name) || '';\n const safeFilename = `${id}_${basename(file.name).replace(/[^a-zA-Z0-9._-]/g, '_')}`;\n const filePath = join(dir, safeFilename);\n\n const arrayBuffer = await file.arrayBuffer();\n writeFileSync(filePath, Buffer.from(arrayBuffer));\n\n return c.json({\n id,\n filename: file.name,\n storedAs: safeFilename,\n path: filePath,\n size: file.size,\n mediaType: file.type,\n sessionId,\n }, 201);\n } catch (err) {\n console.error('Failed to upload attachment:', err);\n return c.json({ error: 'Failed to upload file' }, 500);\n }\n }\n\n // Handle base64 JSON upload\n try {\n const body = await c.req.json() as { \n filename: string; \n data: string; \n mediaType?: string;\n };\n \n if (!body.filename || !body.data) {\n return c.json({ error: 'Missing filename or data' }, 400);\n }\n\n const dir = ensureAttachmentsDir(sessionId);\n const id = nanoid(10);\n const ext = extname(body.filename) || '';\n const safeFilename = `${id}_${basename(body.filename).replace(/[^a-zA-Z0-9._-]/g, '_')}`;\n const filePath = join(dir, safeFilename);\n\n // Handle base64 data URL or raw base64\n let base64Data = body.data;\n if (base64Data.includes(',')) {\n base64Data = base64Data.split(',')[1];\n }\n \n const buffer = Buffer.from(base64Data, 'base64');\n writeFileSync(filePath, buffer);\n\n return c.json({\n id,\n filename: body.filename,\n storedAs: safeFilename,\n path: filePath,\n size: buffer.length,\n mediaType: body.mediaType,\n sessionId,\n }, 201);\n } catch (err) {\n console.error('Failed to upload attachment:', err);\n return c.json({ error: 'Failed to upload file' }, 500);\n }\n});\n\n// Delete an attachment\nsessions.delete('/:id/attachments/:attachmentId', async (c) => {\n const sessionId = c.req.param('id');\n const attachmentId = c.req.param('attachmentId');\n\n const session = await sessionQueries.getById(sessionId);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n const dir = getAttachmentsDir(sessionId);\n if (!existsSync(dir)) {\n return c.json({ error: 'Attachment not found' }, 404);\n }\n\n // Find the file that starts with the attachment ID\n const files = readdirSync(dir);\n const file = files.find(f => f.startsWith(attachmentId + '_'));\n \n if (!file) {\n return c.json({ error: 'Attachment not found' }, 404);\n }\n\n const filePath = join(dir, file);\n unlinkSync(filePath);\n\n return c.json({ success: true, id: attachmentId });\n});\n\n// ============================================================================\n// Workspace Files API - For @ mentions in chat input\n// ============================================================================\n\nconst filesQuerySchema = z.object({\n query: z.string().optional(), // Filter query (e.g., \"src/com\" to match \"src/components\")\n limit: z.string().optional(), // Max results (default 50)\n});\n\n// Directories to ignore when listing files\nconst IGNORED_DIRECTORIES = new Set([\n 'node_modules',\n '.git',\n '.next',\n 'dist',\n 'build',\n '.turbo',\n '.cache',\n 'coverage',\n '__pycache__',\n '.pytest_cache',\n 'venv',\n '.venv',\n 'target', // Rust\n '.idea',\n '.vscode',\n]);\n\n// File extensions to ignore\nconst IGNORED_EXTENSIONS = new Set([\n '.pyc',\n '.pyo',\n '.class',\n '.o',\n '.obj',\n '.exe',\n '.dll',\n '.so',\n '.dylib',\n]);\n\ninterface WorkspaceFile {\n path: string; // Relative path from working directory\n name: string; // File/folder name\n type: 'file' | 'folder';\n extension?: string; // For files only\n}\n\n// Maximum depth to recurse into directories\nconst MAX_RECURSION_DEPTH = 10;\n\n/**\n * Check if a query matches a file/folder name using fuzzy matching\n * Matches if:\n * - Name contains the query (substring match)\n * - Path contains the query\n * - Query matches start of name segments (e.g., \"comp\" matches \"components\")\n */\nfunction matchesQuery(name: string, relativePath: string, query: string): boolean {\n if (!query) return true;\n \n const queryLower = query.toLowerCase();\n const nameLower = name.toLowerCase();\n const pathLower = relativePath.toLowerCase();\n \n // Exact substring match in name or path\n if (nameLower.includes(queryLower) || pathLower.includes(queryLower)) {\n return true;\n }\n \n // Match by path segments (e.g., \"prompting\" matches \"apps/prompting\")\n const pathSegments = pathLower.split('/');\n for (const segment of pathSegments) {\n if (segment.includes(queryLower) || segment.startsWith(queryLower)) {\n return true;\n }\n }\n \n return false;\n}\n\n/**\n * Calculate a relevance score for sorting results\n * Higher scores = more relevant\n */\nfunction getRelevanceScore(name: string, relativePath: string, query: string): number {\n if (!query) return 0;\n \n const queryLower = query.toLowerCase();\n const nameLower = name.toLowerCase();\n const pathLower = relativePath.toLowerCase();\n \n // Exact name match\n if (nameLower === queryLower) return 100;\n \n // Name starts with query\n if (nameLower.startsWith(queryLower)) return 90;\n \n // Name contains query\n if (nameLower.includes(queryLower)) return 80;\n \n // Last path segment matches\n const lastSegment = pathLower.split('/').pop() || '';\n if (lastSegment === queryLower) return 70;\n if (lastSegment.startsWith(queryLower)) return 60;\n if (lastSegment.includes(queryLower)) return 50;\n \n // Path contains query\n if (pathLower.includes(queryLower)) return 30;\n \n // Any segment starts with query\n const segments = pathLower.split('/');\n for (const segment of segments) {\n if (segment.startsWith(queryLower)) return 40;\n }\n \n return 10;\n}\n\n/**\n * Recursively list files and folders in a directory\n * Always recurses into directories (up to MAX_RECURSION_DEPTH) to find deep matches\n */\nasync function listWorkspaceFiles(\n baseDir: string,\n currentDir: string,\n query: string,\n limit: number,\n results: WorkspaceFile[] = [],\n depth: number = 0\n): Promise<WorkspaceFile[]> {\n // Stop recursion at max depth or if we have enough results\n if (depth > MAX_RECURSION_DEPTH || results.length >= limit * 2) {\n return results;\n }\n\n try {\n const entries = await readdir(currentDir, { withFileTypes: true });\n\n for (const entry of entries) {\n // Stop if we have way more than enough results\n if (results.length >= limit * 2) break;\n\n const fullPath = join(currentDir, entry.name);\n const relativePath = relative(baseDir, fullPath);\n\n // Skip ignored directories\n if (entry.isDirectory() && IGNORED_DIRECTORIES.has(entry.name)) {\n continue;\n }\n\n // Skip hidden files/folders (starting with .)\n if (entry.name.startsWith('.')) {\n continue;\n }\n\n // Skip ignored extensions\n const ext = extname(entry.name).toLowerCase();\n if (IGNORED_EXTENSIONS.has(ext)) {\n continue;\n }\n\n if (entry.isDirectory()) {\n // Add folder if it matches query\n if (matchesQuery(entry.name, relativePath, query)) {\n results.push({\n path: relativePath,\n name: entry.name,\n type: 'folder',\n });\n }\n \n // Always recurse into directories to find deep matches\n await listWorkspaceFiles(baseDir, fullPath, query, limit, results, depth + 1);\n } else if (entry.isFile()) {\n // Add file if it matches query\n if (matchesQuery(entry.name, relativePath, query)) {\n results.push({\n path: relativePath,\n name: entry.name,\n type: 'file',\n extension: ext || undefined,\n });\n }\n }\n }\n } catch {\n // Ignore permission errors etc.\n }\n\n return results;\n}\n\n// List workspace files for a session (for @ mentions)\nsessions.get(\n '/:id/files',\n zValidator('query', filesQuerySchema),\n async (c) => {\n const sessionId = c.req.param('id');\n const { query = '', limit: limitStr = '50' } = c.req.valid('query');\n const limit = Math.min(parseInt(limitStr) || 50, 100); // Cap at 100\n\n const session = await sessionQueries.getById(sessionId);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n const workingDirectory = session.workingDirectory;\n \n if (!existsSync(workingDirectory)) {\n return c.json({ \n sessionId,\n workingDirectory,\n files: [], \n count: 0,\n error: 'Working directory does not exist' \n });\n }\n\n try {\n const allFiles = await listWorkspaceFiles(\n workingDirectory,\n workingDirectory,\n query,\n limit\n );\n\n // Sort by relevance (best matches first), then folders first, then alphabetically\n allFiles.sort((a, b) => {\n // If there's a query, sort by relevance first\n if (query) {\n const scoreA = getRelevanceScore(a.name, a.path, query);\n const scoreB = getRelevanceScore(b.name, b.path, query);\n if (scoreA !== scoreB) {\n return scoreB - scoreA; // Higher score first\n }\n }\n \n // Then folders first\n if (a.type !== b.type) {\n return a.type === 'folder' ? -1 : 1;\n }\n \n // Then alphabetically by path\n return a.path.localeCompare(b.path);\n });\n\n // Limit results after sorting to get the most relevant ones\n const files = allFiles.slice(0, limit);\n\n return c.json({\n sessionId,\n workingDirectory,\n files,\n count: files.length,\n query,\n });\n } catch (err) {\n console.error('Failed to list workspace files:', err);\n return c.json({ \n error: 'Failed to list files',\n sessionId,\n workingDirectory,\n files: [],\n count: 0,\n }, 500);\n }\n }\n);\n\n// ============================================\n// Session Files (proxied to remote server)\n// ============================================\n\nsessions.get('/:id/session-files', async (c) => {\n const sessionId = c.req.param('id');\n\n try {\n const { isRemoteConfigured } = await import('../../db/remote.js');\n if (!isRemoteConfigured()) {\n return c.json({ files: [] });\n }\n\n const { storageQueries } = await import('../../db/remote.js');\n const files = await storageQueries.getSessionFiles(sessionId);\n return c.json({ sessionId, files });\n } catch (err: any) {\n console.error('Failed to get session files:', err.message);\n return c.json({ sessionId, files: [] });\n }\n});\n\nsessions.get('/files/:fileId/download', async (c) => {\n const fileId = c.req.param('fileId');\n\n try {\n const { isRemoteConfigured, storageQueries } = await import('../../db/remote.js');\n if (!isRemoteConfigured()) {\n return c.json({ error: 'Remote server not configured' }, 503);\n }\n\n const result = await storageQueries.getDownloadUrl(fileId);\n return c.json(result);\n } catch (err: any) {\n return c.json({ error: err.message }, 500);\n }\n});\n\n/**\n * GET /sessions/:id/browser-recording\n * Get the browser recording(s) for a session (MP4 download URLs).\n */\nsessions.get('/:id/browser-recording', async (c) => {\n const sessionId = c.req.param('id');\n\n try {\n const { isRemoteConfigured, storageQueries } = await import('../../db/remote.js');\n if (!isRemoteConfigured()) {\n return c.json({ sessionId, recordings: [] });\n }\n\n const files = await storageQueries.getSessionFiles(sessionId);\n const recordings = files.filter((f) => f.category === 'browser-recording');\n\n if (recordings.length === 0) {\n return c.json({ sessionId, recordings: [], message: 'No browser recordings for this session' });\n }\n\n return c.json({\n sessionId,\n recordings: recordings.map((r) => ({\n id: r.id,\n fileName: r.fileName,\n sizeBytes: r.sizeBytes,\n createdAt: r.createdAt,\n downloadUrl: r.downloadUrl,\n expiresAt: r.downloadUrlExpiresAt,\n })),\n });\n } catch (err: any) {\n console.error('Failed to get browser recordings:', err.message);\n return c.json({ sessionId, recordings: [], error: err.message });\n }\n});\n\nexport { sessions };\n","import {\n streamText,\n generateText,\n tool,\n stepCountIs,\n type ToolSet,\n type ModelMessage,\n} from 'ai';\nimport { isAnthropicModel, resolveModel } from './model.js';\nimport { z } from 'zod';\nimport { nanoid } from 'nanoid';\nimport {\n sessionQueries,\n toolExecutionQueries,\n Session,\n ToolExecution,\n} from '../db/index.js';\nimport { getConfig, requiresApproval, SessionConfig } from '../config/index.js';\nimport { createTools, BashToolProgress, WriteFileProgress, SearchToolProgress, type TaskCompletionSignal } from '../tools/index.js';\nimport { ContextManager } from './context.js';\nimport { buildSystemPrompt, buildTaskPromptAddendum } from './prompts.js';\nimport { sendWebhook, type WebhookEvent } from '../utils/webhook.js';\nimport { type TaskConfig } from '../db/index.js';\n\nconst MAX_SSE_FIELD_LENGTH = 8 * 1024;\nconst SSE_PREVIEW_LENGTH = 2 * 1024;\n\nfunction truncateWriteFileInput(input: Record<string, unknown>): Record<string, unknown> {\n const out = { ...input };\n for (const key of ['content', 'old_string', 'new_string'] as const) {\n const val = out[key];\n if (typeof val === 'string' && val.length > MAX_SSE_FIELD_LENGTH) {\n out[key] = `${val.slice(0, SSE_PREVIEW_LENGTH)}\\n... (truncated)`;\n out[`${key}Truncated`] = true;\n out[`${key}Length`] = val.length;\n }\n }\n return out;\n}\n\n// Shared store for approval resolvers (needed because approve/reject come from different HTTP requests)\nconst approvalResolvers = new Map<string, { \n resolve: (approved: boolean) => void; \n reason?: string;\n sessionId: string;\n}>();\n\nexport interface AgentOptions {\n sessionId?: string;\n name?: string;\n workingDirectory?: string;\n model?: string;\n sessionConfig?: Partial<SessionConfig>;\n}\n\n/** Attachment for user messages (images, files) */\nexport interface MessageAttachment {\n type: 'image' | 'file';\n data: string; // base64 data URL or raw base64\n mediaType?: string;\n filename?: string; // Original filename for context\n savedPath?: string; // Path where file was saved on disk\n}\n\nexport interface AgentRunOptions {\n prompt: string;\n /** Optional file/image attachments to include in the message */\n attachments?: MessageAttachment[];\n abortSignal?: AbortSignal;\n /** Skip saving user message (if already saved externally) */\n skipSaveUserMessage?: boolean;\n onText?: (text: string) => void;\n onToolCall?: (toolCall: { toolCallId: string; toolName: string; input: unknown }) => void;\n onToolResult?: (result: { toolCallId: string; toolName: string; output: unknown }) => void;\n onApprovalRequired?: (execution: ToolExecution) => void;\n onStepFinish?: (step: { text?: string; toolCalls?: unknown[]; usage?: unknown }) => void;\n onAbort?: (info: { steps: unknown[] }) => void;\n /** Called when a tool (like bash, write_file, or explore_agent) has progress to report */\n onToolProgress?: (progress: { toolName: string; data: BashToolProgress | WriteFileProgress | SearchToolProgress }) => void;\n}\n\nexport interface AgentStreamResult {\n sessionId: string;\n stream: ReturnType<typeof streamText>;\n waitForApprovals: () => Promise<ToolExecution[]>;\n /** Call this after stream completes to save response messages */\n saveResponseMessages: () => Promise<void>;\n}\n\n/**\n * Strip the `data:...;base64,` prefix from a data URL, returning raw base64.\n * Some runtimes (Modal, Vercel edge) don't support `fetch('data:...')`,\n * and the AI SDK's downloadAssets will try to fetch any string that\n * parses as a URL. Passing raw base64 avoids the download path entirely.\n */\nfunction stripDataUrlPrefix(data: string): string {\n const commaIdx = data.indexOf(',');\n if (commaIdx !== -1 && data.startsWith('data:')) {\n return data.slice(commaIdx + 1);\n }\n return data;\n}\n\n/**\n * The main coding agent that orchestrates LLM interactions\n */\nexport class Agent {\n private session: Session;\n private context: ContextManager;\n private baseTools: ToolSet;\n private pendingApprovals: Map<string, ToolExecution> = new Map();\n\n private constructor(session: Session, context: ContextManager, tools: ToolSet) {\n this.session = session;\n this.context = context;\n this.baseTools = tools;\n }\n\n /**\n * Create tools with optional progress callbacks\n */\n private async createToolsWithCallbacks(options: {\n onToolProgress?: AgentRunOptions['onToolProgress'];\n }): Promise<ToolSet> {\n const config = getConfig();\n return createTools({\n sessionId: this.session.id,\n workingDirectory: this.session.workingDirectory,\n skillsDirectories: config.resolvedSkillsDirectories,\n onBashProgress: options.onToolProgress\n ? (progress) => options.onToolProgress!({ toolName: 'bash', data: progress })\n : undefined,\n onWriteFileProgress: options.onToolProgress\n ? (progress) => options.onToolProgress!({ toolName: 'write_file', data: progress })\n : undefined,\n onSearchProgress: options.onToolProgress\n ? (progress) => options.onToolProgress!({ toolName: 'explore_agent', data: progress })\n : undefined,\n });\n }\n\n /**\n * Create or resume an agent session\n */\n static async create(options: AgentOptions = {}): Promise<Agent> {\n const config = getConfig();\n\n // Get or create session\n let session: Session;\n\n if (options.sessionId) {\n const existing = await sessionQueries.getById(options.sessionId);\n if (!existing) {\n throw new Error(`Session not found: ${options.sessionId}`);\n }\n session = existing;\n } else {\n session = await sessionQueries.create({\n name: options.name,\n workingDirectory: options.workingDirectory || config.resolvedWorkingDirectory,\n model: options.model || config.defaultModel,\n config: options.sessionConfig as SessionConfig,\n });\n }\n\n // Create context manager\n const context = new ContextManager({\n sessionId: session.id,\n modelId: session.model || config.defaultModel,\n maxContextChars: config.context?.maxChars || 200_000,\n keepRecentMessages: config.context?.keepRecentMessages || 10,\n autoSummarize: config.context?.autoSummarize ?? true,\n });\n\n // Create tools\n const tools = await createTools({\n sessionId: session.id,\n workingDirectory: session.workingDirectory,\n skillsDirectories: config.resolvedSkillsDirectories,\n });\n\n return new Agent(session, context, tools);\n }\n\n /**\n * Get the session ID\n */\n get sessionId(): string {\n return this.session.id;\n }\n\n /**\n * Get session details\n */\n getSession(): Session {\n return this.session;\n }\n\n /**\n * Build user message content from prompt and attachments\n */\n private buildUserMessageContent(\n prompt: string,\n attachments?: MessageAttachment[]\n ): string | Array<{ type: string; text?: string; image?: string; data?: string; mediaType?: string; filename?: string; savedPath?: string }> {\n if (!attachments || attachments.length === 0) {\n return prompt;\n }\n\n // Build content array with text and file parts\n const contentParts: Array<{ type: string; text?: string; image?: string; data?: string; mediaType?: string; filename?: string; savedPath?: string }> = [];\n \n // IMPORTANT: Put file location info FIRST so the model knows where files are saved\n // This gives the model context about file paths before it sees the images\n const attachmentDescriptions = attachments\n .map((a, i) => {\n const name = a.filename || `attachment_${i + 1}`;\n const typeLabel = a.type === 'image' ? 'Image' : 'File';\n const location = a.savedPath || '(path unknown)';\n return `${i + 1}. ${typeLabel}: \"${name}\" saved at: ${location}`;\n })\n .join('\\n');\n \n contentParts.push({ \n type: 'text', \n text: `[FILE ATTACHMENTS - The user has attached the following files which are saved on disk]\\n${attachmentDescriptions}\\n\\nYou can reference these files by their paths above. The file contents are also shown inline below.` \n });\n \n // Add user's text prompt\n if (prompt) {\n contentParts.push({ type: 'text', text: `\\n[USER MESSAGE]\\n${prompt}` });\n }\n \n // Add file/image parts with filename and path metadata\n for (const attachment of attachments) {\n if (attachment.type === 'image') {\n contentParts.push({\n type: 'image',\n image: stripDataUrlPrefix(attachment.data),\n mediaType: attachment.mediaType,\n filename: attachment.filename,\n savedPath: attachment.savedPath,\n });\n } else {\n contentParts.push({\n type: 'file',\n data: stripDataUrlPrefix(attachment.data),\n mediaType: attachment.mediaType || 'application/octet-stream',\n filename: attachment.filename,\n savedPath: attachment.savedPath,\n });\n }\n }\n \n return contentParts;\n }\n\n /**\n * Run the agent with a prompt (streaming)\n */\n async stream(options: AgentRunOptions): Promise<AgentStreamResult> {\n const config = getConfig();\n\n // Build user message content with attachments\n const userContent = this.buildUserMessageContent(options.prompt, options.attachments);\n\n // Add user message to context (skip if already saved externally)\n if (!options.skipSaveUserMessage) {\n this.context.addUserMessage(userContent);\n }\n\n // Update session status\n await sessionQueries.updateStatus(this.session.id, 'active');\n\n // Build system prompt with enhanced skill discovery\n const systemPrompt = await buildSystemPrompt({\n workingDirectory: this.session.workingDirectory,\n skillsDirectories: config.resolvedSkillsDirectories,\n sessionId: this.session.id,\n discoveredSkills: config.discoveredSkills,\n // TODO: Pass activeFiles from client for glob matching\n activeFiles: [],\n });\n\n // Get conversation history\n const messages = await this.context.getMessages();\n\n // Create tools with progress callbacks if needed\n const tools = options.onToolProgress\n ? await this.createToolsWithCallbacks({ onToolProgress: options.onToolProgress })\n : this.baseTools;\n\n // Wrap tools with approval checking\n const wrappedTools = this.wrapToolsWithApproval(options, tools);\n\n // Create stream with reasoning enabled for supported models\n const useAnthropic = isAnthropicModel(this.session.model);\n const stream = streamText({\n model: resolveModel(this.session.model) as any,\n system: systemPrompt,\n messages: messages as any,\n tools: wrappedTools,\n stopWhen: stepCountIs(500),\n // Forward abort signal if provided\n abortSignal: options.abortSignal,\n // Enable extended thinking/reasoning for models that support it\n providerOptions: useAnthropic\n ? {\n anthropic: {\n toolStreaming: true,\n thinking: {\n type: 'enabled',\n budgetTokens: 10000,\n },\n },\n }\n : undefined,\n onStepFinish: async (step) => {\n options.onStepFinish?.(step as any);\n },\n onAbort: ({ steps }) => {\n options.onAbort?.({ steps });\n },\n });\n\n // Helper to save response messages after stream completes\n const saveResponseMessages = async () => {\n const result = await stream;\n const response = await result.response;\n const responseMessages = response.messages as ModelMessage[];\n this.context.addResponseMessages(responseMessages);\n };\n\n return {\n sessionId: this.session.id,\n stream,\n waitForApprovals: () => this.waitForApprovals(),\n saveResponseMessages,\n };\n }\n\n /**\n * Run the agent with a prompt (non-streaming)\n */\n async run(options: Omit<AgentRunOptions, 'onText'>): Promise<{ text: string; steps: unknown[] }> {\n const config = getConfig();\n\n // Add user message to context\n this.context.addUserMessage(options.prompt);\n\n // Build system prompt with enhanced skill discovery\n const systemPrompt = await buildSystemPrompt({\n workingDirectory: this.session.workingDirectory,\n skillsDirectories: config.resolvedSkillsDirectories,\n sessionId: this.session.id,\n discoveredSkills: config.discoveredSkills,\n activeFiles: [],\n });\n\n // Get conversation history\n const messages = await this.context.getMessages();\n\n // Create tools with progress callbacks if needed\n const tools = options.onToolProgress\n ? await this.createToolsWithCallbacks({ onToolProgress: options.onToolProgress })\n : this.baseTools;\n\n // Wrap tools with approval checking\n const wrappedTools = this.wrapToolsWithApproval(options, tools);\n\n const useAnthropic = isAnthropicModel(this.session.model);\n const result = await generateText({\n model: resolveModel(this.session.model) as any,\n system: systemPrompt,\n messages: messages as any,\n tools: wrappedTools,\n stopWhen: stepCountIs(500),\n // Enable extended thinking/reasoning for models that support it\n providerOptions: useAnthropic\n ? {\n anthropic: {\n thinking: {\n type: 'enabled',\n budgetTokens: 10000,\n },\n },\n }\n : undefined,\n });\n\n // Save response messages using the proper AI SDK format\n const responseMessages = result.response.messages as ModelMessage[];\n this.context.addResponseMessages(responseMessages);\n\n return {\n text: result.text,\n steps: result.steps,\n };\n }\n\n /**\n * Run the agent in task mode — loops autonomously until the agent calls\n * complete_task or task_failed (or hits maxIterations).\n * All tools run without approval. Webhook events are fired throughout.\n */\n async runTask(options: {\n prompt: string;\n taskConfig: TaskConfig;\n abortSignal?: AbortSignal;\n writeSSE?: (data: string) => Promise<void>;\n onText?: (text: string) => void;\n onToolCall?: (toolCall: { toolCallId: string; toolName: string; input: unknown }) => void;\n onToolResult?: (result: { toolCallId: string; toolName: string; output: unknown }) => void;\n onToolProgress?: AgentRunOptions['onToolProgress'];\n onStepFinish?: AgentRunOptions['onStepFinish'];\n }): Promise<{ status: 'completed' | 'failed'; result?: unknown; error?: string; iterations: number }> {\n const config = getConfig();\n const maxIterations = options.taskConfig.maxIterations ?? 50;\n const webhookUrl = options.taskConfig.webhookUrl;\n\n const fireWebhook = (type: WebhookEvent['type'], data: unknown) => {\n if (!webhookUrl) return;\n sendWebhook(webhookUrl, {\n type,\n taskId: this.session.id,\n sessionId: this.session.id,\n timestamp: new Date().toISOString(),\n data,\n });\n };\n\n // Completion signal shared between tools and the loop.\n const completion: { signal: TaskCompletionSignal | null } = { signal: null };\n const onComplete = (signal: TaskCompletionSignal) => {\n completion.signal = signal;\n };\n\n // Browser recording for task mode: track proxy + recorder per session\n let taskRecorder: import('../browser/recorder.js').FrameRecorder | null = null;\n const sessionId = this.session.id;\n\n const emit = options.writeSSE;\n\n const bashProgressHandler = (progress: BashToolProgress) => {\n options.onToolProgress?.({ toolName: 'bash', data: progress });\n if (emit) emit(JSON.stringify({ type: 'tool-progress', toolName: 'bash', data: progress })).catch(() => {});\n\n // Start browser proxy + recorder when agent-browser opens\n const port = progress.browserStreamPort;\n if (port && progress.status === 'started') {\n import('../browser/stream-proxy.js').then(({ getOrCreateProxy }) => {\n const proxy = getOrCreateProxy(sessionId, port);\n if (!taskRecorder) {\n import('../browser/recorder.js').then(({ FrameRecorder }) => {\n taskRecorder = new FrameRecorder(sessionId);\n taskRecorder.start();\n });\n }\n if (proxy.listenerCount('frame') === 0) {\n proxy.on('frame', (frame) => {\n taskRecorder?.addFrame(frame);\n if (emit) emit(JSON.stringify({ type: 'browser-frame', data: frame.data, metadata: frame.metadata })).catch(() => {});\n });\n proxy.on('status', (s: any) => {\n if (emit) emit(JSON.stringify({ type: 'browser-status', ...s })).catch(() => {});\n });\n }\n });\n }\n };\n\n // Build task-augmented tools (no approval wrapping — all tools auto-approved)\n const taskTools = await createTools({\n sessionId: this.session.id,\n workingDirectory: this.session.workingDirectory,\n skillsDirectories: config.resolvedSkillsDirectories,\n onBashProgress: bashProgressHandler,\n onWriteFileProgress: (progress) => {\n options.onToolProgress?.({ toolName: 'write_file', data: progress });\n if (emit) emit(JSON.stringify({ type: 'tool-progress', toolName: 'write_file', data: progress })).catch(() => {});\n },\n onSearchProgress: (progress) => {\n options.onToolProgress?.({ toolName: 'explore_agent', data: progress });\n if (emit) emit(JSON.stringify({ type: 'tool-progress', toolName: 'explore_agent', data: progress })).catch(() => {});\n },\n taskTools: {\n outputSchema: options.taskConfig.outputSchema,\n onComplete,\n },\n });\n\n // Build system prompt with task addendum\n const baseSystemPrompt = await buildSystemPrompt({\n workingDirectory: this.session.workingDirectory,\n skillsDirectories: config.resolvedSkillsDirectories,\n sessionId: this.session.id,\n discoveredSkills: config.discoveredSkills,\n activeFiles: [],\n });\n const taskAddendum = buildTaskPromptAddendum(options.taskConfig.outputSchema);\n const systemPrompt = `${baseSystemPrompt}\\n\\n${taskAddendum}`;\n\n fireWebhook('task.started', { prompt: options.prompt });\n\n if (emit) {\n await emit(JSON.stringify({ type: 'data-user-message', data: { id: `user_${Date.now()}`, content: options.prompt } }));\n }\n\n // Add the initial user message\n await this.context.addUserMessage(options.prompt);\n\n let iteration = 0;\n\n while (iteration < maxIterations) {\n iteration++;\n\n if (options.abortSignal?.aborted) {\n const cancelError = 'Task was cancelled';\n fireWebhook('task.failed', { status: 'failed', error: cancelError, iterations: iteration });\n return { status: 'failed', error: cancelError, iterations: iteration };\n }\n\n const messages = await this.context.getMessages();\n const useAnthropic = isAnthropicModel(this.session.model);\n\n // Emit start-of-message event for the dashboard\n if (emit) {\n await emit(JSON.stringify({ type: 'start', messageId: `msg_${Date.now()}` }));\n }\n\n let textStarted = false;\n let textId = `text_${Date.now()}`;\n let reasoningId = `reasoning_${Date.now()}`;\n let reasoningStarted = false;\n const toolCallStarts = new Set<string>();\n\n const iterStream = streamText({\n model: resolveModel(this.session.model) as any,\n system: systemPrompt,\n messages: messages as any,\n tools: taskTools,\n stopWhen: stepCountIs(500),\n abortSignal: options.abortSignal,\n providerOptions: useAnthropic\n ? {\n anthropic: {\n toolStreaming: true,\n thinking: { type: 'enabled', budgetTokens: 10000 },\n },\n }\n : undefined,\n onStepFinish: async (step: any) => {\n options.onStepFinish?.(step);\n fireWebhook('task.step_finished', { iteration, text: step.text });\n if (emit) {\n if (textStarted) {\n await emit(JSON.stringify({ type: 'text-end', id: textId }));\n textStarted = false;\n textId = `text_${Date.now()}`;\n }\n await emit(JSON.stringify({ type: 'finish-step' }));\n }\n },\n });\n\n // Consume the stream, emitting SSE events for live dashboard updates\n for await (const part of iterStream.fullStream) {\n if (part.type === 'text-delta') {\n if (emit) {\n if (!textStarted) {\n await emit(JSON.stringify({ type: 'text-start', id: textId }));\n textStarted = true;\n }\n await emit(JSON.stringify({ type: 'text-delta', id: textId, delta: part.text }));\n }\n } else if (part.type === 'reasoning-start') {\n if (emit) {\n await emit(JSON.stringify({ type: 'reasoning-start', id: reasoningId }));\n reasoningStarted = true;\n }\n } else if (part.type === 'reasoning-delta') {\n if (emit) {\n await emit(JSON.stringify({ type: 'reasoning-delta', id: reasoningId, delta: part.text }));\n }\n } else if (part.type === 'reasoning-end') {\n if (emit && reasoningStarted) {\n await emit(JSON.stringify({ type: 'reasoning-end', id: reasoningId }));\n reasoningStarted = false;\n reasoningId = `reasoning_${Date.now()}`;\n }\n } else if ((part as any).type === 'tool-call-streaming-start') {\n if (emit) {\n const p = part as any;\n await emit(JSON.stringify({ type: 'tool-input-start', toolCallId: p.toolCallId, toolName: p.toolName }));\n toolCallStarts.add(p.toolCallId);\n }\n } else if ((part as any).type === 'tool-call-delta') {\n if (emit) {\n const p = part as any;\n await emit(JSON.stringify({ type: 'tool-input-delta', toolCallId: p.toolCallId, argsTextDelta: p.argsTextDelta }));\n }\n } else if (part.type === 'tool-call') {\n if (emit) {\n if (!toolCallStarts.has(part.toolCallId)) {\n await emit(JSON.stringify({ type: 'tool-input-start', toolCallId: part.toolCallId, toolName: part.toolName }));\n toolCallStarts.add(part.toolCallId);\n }\n const safeInput = part.toolName === 'write_file' && part.input && typeof part.input === 'object'\n ? truncateWriteFileInput(part.input as Record<string, unknown>)\n : part.input;\n await emit(JSON.stringify({ type: 'tool-input-available', toolCallId: part.toolCallId, toolName: part.toolName, input: safeInput }));\n }\n } else if (part.type === 'tool-result') {\n if (emit) {\n await emit(JSON.stringify({ type: 'tool-output-available', toolCallId: part.toolCallId, output: part.output }));\n }\n } else if (part.type === 'error') {\n console.error('Task stream error:', part.error);\n if (emit) {\n await emit(JSON.stringify({ type: 'error', errorText: String(part.error) }));\n }\n }\n }\n\n // End open text/reasoning blocks after stream consumed\n if (emit && textStarted) {\n await emit(JSON.stringify({ type: 'text-end', id: textId }));\n }\n if (emit && reasoningStarted) {\n await emit(JSON.stringify({ type: 'reasoning-end', id: reasoningId }));\n }\n\n // Get results after stream is fully consumed\n const iterResponse = await iterStream.response;\n const responseMessages = iterResponse.messages as ModelMessage[];\n await this.context.addResponseMessages(responseMessages);\n\n const resultText = await iterStream.text;\n const resultSteps = await iterStream.steps;\n\n // Fire webhook for text output\n if (resultText) {\n options.onText?.(resultText);\n fireWebhook('task.message', { iteration, text: resultText });\n }\n\n // Fire webhooks for tool calls in this iteration\n for (const step of resultSteps) {\n if ((step as any).toolCalls) {\n for (const tc of (step as any).toolCalls) {\n options.onToolCall?.({ toolCallId: tc.toolCallId, toolName: tc.toolName, input: tc.args });\n fireWebhook('task.tool_call', { iteration, toolName: tc.toolName, toolCallId: tc.toolCallId, input: tc.args });\n }\n }\n if ((step as any).toolResults) {\n for (const tr of (step as any).toolResults) {\n options.onToolResult?.({ toolCallId: tr.toolCallId, toolName: tr.toolName, output: tr.result });\n fireWebhook('task.tool_result', { iteration, toolName: tr.toolName, toolCallId: tr.toolCallId, output: tr.result });\n }\n }\n }\n\n // Check if a completion tool was called\n if (completion.signal) {\n const sig = completion.signal;\n const finalStatus = sig.status;\n\n // Upload task output files to GCS if configured\n let fileUrls: string[] | undefined;\n if (finalStatus === 'completed' && sig.result && typeof sig.result === 'object') {\n const resultObj = sig.result as Record<string, unknown>;\n const filePaths = Array.isArray(resultObj.files) ? resultObj.files as string[] : [];\n if (filePaths.length > 0) {\n fileUrls = await this.uploadTaskFiles(filePaths);\n }\n }\n\n // Encode and upload browser recording if one was captured\n const recordingUrls = await this.finishTaskRecording(taskRecorder);\n\n const allFileUrls = [...(fileUrls || []), ...recordingUrls];\n\n const eventType = finalStatus === 'completed' ? 'task.completed' : 'task.failed';\n fireWebhook(eventType as WebhookEvent['type'], {\n status: finalStatus,\n result: sig.result,\n error: sig.error,\n iterations: iteration,\n fileUrls: allFileUrls.length > 0 ? allFileUrls : undefined,\n browserRecordingUrls: recordingUrls.length > 0 ? recordingUrls : undefined,\n });\n\n // Persist task result in session config\n const updatedTask: TaskConfig = {\n ...options.taskConfig,\n status: finalStatus,\n result: sig.result,\n error: sig.error,\n iterations: iteration,\n };\n await sessionQueries.update(this.session.id, {\n config: { ...this.session.config, task: updatedTask },\n });\n\n return {\n status: finalStatus,\n result: sig.result,\n error: sig.error,\n iterations: iteration,\n };\n }\n\n // No completion tool called — inject continuation and re-run\n const continuationPrompt = 'Continue working on the task. Before calling `complete_task`, VERIFY your work is correct — re-read edited files, run the linter, run tests if applicable, and check the browser/server if you made UI or API changes. Make sure you searched the right directories and found everything relevant. When fully verified, call `complete_task` with the result. If you cannot complete it, call `task_failed` with a reason.';\n if (emit) {\n await emit(JSON.stringify({ type: 'data-user-message', data: { id: `user_${Date.now()}`, content: continuationPrompt } }));\n }\n await this.context.addUserMessage(continuationPrompt);\n }\n\n // Hit max iterations without completion\n const timeoutError = `Task did not complete within ${maxIterations} iterations`;\n const timeoutRecordingUrls = await this.finishTaskRecording(taskRecorder);\n fireWebhook('task.failed', {\n status: 'failed',\n error: timeoutError,\n iterations: iteration,\n browserRecordingUrls: timeoutRecordingUrls.length > 0 ? timeoutRecordingUrls : undefined,\n });\n\n const updatedTask: TaskConfig = {\n ...options.taskConfig,\n status: 'failed',\n error: timeoutError,\n iterations: iteration,\n };\n await sessionQueries.update(this.session.id, {\n config: { ...this.session.config, task: updatedTask },\n });\n\n return { status: 'failed', error: timeoutError, iterations: iteration };\n }\n\n /**\n * Stop a task-mode browser recording, encode to MP4, upload to GCS.\n * Returns download URLs for any recordings produced.\n */\n private async finishTaskRecording(\n recorder: import('../browser/recorder.js').FrameRecorder | null\n ): Promise<string[]> {\n // Clean up the browser proxy\n try {\n const { destroyProxy } = await import('../browser/stream-proxy.js');\n destroyProxy(this.session.id);\n } catch {}\n\n if (!recorder || recorder.frameCount === 0) {\n recorder?.clear();\n return [];\n }\n\n recorder.stop();\n try {\n const { isRemoteConfigured, storageQueries } = await import('../db/remote.js');\n if (!isRemoteConfigured()) { recorder.clear(); return []; }\n\n const result = await recorder.encode();\n recorder.clear();\n if (!result) return [];\n\n const { readFile, unlink } = await import('node:fs/promises');\n\n const uploadInfo = await storageQueries.getUploadUrl(\n this.session.id,\n `browser-recording-${Date.now()}.mp4`,\n 'video/mp4',\n 'browser-recording'\n );\n\n const fileData = await readFile(result.path);\n await fetch(uploadInfo.uploadUrl, {\n method: 'PUT',\n headers: { 'Content-Type': 'video/mp4' },\n body: fileData,\n });\n await storageQueries.updateFile(uploadInfo.fileId, { sizeBytes: result.sizeBytes });\n\n const dlInfo = await storageQueries.getDownloadUrl(uploadInfo.fileId);\n await unlink(result.path).catch(() => {});\n\n console.log(`[TASK] Browser recording uploaded (${result.sizeBytes} bytes)`);\n return [dlInfo.downloadUrl];\n } catch (err: any) {\n console.error('[TASK] Failed to upload browser recording:', err.message);\n recorder.clear();\n return [];\n }\n }\n\n /**\n * Upload task output files to GCS via the remote server.\n * Returns an array of download URLs for successfully uploaded files.\n */\n private async uploadTaskFiles(filePaths: string[]): Promise<string[]> {\n try {\n const { isRemoteConfigured, storageQueries } = await import('../db/remote.js');\n if (!isRemoteConfigured()) return [];\n\n const { readFile } = await import('node:fs/promises');\n const { join, basename } = await import('node:path');\n\n const urls: string[] = [];\n\n for (const filePath of filePaths) {\n try {\n const fullPath = filePath.startsWith('/')\n ? filePath\n : join(this.session.workingDirectory, filePath);\n const fileName = basename(fullPath);\n\n // Determine content type\n const ext = fileName.split('.').pop()?.toLowerCase() || '';\n const mimeMap: Record<string, string> = {\n pdf: 'application/pdf', json: 'application/json', csv: 'text/csv',\n txt: 'text/plain', md: 'text/markdown', html: 'text/html',\n png: 'image/png', jpg: 'image/jpeg', jpeg: 'image/jpeg',\n gif: 'image/gif', svg: 'image/svg+xml', mp4: 'video/mp4',\n zip: 'application/zip',\n };\n const contentType = mimeMap[ext] || 'application/octet-stream';\n\n const uploadInfo = await storageQueries.getUploadUrl(\n this.session.id,\n fileName,\n contentType,\n 'task-output'\n );\n\n const fileData = await readFile(fullPath);\n await fetch(uploadInfo.uploadUrl, {\n method: 'PUT',\n headers: { 'Content-Type': contentType },\n body: fileData,\n });\n\n await storageQueries.updateFile(uploadInfo.fileId, { sizeBytes: fileData.length });\n\n const downloadInfo = await storageQueries.getDownloadUrl(uploadInfo.fileId);\n urls.push(downloadInfo.downloadUrl);\n\n console.log(`[TASK] Uploaded file: ${fileName} (${fileData.length} bytes)`);\n } catch (err: any) {\n console.error(`[TASK] Failed to upload file ${filePath}:`, err.message);\n }\n }\n\n return urls;\n } catch (err: any) {\n console.error('[TASK] File upload failed:', err.message);\n return [];\n }\n }\n\n /**\n * Wrap tools to add approval checking\n */\n private wrapToolsWithApproval(options: AgentRunOptions, tools?: ToolSet): ToolSet {\n const sessionConfig = this.session.config;\n const wrappedTools: ToolSet = {};\n const toolsToWrap = tools || this.baseTools;\n\n for (const [name, originalTool] of Object.entries(toolsToWrap)) {\n const needsApproval = requiresApproval(name, sessionConfig ?? undefined);\n\n if (!needsApproval) {\n wrappedTools[name] = originalTool;\n continue;\n }\n\n // Create wrapped tool that checks for approval and waits\n wrappedTools[name] = tool({\n description: originalTool.description || '',\n inputSchema: (originalTool as any).inputSchema || z.object({}),\n execute: async (input: unknown, toolOptions: { toolCallId?: string }) => {\n const toolCallId = toolOptions.toolCallId || nanoid();\n\n // Record the execution\n const execution = toolExecutionQueries.create({\n sessionId: this.session.id,\n toolName: name,\n toolCallId,\n input: input as any,\n requiresApproval: true,\n status: 'pending',\n });\n\n // Store pending approval\n this.pendingApprovals.set(toolCallId, await execution);\n\n // Notify about approval requirement\n options.onApprovalRequired?.(await execution);\n\n // Update session status\n await sessionQueries.updateStatus(this.session.id, 'waiting');\n\n // Wait for approval decision (using shared store for cross-request access)\n const approved = await new Promise<boolean>((resolve) => {\n approvalResolvers.set(toolCallId, { resolve, sessionId: this.session.id });\n });\n\n // Get any rejection reason\n const resolverData = approvalResolvers.get(toolCallId);\n approvalResolvers.delete(toolCallId);\n this.pendingApprovals.delete(toolCallId);\n\n const exec = await execution;\n if (!approved) {\n // Tool was rejected\n const reason = resolverData?.reason || 'User rejected the tool execution';\n await toolExecutionQueries.reject(exec.id);\n await sessionQueries.updateStatus(this.session.id, 'active');\n \n return {\n status: 'rejected',\n toolCallId,\n rejected: true,\n reason,\n message: `Tool \"${name}\" was rejected by the user. Reason: ${reason}`,\n };\n }\n\n // Tool was approved - execute the original tool\n await toolExecutionQueries.approve(exec.id);\n await sessionQueries.updateStatus(this.session.id, 'active');\n\n try {\n const result = await (originalTool as any).execute(input, toolOptions);\n await toolExecutionQueries.complete(exec.id, result);\n return result;\n } catch (error: any) {\n await toolExecutionQueries.complete(exec.id, null, error.message);\n throw error;\n }\n },\n });\n }\n\n return wrappedTools;\n }\n\n /**\n * Wait for all pending approvals\n */\n async waitForApprovals(): Promise<ToolExecution[]> {\n return Array.from(this.pendingApprovals.values());\n }\n\n /**\n * Approve a pending tool execution\n */\n async approve(toolCallId: string): Promise<{ approved: true }> {\n // Check shared resolver store (the streaming Agent is waiting on this)\n const resolver = approvalResolvers.get(toolCallId);\n if (resolver) {\n resolver.resolve(true);\n return { approved: true };\n }\n\n // Fall back to database lookup\n const pendingFromDb = await toolExecutionQueries.getPendingApprovals(this.session.id);\n const execution = pendingFromDb.find((e: ToolExecution) => e.toolCallId === toolCallId);\n \n if (!execution) {\n throw new Error(`No pending approval for tool call: ${toolCallId}`);\n }\n\n // Mark as approved in DB\n await toolExecutionQueries.approve(execution.id);\n return { approved: true };\n }\n\n /**\n * Reject a pending tool execution\n */\n async reject(toolCallId: string, reason?: string): Promise<{ rejected: true }> {\n // Check shared resolver store (the streaming Agent is waiting on this)\n const resolver = approvalResolvers.get(toolCallId);\n if (resolver) {\n resolver.reason = reason;\n resolver.resolve(false);\n return { rejected: true };\n }\n\n // Fall back to database lookup\n const pendingFromDb = await toolExecutionQueries.getPendingApprovals(this.session.id);\n const execution = pendingFromDb.find((e: ToolExecution) => e.toolCallId === toolCallId);\n \n if (!execution) {\n throw new Error(`No pending approval for tool call: ${toolCallId}`);\n }\n\n // Mark as rejected in DB\n await toolExecutionQueries.reject(execution.id);\n return { rejected: true };\n }\n\n /**\n * Get pending approvals\n */\n async getPendingApprovals(): Promise<ToolExecution[]> {\n return toolExecutionQueries.getPendingApprovals(this.session.id);\n }\n\n /**\n * Get context statistics\n */\n getContextStats() {\n return this.context.getStats();\n }\n\n /**\n * Clear conversation context (start fresh)\n */\n clearContext(): void {\n this.context.clear();\n }\n}\n\nexport { ContextManager } from './context.js';\nexport { buildSystemPrompt, buildTaskPromptAddendum } from './prompts.js';\n","import { gateway } from '@ai-sdk/gateway';\nimport type { LanguageModel } from 'ai';\nimport { createRemoteModel } from './remote-model.js';\nimport { getConfig } from '../config/index.js';\n\nconst ANTHROPIC_PREFIX = 'anthropic/';\nconst GOOGLE_PREFIX = 'google/';\n\n/**\n * Check if a model ID is an Anthropic model (for provider-specific options).\n */\nexport function isAnthropicModel(modelId: string): boolean {\n const normalized = modelId.trim().toLowerCase();\n return normalized.startsWith(ANTHROPIC_PREFIX) || normalized.startsWith('claude-');\n}\n\n/**\n * Check if a model ID is a Google model.\n */\nexport function isGoogleModel(modelId: string): boolean {\n const normalized = modelId.trim().toLowerCase();\n return normalized.startsWith(GOOGLE_PREFIX) || normalized.startsWith('gemini-');\n}\n\n/**\n * Resolves a model ID to a LanguageModel instance.\n *\n * When a remote server is configured, routes through the remote inference\n * proxy so API keys never leave the server. Falls back to local AI Gateway\n * for development/offline use.\n */\nexport function resolveModel(modelId: string): LanguageModel {\n try {\n const config = getConfig();\n if (config.resolvedRemoteServer.isConfigured) {\n return createRemoteModel(modelId.trim(), {\n url: config.resolvedRemoteServer.url!,\n authKey: config.resolvedRemoteServer.authKey!,\n }) as LanguageModel;\n }\n } catch {\n // Config not loaded yet (e.g. during import-time calls) - fall through to local\n }\n return gateway(modelId.trim());\n}\n\n// Default models for subagents (smaller, faster models)\nexport const SUBAGENT_MODELS = {\n search: 'google/gemini-3-flash-preview',\n analyze: 'google/gemini-3-flash-preview',\n default: 'google/gemini-3-flash-preview',\n} as const;\n","/**\n * Remote inference proxy model.\n *\n * Implements the LanguageModelV3 interface by forwarding doGenerate/doStream\n * calls to the remote server's /inference endpoints. API keys never leave\n * the remote server.\n */\n\nexport interface RemoteModelConfig {\n url: string;\n authKey: string;\n}\n\ninterface CallOptions {\n prompt: any[];\n abortSignal?: AbortSignal;\n [key: string]: unknown;\n}\n\n/**\n * Serialize prompt content for JSON transport.\n * Converts Uint8Array data to base64 with a marker flag.\n */\nfunction serializePrompt(prompt: any[]): any[] {\n return prompt.map((msg: any) => {\n if (!Array.isArray(msg.content)) return msg;\n return {\n ...msg,\n content: msg.content.map((part: any) => {\n if (part.type === 'file' && part.data instanceof Uint8Array) {\n return {\n ...part,\n data: Buffer.from(part.data).toString('base64'),\n _base64: true,\n };\n }\n return part;\n }),\n };\n });\n}\n\n/**\n * Deserialize a stream part from JSON transport.\n * Restores Uint8Array from base64-encoded markers.\n */\nfunction deserializeValue(value: any): any {\n if (value && typeof value === 'object') {\n if (value.__uint8array && typeof value.data === 'string') {\n return Buffer.from(value.data, 'base64');\n }\n if (Array.isArray(value)) {\n return value.map(deserializeValue);\n }\n const result: any = {};\n for (const [k, v] of Object.entries(value)) {\n result[k] = deserializeValue(v);\n }\n return result;\n }\n return value;\n}\n\n/**\n * Strip non-serializable fields from call options before sending over HTTP.\n */\nfunction prepareOptions(options: CallOptions): Record<string, unknown> {\n const { abortSignal, ...rest } = options;\n return {\n ...rest,\n prompt: serializePrompt(options.prompt),\n };\n}\n\n/**\n * Creates a LanguageModelV3-compatible model that proxies all inference\n * requests to the remote server. The returned object satisfies the\n * LanguageModelV3 interface structurally.\n */\nexport function createRemoteModel(\n modelId: string,\n config: RemoteModelConfig,\n) {\n const baseUrl = config.url.replace(/\\/$/, '');\n const headers = {\n 'Content-Type': 'application/json',\n 'Authorization': `Bearer ${config.authKey}`,\n };\n\n return {\n specificationVersion: 'v3',\n provider: 'remote-proxy',\n modelId,\n supportedUrls: {},\n\n async doGenerate(options: CallOptions) {\n const res = await fetch(`${baseUrl}/inference/generate`, {\n method: 'POST',\n headers,\n body: JSON.stringify({\n modelId,\n options: prepareOptions(options),\n }),\n signal: options.abortSignal,\n });\n\n if (!res.ok) {\n const err = await res.json().catch(() => ({})) as { error?: string; details?: any };\n const detail = formatRemoteError(res.status, modelId, err);\n throw new Error(detail);\n }\n\n const result = await res.json();\n return deserializeValue(result);\n },\n\n async doStream(options: CallOptions) {\n const res = await fetch(`${baseUrl}/inference/stream`, {\n method: 'POST',\n headers,\n body: JSON.stringify({\n modelId,\n options: prepareOptions(options),\n }),\n signal: options.abortSignal,\n });\n\n if (!res.ok) {\n const err = await res.json().catch(() => ({})) as { error?: string; details?: any };\n const detail = formatRemoteError(res.status, modelId, err);\n throw new Error(detail);\n }\n\n const reader = res.body!.getReader();\n const decoder = new TextDecoder();\n let buffer = '';\n\n const stream = new ReadableStream({\n async pull(controller) {\n while (true) {\n const { done, value } = await reader.read();\n\n if (done) {\n if (buffer.trim()) {\n try {\n const parsed = deserializeValue(JSON.parse(buffer.trim()));\n if (parsed.type === 'error') {\n controller.error(new Error(parsed.error));\n } else {\n controller.enqueue(parsed);\n }\n } catch { /* ignore partial data */ }\n }\n controller.close();\n return;\n }\n\n buffer += decoder.decode(value, { stream: true });\n const lines = buffer.split('\\n');\n buffer = lines.pop() || '';\n\n for (const line of lines) {\n if (!line.trim()) continue;\n try {\n const parsed = deserializeValue(JSON.parse(line));\n if (parsed.type === 'error') {\n controller.error(new Error(parsed.error));\n return;\n }\n controller.enqueue(parsed);\n } catch {\n // skip malformed lines\n }\n }\n }\n },\n cancel() {\n reader.cancel();\n },\n });\n\n const responseHeaders: Record<string, string> = {};\n res.headers.forEach((v, k) => {\n if (k.startsWith('x-upstream-')) {\n responseHeaders[k.replace('x-upstream-', '')] = v;\n }\n });\n\n return {\n stream,\n response: Object.keys(responseHeaders).length > 0\n ? { headers: responseHeaders }\n : undefined,\n };\n },\n };\n}\n\nfunction formatRemoteError(\n status: number,\n modelId: string,\n body: { error?: string; details?: any },\n): string {\n const parts = [`Remote inference failed (${status}) for ${modelId}`];\n\n if (body.error) parts.push(body.error);\n\n if (body.details) {\n const d = body.details;\n if (d.type) parts.push(`type=${d.type}`);\n if (d.statusCode && d.statusCode !== status) parts.push(`upstream=${d.statusCode}`);\n if (d.cause) parts.push(`cause: ${d.cause}`);\n if (d.orderWarnings?.length) parts.push(`prompt issues: ${d.orderWarnings.join('; ')}`);\n }\n\n return parts.join(' — ');\n}\n","import { tool } from 'ai';\nimport { z } from 'zod';\nimport { exec } from 'node:child_process';\nimport { promisify } from 'node:util';\nimport { truncateOutput } from '../utils/truncate.js';\nimport * as tmux from '../terminal/tmux.js';\n\nconst execAsync = promisify(exec);\n\nconst COMMAND_TIMEOUT = 120_000; // 2 minutes for sync commands\nconst MAX_OUTPUT_CHARS = 10_000;\n\n// Commands that are blocked for safety\nconst BLOCKED_COMMANDS = [\n 'rm -rf /',\n 'rm -rf ~',\n 'mkfs',\n 'dd if=/dev/zero',\n ':(){:|:&};:',\n 'chmod -R 777 /',\n];\n\n/**\n * Check if a command is blocked\n */\nfunction isBlockedCommand(command: string): boolean {\n const normalizedCommand = command.toLowerCase().trim();\n return BLOCKED_COMMANDS.some((blocked) =>\n normalizedCommand.includes(blocked.toLowerCase())\n );\n}\n\nexport interface BashToolProgress {\n terminalId: string;\n status: 'started' | 'running' | 'completed';\n command?: string;\n browserStreamPort?: number;\n browserClosed?: boolean;\n}\n\nexport interface BashToolOptions {\n workingDirectory: string;\n sessionId: string;\n onOutput?: (output: string) => void;\n onProgress?: (progress: BashToolProgress) => void;\n}\n\nconst BROWSER_STREAM_BASE_PORT = 9223;\nconst sessionBrowserPorts = new Map<string, number>();\nlet nextPortOffset = 0;\n\nfunction getBrowserStreamPort(sessionId: string): number {\n let port = sessionBrowserPorts.get(sessionId);\n if (!port) {\n port = BROWSER_STREAM_BASE_PORT + nextPortOffset++;\n sessionBrowserPorts.set(sessionId, port);\n }\n return port;\n}\n\nfunction hasAgentBrowserCommand(command: string): boolean {\n return /\\bagent-browser\\b/.test(command);\n}\n\nfunction isAgentBrowserOpenCommand(command: string): boolean {\n return /\\bagent-browser\\s+open\\b/.test(command);\n}\n\nfunction isAgentBrowserCloseCommand(command: string): boolean {\n return /\\bagent-browser\\s+(close|close\\s+--all)\\b/.test(command);\n}\n\n/**\n * Inject AGENT_BROWSER_STREAM_PORT into every `agent-browser` invocation\n * in the command so the WS server stays active across chained commands.\n */\nfunction injectBrowserStreamPort(command: string, port: number): string {\n return command.replace(\n /\\bagent-browser\\b/g,\n `AGENT_BROWSER_STREAM_PORT=${port} agent-browser`\n );\n}\n\n// Unified bash tool schema - Option A (minimal flags)\nconst bashInputSchema = z.object({\n command: z\n .string()\n .optional()\n .describe('The command to execute. Required for running new commands.'),\n background: z\n .boolean()\n .default(false)\n .describe('Run the command in background mode (for dev servers, watchers). Returns immediately with terminal ID.'),\n id: z\n .string()\n .optional()\n .describe('Terminal ID. Use to get logs from, send input to, or kill an existing terminal.'),\n kill: z\n .boolean()\n .optional()\n .describe('Kill the terminal with the given ID.'),\n tail: z\n .number()\n .optional()\n .describe('Number of lines to return from the end of output (for logs).'),\n input: z\n .string()\n .optional()\n .describe('Send text input to an interactive terminal (requires id). Used for responding to prompts.'),\n key: z\n .enum(['Enter', 'Escape', 'Up', 'Down', 'Left', 'Right', 'Tab', 'C-c', 'C-d', 'y', 'n'])\n .optional()\n .describe('Send a special key to an interactive terminal (requires id). Use \"y\" or \"n\" for yes/no prompts.'),\n});\n\ntype BashInput = z.infer<typeof bashInputSchema>;\n\n// Cache tmux availability at startup\nlet useTmux: boolean | null = null;\n\nasync function shouldUseTmux(): Promise<boolean> {\n if (useTmux === null) {\n useTmux = await tmux.isTmuxAvailable();\n if (!useTmux) {\n console.warn('[bash] tmux not available, using fallback exec mode');\n }\n }\n return useTmux;\n}\n\n/**\n * Fallback implementation using exec (when tmux is not available)\n */\nasync function execFallback(\n command: string,\n workingDirectory: string,\n onOutput?: (output: string) => void\n): Promise<{ success: boolean; output: string; exitCode: number; error?: string }> {\n try {\n const { stdout, stderr } = await execAsync(command, {\n cwd: workingDirectory,\n timeout: COMMAND_TIMEOUT,\n maxBuffer: 10 * 1024 * 1024,\n });\n\n const output = truncateOutput(stdout + (stderr ? `\\n${stderr}` : ''), MAX_OUTPUT_CHARS);\n onOutput?.(output);\n\n return {\n success: true,\n output,\n exitCode: 0,\n };\n } catch (error: any) {\n const output = truncateOutput(\n (error.stdout || '') + (error.stderr ? `\\n${error.stderr}` : ''),\n MAX_OUTPUT_CHARS\n );\n onOutput?.(output || error.message);\n\n if (error.killed) {\n return {\n success: false,\n error: `Command timed out after ${COMMAND_TIMEOUT / 1000} seconds`,\n output,\n exitCode: 124,\n };\n }\n\n return {\n success: false,\n error: error.message,\n output,\n exitCode: error.code ?? 1,\n };\n }\n}\n\nexport function createBashTool(options: BashToolOptions) {\n return tool({\n description: `Execute commands in the terminal. Every command runs in its own session with logs saved to disk.\n\n**Run a command (default - waits for completion):**\nbash({ command: \"npm install\" })\nbash({ command: \"git status\" })\n\n**Run in background (for dev servers, watchers, or interactive commands):**\nbash({ command: \"npm run dev\", background: true })\n→ Returns { id: \"abc123\" } - save this ID\n\n**Check on a background process:**\nbash({ id: \"abc123\" })\nbash({ id: \"abc123\", tail: 50 }) // last 50 lines only\n\n**Stop a background process:**\nbash({ id: \"abc123\", kill: true })\n\n**Respond to interactive prompts (for yes/no questions, etc.):**\nbash({ id: \"abc123\", key: \"y\" }) // send 'y' for yes\nbash({ id: \"abc123\", key: \"n\" }) // send 'n' for no\nbash({ id: \"abc123\", key: \"Enter\" }) // press Enter\nbash({ id: \"abc123\", input: \"my text\" }) // send text input\n\n**IMPORTANT for interactive commands:**\n- Use --yes, -y, or similar flags to avoid prompts when available\n- For create-next-app: add --yes to accept defaults\n- For npm: add --yes or -y to skip confirmation\n- If prompts are unavoidable, run in background mode and use input/key to respond\n\nTerminal output is stored in the global SparkECoder data directory. Use the \\`tail\\` option to read recent output.`,\n\n inputSchema: bashInputSchema,\n\n execute: async (inputArgs: BashInput) => {\n const { command, background, id, kill, tail, input: textInput, key } = inputArgs;\n\n // Handle terminal management (id-based operations)\n if (id) {\n // Kill a terminal\n if (kill) {\n const success = await tmux.killTerminal(id);\n return {\n success,\n id,\n status: success ? 'stopped' : 'not_found',\n message: success ? `Terminal ${id} stopped` : `Terminal ${id} not found or already stopped`,\n };\n }\n\n // Send input to an interactive terminal\n if (textInput !== undefined) {\n const success = await tmux.sendInput(id, textInput, { pressEnter: true });\n if (!success) {\n return {\n success: false,\n id,\n error: `Terminal ${id} not found or not running`,\n };\n }\n \n // Wait a moment for the input to be processed, then get logs\n await new Promise(r => setTimeout(r, 300));\n const { output, status } = await tmux.getLogs(id, options.workingDirectory, { tail: tail || 50, sessionId: options.sessionId });\n const truncatedOutput = truncateOutput(output, MAX_OUTPUT_CHARS);\n \n return {\n success: true,\n id,\n output: truncatedOutput,\n status,\n message: `Sent input \"${textInput}\" to terminal`,\n };\n }\n\n // Send a special key to an interactive terminal\n if (key) {\n const success = await tmux.sendKey(id, key);\n if (!success) {\n return {\n success: false,\n id,\n error: `Terminal ${id} not found or not running`,\n };\n }\n \n // Wait a moment for the key to be processed, then get logs\n await new Promise(r => setTimeout(r, 300));\n const { output, status } = await tmux.getLogs(id, options.workingDirectory, { tail: tail || 50, sessionId: options.sessionId });\n const truncatedOutput = truncateOutput(output, MAX_OUTPUT_CHARS);\n \n return {\n success: true,\n id,\n output: truncatedOutput,\n status,\n message: `Sent key \"${key}\" to terminal`,\n };\n }\n\n // Get logs/status from a terminal\n const { output, status } = await tmux.getLogs(id, options.workingDirectory, { tail, sessionId: options.sessionId });\n const truncatedOutput = truncateOutput(output, MAX_OUTPUT_CHARS);\n\n return {\n success: true,\n id,\n output: truncatedOutput,\n status,\n };\n }\n\n // Running a new command requires the command parameter\n if (!command) {\n return {\n success: false,\n error: 'Either \"command\" (to run a new command) or \"id\" (to check/kill/send input) is required',\n };\n }\n\n // Safety check\n if (isBlockedCommand(command)) {\n return {\n success: false,\n error: 'This command is blocked for safety reasons.',\n output: '',\n exitCode: 1,\n };\n }\n\n // Detect agent-browser commands and inject streaming port.\n // We inject on ALL agent-browser commands so the WS server stays alive\n // across open, screenshot, eval, snapshot, etc.\n let actualCommand = command;\n const hasAgentBrowser = hasAgentBrowserCommand(command);\n const browserClose = isAgentBrowserCloseCommand(command);\n let browserPort: number | undefined;\n\n if (hasAgentBrowser) {\n browserPort = getBrowserStreamPort(options.sessionId);\n if (!browserClose) {\n actualCommand = injectBrowserStreamPort(command, browserPort);\n }\n }\n\n // Check if we can use tmux\n const canUseTmux = await shouldUseTmux();\n\n if (background) {\n // Background mode\n if (!canUseTmux) {\n return {\n success: false,\n error: 'Background mode requires tmux to be installed. Install with: brew install tmux (macOS) or apt install tmux (Linux)',\n };\n }\n\n // Generate terminal ID upfront and emit progress\n const terminalId = tmux.generateTerminalId();\n options.onProgress?.({ terminalId, status: 'started', command, browserStreamPort: browserPort });\n\n const result = await tmux.runBackground(actualCommand, options.workingDirectory, {\n sessionId: options.sessionId,\n terminalId,\n });\n\n return {\n success: true,\n id: result.id,\n status: 'running',\n message: `Started background process. Use bash({ id: \"${result.id}\" }) to check logs.`,\n };\n }\n\n // Sync mode (default)\n if (canUseTmux) {\n const terminalId = tmux.generateTerminalId();\n options.onProgress?.({ terminalId, status: 'started', command, browserStreamPort: browserPort });\n\n try {\n const result = await tmux.runSync(actualCommand, options.workingDirectory, {\n sessionId: options.sessionId,\n timeout: COMMAND_TIMEOUT,\n terminalId,\n });\n\n const truncatedOutput = truncateOutput(result.output, MAX_OUTPUT_CHARS);\n options.onOutput?.(truncatedOutput);\n\n options.onProgress?.({\n terminalId,\n status: 'completed',\n command,\n browserStreamPort: browserPort,\n browserClosed: browserClose || undefined,\n });\n\n return {\n success: result.exitCode === 0,\n id: result.id,\n output: truncatedOutput,\n exitCode: result.exitCode,\n status: result.status,\n };\n } catch (error: any) {\n options.onProgress?.({ terminalId, status: 'completed', command });\n return {\n success: false,\n error: error.message,\n output: '',\n exitCode: 1,\n };\n }\n } else {\n // Fallback to exec (no tmux)\n const result = await execFallback(actualCommand, options.workingDirectory, options.onOutput);\n return {\n success: result.success,\n output: result.output,\n exitCode: result.exitCode,\n error: result.error,\n };\n }\n },\n });\n}\n\nexport type BashTool = ReturnType<typeof createBashTool>;\n","/**\n * Lightweight token estimation using the ~4 chars/token heuristic.\n * Accurate enough for context window budgeting without pulling in a tokenizer.\n */\n\nconst CHARS_PER_TOKEN = 4;\nconst MESSAGE_OVERHEAD_TOKENS = 4;\n\nexport function estimateTokens(text: string): number {\n return Math.ceil(text.length / CHARS_PER_TOKEN);\n}\n\nexport function estimateMessageTokens(messages: Array<{ role: string; content: unknown }>): number {\n return messages.reduce((total, msg) => {\n const content = typeof msg.content === 'string'\n ? msg.content\n : JSON.stringify(msg.content);\n return total + estimateTokens(content) + MESSAGE_OVERHEAD_TOKENS;\n }, 0);\n}\n","import { estimateMessageTokens } from './tokens.js';\n\nconst MAX_OUTPUT_CHARS = 10_000;\n\n/**\n * Truncate a string if it exceeds the max length\n */\nexport function truncateOutput(\n output: string,\n maxChars: number = MAX_OUTPUT_CHARS\n): string {\n if (output.length <= maxChars) {\n return output;\n }\n\n const halfMax = Math.floor(maxChars / 2);\n const truncatedChars = output.length - maxChars;\n\n return (\n output.slice(0, halfMax) +\n `\\n\\n... [TRUNCATED: ${truncatedChars.toLocaleString()} characters omitted] ...\\n\\n` +\n output.slice(-halfMax)\n );\n}\n\n/**\n * Calculate the total character count of messages\n */\nexport function calculateContextSize(messages: Array<{ content: unknown }>): number {\n return messages.reduce((total, msg) => {\n const content = typeof msg.content === 'string' \n ? msg.content \n : JSON.stringify(msg.content);\n return total + content.length;\n }, 0);\n}\n\n/**\n * Calculate the estimated token count of messages\n */\nexport function calculateContextTokens(messages: Array<{ role: string; content: unknown }>): number {\n return estimateMessageTokens(messages);\n}\n\n/**\n * Format bytes to human readable string\n */\nexport function formatBytes(bytes: number): string {\n if (bytes < 1024) return `${bytes} B`;\n if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`;\n return `${(bytes / (1024 * 1024)).toFixed(1)} MB`;\n}\n\n/**\n * Format number with commas\n */\nexport function formatNumber(num: number): string {\n return num.toLocaleString();\n}\n","/**\n * tmux wrapper for terminal session management\n * \n * Provides a thin abstraction over tmux commands for:\n * - Session creation and management\n * - Output capture and logging\n * - Process lifecycle management\n */\n\nimport { exec } from 'node:child_process';\nimport { promisify } from 'node:util';\nimport { mkdir, writeFile, readFile } from 'node:fs/promises';\nimport { existsSync, mkdirSync } from 'node:fs';\nimport { join } from 'node:path';\nimport { nanoid } from 'nanoid';\nimport { getAppDataDirectory } from '../config/index.js';\n\nconst execAsync = promisify(exec);\n\n// Session prefix for all sparkecoder terminals\nconst SESSION_PREFIX = 'spark_';\n\n// Log directory base path - stored in global app data directory\nconst LOG_BASE_DIR = 'sessions';\n\nexport interface TerminalMeta {\n id: string;\n command: string;\n cwd: string;\n createdAt: string;\n sessionId: string;\n background: boolean;\n name?: string;\n}\n\nexport interface TerminalResult {\n id: string;\n output: string;\n exitCode: number;\n status: 'completed' | 'running' | 'stopped' | 'error';\n}\n\n// Cache tmux availability check\nlet tmuxAvailableCache: boolean | null = null;\n\n/**\n * Check if tmux is installed and available\n */\nexport async function isTmuxAvailable(): Promise<boolean> {\n if (tmuxAvailableCache !== null) {\n return tmuxAvailableCache;\n }\n \n try {\n const { stdout } = await execAsync('tmux -V');\n tmuxAvailableCache = true;\n // console.log(`[tmux] Available: ${stdout.trim()}`);\n return true;\n } catch (error) {\n tmuxAvailableCache = false;\n console.log(`[tmux] Not available: ${error instanceof Error ? error.message : 'unknown error'}`);\n return false;\n }\n}\n\n/**\n * Generate a unique terminal ID\n * Ensure it starts with a letter (tmux session names work better this way)\n */\nexport function generateTerminalId(): string {\n // Prefix with 't' to ensure it starts with a letter (nanoid can start with - or _)\n return 't' + nanoid(9);\n}\n\n/**\n * Get the tmux session name for a terminal ID\n */\nexport function getSessionName(terminalId: string): string {\n return `${SESSION_PREFIX}${terminalId}`;\n}\n\n/**\n * Get the global terminal data directory\n * Uses OS-appropriate app data location (not the working directory)\n */\nfunction getTerminalDataDir(): string {\n const appDataDir = getAppDataDirectory();\n // Ensure directory exists\n if (!existsSync(appDataDir)) {\n mkdirSync(appDataDir, { recursive: true });\n }\n return appDataDir;\n}\n\n/**\n * Get the log directory for a terminal (stored in global app data, not working directory)\n */\nexport function getLogDir(terminalId: string, _workingDirectory: string, sessionId?: string): string {\n const baseDir = getTerminalDataDir();\n if (sessionId) {\n // Session-scoped path: ~/Library/Application Support/sparkecoder/sessions/{sessionId}/terminals/{terminalId}/\n return join(baseDir, LOG_BASE_DIR, sessionId, 'terminals', terminalId);\n }\n // Fallback for legacy terminals without sessionId\n return join(baseDir, 'terminals', terminalId);\n}\n\n/**\n * Escape a string for shell command\n */\nfunction shellEscape(str: string): string {\n // Use single quotes and escape any single quotes in the string\n return `'${str.replace(/'/g, \"'\\\\''\")}'`;\n}\n\n/**\n * Create log directory and metadata file\n */\nasync function initLogDir(terminalId: string, meta: TerminalMeta, workingDirectory: string): Promise<string> {\n const logDir = getLogDir(terminalId, workingDirectory, meta.sessionId);\n await mkdir(logDir, { recursive: true });\n await writeFile(join(logDir, 'meta.json'), JSON.stringify(meta, null, 2));\n // Create empty output.log\n await writeFile(join(logDir, 'output.log'), '');\n return logDir;\n}\n\n/**\n * Poll until a condition is met or timeout\n */\nasync function pollUntil(\n condition: () => Promise<boolean>,\n options: { timeout: number; interval?: number }\n): Promise<boolean> {\n const { timeout, interval = 100 } = options;\n const startTime = Date.now();\n \n while (Date.now() - startTime < timeout) {\n if (await condition()) {\n return true;\n }\n await new Promise(r => setTimeout(r, interval));\n }\n \n return false;\n}\n\n/**\n * Run a command synchronously in tmux (waits for completion)\n */\nexport async function runSync(\n command: string,\n workingDirectory: string,\n options: { sessionId: string; timeout?: number; terminalId?: string }\n): Promise<TerminalResult> {\n if (!options) {\n throw new Error('runSync: options parameter is required (must include sessionId)');\n }\n const id = options.terminalId || generateTerminalId();\n const session = getSessionName(id);\n const logDir = await initLogDir(id, {\n id,\n command,\n cwd: workingDirectory,\n createdAt: new Date().toISOString(),\n sessionId: options.sessionId,\n background: false,\n }, workingDirectory);\n \n const logFile = join(logDir, 'output.log');\n const exitCodeFile = join(logDir, 'exit_code');\n const timeout = options.timeout || 120000; // 2 minute default\n \n try {\n // Wrap command to write exit code to a file when done\n // Also write output to the log file directly (more reliable than pipe-pane for quick commands)\n const wrappedCommand = `(${command}) 2>&1 | tee -a ${shellEscape(logFile)}; echo $? > ${shellEscape(exitCodeFile)}`;\n \n // Start tmux session\n await execAsync(\n `tmux new-session -d -s ${session} -c ${shellEscape(workingDirectory)} ${shellEscape(wrappedCommand)}`,\n { timeout: 5000 }\n );\n \n // Try to pipe output to log file (may fail if command completes quickly, that's ok)\n try {\n await execAsync(\n `tmux pipe-pane -t ${session} -o 'cat >> ${shellEscape(logFile)}'`,\n { timeout: 1000 }\n );\n } catch {\n // Session may have already ended - that's fine, we use tee in the command\n }\n \n // Poll until session ends or timeout\n const completed = await pollUntil(\n async () => {\n try {\n await execAsync(`tmux has-session -t ${session}`, { timeout: 1000 });\n return false; // Session still exists\n } catch {\n return true; // Session ended\n }\n },\n { timeout, interval: 100 }\n );\n \n if (!completed) {\n // Timeout - kill the session\n try {\n await execAsync(`tmux kill-session -t ${session}`, { timeout: 5000 });\n } catch {\n // Ignore\n }\n \n // Read whatever output we have\n let output = '';\n try {\n output = await readFile(logFile, 'utf-8');\n } catch {\n // Ignore\n }\n \n return {\n id,\n output: output.trim(),\n exitCode: 124, // Standard timeout exit code\n status: 'error',\n };\n }\n \n // Session ended - read output and exit code\n // Give a moment for log file to be flushed\n await new Promise(r => setTimeout(r, 50));\n \n let output = '';\n try {\n output = await readFile(logFile, 'utf-8');\n } catch {\n // Ignore\n }\n \n // Read exit code\n let exitCode = 0;\n try {\n if (existsSync(exitCodeFile)) {\n const exitCodeStr = await readFile(exitCodeFile, 'utf-8');\n exitCode = parseInt(exitCodeStr.trim(), 10) || 0;\n }\n } catch {\n // Ignore exit code read errors\n }\n \n return {\n id,\n output: output.trim(),\n exitCode,\n status: 'completed',\n };\n } catch (error: any) {\n // Try to kill the session on any error\n try {\n await execAsync(`tmux kill-session -t ${session}`, { timeout: 5000 });\n } catch {\n // Ignore\n }\n \n throw error;\n }\n}\n\n/**\n * Run a command in the background (returns immediately)\n */\nexport async function runBackground(\n command: string,\n workingDirectory: string,\n options: { sessionId: string; terminalId?: string; name?: string }\n): Promise<TerminalResult> {\n if (!options) {\n throw new Error('runBackground: options parameter is required (must include sessionId)');\n }\n const id = options.terminalId || generateTerminalId();\n const session = getSessionName(id);\n const logDir = await initLogDir(id, {\n id,\n command,\n cwd: workingDirectory,\n createdAt: new Date().toISOString(),\n sessionId: options.sessionId,\n background: true,\n name: options.name,\n }, workingDirectory);\n \n const logFile = join(logDir, 'output.log');\n \n // Wrap command to log output via tee (more reliable than pipe-pane)\n const wrappedCommand = `(${command}) 2>&1 | tee -a ${shellEscape(logFile)}`;\n \n // Start tmux session (don't wait for completion)\n await execAsync(\n `tmux new-session -d -s ${session} -c ${shellEscape(workingDirectory)} ${shellEscape(wrappedCommand)}`,\n { timeout: 5000 }\n );\n \n return {\n id,\n output: '',\n exitCode: 0,\n status: 'running',\n };\n}\n\n/**\n * Get logs from a terminal\n */\nexport async function getLogs(\n terminalId: string,\n workingDirectory: string,\n options: { tail?: number; sessionId?: string } = {}\n): Promise<{ output: string; status: 'running' | 'stopped' | 'unknown' }> {\n const session = getSessionName(terminalId);\n const logDir = getLogDir(terminalId, workingDirectory, options.sessionId);\n const logFile = join(logDir, 'output.log');\n \n // Check if session is still running\n let isRunning = false;\n try {\n await execAsync(`tmux has-session -t ${session}`, { timeout: 5000 });\n isRunning = true;\n } catch {\n // Session not running\n }\n \n // Try to capture from tmux first (more up-to-date)\n if (isRunning) {\n try {\n const lines = options.tail || 1000;\n const { stdout } = await execAsync(\n `tmux capture-pane -t ${session} -p -S -${lines}`,\n { timeout: 5000, maxBuffer: 10 * 1024 * 1024 }\n );\n return { output: stdout.trim(), status: 'running' };\n } catch {\n // Fall through to file-based approach\n }\n }\n \n // Fall back to log file\n try {\n let output = await readFile(logFile, 'utf-8');\n \n if (options.tail) {\n const lines = output.split('\\n');\n output = lines.slice(-options.tail).join('\\n');\n }\n \n return { output: output.trim(), status: isRunning ? 'running' : 'stopped' };\n } catch {\n return { output: '', status: 'unknown' };\n }\n}\n\n/**\n * Check if a terminal is running\n */\nexport async function isRunning(terminalId: string): Promise<boolean> {\n const session = getSessionName(terminalId);\n try {\n await execAsync(`tmux has-session -t ${session}`, { timeout: 5000 });\n return true;\n } catch {\n return false;\n }\n}\n\n/**\n * Kill a terminal session\n */\nexport async function killTerminal(terminalId: string): Promise<boolean> {\n const session = getSessionName(terminalId);\n try {\n await execAsync(`tmux kill-session -t ${session}`, { timeout: 5000 });\n return true;\n } catch {\n return false;\n }\n}\n\n/**\n * List all sparkecoder terminal sessions\n */\nexport async function listSessions(): Promise<string[]> {\n try {\n const { stdout } = await execAsync(\n `tmux list-sessions -F '#{session_name}' 2>/dev/null || true`,\n { timeout: 5000 }\n );\n \n return stdout\n .trim()\n .split('\\n')\n .filter(name => name.startsWith(SESSION_PREFIX))\n .map(name => name.slice(SESSION_PREFIX.length));\n } catch {\n return [];\n }\n}\n\n/**\n * Get metadata for a terminal\n */\nexport async function getMeta(terminalId: string, workingDirectory: string, sessionId?: string): Promise<TerminalMeta | null> {\n const logDir = getLogDir(terminalId, workingDirectory, sessionId);\n const metaFile = join(logDir, 'meta.json');\n \n try {\n const content = await readFile(metaFile, 'utf-8');\n return JSON.parse(content);\n } catch {\n return null;\n }\n}\n\n/**\n * List all terminals for a session\n */\nexport async function listSessionTerminals(\n sessionId: string,\n workingDirectory: string\n): Promise<TerminalMeta[]> {\n const terminalsDir = join(workingDirectory, LOG_BASE_DIR, sessionId, 'terminals');\n const terminals: TerminalMeta[] = [];\n \n try {\n const { readdir } = await import('node:fs/promises');\n const entries = await readdir(terminalsDir, { withFileTypes: true });\n \n for (const entry of entries) {\n if (entry.isDirectory()) {\n const meta = await getMeta(entry.name, workingDirectory, sessionId);\n if (meta) {\n terminals.push(meta);\n }\n }\n }\n } catch {\n // Directory doesn't exist or can't be read\n }\n \n return terminals;\n}\n\n/**\n * Send input (keystrokes) to a running terminal\n * Use this to respond to interactive prompts\n */\nexport async function sendInput(terminalId: string, input: string, options: { pressEnter?: boolean } = {}): Promise<boolean> {\n const session = getSessionName(terminalId);\n const { pressEnter = true } = options;\n \n try {\n // Check if session exists first\n await execAsync(`tmux has-session -t ${session}`, { timeout: 1000 });\n \n // Send the input using tmux send-keys with -l (literal) flag\n await execAsync(\n `tmux send-keys -t ${session} -l ${shellEscape(input)}`,\n { timeout: 1000 }\n );\n \n // Send Enter key separately if requested\n if (pressEnter) {\n await execAsync(\n `tmux send-keys -t ${session} Enter`,\n { timeout: 1000 }\n );\n }\n \n return true;\n } catch {\n return false;\n }\n}\n\n/**\n * Send special keys to a terminal (like arrow keys, escape, etc.)\n */\nexport async function sendKey(terminalId: string, key: 'Enter' | 'Escape' | 'Up' | 'Down' | 'Left' | 'Right' | 'Tab' | 'C-c' | 'C-d' | 'y' | 'n'): Promise<boolean> {\n const session = getSessionName(terminalId);\n \n try {\n await execAsync(`tmux has-session -t ${session}`, { timeout: 1000 });\n await execAsync(`tmux send-keys -t ${session} ${key}`, { timeout: 1000 });\n return true;\n } catch {\n return false;\n }\n}\n","import { tool } from 'ai';\nimport { z } from 'zod';\nimport { readFile, stat } from 'node:fs/promises';\nimport { resolve, relative, isAbsolute, extname } from 'node:path';\nimport { existsSync } from 'node:fs';\nimport { truncateOutput } from '../utils/truncate.js';\nimport { resizeImageIfNeeded } from '../utils/resize-image.js';\n\nconst MAX_FILE_SIZE = 5 * 1024 * 1024; // 5MB\nconst MAX_IMAGE_SIZE = 20 * 1024 * 1024; // 20MB for images\nconst MAX_OUTPUT_CHARS = 50_000;\n\nconst IMAGE_EXTENSIONS: Record<string, string> = {\n '.png': 'image/png',\n '.jpg': 'image/jpeg',\n '.jpeg': 'image/jpeg',\n '.gif': 'image/gif',\n '.webp': 'image/webp',\n};\n\nfunction isImageFile(filePath: string): boolean {\n return extname(filePath).toLowerCase() in IMAGE_EXTENSIONS;\n}\n\nfunction getImageMediaType(filePath: string): string {\n return IMAGE_EXTENSIONS[extname(filePath).toLowerCase()] || 'image/png';\n}\n\nexport interface ReadFileToolOptions {\n workingDirectory: string;\n}\n\nconst readFileInputSchema = z.object({\n path: z\n .string()\n .describe('The path to the file to read. Can be relative to working directory or absolute. Supports text files and images (png, jpg, jpeg, gif, webp).'),\n startLine: z\n .number()\n .optional()\n .describe('Optional: Start reading from this line number (1-indexed). Only for text files.'),\n endLine: z\n .number()\n .optional()\n .describe('Optional: Stop reading at this line number (1-indexed, inclusive). Only for text files.'),\n});\n\nexport function createReadFileTool(options: ReadFileToolOptions) {\n return tool({\n description: `Read the contents of a file. Provide a path relative to the working directory (${options.workingDirectory}) or an absolute path.\nSupports text files (automatically truncated if large) and image files (png, jpg, jpeg, gif, webp).\nFor images, the file contents are returned as visual data you can see and analyze.\nUse this to understand existing code, check file contents, view screenshots, or gather context.`,\n\n inputSchema: readFileInputSchema,\n\n execute: async ({ path: filePath, startLine, endLine }: z.infer<typeof readFileInputSchema>) => {\n try {\n const absolutePath = isAbsolute(filePath)\n ? filePath\n : resolve(options.workingDirectory, filePath);\n\n const relativePath = relative(options.workingDirectory, absolutePath);\n if (relativePath.startsWith('..') && !isAbsolute(filePath)) {\n return {\n success: false,\n error: 'Path escapes the working directory. Use an absolute path if intentional.',\n content: null,\n };\n }\n\n if (!existsSync(absolutePath)) {\n return {\n success: false,\n error: `File not found: ${filePath}`,\n content: null,\n };\n }\n\n const stats = await stat(absolutePath);\n\n if (stats.isDirectory()) {\n return {\n success: false,\n error: 'Path is a directory, not a file. Use bash with \"ls\" to list directory contents.',\n content: null,\n };\n }\n\n // Handle image files\n if (isImageFile(absolutePath)) {\n if (stats.size > MAX_IMAGE_SIZE) {\n return {\n success: false,\n error: `Image is too large (${(stats.size / 1024 / 1024).toFixed(2)}MB). Maximum size is ${MAX_IMAGE_SIZE / 1024 / 1024}MB.`,\n content: null,\n };\n }\n\n const rawBuffer = await readFile(absolutePath);\n const originalMediaType = getImageMediaType(absolutePath);\n const resized = await resizeImageIfNeeded(rawBuffer, originalMediaType);\n const base64 = resized.buffer.toString('base64');\n\n return {\n success: true,\n path: absolutePath,\n relativePath: relative(options.workingDirectory, absolutePath),\n content: `[Image: ${relativePath} (${resized.mediaType}, ${(stats.size / 1024).toFixed(1)}KB)]`,\n mediaType: resized.mediaType,\n imageData: base64,\n sizeBytes: stats.size,\n };\n }\n\n // Handle text files\n if (stats.size > MAX_FILE_SIZE) {\n return {\n success: false,\n error: `File is too large (${(stats.size / 1024 / 1024).toFixed(2)}MB). Maximum size is ${MAX_FILE_SIZE / 1024 / 1024}MB.`,\n content: null,\n };\n }\n\n let content = await readFile(absolutePath, 'utf-8');\n\n if (startLine !== undefined || endLine !== undefined) {\n const lines = content.split('\\n');\n const start = (startLine ?? 1) - 1;\n const end = endLine ?? lines.length;\n \n if (start < 0 || start >= lines.length) {\n return {\n success: false,\n error: `Start line ${startLine} is out of range. File has ${lines.length} lines.`,\n content: null,\n };\n }\n\n content = lines\n .slice(start, end)\n .map((line, idx) => `${(start + idx + 1).toString().padStart(4)}: ${line}`)\n .join('\\n');\n }\n\n const truncatedContent = truncateOutput(content, MAX_OUTPUT_CHARS);\n const wasTruncated = truncatedContent.length < content.length;\n\n return {\n success: true,\n path: absolutePath,\n relativePath: relative(options.workingDirectory, absolutePath),\n content: truncatedContent,\n lineCount: content.split('\\n').length,\n wasTruncated,\n sizeBytes: stats.size,\n };\n } catch (error: any) {\n if (error.code === 'ERR_INVALID_ARG_VALUE' || error.message.includes('encoding')) {\n return {\n success: false,\n error: 'File appears to be binary and cannot be read as text.',\n content: null,\n };\n }\n\n return {\n success: false,\n error: error.message,\n content: null,\n };\n }\n },\n\n toModelOutput: ({ output }) => {\n if (output && typeof output === 'object' && 'imageData' in output && output.imageData) {\n const result = output as { imageData: string; mediaType: string; content: string; relativePath: string };\n return {\n type: 'content' as const,\n value: [\n { type: 'text' as const, text: result.content },\n { type: 'image-data' as const, data: result.imageData, mediaType: result.mediaType },\n ],\n };\n }\n return typeof output === 'string'\n ? { type: 'text' as const, value: output }\n : { type: 'json' as const, value: output as any };\n },\n });\n}\n\nexport type ReadFileTool = ReturnType<typeof createReadFileTool>;\n","import sharp from 'sharp';\nimport { createHash } from 'node:crypto';\nimport { existsSync, mkdirSync, readFileSync, writeFileSync } from 'node:fs';\nimport { join } from 'node:path';\nimport { getAppDataDirectory } from '../config/index.js';\n\n/**\n * Anthropic API limits (as of 2026):\n * - Hard reject: any dimension > 8000px (or >2000px if >20 images)\n * - Optimal: long edge <= 1568px, ~1.15 megapixels\n * (anything larger is re-resized server-side, adding TTFT latency)\n * - File size: 5MB per image (base64-encoded)\n *\n * We target 1568px long edge to match Anthropic's optimal dimensions,\n * avoiding both the hard rejection AND the server-side resize latency penalty.\n * If the result still exceeds 5MB, we reduce quality iteratively.\n */\nconst MAX_LONG_EDGE = 1568;\nconst MAX_FILE_BYTES = 5 * 1024 * 1024; // 5MB\nconst CACHE_DIR_NAME = 'image-cache';\n\nfunction getCacheDir(): string {\n const dir = join(getAppDataDirectory(), CACHE_DIR_NAME);\n if (!existsSync(dir)) mkdirSync(dir, { recursive: true });\n return dir;\n}\n\nfunction cacheKey(buffer: Buffer): string {\n return createHash('sha256').update(buffer).digest('hex');\n}\n\nexport interface ResizeResult {\n buffer: Buffer;\n mediaType: string;\n}\n\n/**\n * Downscale an image buffer to fit within Anthropic's optimal dimensions\n * (1568px long edge) and file size limit (5MB).\n * Returns the original buffer unchanged if no processing is needed.\n * Caches processed results on disk keyed by SHA-256 of the original.\n *\n * Returns both the (possibly converted) buffer and the actual output\n * media type, since large PNGs get converted to JPEG.\n */\nexport async function resizeImageIfNeeded(buffer: Buffer, mediaType?: string): Promise<ResizeResult> {\n const inputMediaType = mediaType || 'image/png';\n\n let metadata;\n try {\n metadata = await sharp(buffer).metadata();\n } catch {\n return { buffer, mediaType: inputMediaType };\n }\n\n const { width, height } = metadata;\n if (!width || !height) return { buffer, mediaType: inputMediaType };\n\n const longEdge = Math.max(width, height);\n const needsResize = longEdge > MAX_LONG_EDGE;\n const needsShrink = buffer.length > MAX_FILE_BYTES;\n\n if (!needsResize && !needsShrink) return { buffer, mediaType: inputMediaType };\n\n const key = cacheKey(buffer);\n const cacheDir = getCacheDir();\n const isPng = inputMediaType.includes('png');\n\n // Determine output format up front so cache path and mediaType are consistent\n const willConvertToJpeg = isPng && (needsShrink || buffer.length > 2 * 1024 * 1024);\n const outputMediaType = (willConvertToJpeg || !isPng) ? 'image/jpeg' : 'image/png';\n const ext = outputMediaType === 'image/png' ? '.png' : '.jpg';\n const cachePath = join(cacheDir, key + ext);\n\n if (existsSync(cachePath)) {\n console.log(`[image-resize] Cache hit for ${width}x${height} image`);\n return { buffer: readFileSync(cachePath), mediaType: outputMediaType };\n }\n\n let pipeline = sharp(buffer);\n\n if (needsResize) {\n pipeline = pipeline.resize(MAX_LONG_EDGE, MAX_LONG_EDGE, {\n fit: 'inside',\n withoutEnlargement: true,\n });\n }\n\n let result: Buffer;\n if (willConvertToJpeg) {\n result = await pipeline.jpeg({ quality: 85 }).toBuffer();\n } else if (isPng) {\n result = await pipeline.png().toBuffer();\n } else {\n result = await pipeline.jpeg({ quality: 85 }).toBuffer();\n }\n\n // If still over 5MB, reduce quality iteratively (always JPEG at this point)\n let finalMediaType = outputMediaType;\n if (result.length > MAX_FILE_BYTES) {\n for (const quality of [70, 50, 30]) {\n result = await sharp(buffer)\n .resize(MAX_LONG_EDGE, MAX_LONG_EDGE, { fit: 'inside', withoutEnlargement: true })\n .jpeg({ quality })\n .toBuffer();\n if (result.length <= MAX_FILE_BYTES) break;\n }\n finalMediaType = 'image/jpeg';\n }\n\n writeFileSync(cachePath, result);\n\n const resultMeta = await sharp(result).metadata();\n console.log(\n `[image-resize] ${width}x${height} -> ${resultMeta.width}x${resultMeta.height} ` +\n `(${(buffer.length / 1024).toFixed(0)}KB -> ${(result.length / 1024).toFixed(0)}KB, ${finalMediaType})`,\n );\n\n return { buffer: result, mediaType: finalMediaType };\n}\n","import { tool } from 'ai';\nimport { z } from 'zod';\nimport { readFile, writeFile, mkdir } from 'node:fs/promises';\nimport { resolve, relative, isAbsolute, dirname } from 'node:path';\nimport { existsSync } from 'node:fs';\nimport { backupFile } from '../checkpoints/index.js';\nimport * as LSP from '../lsp/index.js';\n\nexport interface WriteFileProgress {\n /** The file path being written */\n path: string;\n /** Relative path from working directory */\n relativePath: string;\n /** Write mode */\n mode: 'full' | 'str_replace';\n /** Status of the write operation */\n status: 'started' | 'content' | 'completed';\n /** For 'content' status: the content being written (may be chunked for streaming) */\n content?: string;\n /** When content is chunked, the chunk index (0-based) */\n chunkIndex?: number;\n /** When content is chunked, the total number of chunks */\n chunkCount?: number;\n /** When content is chunked, the start offset for this chunk */\n chunkStart?: number;\n /** Whether this content update is chunked */\n isChunked?: boolean;\n /** For str_replace: the old string being replaced */\n oldString?: string;\n /** For str_replace: the new string */\n newString?: string;\n /** Total content length (for progress tracking) */\n totalLength?: number;\n /** Action being performed */\n action?: 'created' | 'replaced' | 'edited';\n}\n\nexport interface WriteFileToolOptions {\n workingDirectory: string;\n sessionId: string;\n /** Enable LSP diagnostics after file edits (default: true) */\n enableLSP?: boolean;\n /** Called when write_file has progress to report (for streaming content) */\n onProgress?: (progress: WriteFileProgress) => void;\n}\n\nconst MAX_PROGRESS_CHUNK_SIZE = 16 * 1024;\n\nconst writeFileInputSchema = z.object({\n path: z\n .string()\n .describe('The path to the file. Can be relative to working directory or absolute.'),\n mode: z\n .enum(['full', 'str_replace'])\n .describe('Write mode: \"full\" for complete file write, \"str_replace\" for targeted string replacement'),\n content: z\n .string()\n .optional()\n .describe('For \"full\" mode: The complete content to write to the file'),\n old_string: z\n .string()\n .optional()\n .describe('For \"str_replace\" mode: The exact string to find and replace'),\n new_string: z\n .string()\n .optional()\n .describe('For \"str_replace\" mode: The string to replace old_string with'),\n});\n\nexport function createWriteFileTool(options: WriteFileToolOptions) {\n return tool({\n description: `Write content to a file. Supports two modes:\n1. \"full\" - Write the entire file content (creates new file or replaces existing)\n2. \"str_replace\" - Replace a specific string in an existing file (for precise edits)\n\nFor str_replace mode:\n- Provide the exact string to find (old_string) and its replacement (new_string)\n- The old_string must match EXACTLY (including whitespace and indentation)\n- Only the first occurrence is replaced\n- Use this for surgical edits to existing code\n\nFor full mode:\n- Provide the complete file content\n- Creates parent directories if they don't exist\n- Use this for new files or complete rewrites\n\nWorking directory: ${options.workingDirectory}`,\n\n inputSchema: writeFileInputSchema,\n\n execute: async ({ path, mode, content, old_string, new_string }: z.infer<typeof writeFileInputSchema>) => {\n try {\n // Resolve the path\n const absolutePath = isAbsolute(path)\n ? path\n : resolve(options.workingDirectory, path);\n\n // Security check\n const relativePath = relative(options.workingDirectory, absolutePath);\n if (relativePath.startsWith('..') && !isAbsolute(path)) {\n return {\n success: false,\n error: 'Path escapes the working directory. Use an absolute path if intentional.',\n };\n }\n\n if (mode === 'full') {\n // Full file write\n if (content === undefined) {\n return {\n success: false,\n error: 'Content is required for \"full\" mode',\n };\n }\n\n const existed = existsSync(absolutePath);\n const action = existed ? 'replaced' : 'created';\n\n // Emit progress: started\n console.log('[WRITE-FILE] onProgress callback exists:', !!options.onProgress);\n console.log('[WRITE-FILE] Emitting started event for:', relativePath);\n options.onProgress?.({\n path: absolutePath,\n relativePath,\n mode: 'full',\n status: 'started',\n action,\n totalLength: content.length,\n });\n\n // Emit progress: content (chunked for large payloads to keep SSE stable)\n if (content.length <= MAX_PROGRESS_CHUNK_SIZE) {\n options.onProgress?.({\n path: absolutePath,\n relativePath,\n mode: 'full',\n status: 'content',\n content,\n action,\n totalLength: content.length,\n });\n } else {\n const chunkCount = Math.ceil(content.length / MAX_PROGRESS_CHUNK_SIZE);\n for (let i = 0; i < chunkCount; i += 1) {\n const chunkStart = i * MAX_PROGRESS_CHUNK_SIZE;\n const chunk = content.slice(chunkStart, chunkStart + MAX_PROGRESS_CHUNK_SIZE);\n options.onProgress?.({\n path: absolutePath,\n relativePath,\n mode: 'full',\n status: 'content',\n content: chunk,\n action,\n totalLength: content.length,\n chunkIndex: i,\n chunkCount,\n chunkStart,\n isChunked: true,\n });\n // Yield between chunks so SSE can flush progressively\n if (chunkCount > 1) {\n await new Promise((resolve) => setTimeout(resolve, 0));\n }\n }\n }\n\n // Backup the file before modifying (for checkpoint/revert)\n await backupFile(options.sessionId, options.workingDirectory, absolutePath);\n\n // Create parent directories if needed\n const dir = dirname(absolutePath);\n if (!existsSync(dir)) {\n await mkdir(dir, { recursive: true });\n }\n\n await writeFile(absolutePath, content, 'utf-8');\n\n // Get LSP diagnostics if enabled and file type is supported\n let diagnosticsOutput = '';\n if (options.enableLSP !== false && LSP.isSupported(absolutePath)) {\n await LSP.touchFile(absolutePath, true);\n diagnosticsOutput = await LSP.formatDiagnosticsOutput(absolutePath);\n }\n\n // Emit progress: completed\n options.onProgress?.({\n path: absolutePath,\n relativePath,\n mode: 'full',\n status: 'completed',\n action,\n totalLength: content.length,\n });\n\n return {\n success: true,\n path: absolutePath,\n relativePath,\n mode: 'full',\n action,\n bytesWritten: Buffer.byteLength(content, 'utf-8'),\n lineCount: content.split('\\n').length,\n ...(diagnosticsOutput && { diagnostics: diagnosticsOutput }),\n };\n } else if (mode === 'str_replace') {\n // String replacement mode\n if (old_string === undefined || new_string === undefined) {\n return {\n success: false,\n error: 'Both old_string and new_string are required for \"str_replace\" mode',\n };\n }\n\n if (!existsSync(absolutePath)) {\n return {\n success: false,\n error: `File not found: ${path}. Use \"full\" mode to create new files.`,\n };\n }\n\n // Emit progress: started\n options.onProgress?.({\n path: absolutePath,\n relativePath,\n mode: 'str_replace',\n status: 'started',\n action: 'edited',\n });\n\n // Emit progress: content (show the replacement)\n options.onProgress?.({\n path: absolutePath,\n relativePath,\n mode: 'str_replace',\n status: 'content',\n oldString: old_string,\n newString: new_string,\n action: 'edited',\n });\n\n // Backup the file before modifying (for checkpoint/revert)\n await backupFile(options.sessionId, options.workingDirectory, absolutePath);\n\n // Read current content\n const currentContent = await readFile(absolutePath, 'utf-8');\n\n // Check if old_string exists\n if (!currentContent.includes(old_string)) {\n // Provide helpful debugging info\n const lines = currentContent.split('\\n');\n const preview = lines.slice(0, 20).join('\\n');\n \n return {\n success: false,\n error: 'old_string not found in file. The string must match EXACTLY including whitespace.',\n hint: 'Check for differences in indentation, line endings, or invisible characters.',\n filePreview: lines.length > 20 \n ? `${preview}\\n... (${lines.length - 20} more lines)`\n : preview,\n };\n }\n\n // Check for multiple occurrences\n const occurrences = currentContent.split(old_string).length - 1;\n if (occurrences > 1) {\n return {\n success: false,\n error: `Found ${occurrences} occurrences of old_string. Please provide more context to make it unique.`,\n hint: 'Include surrounding lines or more specific content in old_string.',\n };\n }\n\n // Perform replacement\n const newContent = currentContent.replace(old_string, new_string);\n await writeFile(absolutePath, newContent, 'utf-8');\n\n // Calculate diff info\n const oldLines = old_string.split('\\n').length;\n const newLines = new_string.split('\\n').length;\n\n // Get LSP diagnostics if enabled and file type is supported\n let diagnosticsOutput = '';\n if (options.enableLSP !== false && LSP.isSupported(absolutePath)) {\n await LSP.touchFile(absolutePath, true);\n diagnosticsOutput = await LSP.formatDiagnosticsOutput(absolutePath);\n }\n\n // Emit progress: completed\n options.onProgress?.({\n path: absolutePath,\n relativePath,\n mode: 'str_replace',\n status: 'completed',\n action: 'edited',\n });\n\n return {\n success: true,\n path: absolutePath,\n relativePath,\n mode: 'str_replace',\n linesRemoved: oldLines,\n linesAdded: newLines,\n lineDelta: newLines - oldLines,\n ...(diagnosticsOutput && { diagnostics: diagnosticsOutput }),\n };\n }\n\n return {\n success: false,\n error: `Invalid mode: ${mode}`,\n };\n } catch (error: any) {\n return {\n success: false,\n error: error.message,\n };\n }\n },\n });\n}\n\nexport type WriteFileTool = ReturnType<typeof createWriteFileTool>;\n","/**\n * Checkpoint system for session revert functionality\n * \n * Creates checkpoints before each user message, backs up modified files,\n * and allows reverting to any previous checkpoint.\n */\n\nimport { readFile, writeFile, unlink, mkdir } from 'node:fs/promises';\nimport { existsSync } from 'node:fs';\nimport { resolve, relative, dirname } from 'node:path';\nimport { exec } from 'node:child_process';\nimport { promisify } from 'node:util';\nimport {\n checkpointQueries,\n fileBackupQueries,\n messageQueries,\n toolExecutionQueries,\n sessionQueries,\n type Checkpoint,\n type FileBackup,\n} from '../db/index.js';\n\nconst execAsync = promisify(exec);\n\n/**\n * Get the current git HEAD commit hash (if in a git repo)\n */\nasync function getGitHead(workingDirectory: string): Promise<string | undefined> {\n try {\n const { stdout } = await execAsync('git rev-parse HEAD', {\n cwd: workingDirectory,\n timeout: 5000,\n });\n return stdout.trim();\n } catch {\n return undefined;\n }\n}\n\n/**\n * Check if a directory is a git repository\n */\nasync function isGitRepo(workingDirectory: string): Promise<boolean> {\n try {\n await execAsync('git rev-parse --git-dir', {\n cwd: workingDirectory,\n timeout: 5000,\n });\n return true;\n } catch {\n return false;\n }\n}\n\nexport interface CheckpointManager {\n sessionId: string;\n workingDirectory: string;\n currentCheckpointId: string | null;\n}\n\n// Store for active checkpoint managers (one per session)\nconst activeManagers = new Map<string, CheckpointManager>();\n\n/**\n * Get or create a checkpoint manager for a session\n */\nexport function getCheckpointManager(sessionId: string, workingDirectory: string): CheckpointManager {\n let manager = activeManagers.get(sessionId);\n if (!manager) {\n manager = {\n sessionId,\n workingDirectory,\n currentCheckpointId: null,\n };\n activeManagers.set(sessionId, manager);\n }\n return manager;\n}\n\n/**\n * Create a new checkpoint before processing a user message\n * Called when a user message is about to be processed\n */\nexport async function createCheckpoint(\n sessionId: string,\n workingDirectory: string,\n messageSequence: number\n): Promise<Checkpoint> {\n // Get git HEAD if available\n const gitHead = await getGitHead(workingDirectory);\n\n // Create the checkpoint record\n const checkpoint = await checkpointQueries.create({\n sessionId,\n messageSequence,\n gitHead,\n });\n\n // Update the manager with the current checkpoint\n const manager = getCheckpointManager(sessionId, workingDirectory);\n manager.currentCheckpointId = checkpoint.id;\n\n return checkpoint;\n}\n\n/**\n * Backup a file before it's modified\n * Called by the write_file tool before writing\n */\nexport async function backupFile(\n sessionId: string,\n workingDirectory: string,\n filePath: string\n): Promise<FileBackup | null> {\n const manager = getCheckpointManager(sessionId, workingDirectory);\n \n if (!manager.currentCheckpointId) {\n console.warn('[checkpoint] No active checkpoint, skipping file backup');\n return null;\n }\n\n // Normalize the file path to be relative\n const absolutePath = resolve(workingDirectory, filePath);\n const relativePath = relative(workingDirectory, absolutePath);\n\n // Check if we already have a backup for this file in this checkpoint\n if (await fileBackupQueries.hasBackup(manager.currentCheckpointId, relativePath)) {\n // Already backed up in this checkpoint, no need to backup again\n return null;\n }\n\n // Read the original content (if file exists)\n let originalContent: string | null = null;\n let existed = false;\n\n if (existsSync(absolutePath)) {\n try {\n originalContent = await readFile(absolutePath, 'utf-8');\n existed = true;\n } catch (error: any) {\n console.warn(`[checkpoint] Failed to read file for backup: ${error.message}`);\n }\n }\n\n // Create the backup record\n const backup = await fileBackupQueries.create({\n checkpointId: manager.currentCheckpointId,\n sessionId,\n filePath: relativePath,\n originalContent,\n existed,\n });\n\n return backup;\n}\n\n/**\n * Revert a session to a specific checkpoint\n * This will:\n * 1. Restore all files to their state at that checkpoint\n * 2. Delete all messages after the checkpoint's message sequence\n * 3. Delete all tool executions after the checkpoint\n * 4. Delete all checkpoints after this one\n */\nexport async function revertToCheckpoint(\n sessionId: string,\n checkpointId: string\n): Promise<{\n success: boolean;\n filesRestored: number;\n filesDeleted: number;\n messagesDeleted: number;\n checkpointsDeleted: number;\n error?: string;\n}> {\n // Get the session to find working directory\n const session = await sessionQueries.getById(sessionId);\n if (!session) {\n return {\n success: false,\n filesRestored: 0,\n filesDeleted: 0,\n messagesDeleted: 0,\n checkpointsDeleted: 0,\n error: 'Session not found',\n };\n }\n\n // Get the checkpoint\n const checkpoint = await checkpointQueries.getById(checkpointId);\n if (!checkpoint || checkpoint.sessionId !== sessionId) {\n return {\n success: false,\n filesRestored: 0,\n filesDeleted: 0,\n messagesDeleted: 0,\n checkpointsDeleted: 0,\n error: 'Checkpoint not found',\n };\n }\n\n const workingDirectory = session.workingDirectory;\n\n // Get all file backups FROM this checkpoint onwards (these need to be reverted)\n // This includes backups from the target checkpoint since they represent changes made\n // AFTER the checkpoint was created (i.e., during processing of that user message)\n const backupsToRevert = await fileBackupQueries.getFromSequence(sessionId, checkpoint.messageSequence);\n\n // Group backups by file path, keeping only the earliest backup for each file\n // (we want to restore to the state before ANY changes were made)\n const fileToEarliestBackup = new Map<string, FileBackup>();\n for (const backup of backupsToRevert) {\n if (!fileToEarliestBackup.has(backup.filePath)) {\n fileToEarliestBackup.set(backup.filePath, backup);\n }\n }\n\n let filesRestored = 0;\n let filesDeleted = 0;\n\n // Restore each file\n for (const [filePath, backup] of fileToEarliestBackup) {\n const absolutePath = resolve(workingDirectory, filePath);\n\n try {\n if (backup.existed && backup.originalContent !== null) {\n // File existed before - restore its content\n const dir = dirname(absolutePath);\n if (!existsSync(dir)) {\n await mkdir(dir, { recursive: true });\n }\n await writeFile(absolutePath, backup.originalContent, 'utf-8');\n filesRestored++;\n } else if (!backup.existed) {\n // File didn't exist before - delete it\n if (existsSync(absolutePath)) {\n await unlink(absolutePath);\n filesDeleted++;\n }\n }\n } catch (error: any) {\n console.error(`Failed to restore ${filePath}: ${error.message}`);\n }\n }\n\n // Delete messages from the checkpoint's message sequence onwards\n const messagesDeleted = await messageQueries.deleteFromSequence(sessionId, checkpoint.messageSequence);\n\n // Delete tool executions after the checkpoint was created\n await toolExecutionQueries.deleteAfterTime(sessionId, checkpoint.createdAt);\n\n // Delete checkpoints after this one (the file backups are deleted via CASCADE)\n const checkpointsDeleted = await checkpointQueries.deleteAfterSequence(sessionId, checkpoint.messageSequence);\n\n // Update the manager\n const manager = getCheckpointManager(sessionId, workingDirectory);\n manager.currentCheckpointId = checkpoint.id;\n\n return {\n success: true,\n filesRestored,\n filesDeleted,\n messagesDeleted,\n checkpointsDeleted,\n };\n}\n\n/**\n * Get all checkpoints for a session\n */\nexport async function getCheckpoints(sessionId: string): Promise<Checkpoint[]> {\n return checkpointQueries.getBySession(sessionId);\n}\n\n/**\n * Get the diff for an entire session (all file changes from start to now)\n */\nexport async function getSessionDiff(\n sessionId: string\n): Promise<{\n files: Array<{\n path: string;\n status: 'created' | 'modified' | 'deleted';\n originalContent: string | null;\n currentContent: string | null;\n }>;\n}> {\n const session = await sessionQueries.getById(sessionId);\n if (!session) {\n return { files: [] };\n }\n\n const workingDirectory = session.workingDirectory;\n\n // Get all file backups for this session\n const allBackups = await fileBackupQueries.getBySession(sessionId);\n\n // Group by file path, keeping the earliest backup (original state)\n const fileToOriginalBackup = new Map<string, FileBackup>();\n for (const backup of allBackups) {\n if (!fileToOriginalBackup.has(backup.filePath)) {\n fileToOriginalBackup.set(backup.filePath, backup);\n }\n }\n\n const files: Array<{\n path: string;\n status: 'created' | 'modified' | 'deleted';\n originalContent: string | null;\n currentContent: string | null;\n }> = [];\n\n for (const [filePath, originalBackup] of fileToOriginalBackup) {\n const absolutePath = resolve(workingDirectory, filePath);\n \n // Get current content\n let currentContent: string | null = null;\n let currentExists = false;\n \n if (existsSync(absolutePath)) {\n try {\n currentContent = await readFile(absolutePath, 'utf-8');\n currentExists = true;\n } catch {\n // File exists but can't be read\n }\n }\n\n // Determine status\n let status: 'created' | 'modified' | 'deleted';\n if (!originalBackup.existed && currentExists) {\n status = 'created';\n } else if (originalBackup.existed && !currentExists) {\n status = 'deleted';\n } else {\n status = 'modified';\n }\n\n files.push({\n path: filePath,\n status,\n originalContent: originalBackup.originalContent,\n currentContent,\n });\n }\n\n return { files };\n}\n\n/**\n * Clear the checkpoint manager for a session (called when session is deleted)\n */\nexport function clearCheckpointManager(sessionId: string): void {\n activeManagers.delete(sessionId);\n}\n","/**\n * LSP Integration Module\n * \n * Provides Language Server Protocol support for the coding agent.\n * Automatically spawns LSP servers on-demand when files are touched,\n * collects diagnostics, and formats them for the agent.\n * \n * Usage:\n * import * as LSP from './lsp/index.js';\n * \n * // After editing a file, get diagnostics\n * await LSP.touchFile('/path/to/file.ts', true);\n * const diagnostics = await LSP.getDiagnostics('/path/to/file.ts');\n */\n\nimport { extname, dirname } from 'node:path';\nimport { getServerForExtension, getSupportedExtensions } from './servers.js';\nimport { createClient, normalizePath } from './client.js';\nimport {\n formatDiagnosticsForAgent,\n formatDiagnostic,\n DiagnosticSeverity,\n SymbolKind,\n} from './types.js';\nimport type { Diagnostic, LSPClient, Location, DocumentSymbol, SymbolInformation } from './types.js';\n\n// Re-export types and utilities\nexport * from './types.js';\nexport { normalizePath } from './client.js';\nexport { getSupportedExtensions, getServerForExtension } from './servers.js';\n\n/**\n * Global state for LSP clients\n */\ninterface LSPState {\n clients: Map<string, LSPClient>; // key: `${serverId}:${root}`\n broken: Set<string>; // keys of servers that failed to start\n initialized: boolean;\n}\n\nlet state: LSPState = {\n clients: new Map(),\n broken: new Set(),\n initialized: false,\n};\n\n/**\n * Initialize the LSP system (optional, called automatically on first use)\n */\nexport async function init(): Promise<void> {\n if (state.initialized) return;\n state.initialized = true;\n}\n\n/**\n * Get or create an LSP client for a file\n */\nasync function getClientForFile(filePath: string): Promise<LSPClient | null> {\n const normalized = normalizePath(filePath);\n const ext = extname(normalized);\n \n // Check if we support this file type\n const serverDef = getServerForExtension(ext);\n if (!serverDef) {\n return null;\n }\n \n // Use file's directory as root (server will find project root)\n const root = dirname(normalized);\n const key = `${serverDef.id}:${root}`;\n \n // Check if we already have a client\n const existing = state.clients.get(key);\n if (existing) {\n return existing;\n }\n \n // Check if this server is broken for this root\n if (state.broken.has(key)) {\n return null;\n }\n \n // Spawn new server\n try {\n const handle = await serverDef.spawn(root);\n if (!handle) {\n state.broken.add(key);\n return null;\n }\n \n console.log(`[lsp] Started ${serverDef.name} for ${root}`);\n \n const client = await createClient(serverDef.id, handle, root);\n state.clients.set(key, client);\n \n // Handle process exit\n handle.process.on('exit', (code) => {\n console.log(`[lsp] ${serverDef.name} exited with code ${code}`);\n state.clients.delete(key);\n });\n \n return client;\n } catch (error) {\n console.error(`[lsp] Failed to start ${serverDef.name}:`, error);\n state.broken.add(key);\n return null;\n }\n}\n\n/**\n * Get all clients for a file (currently just TypeScript, but extensible)\n */\nasync function getClientsForFile(filePath: string): Promise<LSPClient[]> {\n const client = await getClientForFile(filePath);\n return client ? [client] : [];\n}\n\n/**\n * Touch a file (notify LSP of change and optionally wait for diagnostics)\n * \n * Call this after editing a file to get diagnostics.\n * \n * @param filePath - Path to the file\n * @param waitForDiagnostics - Whether to wait for diagnostics before returning\n * @returns Promise that resolves when done\n */\nexport async function touchFile(filePath: string, waitForDiagnostics = false): Promise<void> {\n const clients = await getClientsForFile(filePath);\n \n if (clients.length === 0) {\n return;\n }\n \n // Notify all clients\n await Promise.all(clients.map(client => client.notifyOpen(filePath)));\n \n // Optionally wait for diagnostics\n if (waitForDiagnostics) {\n await Promise.all(clients.map(client => client.waitForDiagnostics(filePath)));\n }\n}\n\n/**\n * Get diagnostics for a file\n */\nexport async function getDiagnostics(filePath: string): Promise<Diagnostic[]> {\n const normalized = normalizePath(filePath);\n const clients = await getClientsForFile(normalized);\n \n const allDiagnostics: Diagnostic[] = [];\n \n for (const client of clients) {\n const diags = client.getDiagnostics(normalized);\n allDiagnostics.push(...diags);\n }\n \n return allDiagnostics;\n}\n\n/**\n * Get all diagnostics from all clients\n */\nexport async function getAllDiagnostics(): Promise<Record<string, Diagnostic[]>> {\n const results: Record<string, Diagnostic[]> = {};\n \n for (const client of state.clients.values()) {\n const clientDiags = client.getAllDiagnostics();\n for (const [path, diagnostics] of clientDiags.entries()) {\n const existing = results[path] || [];\n existing.push(...diagnostics);\n results[path] = existing;\n }\n }\n \n return results;\n}\n\n/**\n * Wait for diagnostics on a file\n */\nexport async function waitForDiagnostics(filePath: string, timeoutMs = 5000): Promise<Diagnostic[]> {\n const normalized = normalizePath(filePath);\n const clients = await getClientsForFile(normalized);\n \n const allDiagnostics: Diagnostic[] = [];\n \n await Promise.all(\n clients.map(async (client) => {\n const diags = await client.waitForDiagnostics(normalized, timeoutMs);\n allDiagnostics.push(...diags);\n })\n );\n \n return allDiagnostics;\n}\n\n// ============================================================\n// Code intelligence methods (definition, references, hover, symbols)\n// ============================================================\n\n/**\n * Get definition locations for a symbol at a given position\n */\nexport async function getDefinition(filePath: string, line: number, character: number): Promise<Location[]> {\n const normalized = normalizePath(filePath);\n const client = await getClientForFile(normalized);\n if (!client) return [];\n return client.getDefinition(normalized, line, character);\n}\n\n/**\n * Find all references to a symbol at a given position\n */\nexport async function getReferences(\n filePath: string,\n line: number,\n character: number,\n includeDeclaration = false\n): Promise<Location[]> {\n const normalized = normalizePath(filePath);\n const client = await getClientForFile(normalized);\n if (!client) return [];\n return client.getReferences(normalized, line, character, includeDeclaration);\n}\n\n/**\n * Get hover/type information for a symbol at a given position\n */\nexport async function getHover(filePath: string, line: number, character: number): Promise<string | null> {\n const normalized = normalizePath(filePath);\n const client = await getClientForFile(normalized);\n if (!client) return null;\n return client.getHover(normalized, line, character);\n}\n\n/**\n * Get all symbols defined in a document (hierarchical tree)\n */\nexport async function getDocumentSymbols(filePath: string): Promise<DocumentSymbol[]> {\n const normalized = normalizePath(filePath);\n const client = await getClientForFile(normalized);\n if (!client) return [];\n return client.getDocumentSymbols(normalized);\n}\n\n/**\n * Search for symbols across the workspace by name.\n * Requires a hint file path to identify which LSP client/project to search.\n */\nexport async function findWorkspaceSymbols(query: string, hintFilePath?: string): Promise<SymbolInformation[]> {\n let client: LSPClient | null = null;\n if (hintFilePath) {\n client = await getClientForFile(hintFilePath);\n }\n if (!client) {\n // Use first available client\n const clients = Array.from(state.clients.values());\n client = clients[0] || null;\n }\n if (!client) return [];\n return client.findWorkspaceSymbols(query);\n}\n\n/**\n * Format diagnostics for agent output\n * \n * Call this after touchFile to get a formatted string to append to tool output.\n */\nexport async function formatDiagnosticsOutput(\n filePath: string,\n options: { maxDiagnostics?: number; errorsOnly?: boolean } = {}\n): Promise<string> {\n const diagnostics = await getDiagnostics(filePath);\n return formatDiagnosticsForAgent(filePath, diagnostics, options);\n}\n\n/**\n * Get errors only (severity = 1)\n */\nexport function getErrors(diagnostics: Diagnostic[]): Diagnostic[] {\n return diagnostics.filter(d => d.severity === DiagnosticSeverity.Error);\n}\n\n/**\n * Check if a file type is supported\n */\nexport function isSupported(filePath: string): boolean {\n const ext = extname(filePath);\n return getServerForExtension(ext) !== null;\n}\n\n/**\n * Shutdown all LSP clients\n */\nexport async function shutdown(): Promise<void> {\n const shutdownPromises: Promise<void>[] = [];\n \n for (const [key, client] of state.clients.entries()) {\n console.log(`[lsp] Shutting down ${key}`);\n shutdownPromises.push(client.shutdown());\n }\n \n await Promise.allSettled(shutdownPromises);\n \n state.clients.clear();\n state.broken.clear();\n state.initialized = false;\n}\n\n/**\n * Reset state (for testing)\n * \n * Kills all existing LSP server processes before clearing state to prevent\n * orphaned tsserver processes from accumulating (which causes CI hangs).\n */\nexport async function reset(): Promise<void> {\n // Shut down existing clients to avoid orphaning server processes\n for (const client of state.clients.values()) {\n try {\n await client.shutdown();\n } catch {\n // Ignore errors during cleanup\n }\n }\n\n state = {\n clients: new Map(),\n broken: new Set(),\n initialized: false,\n };\n}\n\n// Utility exports for direct usage\nexport const DiagnosticUtils = {\n format: formatDiagnostic,\n formatForAgent: formatDiagnosticsForAgent,\n Severity: DiagnosticSeverity,\n};\n\n// Alias for backwards compatibility\nexport { DiagnosticUtils as Diagnostic };\n","/**\n * LSP Server Definitions\n * \n * Defines how to spawn and configure various LSP servers.\n * Currently supports TypeScript/JavaScript with typescript-language-server.\n */\n\nimport { spawn } from 'node:child_process';\nimport { existsSync } from 'node:fs';\nimport { resolve, dirname } from 'node:path';\nimport type { LSPServerDefinition, LSPServerHandle } from './types.js';\n\n/**\n * Find the nearest directory containing one of the given files\n */\nfunction findNearestRoot(startDir: string, markers: string[]): string | null {\n let dir = startDir;\n const root = '/';\n \n while (dir !== root) {\n for (const marker of markers) {\n if (existsSync(resolve(dir, marker))) {\n return dir;\n }\n }\n const parent = dirname(dir);\n if (parent === dir) break;\n dir = parent;\n }\n \n return null;\n}\n\n/**\n * Check if a command exists in PATH\n */\nasync function commandExists(cmd: string): Promise<boolean> {\n try {\n const { exec } = await import('node:child_process');\n const { promisify } = await import('node:util');\n const execAsync = promisify(exec);\n \n const isWindows = process.platform === 'win32';\n const checkCmd = isWindows ? `where ${cmd}` : `which ${cmd}`;\n \n await execAsync(checkCmd);\n return true;\n } catch {\n return false;\n }\n}\n\n/**\n * TypeScript/JavaScript Language Server\n * \n * Uses typescript-language-server which wraps tsserver.\n * Provides type checking, error detection, and more.\n */\nexport const TypeScriptServer: LSPServerDefinition = {\n id: 'typescript',\n name: 'TypeScript Language Server',\n extensions: ['.ts', '.tsx', '.js', '.jsx', '.mjs', '.cjs', '.mts', '.cts'],\n \n async spawn(root: string): Promise<LSPServerHandle | null> {\n // Check for package manager lock files to determine project root\n const projectRoot = findNearestRoot(root, [\n 'package-lock.json',\n 'pnpm-lock.yaml',\n 'yarn.lock',\n 'bun.lockb',\n 'bun.lock',\n ]) || root;\n \n // Try to find typescript-language-server\n const hasNpx = await commandExists('npx');\n const hasBunx = await commandExists('bunx');\n const hasPnpx = await commandExists('pnpx');\n \n let cmd: string[];\n \n if (hasPnpx) {\n cmd = ['pnpx', 'typescript-language-server', '--stdio'];\n } else if (hasBunx) {\n cmd = ['bunx', 'typescript-language-server', '--stdio'];\n } else if (hasNpx) {\n cmd = ['npx', 'typescript-language-server', '--stdio'];\n } else {\n console.warn('[lsp] No package runner (npx/bunx/pnpx) found for typescript-language-server');\n return null;\n }\n \n try {\n const proc = spawn(cmd[0], cmd.slice(1), {\n cwd: projectRoot,\n stdio: ['pipe', 'pipe', 'pipe'],\n env: {\n ...process.env,\n // Suppress some noisy output\n TSS_LOG: '-level none',\n },\n });\n \n // Handle stderr (for debugging)\n proc.stderr?.on('data', (data) => {\n const msg = data.toString().trim();\n if (msg && !msg.includes('deprecated')) {\n // Only log non-trivial errors\n console.debug('[lsp:typescript:stderr]', msg);\n }\n });\n \n return {\n process: proc,\n initialization: {\n // TypeScript-specific initialization options\n preferences: {\n includeInlayParameterNameHints: 'none',\n includeInlayPropertyDeclarationTypeHints: false,\n includeInlayFunctionLikeReturnTypeHints: false,\n },\n },\n };\n } catch (error) {\n console.error('[lsp] Failed to spawn typescript-language-server:', error);\n return null;\n }\n },\n};\n\n/**\n * All available LSP servers\n */\nexport const servers: LSPServerDefinition[] = [\n TypeScriptServer,\n];\n\n/**\n * Get the appropriate server for a file extension\n */\nexport function getServerForExtension(ext: string): LSPServerDefinition | null {\n for (const server of servers) {\n if (server.extensions.includes(ext)) {\n return server;\n }\n }\n return null;\n}\n\n/**\n * Get all supported file extensions\n */\nexport function getSupportedExtensions(): string[] {\n const extensions = new Set<string>();\n for (const server of servers) {\n for (const ext of server.extensions) {\n extensions.add(ext);\n }\n }\n return Array.from(extensions);\n}\n","/**\n * LSP Client\n * \n * Manages communication with an LSP server via JSON-RPC over stdio.\n * Handles initialization, file notifications, and diagnostics collection.\n */\n\nimport {\n createMessageConnection,\n StreamMessageReader,\n StreamMessageWriter,\n type MessageConnection,\n} from 'vscode-jsonrpc/node.js';\nimport { pathToFileURL, fileURLToPath } from 'node:url';\nimport { readFile } from 'node:fs/promises';\nimport { existsSync } from 'node:fs';\nimport { extname, normalize } from 'node:path';\nimport type { LSPClient, LSPServerHandle, Diagnostic, Location, DocumentSymbol, SymbolInformation } from './types.js';\n\n/**\n * Map file extension to LSP language ID\n */\nfunction getLanguageId(filePath: string): string {\n const ext = extname(filePath).toLowerCase();\n const map: Record<string, string> = {\n '.ts': 'typescript',\n '.tsx': 'typescriptreact',\n '.js': 'javascript',\n '.jsx': 'javascriptreact',\n '.mjs': 'javascript',\n '.cjs': 'javascript',\n '.mts': 'typescript',\n '.cts': 'typescript',\n '.json': 'json',\n '.jsonc': 'jsonc',\n };\n return map[ext] || 'plaintext';\n}\n\n/**\n * Normalize a file path for consistent key usage\n */\nexport function normalizePath(filePath: string): string {\n return normalize(filePath);\n}\n\n/**\n * Create an LSP client connected to a server\n */\nexport async function createClient(\n serverId: string,\n handle: LSPServerHandle,\n root: string\n): Promise<LSPClient> {\n const { process: proc } = handle;\n \n if (!proc.stdout || !proc.stdin) {\n throw new Error('LSP server process has no stdout/stdin');\n }\n \n // Create JSON-RPC connection over stdio\n const connection: MessageConnection = createMessageConnection(\n new StreamMessageReader(proc.stdout),\n new StreamMessageWriter(proc.stdin)\n );\n \n // Diagnostics storage\n const diagnostics = new Map<string, Diagnostic[]>();\n \n // Track open files and their versions\n const fileVersions = new Map<string, number>();\n \n // Event listeners for diagnostics updates\n const diagnosticListeners = new Map<string, Array<() => void>>();\n \n // Listen for diagnostic notifications\n connection.onNotification('textDocument/publishDiagnostics', (params: any) => {\n const filePath = normalizePath(fileURLToPath(params.uri));\n diagnostics.set(filePath, params.diagnostics || []);\n \n // Notify any waiters\n const listeners = diagnosticListeners.get(filePath);\n if (listeners) {\n for (const listener of listeners) {\n listener();\n }\n }\n });\n \n // Handle server requests\n connection.onRequest('workspace/configuration', async (params: any) => {\n // Return configuration for each requested section\n return params.items.map(() => handle.initialization || {});\n });\n \n connection.onRequest('client/registerCapability', async () => {\n // Accept capability registration\n return null;\n });\n \n connection.onRequest('window/workDoneProgress/create', async () => {\n // Accept progress token creation\n return null;\n });\n \n connection.onNotification('window/logMessage', (params: any) => {\n // Optionally log server messages\n if (params.type <= 2) { // Error or Warning\n console.debug(`[lsp:${serverId}]`, params.message);\n }\n });\n \n // Start listening\n connection.listen();\n \n // Initialize the server\n const initResult = await connection.sendRequest('initialize', {\n processId: process.pid,\n rootUri: pathToFileURL(root).href,\n rootPath: root,\n workspaceFolders: [\n {\n name: 'workspace',\n uri: pathToFileURL(root).href,\n },\n ],\n capabilities: {\n textDocument: {\n synchronization: {\n dynamicRegistration: true,\n willSave: false,\n willSaveWaitUntil: false,\n didSave: true,\n },\n publishDiagnostics: {\n relatedInformation: true,\n versionSupport: true,\n codeDescriptionSupport: true,\n },\n completion: {\n dynamicRegistration: true,\n completionItem: {\n snippetSupport: true,\n documentationFormat: ['markdown', 'plaintext'],\n },\n },\n hover: {\n dynamicRegistration: true,\n contentFormat: ['markdown', 'plaintext'],\n },\n definition: {\n dynamicRegistration: true,\n },\n references: {\n dynamicRegistration: true,\n },\n documentSymbol: {\n dynamicRegistration: true,\n hierarchicalDocumentSymbolSupport: true,\n symbolKind: {\n valueSet: [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26],\n },\n },\n },\n workspace: {\n configuration: true,\n didChangeConfiguration: {\n dynamicRegistration: true,\n },\n didChangeWatchedFiles: {\n dynamicRegistration: true,\n },\n workspaceFolders: true,\n },\n },\n initializationOptions: handle.initialization,\n });\n \n // Send initialized notification\n await connection.sendNotification('initialized', {});\n \n // Return client interface\n const client: LSPClient = {\n serverId,\n root,\n diagnostics,\n \n async notifyOpen(filePath: string): Promise<void> {\n const normalized = normalizePath(filePath);\n \n if (!existsSync(normalized)) {\n return;\n }\n \n try {\n const content = await readFile(normalized, 'utf-8');\n const version = (fileVersions.get(normalized) ?? -1) + 1;\n fileVersions.set(normalized, version);\n \n if (version === 0) {\n // First time opening\n await connection.sendNotification('textDocument/didOpen', {\n textDocument: {\n uri: pathToFileURL(normalized).href,\n languageId: getLanguageId(normalized),\n version,\n text: content,\n },\n });\n } else {\n // Already open, send change\n await connection.sendNotification('textDocument/didChange', {\n textDocument: {\n uri: pathToFileURL(normalized).href,\n version,\n },\n contentChanges: [{ text: content }],\n });\n }\n } catch (error) {\n console.error('[lsp] Error notifying open:', error);\n }\n },\n \n async notifyChange(filePath: string): Promise<void> {\n const normalized = normalizePath(filePath);\n \n if (!existsSync(normalized)) {\n return;\n }\n \n try {\n const content = await readFile(normalized, 'utf-8');\n const version = (fileVersions.get(normalized) ?? 0) + 1;\n fileVersions.set(normalized, version);\n \n await connection.sendNotification('textDocument/didChange', {\n textDocument: {\n uri: pathToFileURL(normalized).href,\n version,\n },\n contentChanges: [{ text: content }],\n });\n } catch (error) {\n console.error('[lsp] Error notifying change:', error);\n }\n },\n \n async notifyClose(filePath: string): Promise<void> {\n const normalized = normalizePath(filePath);\n fileVersions.delete(normalized);\n diagnostics.delete(normalized);\n \n try {\n await connection.sendNotification('textDocument/didClose', {\n textDocument: {\n uri: pathToFileURL(normalized).href,\n },\n });\n } catch (error) {\n console.error('[lsp] Error notifying close:', error);\n }\n },\n \n async notifyWatchedFilesChanged(changes: Array<{ uri: string; type: number }>): Promise<void> {\n try {\n await connection.sendNotification('workspace/didChangeWatchedFiles', {\n changes,\n });\n } catch (error) {\n console.error('[lsp] Error notifying watched files:', error);\n }\n },\n \n async waitForDiagnostics(filePath: string, timeoutMs = 5000): Promise<Diagnostic[]> {\n const normalized = normalizePath(filePath);\n \n return new Promise<Diagnostic[]>((resolve) => {\n const startTime = Date.now();\n let debounceTimer: NodeJS.Timeout | undefined;\n let resolved = false;\n \n const cleanup = () => {\n if (debounceTimer) clearTimeout(debounceTimer);\n const listeners = diagnosticListeners.get(normalized);\n if (listeners) {\n const idx = listeners.indexOf(onDiagnostic);\n if (idx >= 0) listeners.splice(idx, 1);\n if (listeners.length === 0) {\n diagnosticListeners.delete(normalized);\n }\n }\n };\n \n const finish = () => {\n if (resolved) return;\n resolved = true;\n cleanup();\n resolve(diagnostics.get(normalized) || []);\n };\n \n const onDiagnostic = () => {\n // Debounce: wait 150ms after last update\n if (debounceTimer) clearTimeout(debounceTimer);\n debounceTimer = setTimeout(finish, 150);\n };\n \n // Register listener\n if (!diagnosticListeners.has(normalized)) {\n diagnosticListeners.set(normalized, []);\n }\n diagnosticListeners.get(normalized)!.push(onDiagnostic);\n \n // Timeout fallback\n setTimeout(() => {\n if (!resolved) {\n finish();\n }\n }, timeoutMs);\n \n // If we already have diagnostics, trigger debounce\n if (diagnostics.has(normalized)) {\n onDiagnostic();\n }\n });\n },\n \n getDiagnostics(filePath: string): Diagnostic[] {\n return diagnostics.get(normalizePath(filePath)) || [];\n },\n \n getAllDiagnostics(): Map<string, Diagnostic[]> {\n return new Map(diagnostics);\n },\n \n async getDefinition(filePath: string, line: number, character: number): Promise<Location[]> {\n const normalized = normalizePath(filePath);\n // Only open if not already tracked (avoids redundant file reads + didChange)\n if (!fileVersions.has(normalized)) {\n await client.notifyOpen(normalized);\n }\n try {\n const result: any = await connection.sendRequest('textDocument/definition', {\n textDocument: { uri: pathToFileURL(normalized).href },\n position: { line, character },\n });\n if (!result) return [];\n const items = Array.isArray(result) ? result : [result];\n return items.map((r: any) => ({\n uri: r.targetUri || r.uri,\n range: r.targetRange || r.range,\n }));\n } catch (error) {\n console.error('[lsp] Error getting definition:', error);\n return [];\n }\n },\n \n async getReferences(filePath: string, line: number, character: number, includeDeclaration = false): Promise<Location[]> {\n const normalized = normalizePath(filePath);\n if (!fileVersions.has(normalized)) {\n await client.notifyOpen(normalized);\n }\n try {\n const result = await connection.sendRequest('textDocument/references', {\n textDocument: { uri: pathToFileURL(normalized).href },\n position: { line, character },\n context: { includeDeclaration },\n });\n return (result as Location[]) || [];\n } catch (error) {\n console.error('[lsp] Error getting references:', error);\n return [];\n }\n },\n \n async getHover(filePath: string, line: number, character: number): Promise<string | null> {\n const normalized = normalizePath(filePath);\n if (!fileVersions.has(normalized)) {\n await client.notifyOpen(normalized);\n }\n try {\n const result: any = await connection.sendRequest('textDocument/hover', {\n textDocument: { uri: pathToFileURL(normalized).href },\n position: { line, character },\n });\n if (!result || !result.contents) return null;\n if (typeof result.contents === 'string') return result.contents;\n if (result.contents.value) return result.contents.value;\n if (Array.isArray(result.contents)) {\n return result.contents.map((c: any) => typeof c === 'string' ? c : c.value).join('\\n');\n }\n return null;\n } catch (error) {\n console.error('[lsp] Error getting hover:', error);\n return null;\n }\n },\n \n async getDocumentSymbols(filePath: string): Promise<DocumentSymbol[]> {\n const normalized = normalizePath(filePath);\n if (!fileVersions.has(normalized)) {\n await client.notifyOpen(normalized);\n }\n try {\n const result: any[] = await connection.sendRequest('textDocument/documentSymbol', {\n textDocument: { uri: pathToFileURL(normalized).href },\n });\n if (!result || result.length === 0) return [];\n \n // Handle both DocumentSymbol[] (hierarchical) and SymbolInformation[] (flat) formats\n // DocumentSymbol has `range` directly; SymbolInformation has `location.range`\n if (result[0].range) {\n return result as DocumentSymbol[];\n }\n // Convert SymbolInformation[] to DocumentSymbol[]\n return result.map((si: any) => ({\n name: si.name,\n kind: si.kind,\n range: si.location?.range ?? { start: { line: 0, character: 0 }, end: { line: 0, character: 0 } },\n selectionRange: si.location?.range ?? { start: { line: 0, character: 0 }, end: { line: 0, character: 0 } },\n detail: si.containerName,\n }));\n } catch (error) {\n console.error('[lsp] Error getting document symbols:', error);\n return [];\n }\n },\n \n async findWorkspaceSymbols(query: string): Promise<SymbolInformation[]> {\n try {\n const result = await connection.sendRequest('workspace/symbol', { query });\n return (result as SymbolInformation[]) || [];\n } catch (error) {\n console.error('[lsp] Error finding workspace symbols:', error);\n return [];\n }\n },\n \n async shutdown(): Promise<void> {\n try {\n await connection.sendRequest('shutdown');\n await connection.sendNotification('exit');\n connection.end();\n connection.dispose();\n proc.kill();\n } catch (error) {\n // Force kill if graceful shutdown fails\n proc.kill('SIGKILL');\n }\n },\n };\n \n return client;\n}\n","/**\n * LSP Types\n * \n * Type definitions for the Language Server Protocol integration.\n * These types are compatible with vscode-languageserver-types.\n */\n\n/**\n * Diagnostic severity levels from LSP spec\n */\nexport enum DiagnosticSeverity {\n Error = 1,\n Warning = 2,\n Information = 3,\n Hint = 4,\n}\n\n/**\n * Position in a text document (0-indexed)\n */\nexport interface Position {\n line: number;\n character: number;\n}\n\n/**\n * Range in a text document\n */\nexport interface Range {\n start: Position;\n end: Position;\n}\n\n/**\n * A diagnostic message from an LSP server\n */\nexport interface Diagnostic {\n range: Range;\n message: string;\n severity?: DiagnosticSeverity;\n code?: number | string;\n source?: string;\n relatedInformation?: DiagnosticRelatedInformation[];\n}\n\n/**\n * Related information for a diagnostic\n */\nexport interface DiagnosticRelatedInformation {\n location: {\n uri: string;\n range: Range;\n };\n message: string;\n}\n\n/**\n * A location in a text document (used by definition, references, etc.)\n */\nexport interface Location {\n uri: string;\n range: Range;\n}\n\n/**\n * Symbol kinds from the LSP spec\n */\nexport enum SymbolKind {\n File = 1,\n Module = 2,\n Namespace = 3,\n Package = 4,\n Class = 5,\n Method = 6,\n Property = 7,\n Field = 8,\n Constructor = 9,\n Enum = 10,\n Interface = 11,\n Function = 12,\n Variable = 13,\n Constant = 14,\n String = 15,\n Number = 16,\n Boolean = 17,\n Array = 18,\n Object = 19,\n Key = 20,\n Null = 21,\n EnumMember = 22,\n Struct = 23,\n Event = 24,\n Operator = 25,\n TypeParameter = 26,\n}\n\n/**\n * Hierarchical document symbol (returned by textDocument/documentSymbol)\n */\nexport interface DocumentSymbol {\n name: string;\n detail?: string;\n kind: SymbolKind;\n range: Range;\n selectionRange: Range;\n children?: DocumentSymbol[];\n}\n\n/**\n * Flat symbol information (returned by workspace/symbol)\n */\nexport interface SymbolInformation {\n name: string;\n kind: SymbolKind;\n location: Location;\n containerName?: string;\n}\n\n/**\n * Parameters for textDocument/publishDiagnostics notification\n */\nexport interface PublishDiagnosticsParams {\n uri: string;\n version?: number;\n diagnostics: Diagnostic[];\n}\n\n/**\n * LSP Server handle (spawned process)\n */\nexport interface LSPServerHandle {\n process: import('node:child_process').ChildProcess;\n initialization?: Record<string, unknown>;\n}\n\n/**\n * LSP Server definition\n */\nexport interface LSPServerDefinition {\n id: string;\n name: string;\n extensions: string[];\n spawn: (root: string) => Promise<LSPServerHandle | null>;\n}\n\n/**\n * LSP Client interface\n */\nexport interface LSPClient {\n serverId: string;\n root: string;\n diagnostics: Map<string, Diagnostic[]>;\n \n notifyOpen(filePath: string): Promise<void>;\n notifyChange(filePath: string): Promise<void>;\n notifyClose(filePath: string): Promise<void>;\n notifyWatchedFilesChanged(changes: Array<{ uri: string; type: number }>): Promise<void>;\n \n waitForDiagnostics(filePath: string, timeoutMs?: number): Promise<Diagnostic[]>;\n getDiagnostics(filePath: string): Diagnostic[];\n getAllDiagnostics(): Map<string, Diagnostic[]>;\n \n /** Go to definition for a symbol at the given position */\n getDefinition(filePath: string, line: number, character: number): Promise<Location[]>;\n /** Find all references to the symbol at the given position */\n getReferences(filePath: string, line: number, character: number, includeDeclaration?: boolean): Promise<Location[]>;\n /** Get hover/type information for the symbol at the given position */\n getHover(filePath: string, line: number, character: number): Promise<string | null>;\n /** Get all symbols defined in a document (hierarchical) */\n getDocumentSymbols(filePath: string): Promise<DocumentSymbol[]>;\n /** Search for symbols across the workspace by name */\n findWorkspaceSymbols(query: string): Promise<SymbolInformation[]>;\n \n shutdown(): Promise<void>;\n}\n\n/**\n * Format a diagnostic for display to the agent\n */\nexport function formatDiagnostic(diagnostic: Diagnostic): string {\n const severity = {\n [DiagnosticSeverity.Error]: 'ERROR',\n [DiagnosticSeverity.Warning]: 'WARN',\n [DiagnosticSeverity.Information]: 'INFO',\n [DiagnosticSeverity.Hint]: 'HINT',\n }[diagnostic.severity ?? DiagnosticSeverity.Error];\n \n const line = diagnostic.range.start.line + 1; // Convert to 1-indexed\n const col = diagnostic.range.start.character + 1;\n const source = diagnostic.source ? ` [${diagnostic.source}]` : '';\n \n return `${severity} [${line}:${col}]${source} ${diagnostic.message}`;\n}\n\n/**\n * Format diagnostics for agent output\n */\nexport function formatDiagnosticsForAgent(\n filePath: string,\n diagnostics: Diagnostic[],\n options: { maxDiagnostics?: number; errorsOnly?: boolean } = {}\n): string {\n const { maxDiagnostics = 20, errorsOnly = true } = options;\n \n // Filter to errors only if requested\n const filtered = errorsOnly\n ? diagnostics.filter(d => d.severity === DiagnosticSeverity.Error)\n : diagnostics;\n \n if (filtered.length === 0) return '';\n \n const limited = filtered.slice(0, maxDiagnostics);\n const suffix = filtered.length > maxDiagnostics\n ? `\\n... and ${filtered.length - maxDiagnostics} more`\n : '';\n \n const formatted = limited.map(formatDiagnostic).join('\\n');\n \n return `\\n\\nLSP errors detected in this file, please fix:\\n<diagnostics file=\"${filePath}\">\\n${formatted}${suffix}\\n</diagnostics>`;\n}\n","import { tool } from 'ai';\nimport { z } from 'zod';\nimport { todoQueries, TodoItem } from '../db/index.js';\n\nexport interface TodoToolOptions {\n sessionId: string;\n}\n\nconst todoInputSchema = z.object({\n action: z\n .enum(['add', 'list', 'mark', 'clear'])\n .describe('The action to perform on the todo list'),\n items: z\n .array(\n z.object({\n content: z.string().describe('Description of the task'),\n order: z.number().optional().describe('Optional order/priority (lower = higher priority)'),\n })\n )\n .optional()\n .describe('For \"add\" action: Array of todo items to add'),\n todoId: z\n .string()\n .optional()\n .describe('For \"mark\" action: The ID of the todo item to update'),\n status: z\n .enum(['pending', 'in_progress', 'completed', 'cancelled'])\n .optional()\n .describe('For \"mark\" action: The new status for the todo item'),\n});\n\nexport function createTodoTool(options: TodoToolOptions) {\n return tool({\n description: `Manage your task list for the current session. Use this to:\n- Break down complex tasks into smaller steps\n- Track progress on multi-step operations\n- Organize your work systematically\n\nAvailable actions:\n- \"add\": Add one or more new todo items to the list\n- \"list\": View all current todo items and their status\n- \"mark\": Update the status of a todo item (pending, in_progress, completed, cancelled)\n- \"clear\": Remove all todo items from the list\n\nBest practices:\n- Add todos before starting complex tasks\n- Mark items as \"in_progress\" when actively working on them\n- Update status as you complete each step`,\n\n inputSchema: todoInputSchema,\n\n execute: async ({ action, items, todoId, status }: z.infer<typeof todoInputSchema>) => {\n try {\n switch (action) {\n case 'add': {\n if (!items || items.length === 0) {\n return {\n success: false,\n error: 'No items provided. Include at least one todo item.',\n };\n }\n\n const created = await todoQueries.createMany(options.sessionId, items);\n \n return {\n success: true,\n action: 'add',\n itemsAdded: created.length,\n items: created.map(formatTodoItem),\n };\n }\n\n case 'list': {\n const todos = await todoQueries.getBySession(options.sessionId);\n \n const stats = {\n total: todos.length,\n pending: todos.filter((t: TodoItem) => t.status === 'pending').length,\n inProgress: todos.filter((t: TodoItem) => t.status === 'in_progress').length,\n completed: todos.filter((t: TodoItem) => t.status === 'completed').length,\n cancelled: todos.filter((t: TodoItem) => t.status === 'cancelled').length,\n };\n\n return {\n success: true,\n action: 'list',\n stats,\n items: todos.map(formatTodoItem),\n };\n }\n\n case 'mark': {\n if (!todoId) {\n return {\n success: false,\n error: 'todoId is required for \"mark\" action',\n };\n }\n\n if (!status) {\n return {\n success: false,\n error: 'status is required for \"mark\" action',\n };\n }\n\n const updated = await todoQueries.updateStatus(todoId, status);\n \n if (!updated) {\n return {\n success: false,\n error: `Todo item not found: ${todoId}`,\n };\n }\n\n return {\n success: true,\n action: 'mark',\n item: formatTodoItem(updated),\n };\n }\n\n case 'clear': {\n const count = await todoQueries.clearSession(options.sessionId);\n \n return {\n success: true,\n action: 'clear',\n itemsRemoved: count,\n };\n }\n\n default:\n return {\n success: false,\n error: `Unknown action: ${action}`,\n };\n }\n } catch (error: any) {\n return {\n success: false,\n error: error.message,\n };\n }\n },\n });\n}\n\nfunction formatTodoItem(item: TodoItem) {\n return {\n id: item.id,\n content: item.content,\n status: item.status,\n order: item.order,\n createdAt: item.createdAt.toISOString(),\n };\n}\n\nexport type TodoTool = ReturnType<typeof createTodoTool>;\n","import { tool } from 'ai';\nimport { z } from 'zod';\nimport { loadAllSkills, loadSkillContent, formatSkillsForContext } from '../skills/index.js';\nimport { skillQueries } from '../db/index.js';\n\nexport interface LoadSkillToolOptions {\n sessionId: string;\n skillsDirectories: string[];\n}\n\nconst loadSkillInputSchema = z.object({\n action: z\n .enum(['list', 'load'])\n .describe('Action to perform: \"list\" to see available skills, \"load\" to load a skill'),\n skillName: z\n .string()\n .optional()\n .describe('For \"load\" action: The name of the skill to load'),\n});\n\nexport function createLoadSkillTool(options: LoadSkillToolOptions) {\n return tool({\n description: `Load a skill document into the conversation context. Skills are specialized knowledge files that provide guidance on specific topics like debugging, code review, architecture patterns, etc.\n\nAvailable actions:\n- \"list\": Show all available skills with their descriptions\n- \"load\": Load a specific skill's full content into context\n\nUse this when you need specialized knowledge or guidance for a particular task.\nOnce loaded, a skill's content will be available in the conversation context.`,\n\n inputSchema: loadSkillInputSchema,\n\n execute: async ({ action, skillName }: z.infer<typeof loadSkillInputSchema>) => {\n try {\n switch (action) {\n case 'list': {\n const skills = await loadAllSkills(options.skillsDirectories);\n \n return {\n success: true,\n action: 'list',\n skillCount: skills.length,\n skills: skills.map((s) => ({\n name: s.name,\n description: s.description,\n })),\n formatted: formatSkillsForContext(skills),\n };\n }\n\n case 'load': {\n if (!skillName) {\n return {\n success: false,\n error: 'skillName is required for \"load\" action',\n };\n }\n\n // Check if already loaded\n if (await skillQueries.isLoaded(options.sessionId, skillName)) {\n return {\n success: false,\n error: `Skill \"${skillName}\" is already loaded in this session`,\n };\n }\n\n // Load the skill content\n const skill = await loadSkillContent(skillName, options.skillsDirectories);\n \n if (!skill) {\n const allSkills = await loadAllSkills(options.skillsDirectories);\n return {\n success: false,\n error: `Skill \"${skillName}\" not found`,\n availableSkills: allSkills.map((s) => s.name),\n };\n }\n\n // Record that we loaded this skill\n await skillQueries.load(options.sessionId, skillName);\n\n return {\n success: true,\n action: 'load',\n skillName: skill.name,\n description: skill.description,\n content: skill.content,\n contentLength: skill.content.length,\n };\n }\n\n default:\n return {\n success: false,\n error: `Unknown action: ${action}`,\n };\n }\n } catch (error: any) {\n return {\n success: false,\n error: error.message,\n };\n }\n },\n });\n}\n\nexport type LoadSkillTool = ReturnType<typeof createLoadSkillTool>;\n","/**\n * Linter Tool\n * \n * Provides the agent with the ability to check files for lint/type errors\n * using the LSP (Language Server Protocol) integration.\n */\n\nimport { tool } from 'ai';\nimport { z } from 'zod';\nimport { resolve, relative, isAbsolute, extname } from 'node:path';\nimport { existsSync } from 'node:fs';\nimport { readdir, stat } from 'node:fs/promises';\nimport * as LSP from '../lsp/index.js';\nimport type { Diagnostic } from '../lsp/types.js';\n\nexport interface LinterToolOptions {\n workingDirectory: string;\n}\n\nconst linterInputSchema = z.object({\n paths: z\n .array(z.string())\n .optional()\n .describe('File or directory paths to check for lint errors. If not provided, returns diagnostics for all recently touched files.'),\n fix: z\n .boolean()\n .optional()\n .default(false)\n .describe('Reserved for future use: auto-fix lint errors (not yet implemented)'),\n});\n\n/**\n * Recursively find all supported files in a directory\n */\nasync function findSupportedFiles(\n dir: string,\n workingDirectory: string,\n maxFiles = 50\n): Promise<string[]> {\n const files: string[] = [];\n const supportedExtensions = LSP.getSupportedExtensions();\n\n async function walk(currentDir: string) {\n if (files.length >= maxFiles) return;\n\n try {\n const entries = await readdir(currentDir, { withFileTypes: true });\n\n for (const entry of entries) {\n if (files.length >= maxFiles) break;\n\n const fullPath = resolve(currentDir, entry.name);\n\n // Skip node_modules, .git, and other common ignore patterns\n if (entry.isDirectory()) {\n if (['node_modules', '.git', 'dist', 'build', '.next', 'coverage'].includes(entry.name)) {\n continue;\n }\n await walk(fullPath);\n } else if (entry.isFile()) {\n const ext = extname(entry.name);\n if (supportedExtensions.includes(ext)) {\n files.push(fullPath);\n }\n }\n }\n } catch {\n // Ignore permission errors etc.\n }\n }\n\n await walk(dir);\n return files;\n}\n\nexport function createLinterTool(options: LinterToolOptions) {\n return tool({\n description: `Check files for linting and type errors using the Language Server Protocol (LSP).\n\nSupports TypeScript, JavaScript, TSX, JSX files.\n\nUsage:\n- \\`linter({})\\` - Get diagnostics for all recently edited files\n- \\`linter({ paths: [\"src/app.ts\"] })\\` - Check specific files\n- \\`linter({ paths: [\"src/\"] })\\` - Check all supported files in a directory\n\nReturns detailed error information including line numbers, error messages, and severity.\nUse this after making changes to verify your code is correct, or proactively to find issues.\n\nWorking directory: ${options.workingDirectory}`,\n\n inputSchema: linterInputSchema,\n\n execute: async ({ paths }: z.infer<typeof linterInputSchema>) => {\n try {\n // If no paths provided, get all diagnostics from LSP\n if (!paths || paths.length === 0) {\n const allDiagnostics = await LSP.getAllDiagnostics();\n \n if (Object.keys(allDiagnostics).length === 0) {\n return {\n success: true,\n message: 'No lint errors found. No files have been analyzed yet - specify paths to check specific files.',\n files: [],\n totalErrors: 0,\n totalWarnings: 0,\n };\n }\n\n return formatDiagnosticsResult(allDiagnostics, options.workingDirectory);\n }\n\n // Process provided paths\n const filesToCheck: string[] = [];\n\n for (const path of paths) {\n const absolutePath = isAbsolute(path)\n ? path\n : resolve(options.workingDirectory, path);\n\n if (!existsSync(absolutePath)) {\n continue;\n }\n\n const stats = await stat(absolutePath);\n\n if (stats.isDirectory()) {\n const dirFiles = await findSupportedFiles(absolutePath, options.workingDirectory);\n filesToCheck.push(...dirFiles);\n } else if (stats.isFile()) {\n if (LSP.isSupported(absolutePath)) {\n filesToCheck.push(absolutePath);\n }\n }\n }\n\n if (filesToCheck.length === 0) {\n return {\n success: true,\n message: 'No supported files found to check. Supported extensions: ' + LSP.getSupportedExtensions().join(', '),\n files: [],\n totalErrors: 0,\n totalWarnings: 0,\n };\n }\n\n // Touch all files and wait for diagnostics\n await Promise.all(\n filesToCheck.map(file => LSP.touchFile(file, true))\n );\n\n // Collect diagnostics for all files\n const diagnosticsMap: Record<string, Diagnostic[]> = {};\n\n for (const file of filesToCheck) {\n const diagnostics = await LSP.getDiagnostics(file);\n if (diagnostics.length > 0) {\n diagnosticsMap[file] = diagnostics;\n }\n }\n\n return formatDiagnosticsResult(diagnosticsMap, options.workingDirectory);\n } catch (error: any) {\n return {\n success: false,\n error: error.message,\n };\n }\n },\n });\n}\n\n/**\n * Format diagnostics into a structured result\n */\nfunction formatDiagnosticsResult(\n diagnosticsMap: Record<string, Diagnostic[]>,\n workingDirectory: string\n) {\n let totalErrors = 0;\n let totalWarnings = 0;\n let totalInfo = 0;\n\n const files: Array<{\n path: string;\n relativePath: string;\n errors: number;\n warnings: number;\n diagnostics: Array<{\n severity: string;\n line: number;\n column: number;\n message: string;\n source?: string;\n code?: string | number;\n }>;\n }> = [];\n\n for (const [filePath, diagnostics] of Object.entries(diagnosticsMap)) {\n const relativePath = relative(workingDirectory, filePath);\n let fileErrors = 0;\n let fileWarnings = 0;\n\n const formattedDiagnostics = diagnostics.map(d => {\n const severity = getSeverityString(d.severity);\n \n if (d.severity === LSP.DiagnosticSeverity.Error) {\n fileErrors++;\n totalErrors++;\n } else if (d.severity === LSP.DiagnosticSeverity.Warning) {\n fileWarnings++;\n totalWarnings++;\n } else {\n totalInfo++;\n }\n\n return {\n severity,\n line: d.range.start.line + 1,\n column: d.range.start.character + 1,\n message: d.message,\n source: d.source,\n code: d.code,\n };\n });\n\n files.push({\n path: filePath,\n relativePath,\n errors: fileErrors,\n warnings: fileWarnings,\n diagnostics: formattedDiagnostics,\n });\n }\n\n // Sort by errors (most first)\n files.sort((a, b) => b.errors - a.errors);\n\n const hasIssues = totalErrors > 0 || totalWarnings > 0;\n\n return {\n success: true,\n message: hasIssues\n ? `Found ${totalErrors} error(s) and ${totalWarnings} warning(s) in ${files.length} file(s).`\n : `No lint errors found in ${Object.keys(diagnosticsMap).length || 'any'} file(s).`,\n files,\n totalErrors,\n totalWarnings,\n totalInfo,\n summary: hasIssues\n ? formatSummary(files)\n : undefined,\n };\n}\n\n/**\n * Get severity as a string\n */\nfunction getSeverityString(severity?: number): string {\n switch (severity) {\n case LSP.DiagnosticSeverity.Error:\n return 'error';\n case LSP.DiagnosticSeverity.Warning:\n return 'warning';\n case LSP.DiagnosticSeverity.Information:\n return 'info';\n case LSP.DiagnosticSeverity.Hint:\n return 'hint';\n default:\n return 'error';\n }\n}\n\n/**\n * Format a human-readable summary\n */\nfunction formatSummary(\n files: Array<{\n relativePath: string;\n diagnostics: Array<{\n severity: string;\n line: number;\n column: number;\n message: string;\n }>;\n }>\n): string {\n const lines: string[] = [];\n\n for (const file of files) {\n lines.push(`\\n${file.relativePath}:`);\n for (const d of file.diagnostics.slice(0, 10)) {\n const prefix = d.severity === 'error' ? '❌' : d.severity === 'warning' ? '⚠️' : 'ℹ️';\n lines.push(` ${prefix} [${d.line}:${d.column}] ${d.message}`);\n }\n if (file.diagnostics.length > 10) {\n lines.push(` ... and ${file.diagnostics.length - 10} more`);\n }\n }\n\n return lines.join('\\n');\n}\n\nexport type LinterTool = ReturnType<typeof createLinterTool>;\n","import { tool } from 'ai';\nimport { z } from 'zod';\nimport { createSearchSubagent, SearchResult, SubagentProgressEvent } from '../agent/subagents/index.js';\nimport { truncateOutput } from '../utils/truncate.js';\n\nconst MAX_RESULT_CHARS = 10_000;\n\n/**\n * Progress event for the explore_agent tool (emitted via onProgress callback)\n */\nexport interface SearchToolProgress {\n status: 'started' | 'step' | 'complete' | 'error';\n subagentId?: string;\n stepType?: 'thought' | 'tool_call' | 'tool_result' | 'text';\n stepContent?: string;\n toolName?: string;\n toolInput?: unknown;\n toolOutput?: unknown;\n result?: SearchResult;\n error?: string;\n}\n\nexport interface SearchToolOptions {\n sessionId: string;\n workingDirectory: string;\n /** Callback for progress updates (for streaming to UI) */\n onProgress?: (progress: SearchToolProgress) => void | Promise<void>;\n}\n\n/**\n * Create the explore_agent tool that spawns a SearchSubagent\n * \n * This tool allows the main agent to delegate explore tasks to a specialized\n * mini-agent that uses a smaller, faster model (Gemini 3 Flash Preview).\n * \n * The subagent has access to:\n * - grep: Search for patterns in files\n * - glob: Find files by pattern\n * - read_file: Read file contents\n * - list_dir: List directory contents\n * - semantic_search: Find code by meaning (if indexed)\n * - code_graph: Inspect a symbol's type hierarchy and usage graph via the TypeScript LSP\n * \n * Progress is streamed back to the UI so users can see exploration happening.\n */\nexport function createSearchTool(options: SearchToolOptions) {\n return tool({\n description: `Delegate an explore task to the explore_agent tool. Use this when you need to:\n- Find files or code matching a pattern\n- Explore the codebase structure\n- Search for specific functions, classes, or variables\n- Understand how a feature is implemented\n\nThe Explore agent will explore the codebase and return a summary of findings.\nThis is more thorough than a simple grep because it can follow references and understand context.\nIt also has access to semantic search to find code by meaning, not just text.\nIt can also use code_graph to inspect a symbol's type hierarchy, references, and which pages/routes use it.\n\nCRITICAL: The explore agent has ZERO context. It cannot see the conversation, the user's message, devtools data, or any prior context. You MUST pass ALL relevant context via the \"context\" parameter. If the user selected a component (component name, file path, HTML, component stack) or there is a <devtools-context> block, you MUST copy that information into the \"context\" field verbatim. Without it the explore agent is searching blind.`,\n\n inputSchema: z.object({\n query: z.string().describe('What to search for. Be specific about what you\\'re looking for.'),\n context: z.string().describe('ALL context the explore agent needs. It has ZERO context on its own - no conversation history, no devtools data, nothing. You MUST include: any selected component info (name, file path, HTML, component stack), any <devtools-context> block (page URL, path, viewport), and any other relevant details from the user message. The explore agent literally only sees the query and this context field.'),\n }),\n\n execute: async ({ query, context }, toolOptions) => {\n const toolCallId = (toolOptions as any).toolCallId || `explore_agent_${Date.now()}`;\n \n // Emit started event\n await options.onProgress?.({\n status: 'started',\n subagentId: toolCallId,\n });\n \n try {\n const subagent = createSearchSubagent();\n \n // Build the full task including any context the main agent passed along\n // (e.g. selected component info, devtools page context, etc.)\n const fullTask = context \n ? `${query}\\n\\nContext: ${context}`\n : query;\n \n // Run the subagent - the context field carries all devtools/component\n // info that the main agent forwarded from the user's message\n const result = await subagent.run({\n task: fullTask,\n sessionId: options.sessionId,\n toolCallId,\n workingDirectory: options.workingDirectory,\n onProgress: async (event: SubagentProgressEvent) => {\n // Map subagent events to explore_agent tool progress\n if (event.type === 'step' && event.step) {\n await options.onProgress?.({\n status: 'step',\n subagentId: event.subagentId,\n stepType: event.step.type,\n stepContent: event.step.content,\n toolName: event.step.toolName,\n toolInput: event.step.toolInput,\n toolOutput: event.step.toolOutput,\n });\n } else if (event.type === 'complete') {\n await options.onProgress?.({\n status: 'complete',\n subagentId: event.subagentId,\n result: event.result as SearchResult,\n });\n } else if (event.type === 'error') {\n await options.onProgress?.({\n status: 'error',\n subagentId: event.subagentId,\n error: event.error,\n });\n }\n },\n });\n \n if (!result.success) {\n return {\n success: false,\n error: result.error || 'Search failed',\n executionId: result.executionId,\n };\n }\n \n const searchResult = result.result!;\n \n // Set the query on the result (parseResult doesn't have access to the original task)\n searchResult.query = query;\n \n // =========================================================\n // Format the result for the main agent\n // The summary is the most important part - it contains the\n // LLM's full analysis with file paths, code snippets, and\n // explanations collected from ALL steps.\n // =========================================================\n \n let formattedResult = '';\n \n // The summary IS the main content - it contains the explore agent's\n // full analysis with paths, snippets, and explanations\n if (searchResult.summary) {\n formattedResult += searchResult.summary;\n }\n \n // Add structured findings as supplementary data\n if (searchResult.findings.length > 0) {\n formattedResult += `\\n\\n### Files & Matches Found (${searchResult.findings.length} items)\\n`;\n \n for (const finding of searchResult.findings) {\n if (finding.type === 'match') {\n formattedResult += `\\n- **${finding.path}:${finding.lineNumber}** - \\`${truncateOutput(finding.content || '', 150)}\\``;\n } else if (finding.type === 'semantic') {\n formattedResult += `\\n- **${finding.path}:${finding.lineNumber}** [semantic] ${finding.context ? `(${finding.context})` : ''}`;\n if (finding.content) {\n formattedResult += `\\n \\`\\`\\`\\n ${truncateOutput(finding.content, 200)}\\n \\`\\`\\``;\n }\n } else if (finding.type === 'file') {\n formattedResult += `\\n- **${finding.path}** ${finding.context ? `(${finding.context})` : ''}`;\n }\n }\n }\n \n if (!formattedResult.trim()) {\n formattedResult = 'The explore agent ran but did not find relevant results for this query. Try being more specific or using different search terms.';\n }\n \n return {\n success: true,\n query: searchResult.query,\n summary: searchResult.summary,\n findings: searchResult.findings,\n matchCount: searchResult.matchCount,\n filesSearched: searchResult.filesSearched,\n formattedResult: truncateOutput(formattedResult, MAX_RESULT_CHARS),\n executionId: result.executionId,\n stepsCount: result.steps.length,\n };\n } catch (error: any) {\n await options.onProgress?.({\n status: 'error',\n error: error.message,\n });\n \n return {\n success: false,\n error: error.message,\n };\n }\n },\n });\n}\n\nexport type SearchTool = ReturnType<typeof createSearchTool>;\n","import {\n streamText,\n generateText,\n stepCountIs,\n type ToolSet,\n} from 'ai';\nimport { nanoid } from 'nanoid';\nimport { resolveModel, SUBAGENT_MODELS } from './model.js';\nimport { subagentQueries, SubagentExecution, SubagentStep } from '../db/index.js';\n\n/**\n * Progress event emitted by subagents\n */\nexport interface SubagentProgressEvent {\n type: 'step' | 'text' | 'tool_call' | 'tool_result' | 'complete' | 'error';\n subagentId: string;\n subagentType: string;\n step?: SubagentStep;\n text?: string;\n toolName?: string;\n toolInput?: unknown;\n toolOutput?: unknown;\n result?: unknown;\n error?: string;\n}\n\n/**\n * Options for running a subagent\n */\nexport interface SubagentRunOptions {\n task: string;\n sessionId: string;\n toolCallId: string;\n workingDirectory: string;\n /** Callback for progress events */\n onProgress?: (event: SubagentProgressEvent) => void | Promise<void>;\n /** Abort signal */\n abortSignal?: AbortSignal;\n /** Additional context to inject into the subagent's system prompt (e.g. devtools context) */\n additionalContext?: string;\n}\n\n/**\n * Result from a subagent execution\n */\nexport interface SubagentResult<T = unknown> {\n success: boolean;\n result?: T;\n error?: string;\n steps: SubagentStep[];\n executionId: string;\n}\n\n/**\n * Base class for subagents.\n * \n * Subagents are lightweight agents that perform specific tasks using smaller,\n * faster models. They're spawned by the main agent via tools and report progress\n * back to the UI.\n * \n * To create a new subagent type:\n * 1. Extend this class\n * 2. Implement `getTools()` to return the tools available to this subagent\n * 3. Implement `getSystemPrompt()` to return the system prompt\n * 4. Optionally override `parseResult()` to structure the final output\n */\nexport abstract class Subagent<TResult = unknown> {\n /** Unique identifier for this subagent type */\n abstract readonly type: string;\n \n /** Human-readable name for this subagent */\n abstract readonly name: string;\n \n /** Model to use (defaults to gemini-3-flash-preview) */\n protected model: string;\n \n /** Maximum steps before stopping */\n protected maxSteps: number = 20;\n \n constructor(model?: string) {\n this.model = model || SUBAGENT_MODELS.default;\n }\n \n /**\n * Get the tools available to this subagent\n */\n protected abstract getTools(options: SubagentRunOptions): ToolSet;\n \n /**\n * Get the system prompt for this subagent\n */\n protected abstract getSystemPrompt(options: SubagentRunOptions): string;\n \n /**\n * Parse the final result from the subagent's output.\n * Override this to structure the result for your subagent type.\n */\n protected parseResult(text: string, steps: SubagentStep[]): TResult {\n return { text, steps } as TResult;\n }\n \n /**\n * Run the subagent with streaming progress updates\n */\n async run(options: SubagentRunOptions): Promise<SubagentResult<TResult>> {\n const { task, sessionId, toolCallId, onProgress, abortSignal } = options;\n const steps: SubagentStep[] = [];\n \n // Create execution record in database\n const execution = await subagentQueries.create({\n sessionId,\n toolCallId,\n subagentType: this.type,\n task,\n model: this.model,\n });\n \n const addStep = async (step: Omit<SubagentStep, 'id' | 'timestamp'>) => {\n const fullStep: SubagentStep = {\n id: nanoid(8),\n timestamp: Date.now(),\n ...step,\n };\n steps.push(fullStep);\n \n // Update database\n await subagentQueries.addStep(execution.id, fullStep);\n \n // Emit progress\n await onProgress?.({\n type: 'step',\n subagentId: execution.id,\n subagentType: this.type,\n step: fullStep,\n });\n };\n \n try {\n const tools = this.getTools(options);\n const systemPrompt = this.getSystemPrompt(options);\n \n // Run the subagent\n const result = await generateText({\n model: resolveModel(this.model) as any,\n system: systemPrompt,\n messages: [\n { role: 'user', content: task }\n ],\n tools,\n stopWhen: stepCountIs(this.maxSteps),\n abortSignal,\n onStepFinish: async (step) => {\n // Record text output\n if (step.text) {\n await addStep({\n type: 'text',\n content: step.text,\n });\n await onProgress?.({\n type: 'text',\n subagentId: execution.id,\n subagentType: this.type,\n text: step.text,\n });\n }\n \n // Record tool calls\n if (step.toolCalls) {\n for (const toolCall of step.toolCalls) {\n await addStep({\n type: 'tool_call',\n content: `Calling ${toolCall.toolName}`,\n toolName: toolCall.toolName,\n toolInput: toolCall.input,\n });\n await onProgress?.({\n type: 'tool_call',\n subagentId: execution.id,\n subagentType: this.type,\n toolName: toolCall.toolName,\n toolInput: toolCall.input,\n });\n }\n }\n \n // Record tool results\n if (step.toolResults) {\n for (const toolResult of step.toolResults) {\n await addStep({\n type: 'tool_result',\n content: `Result from ${toolResult.toolName}`,\n toolName: toolResult.toolName,\n toolOutput: toolResult.output,\n });\n await onProgress?.({\n type: 'tool_result',\n subagentId: execution.id,\n subagentType: this.type,\n toolName: toolResult.toolName,\n toolOutput: toolResult.output,\n });\n }\n }\n },\n });\n \n // Parse the final result\n const parsedResult = this.parseResult(result.text, steps);\n \n // Mark as complete\n await subagentQueries.complete(execution.id, parsedResult);\n \n await onProgress?.({\n type: 'complete',\n subagentId: execution.id,\n subagentType: this.type,\n result: parsedResult,\n });\n \n return {\n success: true,\n result: parsedResult,\n steps,\n executionId: execution.id,\n };\n } catch (error: any) {\n const errorMessage = error.message || 'Unknown error';\n \n // Mark as error\n await subagentQueries.markError(execution.id, errorMessage);\n \n await onProgress?.({\n type: 'error',\n subagentId: execution.id,\n subagentType: this.type,\n error: errorMessage,\n });\n \n return {\n success: false,\n error: errorMessage,\n steps,\n executionId: execution.id,\n };\n }\n }\n \n /**\n * Run with streaming (for real-time progress in UI)\n */\n async *stream(options: SubagentRunOptions): AsyncGenerator<SubagentProgressEvent> {\n const events: SubagentProgressEvent[] = [];\n let resolveNext: ((event: SubagentProgressEvent | null) => void) | null = null;\n let done = false;\n \n // Queue for events\n const eventQueue: SubagentProgressEvent[] = [];\n \n // Start the run with progress callback\n const runPromise = this.run({\n ...options,\n onProgress: async (event) => {\n eventQueue.push(event);\n if (resolveNext) {\n resolveNext(eventQueue.shift()!);\n resolveNext = null;\n }\n },\n }).then((result) => {\n done = true;\n if (resolveNext) {\n resolveNext(null);\n }\n return result;\n });\n \n // Yield events as they come\n while (!done || eventQueue.length > 0) {\n if (eventQueue.length > 0) {\n yield eventQueue.shift()!;\n } else if (!done) {\n // Wait for next event\n const event = await new Promise<SubagentProgressEvent | null>((resolve) => {\n resolveNext = resolve;\n });\n if (event) {\n yield event;\n }\n }\n }\n \n // Wait for completion\n await runPromise;\n }\n}\n\n// Export types\nexport type { SubagentStep };\n","import { tool, type ToolSet } from 'ai';\nimport { z } from 'zod';\nimport { exec } from 'node:child_process';\nimport { promisify } from 'node:util';\nimport { readFile, stat, readdir } from 'node:fs/promises';\nimport { resolve, relative, isAbsolute } from 'node:path';\nimport { existsSync } from 'node:fs';\nimport { Subagent, SubagentRunOptions, SubagentStep } from '../subagent.js';\nimport { SUBAGENT_MODELS } from '../model.js';\nimport { truncateOutput } from '../../utils/truncate.js';\nimport { isVectorGatewayConfigured, checkIndexExists } from '../../semantic/index.js';\nimport { createCodeGraphTool } from '../../tools/code-graph.js';\n\nconst execAsync = promisify(exec);\n\nconst MAX_OUTPUT_CHARS = 10_000;\nconst MAX_FILE_SIZE = 1 * 1024 * 1024; // 1MB for explore subagent\nconst MAX_SUMMARY_CHARS = 6_000; // Cap summary returned to main agent\nconst MAX_FINDINGS = 20; // Cap findings returned\n\n/**\n * Search result from the Explore agent\n */\nexport interface SearchResult {\n query: string;\n summary: string;\n findings: SearchFinding[];\n filesSearched: number;\n matchCount: number;\n}\n\nexport interface SearchFinding {\n type: 'file' | 'match' | 'semantic';\n path: string;\n content?: string;\n lineNumber?: number;\n relevance: 'high' | 'medium' | 'low';\n context?: string;\n}\n\n/**\n * SearchSubagent - A mini-agent specialized for exploring codebases.\n * \n * Uses a smaller, faster model (Gemini 3 Flash Preview) and has access to:\n * - grep: Search for patterns in files\n * - glob: Find files by pattern\n * - read_file: Read file contents\n * - list_dir: List directory contents\n * - semantic_search: Find code by meaning (if indexed)\n * - code_graph: Inspect a symbol's type hierarchy and usage graph via the TypeScript LSP\n * \n * Returns structured, concise search results to the main agent.\n * \n * IMPORTANT: The summary collects ALL text the LLM generates across all steps,\n * not just the final step. This ensures the analysis and discoveries are preserved\n * and sent back to the main agent.\n */\nexport class SearchSubagent extends Subagent<SearchResult> {\n readonly type = 'search';\n readonly name = 'Explore Agent';\n \n constructor(model?: string) {\n super(model || SUBAGENT_MODELS.search);\n this.maxSteps = 10; // Enough steps for thorough exploration\n }\n \n protected getSystemPrompt(options: SubagentRunOptions): string {\n const contextBlock = options.additionalContext\n ? `\\n## Active Context (from devtools)\\n${options.additionalContext}\\n\\nUse this context to understand what the user is currently looking at. If they reference \"this page\", \"this component\", \"this screen\", etc., this context tells you which page/route they mean. Search for files related to this page/route first.\\n`\n : '';\n\n return `You are an Explore agent - a READ-ONLY file search and code analysis specialist. Your job is to find relevant files, understand code structure, and return clear findings to the main agent. You MUST NOT create, modify, or delete any files.\n\nWorking Directory: ${options.workingDirectory}\n${contextBlock}\n## Available Tools\n- **semantic_search**: Search the codebase by meaning (finds code related to a concept). Use this FIRST when available - it's the fastest way to explore.\n- **grep**: Search for exact patterns in files using ripgrep. Best for exact symbol/string matches.\n- **glob**: Find files matching a name pattern. Best for file discovery.\n- **read_file**: Read contents of a specific file. Use to examine code found in searches.\n- **list_dir**: List directory contents. Use to understand project structure.\n- **code_graph**: Inspect a symbol's type hierarchy, references, and usage graph via the TypeScript language server. Returns type signatures, all files that reference the symbol, and which pages/routes contain it. Best for understanding component/function relationships and impact analysis.\n\n## Search Strategy\n\n1. **Start with semantic_search** if available - it finds code by meaning, which is the fastest way to explore\n2. **Use grep** for exact symbol/string matches (function names, class names, imports)\n3. **Use code_graph** when you need to understand a symbol's type signature, what depends on it, or which pages use it. It's much more precise than grep for understanding relationships.\n4. **Use glob** for file discovery by name patterns\n5. **Read key files** to get actual code content and understand context\n6. **Run searches in PARALLEL** - make multiple tool calls at once to cover different angles simultaneously. This is critical for speed.\n\n### Tool Selection Guide\n- Know the exact name? Use **grep** (e.g. \\`getUserById\\`, \\`class AuthService\\`)\n- Need type info, references, or impact analysis? Use **code_graph** (e.g. \\`code_graph({ symbol: \"UserCard\" })\\`)\n- Exploring a concept? Use **semantic_search** (e.g. \"how does authentication work\")\n- Looking for files? Use **glob** (e.g. \\`**/*.config.ts\\`, \\`**/auth/**\\`)\n- Need file content? Use **read_file** with optional line ranges for large files\n\n## CRITICAL: Your text output IS the result\n\nThe text you write in your responses is what gets sent back to the main agent as the summary. The main agent has NO other context about what you found. You MUST include:\n- **Exact file paths** (relative to working directory)\n- **Line numbers** where relevant code is found\n- **Key code snippets** showing the important parts\n- **How pieces connect together** - explain relationships between files\n- **What you actually found**, not just metadata\n\nALWAYS end with a comprehensive summary of your findings. This summary IS the data that the main agent receives.\n\n## Constraints\n- You are READ-ONLY. Do NOT attempt to create, write, edit, or delete files.\n- Be fast. Return results quickly. Don't over-explore when you've found what's needed.\n- Be thorough but efficient. If you find the answer in 2 searches, stop there.\n- No emojis in output.\n\n## Example good output:\n\"Found the authentication system in src/auth/:\n- \\`src/auth/provider.ts\\` (lines 15-45): AuthProvider class handles JWT token validation\n- \\`src/auth/middleware.ts\\` (lines 8-20): Express middleware that checks auth headers\n- \\`src/auth/hooks.ts\\`: useAuth() hook used by React components\nThe auth flow: middleware validates JWT -> provider decodes token -> user context set\"\n\n## Example bad output:\n\"I searched for auth and found some files.\" (too vague, no paths, no details)\n\nKeep it concise but INCLUDE THE ACTUAL DATA.`;\n }\n \n protected async getToolsAsync(options: SubagentRunOptions): Promise<ToolSet> {\n const workingDirectory = options.workingDirectory;\n \n const tools: ToolSet = {\n grep: tool({\n description: 'Search for patterns in files using ripgrep. Returns matching lines with file paths and line numbers.',\n inputSchema: z.object({\n pattern: z.string().describe('The regex pattern to search for'),\n path: z.string().optional().describe('Subdirectory or file to search in (relative to working directory)'),\n fileType: z.string().optional().describe('File type to filter (e.g., \"ts\", \"js\", \"py\")'),\n maxResults: z.number().optional().default(50).describe('Maximum number of results to return'),\n }),\n execute: async ({ pattern, path, fileType, maxResults }) => {\n try {\n const searchPath = path \n ? resolve(workingDirectory, path) \n : workingDirectory;\n \n let args = ['rg', '--line-number', '--no-heading'];\n \n if (fileType) {\n args.push('--type', fileType);\n }\n \n args.push('--max-count', String(maxResults || 50));\n args.push('--', pattern, searchPath);\n \n const { stdout, stderr } = await execAsync(args.join(' '), {\n cwd: workingDirectory,\n maxBuffer: 5 * 1024 * 1024,\n timeout: 30000,\n });\n \n const output = truncateOutput(stdout || 'No matches found', MAX_OUTPUT_CHARS);\n const matchCount = (stdout || '').split('\\n').filter(Boolean).length;\n \n return {\n success: true,\n output,\n matchCount,\n pattern,\n };\n } catch (error: any) {\n // rg returns exit code 1 when no matches found\n if (error.code === 1 && !error.stderr) {\n return {\n success: true,\n output: 'No matches found',\n matchCount: 0,\n pattern,\n };\n }\n return {\n success: false,\n error: error.message,\n pattern,\n };\n }\n },\n }),\n \n glob: tool({\n description: 'Find files matching a glob pattern. Returns list of matching file paths.',\n inputSchema: z.object({\n pattern: z.string().describe('Glob pattern (e.g., \"**/*.ts\", \"src/**/*.tsx\", \"*.json\")'),\n maxResults: z.number().optional().default(100).describe('Maximum number of files to return'),\n }),\n execute: async ({ pattern, maxResults }) => {\n try {\n // Use find command with pattern matching\n const { stdout } = await execAsync(\n `find . -type f -name \"${pattern.replace('**/', '')}\" 2>/dev/null | head -n ${maxResults || 100}`,\n {\n cwd: workingDirectory,\n timeout: 30000,\n }\n );\n \n const files = stdout.trim().split('\\n').filter(Boolean);\n \n return {\n success: true,\n files,\n count: files.length,\n pattern,\n };\n } catch (error: any) {\n return {\n success: false,\n error: error.message,\n pattern,\n };\n }\n },\n }),\n \n read_file: tool({\n description: 'Read the contents of a file. Use this to examine specific files found in search.',\n inputSchema: z.object({\n path: z.string().describe('Path to the file (relative to working directory or absolute)'),\n startLine: z.number().optional().describe('Start reading from this line (1-indexed)'),\n endLine: z.number().optional().describe('Stop reading at this line (1-indexed, inclusive)'),\n }),\n execute: async ({ path, startLine, endLine }) => {\n try {\n const absolutePath = isAbsolute(path)\n ? path\n : resolve(workingDirectory, path);\n \n if (!existsSync(absolutePath)) {\n return {\n success: false,\n error: `File not found: ${path}`,\n };\n }\n \n const stats = await stat(absolutePath);\n if (stats.size > MAX_FILE_SIZE) {\n return {\n success: false,\n error: `File too large (${(stats.size / 1024 / 1024).toFixed(2)}MB). Use startLine/endLine to read portions.`,\n };\n }\n \n let content = await readFile(absolutePath, 'utf-8');\n \n if (startLine !== undefined || endLine !== undefined) {\n const lines = content.split('\\n');\n const start = (startLine ?? 1) - 1;\n const end = endLine ?? lines.length;\n content = lines.slice(start, end).join('\\n');\n }\n \n return {\n success: true,\n path: relative(workingDirectory, absolutePath),\n content: truncateOutput(content, MAX_OUTPUT_CHARS),\n lineCount: content.split('\\n').length,\n };\n } catch (error: any) {\n return {\n success: false,\n error: error.message,\n };\n }\n },\n }),\n \n list_dir: tool({\n description: 'List contents of a directory. Shows files and subdirectories.',\n inputSchema: z.object({\n path: z.string().optional().default('.').describe('Directory path (relative to working directory)'),\n recursive: z.boolean().optional().default(false).describe('List recursively (be careful with large directories)'),\n maxDepth: z.number().optional().default(2).describe('Maximum depth for recursive listing'),\n }),\n execute: async ({ path, recursive, maxDepth }) => {\n try {\n const absolutePath = isAbsolute(path)\n ? path\n : resolve(workingDirectory, path);\n \n if (!existsSync(absolutePath)) {\n return {\n success: false,\n error: `Directory not found: ${path}`,\n };\n }\n \n const stats = await stat(absolutePath);\n if (!stats.isDirectory()) {\n return {\n success: false,\n error: `Not a directory: ${path}`,\n };\n }\n \n if (recursive) {\n // Use find for recursive listing\n const { stdout } = await execAsync(\n `find . -maxdepth ${maxDepth} -type f 2>/dev/null | head -n 200`,\n {\n cwd: absolutePath,\n timeout: 10000,\n }\n );\n \n const files = stdout.trim().split('\\n').filter(Boolean);\n return {\n success: true,\n path: relative(workingDirectory, absolutePath) || '.',\n files,\n count: files.length,\n recursive: true,\n };\n } else {\n const entries = await readdir(absolutePath, { withFileTypes: true });\n const items = entries.slice(0, 200).map(e => ({\n name: e.name,\n type: e.isDirectory() ? 'directory' : 'file',\n }));\n \n return {\n success: true,\n path: relative(workingDirectory, absolutePath) || '.',\n items,\n count: items.length,\n };\n }\n } catch (error: any) {\n return {\n success: false,\n error: error.message,\n };\n }\n },\n }),\n code_graph: createCodeGraphTool({\n workingDirectory,\n }),\n };\n \n // Add semantic_search if vector gateway is configured and index exists\n try {\n if (isVectorGatewayConfigured()) {\n const hasIndex = await checkIndexExists(workingDirectory);\n if (hasIndex) {\n const { createSemanticSearchTool } = await import('../../tools/semantic-search.js');\n tools.semantic_search = createSemanticSearchTool({\n workingDirectory,\n });\n }\n }\n } catch {\n // Silently skip semantic search if unavailable\n }\n \n return tools;\n }\n \n // Override getTools to return sync version (base class requires sync)\n // The actual async tool loading happens in the run() override\n protected getTools(options: SubagentRunOptions): ToolSet {\n // This won't be called directly - we override run() to use getToolsAsync()\n return {};\n }\n \n /**\n * Override run to use async tool loading (for semantic search availability check)\n */\n async run(options: SubagentRunOptions): Promise<import('../subagent.js').SubagentResult<SearchResult>> {\n // Load tools asynchronously (to check semantic search availability)\n const asyncTools = await this.getToolsAsync(options);\n \n // Temporarily override getTools to return our async-loaded tools\n const originalGetTools = this.getTools.bind(this);\n this.getTools = () => asyncTools;\n \n try {\n return await super.run(options);\n } finally {\n this.getTools = originalGetTools;\n }\n }\n \n protected parseResult(text: string, steps: SubagentStep[]): SearchResult {\n // =========================================================\n // KEY FIX: Collect ALL text from ALL steps as the summary.\n // \n // In a multi-step agent, result.text is only the LAST step's text.\n // If the last step was tool calls, result.text is empty.\n // The LLM's analysis (where it explains what it found) happens\n // in intermediate text steps - we need ALL of it.\n // =========================================================\n \n const allTexts: string[] = [];\n \n for (const step of steps) {\n if (step.type === 'text' && step.content) {\n allTexts.push(step.content);\n }\n }\n \n // Use the collected text from all steps, falling back to result.text\n const fullSummary = allTexts.length > 0 \n ? allTexts.join('\\n\\n')\n : text;\n \n // Extract structured findings from tool results\n const findings: SearchFinding[] = [];\n let filesSearched = 0;\n let matchCount = 0;\n \n for (const step of steps) {\n if (step.type === 'tool_result' && step.toolOutput) {\n const output = step.toolOutput as any;\n \n if (step.toolName === 'grep' && output.success) {\n matchCount += output.matchCount || 0;\n \n // Parse grep output to extract findings with content\n const lines = (output.output || '').split('\\n').filter(Boolean);\n for (const line of lines.slice(0, 20)) {\n // Format: path:line:content\n const match = line.match(/^([^:]+):(\\d+):(.*)$/);\n if (match) {\n findings.push({\n type: 'match',\n path: match[1],\n lineNumber: parseInt(match[2], 10),\n content: match[3].trim(),\n relevance: 'high',\n });\n }\n }\n } else if (step.toolName === 'glob' && output.success) {\n filesSearched += output.count || 0;\n \n for (const file of (output.files || []).slice(0, 15)) {\n findings.push({\n type: 'file',\n path: file,\n relevance: 'medium',\n });\n }\n } else if (step.toolName === 'read_file' && output.success) {\n // Include actual content snippet from read files\n const snippet = output.content \n ? truncateOutput(output.content, 500)\n : undefined;\n \n findings.push({\n type: 'file',\n path: output.path,\n content: snippet,\n relevance: 'high',\n context: `${output.lineCount} lines`,\n });\n } else if (step.toolName === 'semantic_search' && output.success) {\n // Include semantic search results\n const matches = output.matches || [];\n for (const m of matches.slice(0, 10)) {\n findings.push({\n type: 'semantic',\n path: m.filePath,\n lineNumber: m.startLine,\n content: m.snippet ? truncateOutput(m.snippet, 300) : undefined,\n relevance: m.score > 0.8 ? 'high' : m.score > 0.5 ? 'medium' : 'low',\n context: m.symbolName || m.language,\n });\n }\n } else if (step.toolName === 'code_graph' && output.success) {\n // Include code_graph results — the definition file + referenced pages\n matchCount += output.referenceCount || 0;\n \n if (output.filePath) {\n findings.push({\n type: 'file',\n path: output.filePath,\n lineNumber: output.line,\n content: output.typeInfo ? truncateOutput(output.typeInfo, 300) : undefined,\n relevance: 'high',\n context: `${output.kind || 'symbol'}${output.referenceCount ? `, ${output.referenceCount} refs` : ''}`,\n });\n }\n \n // Add page files as high-relevance findings\n for (const page of (output.pages || []).slice(0, 10)) {\n findings.push({\n type: 'file',\n path: page.path,\n relevance: 'high',\n context: page.route ? `route: ${page.route}` : 'page',\n });\n }\n }\n }\n }\n \n // Truncate summary if too long, but preserve the content\n const cappedSummary = truncateOutput(fullSummary, MAX_SUMMARY_CHARS);\n \n return {\n query: '', // Set by the caller (search tool) from the original task\n summary: cappedSummary,\n findings: findings.slice(0, MAX_FINDINGS),\n filesSearched,\n matchCount,\n };\n }\n}\n\n// Factory function\nexport function createSearchSubagent(model?: string): SearchSubagent {\n return new SearchSubagent(model);\n}\n","/**\n * Code Graph Tool\n * \n * Uses the TypeScript language server to inspect a symbol's type hierarchy\n * and usage graph. Given a symbol name, traverses references up the component\n * tree to find which pages/routes contain it, along with type signatures.\n * \n * This gives the agent structured context about code relationships without\n * overloading it with full file contents.\n */\n\nimport { tool } from 'ai';\nimport { z } from 'zod';\nimport { resolve, relative, isAbsolute, basename } from 'node:path';\nimport { readFile, readdir } from 'node:fs/promises';\nimport { existsSync } from 'node:fs';\nimport { fileURLToPath } from 'node:url';\nimport { execFileSync } from 'node:child_process';\nimport * as LSP from '../lsp/index.js';\nimport type { DocumentSymbol, Location } from '../lsp/types.js';\nimport { SymbolKind } from '../lsp/types.js';\n\nexport interface CodeGraphToolOptions {\n workingDirectory: string;\n}\n\nconst codeGraphInputSchema = z.object({\n symbol: z.string().describe(\n 'The symbol name to inspect (function, component, class, type, variable, etc.)'\n ),\n filePath: z.string().optional().describe(\n 'File path where the symbol is defined. If omitted, searches the workspace via grep.'\n ),\n depth: z.number().optional().default(2).describe(\n 'How many levels of references to traverse upward (default: 2, max: 3). Level 1 = direct usages, level 2 = usages of those usages.'\n ),\n});\n\n// ============================================================\n// Helpers\n// ============================================================\n\n/** Check if a file is a Next.js/React page or layout file */\nfunction isPageFile(filePath: string): boolean {\n const normalized = filePath.replace(/\\\\/g, '/');\n // App Router: app/page.tsx, app/team/page.tsx, app/admin/users/layout.tsx, etc.\n // The (.+\\/)? makes the intermediate path optional (handles root app/page.tsx)\n if (/\\/app\\/(.+\\/)?(page|layout|loading|error|not-found)\\.(tsx?|jsx?)$/.test(normalized)) return true;\n // Pages Router: any file under pages/ except _app, _document, _error, and api/\n if (/\\/pages\\/(?!_|api\\/).+\\.(tsx?|jsx?)$/.test(normalized)) return true;\n return false;\n}\n\n/** Extract route path from a page file path (e.g. app/team/page.tsx → /team) */\nfunction extractRoutePath(filePath: string, workingDirectory: string): string | undefined {\n const rel = relative(workingDirectory, filePath).replace(/\\\\/g, '/');\n // App Router: app/page.tsx → /, app/team/page.tsx → /team\n // Use ((?:\\/[^/]+)*?) to optionally capture path segments between app/ and page.tsx\n const appMatch = rel.match(/(?:src\\/)?app((?:\\/[^/]+)*?)\\/(?:page|layout|loading|error|not-found)\\.\\w+$/);\n if (appMatch) return appMatch[1] || '/';\n // Pages Router: pages/about.tsx → /about, pages/blog/[slug].tsx → /blog/[slug]\n const pagesMatch = rel.match(/(?:src\\/)?pages(\\/.*?)(?:\\/index)?\\.\\w+$/);\n if (pagesMatch) return pagesMatch[1] || '/';\n return undefined;\n}\n\n/** Human-readable symbol kind */\nfunction symbolKindName(kind: SymbolKind): string {\n const names: Record<number, string> = {\n [SymbolKind.Class]: 'class',\n [SymbolKind.Function]: 'function',\n [SymbolKind.Method]: 'method',\n [SymbolKind.Property]: 'property',\n [SymbolKind.Variable]: 'variable',\n [SymbolKind.Interface]: 'interface',\n [SymbolKind.Enum]: 'enum',\n [SymbolKind.Constant]: 'constant',\n [SymbolKind.Constructor]: 'constructor',\n [SymbolKind.Module]: 'module',\n [SymbolKind.Namespace]: 'namespace',\n [SymbolKind.TypeParameter]: 'type_param',\n [SymbolKind.Field]: 'field',\n [SymbolKind.EnumMember]: 'enum_member',\n [SymbolKind.Object]: 'object',\n };\n return names[kind] || 'symbol';\n}\n\n/** Find the tightest-enclosing symbol for a position in a document symbol tree.\n * LSP ranges are [start, end) — end position is exclusive. */\nfunction findContainingSymbol(\n symbols: DocumentSymbol[],\n line: number,\n character: number,\n): DocumentSymbol | null {\n for (const sym of symbols) {\n if (!sym.range) continue; // guard against malformed symbols\n const { start, end } = sym.range;\n const afterStart = line > start.line || (line === start.line && character >= start.character);\n // end is exclusive: position must be strictly before end\n const beforeEnd = line < end.line || (line === end.line && character < end.character);\n if (afterStart && beforeEnd) {\n if (sym.children?.length) {\n const child = findContainingSymbol(sym.children, line, character);\n if (child) return child;\n }\n return sym;\n }\n }\n return null;\n}\n\n/** Find a symbol by name anywhere in the document symbol tree */\nfunction findSymbolByName(symbols: DocumentSymbol[], name: string): DocumentSymbol | null {\n for (const sym of symbols) {\n if (sym.name === name && sym.selectionRange) return sym;\n if (sym.children) {\n const found = findSymbolByName(sym.children, name);\n if (found) return found;\n }\n }\n return null;\n}\n\n/** Strip markdown code fences from hover output */\nfunction cleanHoverText(text: string): string {\n return text.replace(/```\\w*\\n?/g, '').replace(/\\n```/g, '').trim();\n}\n\n/** Use ripgrep to find a symbol definition in the workspace (with Node.js fallback) */\nasync function grepForSymbol(symbol: string, workingDirectory: string): Promise<{ filePath: string; line: number; char: number } | null> {\n // Escape regex-special chars in the symbol name\n const escaped = symbol.replace(/[.*+?^${}()|[\\]\\\\]/g, '\\\\$&');\n\n // Try ripgrep first (fast)\n const rgPatterns = [\n `(export\\\\s+)?(default\\\\s+)?(function|const|let|var|class|interface|type|enum)\\\\s+${escaped}\\\\b`,\n `(export\\\\s+)?(default\\\\s+)?\\\\b${escaped}\\\\s*[=:(]`,\n ];\n\n for (const pattern of rgPatterns) {\n try {\n const result = execFileSync('rg', [\n '-n', '--no-heading',\n '-e', pattern,\n '--glob', '*.{ts,tsx,js,jsx}',\n '-m', '5',\n ], {\n cwd: workingDirectory,\n encoding: 'utf-8',\n timeout: 5000,\n stdio: ['pipe', 'pipe', 'pipe'],\n }).trim();\n\n if (result) {\n const firstLine = result.split('\\n')[0];\n const match = firstLine.match(/^(.+?):(\\d+):(.*)/);\n if (match) {\n const col = match[3].indexOf(symbol);\n return {\n filePath: resolve(workingDirectory, match[1]),\n line: parseInt(match[2]) - 1,\n char: col >= 0 ? col : 0,\n };\n }\n }\n } catch {\n // rg not found or no matches — fall through\n }\n }\n\n // Fallback: recursive Node.js search (handles environments without rg)\n const defPattern = new RegExp(\n `(export|function|const|let|var|class|interface|type|enum)\\\\s+.*\\\\b${escaped}\\\\b`\n );\n const SUPPORTED_EXTS = new Set(['.ts', '.tsx', '.js', '.jsx']);\n const IGNORED_DIRS = new Set(['node_modules', '.git', 'dist', 'build', '.next', 'coverage']);\n\n async function search(dir: string, maxFiles: number): Promise<{ filePath: string; line: number; char: number } | null> {\n if (maxFiles <= 0) return null;\n let remaining = maxFiles;\n\n try {\n const entries = await readdir(dir, { withFileTypes: true });\n for (const entry of entries) {\n if (remaining <= 0) return null;\n const fullPath = resolve(dir, entry.name);\n\n if (entry.isDirectory()) {\n if (IGNORED_DIRS.has(entry.name) || entry.name.startsWith('.')) continue;\n const found = await search(fullPath, remaining);\n if (found) return found;\n remaining -= 10; // estimate\n } else if (entry.isFile()) {\n const ext = entry.name.substring(entry.name.lastIndexOf('.'));\n if (!SUPPORTED_EXTS.has(ext)) continue;\n remaining--;\n\n const content = await readFile(fullPath, 'utf-8');\n const lines = content.split('\\n');\n for (let i = 0; i < lines.length; i++) {\n if (defPattern.test(lines[i])) {\n const col = lines[i].indexOf(symbol);\n if (col >= 0) {\n return { filePath: fullPath, line: i, char: col };\n }\n }\n }\n }\n }\n } catch { /* permission errors etc */ }\n return null;\n }\n\n return search(workingDirectory, 200);\n}\n\n// ============================================================\n// Main tool\n// ============================================================\n\nconst MAX_REF_FILES = 15;\nconst MAX_LEVEL2_PARENTS = 8;\nconst MAX_LEVEL2_SYMBOLS_PER_PARENT = 3;\n\nexport function createCodeGraphTool(options: CodeGraphToolOptions) {\n return tool({\n description: `Inspect a symbol's type information and usage graph using the TypeScript language server.\n\nGiven a symbol name (function, component, class, type, etc.), this tool will:\n1. Find its definition and full type signature (parameters, return type)\n2. Find all references — what components/functions/files use this symbol\n3. Identify which pages/routes contain it in their component tree\n4. Show the file's symbol structure for surrounding context\n\nUse this to understand:\n- Component hierarchies (what renders what, which pages are affected)\n- Type signatures and parameter/return types before making changes\n- How deeply a symbol is used across the codebase\n- What will break if you change something\n\nSupports TypeScript, JavaScript, TSX, JSX files.\nWorking directory: ${options.workingDirectory}`,\n\n inputSchema: codeGraphInputSchema,\n\n execute: async ({ symbol, filePath, depth }: z.infer<typeof codeGraphInputSchema>) => {\n const maxDepth = Math.min(depth ?? 2, 3);\n\n try {\n // ========================================\n // Step 1: Locate the symbol's definition\n // ========================================\n let defFilePath: string | undefined;\n let defLine = 0;\n let defChar = 0;\n let defSymbol: DocumentSymbol | null = null;\n\n if (filePath) {\n const absPath = isAbsolute(filePath)\n ? filePath\n : resolve(options.workingDirectory, filePath);\n\n if (!existsSync(absPath)) {\n return { success: false, error: `File not found: ${filePath}` };\n }\n\n if (!LSP.isSupported(absPath)) {\n return { success: false, error: `File type not supported. Supports: ${LSP.getSupportedExtensions().join(', ')}` };\n }\n\n // Open file in LSP and wait for it to be processed\n await LSP.touchFile(absPath, true);\n const symbols = await LSP.getDocumentSymbols(absPath);\n defSymbol = findSymbolByName(symbols, symbol);\n\n if (defSymbol) {\n defFilePath = absPath;\n defLine = defSymbol.selectionRange.start.line;\n defChar = defSymbol.selectionRange.start.character;\n } else {\n // Fall back to text search in the file\n const content = await readFile(absPath, 'utf-8');\n const lines = content.split('\\n');\n // First pass: look for definition patterns\n const defPattern = new RegExp(\n `(export|function|const|let|var|class|interface|type|enum)\\\\s+.*\\\\b${symbol.replace(/[.*+?^${}()|[\\]\\\\]/g, '\\\\$&')}\\\\b`\n );\n for (let i = 0; i < lines.length; i++) {\n if (defPattern.test(lines[i])) {\n const col = lines[i].indexOf(symbol);\n if (col !== -1) {\n defFilePath = absPath;\n defLine = i;\n defChar = col;\n break;\n }\n }\n }\n // Second pass: any occurrence\n if (!defFilePath) {\n for (let i = 0; i < lines.length; i++) {\n const col = lines[i].indexOf(symbol);\n if (col !== -1) {\n defFilePath = absPath;\n defLine = i;\n defChar = col;\n break;\n }\n }\n }\n }\n } else {\n // No file path — search workspace\n const found = await grepForSymbol(symbol, options.workingDirectory);\n if (found) {\n defFilePath = found.filePath;\n defLine = found.line;\n defChar = found.char;\n }\n }\n\n if (!defFilePath) {\n return {\n success: false,\n error: `Could not find symbol \"${symbol}\" in the codebase. Try providing a filePath.`,\n };\n }\n\n // Ensure the file is open in LSP and processed\n await LSP.touchFile(defFilePath, true);\n\n // ========================================\n // Step 2: Get type info via hover\n // ========================================\n const rawHover = await LSP.getHover(defFilePath, defLine, defChar);\n const typeInfo = rawHover ? cleanHoverText(rawHover) : null;\n\n // ========================================\n // Step 3: Get document symbols for file structure\n // ========================================\n const fileSymbols = await LSP.getDocumentSymbols(defFilePath);\n if (!defSymbol && fileSymbols.length > 0) {\n defSymbol = findSymbolByName(fileSymbols, symbol);\n }\n\n // ========================================\n // Step 4: Get references (upstream — who uses this)\n // ========================================\n const references = await LSP.getReferences(defFilePath, defLine, defChar, false);\n\n // Group references by file\n const refsByFile = new Map<string, Location[]>();\n for (const ref of references) {\n const refPath = fileURLToPath(ref.uri);\n if (!refsByFile.has(refPath)) {\n refsByFile.set(refPath, []);\n }\n refsByFile.get(refPath)!.push(ref);\n }\n\n // ========================================\n // Step 5: Process each reference file\n // ========================================\n interface RefFileInfo {\n filePath: string;\n relativePath: string;\n isPage: boolean;\n routePath?: string;\n containingSymbols: Array<{\n name: string;\n kind: string;\n line: number;\n char: number;\n typeInfo?: string;\n }>;\n }\n\n const refFileInfos: RefFileInfo[] = [];\n let processed = 0;\n\n for (const [refPath, locs] of refsByFile) {\n if (processed >= MAX_REF_FILES) break;\n // Skip the definition file (self-references)\n if (refPath === defFilePath) continue;\n processed++;\n\n const relPath = relative(options.workingDirectory, refPath);\n const pageFile = isPageFile(refPath);\n const routePath = pageFile ? extractRoutePath(refPath, options.workingDirectory) : undefined;\n\n // Open file + get symbols\n await LSP.touchFile(refPath, false);\n const refFileSymbols = await LSP.getDocumentSymbols(refPath);\n\n const seen = new Map<string, { name: string; kind: string; line: number; char: number; typeInfo?: string }>();\n\n for (const loc of locs) {\n const container = findContainingSymbol(\n refFileSymbols,\n loc.range.start.line,\n loc.range.start.character,\n );\n if (container && !seen.has(container.name)) {\n // Get type info for the containing symbol (concise — just first line)\n let containerHover: string | null = null;\n try {\n const raw = await LSP.getHover(\n refPath,\n container.selectionRange.start.line,\n container.selectionRange.start.character,\n );\n if (raw) containerHover = cleanHoverText(raw).split('\\n')[0];\n } catch { /* skip */ }\n\n seen.set(container.name, {\n name: container.name,\n kind: symbolKindName(container.kind),\n line: container.selectionRange.start.line + 1,\n char: container.selectionRange.start.character,\n typeInfo: containerHover || undefined,\n });\n }\n }\n\n refFileInfos.push({\n filePath: refPath,\n relativePath: relPath,\n isPage: pageFile,\n routePath,\n containingSymbols: Array.from(seen.values()),\n });\n }\n\n // ========================================\n // Step 6: Level-2 references (usages of the usages)\n // ========================================\n interface Level2Ref {\n parentSymbol: string;\n parentFile: string;\n refs: Array<{\n relativePath: string;\n isPage: boolean;\n routePath?: string;\n containingSymbol?: string;\n }>;\n }\n\n const level2Refs: Level2Ref[] = [];\n\n if (maxDepth >= 2) {\n for (const refFile of refFileInfos.slice(0, MAX_LEVEL2_PARENTS)) {\n for (const sym of refFile.containingSymbols.slice(0, MAX_LEVEL2_SYMBOLS_PER_PARENT)) {\n try {\n // Use the stored selectionRange position — no need to re-search\n const symLineIdx = sym.line - 1; // convert 1-indexed back to 0-indexed\n const symChar = sym.char;\n\n const l2Locations = await LSP.getReferences(\n refFile.filePath, symLineIdx, symChar, false,\n );\n\n const l2Nodes: Level2Ref['refs'] = [];\n const seenPaths = new Set<string>();\n\n for (const loc of l2Locations.slice(0, 10)) {\n const l2Path = fileURLToPath(loc.uri);\n if (l2Path === refFile.filePath || l2Path === defFilePath) continue;\n if (seenPaths.has(l2Path)) continue;\n seenPaths.add(l2Path);\n\n const l2Rel = relative(options.workingDirectory, l2Path);\n const l2Page = isPageFile(l2Path);\n const l2Route = l2Page ? extractRoutePath(l2Path, options.workingDirectory) : undefined;\n\n // Optionally find containing symbol\n let containerName: string | undefined;\n try {\n await LSP.touchFile(l2Path, false);\n const l2Symbols = await LSP.getDocumentSymbols(l2Path);\n const container = findContainingSymbol(l2Symbols, loc.range.start.line, loc.range.start.character);\n if (container) containerName = container.name;\n } catch { /* skip */ }\n\n l2Nodes.push({\n relativePath: l2Rel,\n isPage: l2Page,\n routePath: l2Route,\n containingSymbol: containerName,\n });\n }\n\n if (l2Nodes.length > 0) {\n level2Refs.push({\n parentSymbol: sym.name,\n parentFile: refFile.relativePath,\n refs: l2Nodes,\n });\n }\n } catch { /* skip level-2 errors */ }\n }\n }\n }\n\n // ========================================\n // Step 7: Format output for the agent\n // ========================================\n const relDefPath = relative(options.workingDirectory, defFilePath);\n const lines: string[] = [];\n\n // -- Symbol header --\n lines.push(`=== ${symbol} ===`);\n lines.push(`File: ${relDefPath}:${defLine + 1}`);\n if (defSymbol) lines.push(`Kind: ${symbolKindName(defSymbol.kind)}`);\n if (typeInfo) lines.push(`Type: ${typeInfo}`);\n\n // -- References --\n const externalRefCount = references.filter(r => fileURLToPath(r.uri) !== defFilePath).length;\n const externalFileCount = refsByFile.size - (refsByFile.has(defFilePath!) ? 1 : 0);\n\n if (refFileInfos.length > 0) {\n lines.push('');\n lines.push(`=== Referenced by (${externalRefCount} usages across ${externalFileCount} files) ===`);\n\n const pages = refFileInfos.filter(r => r.isPage);\n const nonPages = refFileInfos.filter(r => !r.isPage);\n\n if (pages.length > 0) {\n lines.push('');\n lines.push('Pages/Routes:');\n for (const page of pages) {\n lines.push(` ${page.relativePath}${page.routePath ? ` → ${page.routePath}` : ''}`);\n for (const s of page.containingSymbols) {\n lines.push(` └── ${s.name} (${s.kind}:${s.line})${s.typeInfo ? ` — ${s.typeInfo}` : ''}`);\n }\n }\n }\n\n if (nonPages.length > 0) {\n lines.push('');\n lines.push('Components/Functions:');\n for (const ref of nonPages) {\n lines.push(` ${ref.relativePath}`);\n for (const s of ref.containingSymbols) {\n const typePart = s.typeInfo && s.typeInfo.length < 120 ? ` — ${s.typeInfo}` : '';\n lines.push(` └── ${s.name} (${s.kind}:${s.line})${typePart}`);\n }\n }\n }\n } else {\n lines.push('');\n lines.push('No external references found (symbol may be unused or only used within the same file).');\n }\n\n // -- Level 2 refs --\n if (level2Refs.length > 0) {\n lines.push('');\n lines.push('=== Extended tree (level 2) ===');\n for (const l2 of level2Refs) {\n lines.push('');\n lines.push(`${l2.parentSymbol} (${l2.parentFile}) is used by:`);\n for (const ref of l2.refs) {\n const tag = ref.isPage ? ' [PAGE]' : '';\n const route = ref.routePath ? ` → ${ref.routePath}` : '';\n const container = ref.containingSymbol ? ` in ${ref.containingSymbol}` : '';\n lines.push(` └── ${ref.relativePath}${tag}${route}${container}`);\n }\n }\n }\n\n // -- File structure --\n if (fileSymbols.length > 0) {\n lines.push('');\n lines.push(`=== File structure (${basename(defFilePath)}) ===`);\n for (const sym of fileSymbols) {\n const marker = sym.name === symbol ? ' ← target' : '';\n lines.push(` ${sym.name} (${symbolKindName(sym.kind)}:${sym.selectionRange.start.line + 1})${marker}`);\n if (sym.children) {\n for (const child of sym.children.slice(0, 10)) {\n lines.push(` └── ${child.name} (${symbolKindName(child.kind)}:${child.selectionRange.start.line + 1})`);\n }\n if (sym.children.length > 10) {\n lines.push(` ... and ${sym.children.length - 10} more`);\n }\n }\n }\n }\n\n const formattedResult = lines.join('\\n');\n\n return {\n success: true,\n symbol,\n filePath: relDefPath,\n line: defLine + 1,\n kind: defSymbol ? symbolKindName(defSymbol.kind) : undefined,\n typeInfo: typeInfo || undefined,\n referenceCount: externalRefCount,\n referenceFiles: externalFileCount,\n pages: refFileInfos\n .filter(r => r.isPage)\n .map(r => ({ path: r.relativePath, route: r.routePath })),\n formattedResult,\n };\n } catch (error: unknown) {\n return {\n success: false,\n error: error instanceof Error ? error.message : String(error),\n };\n }\n },\n });\n}\n\nexport type CodeGraphTool = ReturnType<typeof createCodeGraphTool>;\n","import { ToolSet } from 'ai';\nimport { createBashTool, BashToolOptions, BashToolProgress } from './bash.js';\nimport { createReadFileTool, ReadFileToolOptions } from './read-file.js';\nimport { createWriteFileTool, WriteFileToolOptions, WriteFileProgress } from './write-file.js';\nimport { createTodoTool, TodoToolOptions } from './todo.js';\nimport { createLoadSkillTool, LoadSkillToolOptions } from './load-skill.js';\nimport { createLinterTool, LinterToolOptions } from './linter.js';\nimport { createSearchTool, SearchToolOptions, SearchToolProgress } from './search.js';\nimport { createSemanticSearchTool, SemanticSearchToolOptions, SemanticSearchResult } from './semantic-search.js';\nimport { createCodeGraphTool, CodeGraphToolOptions } from './code-graph.js';\nimport { createCompleteTaskTool, createTaskFailedTool, TaskCompletionSignal, CreateTaskToolsOptions } from './task.js';\nimport { createUploadFileTool, UploadFileToolOptions } from './upload-file.js';\nimport { isVectorGatewayConfigured, checkIndexExists } from '../semantic/index.js';\nimport { isRemoteConfigured } from '../db/remote.js';\n\nexport interface CreateToolsOptions {\n sessionId: string;\n workingDirectory: string;\n skillsDirectories: string[];\n onBashOutput?: (output: string) => void;\n onBashProgress?: (progress: BashToolProgress) => void;\n /** Called when write_file has progress to report (for streaming content) */\n onWriteFileProgress?: (progress: WriteFileProgress) => void;\n /** Called when explore_agent tool has progress to report (subagent steps) */\n onSearchProgress?: (progress: SearchToolProgress) => void;\n /** Enable LSP diagnostics for file edits (default: true) */\n enableLSP?: boolean;\n /** Enable semantic search if configured (default: true) */\n enableSemanticSearch?: boolean;\n /** Task mode: include complete_task and task_failed tools */\n taskTools?: CreateTaskToolsOptions;\n}\n\n/**\n * Create all tools for an agent session\n * Note: This is now async to support checking semantic search availability\n */\nexport async function createTools(options: CreateToolsOptions): Promise<ToolSet> {\n const tools: ToolSet = {\n bash: createBashTool({\n workingDirectory: options.workingDirectory,\n sessionId: options.sessionId,\n onOutput: options.onBashOutput,\n onProgress: options.onBashProgress,\n }),\n\n read_file: createReadFileTool({\n workingDirectory: options.workingDirectory,\n }),\n\n write_file: createWriteFileTool({\n workingDirectory: options.workingDirectory,\n sessionId: options.sessionId,\n enableLSP: options.enableLSP ?? true,\n onProgress: options.onWriteFileProgress,\n }),\n\n todo: createTodoTool({\n sessionId: options.sessionId,\n }),\n\n load_skill: createLoadSkillTool({\n sessionId: options.sessionId,\n skillsDirectories: options.skillsDirectories,\n }),\n\n linter: createLinterTool({\n workingDirectory: options.workingDirectory,\n }),\n\n explore_agent: createSearchTool({\n sessionId: options.sessionId,\n workingDirectory: options.workingDirectory,\n onProgress: options.onSearchProgress,\n }),\n\n code_graph: createCodeGraphTool({\n workingDirectory: options.workingDirectory,\n }),\n };\n\n // Conditionally add upload_file if remote server with GCS is configured\n if (isRemoteConfigured()) {\n tools.upload_file = createUploadFileTool({\n workingDirectory: options.workingDirectory,\n sessionId: options.sessionId,\n });\n }\n\n // Conditionally add semantic_search if configured and index exists\n if (options.enableSemanticSearch !== false) {\n try {\n if (isVectorGatewayConfigured()) {\n const hasIndex = await checkIndexExists(options.workingDirectory);\n if (hasIndex) {\n tools.semantic_search = createSemanticSearchTool({\n workingDirectory: options.workingDirectory,\n });\n }\n }\n } catch {\n // Silently skip semantic search if there are any issues\n }\n }\n\n // Conditionally add task tools when in task mode\n if (options.taskTools) {\n tools.complete_task = createCompleteTaskTool(options.taskTools);\n tools.task_failed = createTaskFailedTool(options.taskTools);\n }\n\n return tools;\n}\n\n// Re-export individual tool creators for customization\nexport { createBashTool } from './bash.js';\nexport { createReadFileTool } from './read-file.js';\nexport { createWriteFileTool } from './write-file.js';\nexport { createTodoTool } from './todo.js';\nexport { createLoadSkillTool } from './load-skill.js';\nexport { createLinterTool } from './linter.js';\nexport { createSearchTool } from './search.js';\nexport { createSemanticSearchTool } from './semantic-search.js';\nexport { createCodeGraphTool } from './code-graph.js';\nexport { createCompleteTaskTool, createTaskFailedTool } from './task.js';\nexport { createUploadFileTool } from './upload-file.js';\n\n// Export types\nexport type { BashToolOptions, BashToolProgress } from './bash.js';\nexport type { ReadFileToolOptions } from './read-file.js';\nexport type { WriteFileToolOptions, WriteFileProgress } from './write-file.js';\nexport type { TodoToolOptions } from './todo.js';\nexport type { LoadSkillToolOptions } from './load-skill.js';\nexport type { LinterToolOptions } from './linter.js';\nexport type { SearchToolOptions, SearchToolProgress } from './search.js';\nexport type { SemanticSearchToolOptions, SemanticSearchResult } from './semantic-search.js';\nexport type { CodeGraphToolOptions } from './code-graph.js';\nexport type { TaskCompletionSignal, CreateTaskToolsOptions } from './task.js';\nexport type { UploadFileToolOptions } from './upload-file.js';","import { tool } from 'ai';\nimport { z } from 'zod';\nimport Ajv from 'ajv';\n\nconst ajv = new Ajv({ allErrors: true });\n\nexport interface TaskCompletionSignal {\n status: 'completed' | 'failed';\n result?: unknown;\n error?: string;\n}\n\nexport interface CreateTaskToolsOptions {\n outputSchema: Record<string, unknown>;\n onComplete: (signal: TaskCompletionSignal) => void;\n}\n\nexport function createCompleteTaskTool(options: CreateTaskToolsOptions) {\n const validate = ajv.compile(options.outputSchema);\n\n return tool({\n description:\n 'Call this tool when you have completed the task. Pass the result as a JSON object matching the required output schema. If the result does not match the schema, you will receive validation errors and should fix and retry.',\n inputSchema: z.object({\n result: z\n .record(z.string(), z.unknown())\n .describe('The task result as a JSON object matching the output schema'),\n }),\n execute: async (input) => {\n const valid = validate(input.result);\n if (!valid) {\n const errors = validate.errors?.map((e) => ({\n path: e.instancePath || '/',\n message: e.message,\n params: e.params,\n }));\n return {\n status: 'validation_error',\n message:\n 'The result does not match the required output schema. Fix the errors and call complete_task again.',\n errors,\n expectedSchema: options.outputSchema,\n };\n }\n\n options.onComplete({ status: 'completed', result: input.result });\n return {\n status: 'completed',\n message: 'Task completed successfully.',\n };\n },\n });\n}\n\nexport function createTaskFailedTool(options: Pick<CreateTaskToolsOptions, 'onComplete'>) {\n return tool({\n description:\n 'Call this tool if you are unable to complete the task. Provide a clear reason explaining why the task cannot be completed.',\n inputSchema: z.object({\n reason: z.string().describe('Explanation of why the task cannot be completed'),\n }),\n execute: async (input) => {\n options.onComplete({ status: 'failed', error: input.reason });\n return {\n status: 'failed',\n message: `Task marked as failed: ${input.reason}`,\n };\n },\n });\n}\n","import { tool } from 'ai';\nimport { z } from 'zod';\nimport { readFile, stat } from 'node:fs/promises';\nimport { join, basename, extname } from 'node:path';\n\nexport interface UploadFileToolOptions {\n workingDirectory: string;\n sessionId: string;\n}\n\nconst MIME_TYPES: Record<string, string> = {\n '.txt': 'text/plain',\n '.md': 'text/markdown',\n '.html': 'text/html',\n '.css': 'text/css',\n '.js': 'application/javascript',\n '.ts': 'application/typescript',\n '.json': 'application/json',\n '.csv': 'text/csv',\n '.xml': 'application/xml',\n '.pdf': 'application/pdf',\n '.png': 'image/png',\n '.jpg': 'image/jpeg',\n '.jpeg': 'image/jpeg',\n '.gif': 'image/gif',\n '.webp': 'image/webp',\n '.svg': 'image/svg+xml',\n '.mp4': 'video/mp4',\n '.webm': 'video/webm',\n '.mp3': 'audio/mpeg',\n '.wav': 'audio/wav',\n '.zip': 'application/zip',\n '.tar': 'application/x-tar',\n '.gz': 'application/gzip',\n};\n\nexport function createUploadFileTool(options: UploadFileToolOptions) {\n return tool({\n description: `Upload a file to cloud storage and get back a shareable download URL. Use this when the user needs a hosted link to a file (e.g. a generated report, image, export, or any artifact they want to download or share). The file must already exist on disk.`,\n inputSchema: z.object({\n path: z.string().describe('Path to the file to upload (relative to working directory or absolute)'),\n name: z.string().optional().describe('Display name for the file (defaults to the filename)'),\n }),\n execute: async (input) => {\n try {\n const { isRemoteConfigured, storageQueries } = await import('../db/remote.js');\n if (!isRemoteConfigured()) {\n return {\n success: false,\n error: 'File upload is not available — remote server with GCS is not configured.',\n };\n }\n\n const fullPath = input.path.startsWith('/')\n ? input.path\n : join(options.workingDirectory, input.path);\n\n // Verify file exists\n try {\n await stat(fullPath);\n } catch {\n return {\n success: false,\n error: `File not found: ${input.path}`,\n };\n }\n\n const fileName = input.name || basename(fullPath);\n const ext = extname(fullPath).toLowerCase();\n const contentType = MIME_TYPES[ext] || 'application/octet-stream';\n\n // Get presigned upload URL\n const uploadInfo = await storageQueries.getUploadUrl(\n options.sessionId,\n fileName,\n contentType,\n 'general'\n );\n\n // Read and upload\n const fileData = await readFile(fullPath);\n const putRes = await fetch(uploadInfo.uploadUrl, {\n method: 'PUT',\n headers: { 'Content-Type': contentType },\n body: fileData,\n });\n\n if (!putRes.ok) {\n return {\n success: false,\n error: `Upload failed: ${putRes.status} ${putRes.statusText}`,\n };\n }\n\n // Update file size\n await storageQueries.updateFile(uploadInfo.fileId, { sizeBytes: fileData.length });\n\n // Get download URL\n const downloadInfo = await storageQueries.getDownloadUrl(uploadInfo.fileId);\n\n return {\n success: true,\n fileId: uploadInfo.fileId,\n fileName,\n sizeBytes: fileData.length,\n contentType,\n downloadUrl: downloadInfo.downloadUrl,\n expiresAt: downloadInfo.expiresAt,\n };\n } catch (err: any) {\n return {\n success: false,\n error: `Upload failed: ${err.message}`,\n };\n }\n },\n });\n}\n","import { generateText, type ModelMessage as AIModelMessage } from 'ai';\nimport { resolveModel } from './model.js';\nimport { messageQueries, ModelMessage } from '../db/index.js';\nimport { calculateContextSize } from '../utils/truncate.js';\nimport { estimateTokens, estimateMessageTokens } from '../utils/tokens.js';\nimport { createSummaryPrompt } from './prompts.js';\nimport { getConfig } from '../config/index.js';\nimport { sanitizeModelMessages } from '../utils/sanitize-messages.js';\nimport {\n getModelLimits,\n SUMMARIZATION_MODEL,\n SUMMARY_CHUNK_TOKENS,\n SUMMARY_BUDGET_RATIO,\n} from './model-limits.js';\n\nexport interface ContextManagerOptions {\n sessionId: string;\n modelId: string;\n maxContextChars: number;\n keepRecentMessages: number;\n autoSummarize: boolean;\n}\n\nconst TOOL_OUTPUT_TRIM_CHARS = 400;\n\nconst COMPACTABLE_TOOLS = new Set([\n 'read_file',\n 'bash',\n 'explore_agent',\n 'code_graph',\n]);\n\n/**\n * Manages conversation context with a three-phase rolling window:\n *\n * Phase 1 – Compact: strip todo tool calls, trim large tool outputs,\n * and remove thinking blocks from older messages (no LLM cost).\n * Phase 2 – Chunk-summarize: when compacted context still exceeds the\n * model's rolling target, summarize the oldest ~30 K-token chunk\n * via a cheap/fast model (gemini-3-flash-preview).\n * Phase 3 – Roll summaries: when accumulated summaries exceed their budget\n * (~15 % of rolling target), re-summarize them into one.\n *\n * A final repair pass ensures all tool-call / tool-result pairs are matched\n * before the messages leave the context manager.\n */\nexport class ContextManager {\n private sessionId: string;\n private modelId: string;\n private maxContextChars: number;\n private keepRecentMessages: number;\n private autoSummarize: boolean;\n private summaries: string[] = [];\n\n constructor(options: ContextManagerOptions) {\n this.sessionId = options.sessionId;\n this.modelId = options.modelId;\n this.maxContextChars = options.maxContextChars;\n this.keepRecentMessages = options.keepRecentMessages;\n this.autoSummarize = options.autoSummarize;\n }\n\n /**\n * Get messages for the current context, applying the three-phase pipeline.\n */\n async getMessages(): Promise<AIModelMessage[]> {\n let messages = (await messageQueries.getModelMessages(this.sessionId)) as AIModelMessage[];\n messages = sanitizeModelMessages(messages) as AIModelMessage[];\n\n // Phase 1: compact older messages (always, free)\n messages = this.compactOlderMessages(messages, this.keepRecentMessages);\n\n if (this.autoSummarize) {\n const { rollingTarget } = getModelLimits(this.modelId);\n const summaryBudget = Math.floor(rollingTarget * SUMMARY_BUDGET_RATIO);\n\n // Phase 2: chunk-summarize until we're under the rolling target\n messages = await this.chunkSummarize(messages, rollingTarget);\n\n // Phase 3: roll summaries if they've grown too large\n await this.rollSummaries(summaryBudget);\n }\n\n // Prepend accumulated summaries\n if (this.summaries.length > 0) {\n const summaryContent = this.summaries.join('\\n\\n---\\n\\n');\n messages = [\n {\n role: 'system' as const,\n content: `[Previous conversation summary]\\n${summaryContent}`,\n },\n ...messages,\n ];\n }\n\n // Final safety net: repair any broken tool-call / tool-result pairing\n messages = repairToolPairing(messages);\n\n return messages;\n }\n\n // ---------------------------------------------------------------------------\n // Phase 1 – Compact\n // ---------------------------------------------------------------------------\n\n /**\n * Strip non-essential content from messages older than the most recent\n * `recentCount`. Operates in-memory only — does not touch the DB.\n *\n * Tracks removed tool-call IDs so matching tool-results are also removed,\n * preventing orphaned tool_result blocks that providers reject.\n */\n compactOlderMessages(\n messages: AIModelMessage[],\n recentCount: number,\n ): AIModelMessage[] {\n if (messages.length <= recentCount) return messages;\n\n const boundary = messages.length - recentCount;\n const olderMessages = messages.slice(0, boundary);\n const recentMessages = messages.slice(boundary);\n\n // First pass: compact and collect removed tool-call IDs\n const removedToolCallIds = new Set<string>();\n const compacted: AIModelMessage[] = [];\n\n for (const msg of olderMessages) {\n const processed = this.compactMessage(msg, removedToolCallIds);\n if (processed) compacted.push(processed);\n }\n\n // Second pass: strip orphaned tool-results whose tool-calls were removed\n if (removedToolCallIds.size > 0) {\n const cleaned: AIModelMessage[] = [];\n for (const msg of compacted) {\n const result = stripOrphanedToolResults(msg, removedToolCallIds);\n if (result) cleaned.push(result);\n }\n return [...cleaned, ...recentMessages];\n }\n\n return [...compacted, ...recentMessages];\n }\n\n private compactMessage(\n msg: AIModelMessage,\n removedToolCallIds: Set<string>,\n ): AIModelMessage | null {\n if (!Array.isArray(msg.content)) return msg;\n\n const parts: any[] = [];\n for (const part of msg.content as any[]) {\n // Remove todo tool calls and track their IDs\n if (part.type === 'tool-call' && part.toolName === 'todo') {\n if (part.toolCallId) removedToolCallIds.add(part.toolCallId);\n continue;\n }\n\n // Remove todo tool results and track their IDs\n if (part.type === 'tool-result' && part.toolName === 'todo') {\n if (part.toolCallId) removedToolCallIds.add(part.toolCallId);\n continue;\n }\n\n // Remove thinking/reasoning blocks\n if (part.type === 'reasoning' || part.type === 'thinking') continue;\n\n // Trim large tool-result outputs for known heavy tools\n if (part.type === 'tool-result' && COMPACTABLE_TOOLS.has(part.toolName)) {\n parts.push(this.trimToolResult(part));\n continue;\n }\n\n parts.push(part);\n }\n\n if (parts.length === 0) return null;\n return { ...msg, content: parts } as AIModelMessage;\n }\n\n private trimToolResult(part: any): any {\n const results = Array.isArray(part.result) ? part.result : [part.result];\n const trimmedResults = results.map((r: any) => {\n if (typeof r === 'string' && r.length > TOOL_OUTPUT_TRIM_CHARS) {\n const half = Math.floor(TOOL_OUTPUT_TRIM_CHARS / 2);\n return (\n r.slice(0, half) +\n `\\n...[trimmed ${r.length - TOOL_OUTPUT_TRIM_CHARS} chars]...\\n` +\n r.slice(-half)\n );\n }\n if (r && typeof r === 'object' && typeof r.text === 'string' && r.text.length > TOOL_OUTPUT_TRIM_CHARS) {\n const half = Math.floor(TOOL_OUTPUT_TRIM_CHARS / 2);\n return {\n ...r,\n text:\n r.text.slice(0, half) +\n `\\n...[trimmed ${r.text.length - TOOL_OUTPUT_TRIM_CHARS} chars]...\\n` +\n r.text.slice(-half),\n };\n }\n return r;\n });\n\n return {\n ...part,\n result: Array.isArray(part.result) ? trimmedResults : trimmedResults[0],\n };\n }\n\n // ---------------------------------------------------------------------------\n // Phase 2 – Chunk-summarize\n // ---------------------------------------------------------------------------\n\n /**\n * While estimated tokens exceed `rollingTarget`, peel off the oldest\n * ~SUMMARY_CHUNK_TOKENS worth of messages, summarize them via the cheap\n * model, and prepend the summary.\n */\n private async chunkSummarize(\n messages: AIModelMessage[],\n rollingTarget: number,\n ): Promise<AIModelMessage[]> {\n let totalTokens = estimateMessageTokens(messages);\n\n while (totalTokens > rollingTarget && messages.length > this.keepRecentMessages) {\n // Collect the oldest chunk of ~SUMMARY_CHUNK_TOKENS\n let chunkTokens = 0;\n let chunkEnd = 0;\n const maxChunkable = messages.length - this.keepRecentMessages;\n\n for (let i = 0; i < maxChunkable; i++) {\n const msgTokens = this.messageTokens(messages[i]);\n chunkTokens += msgTokens;\n chunkEnd = i + 1;\n if (chunkTokens >= SUMMARY_CHUNK_TOKENS) break;\n }\n\n if (chunkEnd === 0) break;\n\n const chunk = messages.slice(0, chunkEnd);\n const remaining = messages.slice(chunkEnd);\n\n const summary = await this.summarizeChunk(chunk);\n if (summary) {\n this.summaries.push(summary);\n console.log(\n `[Context] Summarized ${chunk.length} messages (~${chunkTokens} tokens) into ${estimateTokens(summary)} tokens`,\n );\n }\n\n messages = remaining;\n totalTokens = estimateMessageTokens(messages);\n }\n\n return messages;\n }\n\n private async summarizeChunk(chunk: AIModelMessage[]): Promise<string | null> {\n const historyText = chunk\n .map((msg) => {\n const content = typeof msg.content === 'string'\n ? msg.content\n : JSON.stringify(msg.content);\n return `[${msg.role}]: ${content}`;\n })\n .join('\\n\\n');\n\n try {\n const result = await generateText({\n model: resolveModel(SUMMARIZATION_MODEL) as any,\n prompt: createSummaryPrompt(historyText),\n });\n return result.text;\n } catch (error) {\n console.error('[Context] Chunk summarization failed:', error);\n return null;\n }\n }\n\n // ---------------------------------------------------------------------------\n // Phase 3 – Roll summaries\n // ---------------------------------------------------------------------------\n\n /**\n * If accumulated summaries exceed `budget` tokens, re-summarize them\n * into a single condensed summary.\n */\n private async rollSummaries(budget: number): Promise<void> {\n if (this.summaries.length <= 1) return;\n\n const totalSummaryTokens = this.summaries.reduce(\n (t, s) => t + estimateTokens(s),\n 0,\n );\n\n if (totalSummaryTokens <= budget) return;\n\n const combined = this.summaries.join('\\n\\n---\\n\\n');\n\n try {\n const result = await generateText({\n model: resolveModel(SUMMARIZATION_MODEL) as any,\n prompt: createSummaryPrompt(combined),\n });\n\n console.log(\n `[Context] Rolled ${this.summaries.length} summaries (${totalSummaryTokens} tokens) into ${estimateTokens(result.text)} tokens`,\n );\n\n this.summaries = [result.text];\n } catch (error) {\n console.error('[Context] Summary rolling failed:', error);\n }\n }\n\n // ---------------------------------------------------------------------------\n // Helpers\n // ---------------------------------------------------------------------------\n\n private messageTokens(msg: AIModelMessage): number {\n const content = typeof msg.content === 'string'\n ? msg.content\n : JSON.stringify(msg.content);\n return estimateTokens(content) + 4;\n }\n\n // ---------------------------------------------------------------------------\n // Public API (unchanged)\n // ---------------------------------------------------------------------------\n\n async addUserMessage(\n content:\n | string\n | Array<{ type: string; text?: string; image?: string; data?: string; mediaType?: string }>,\n ): Promise<void> {\n const userMessage: ModelMessage = {\n role: 'user',\n content: content as any,\n };\n await messageQueries.create(this.sessionId, userMessage);\n }\n\n async addResponseMessages(messages: AIModelMessage[]): Promise<void> {\n await messageQueries.addMany(this.sessionId, messages as ModelMessage[]);\n }\n\n async getStats(): Promise<{\n messageCount: number;\n contextChars: number;\n estimatedTokens: number;\n hasSummary: boolean;\n summaryCount: number;\n }> {\n const messages = (await messageQueries.getModelMessages(this.sessionId)) as AIModelMessage[];\n\n return {\n messageCount: messages.length,\n contextChars: calculateContextSize(messages),\n estimatedTokens: estimateMessageTokens(messages),\n hasSummary: this.summaries.length > 0,\n summaryCount: this.summaries.length,\n };\n }\n\n async clear(): Promise<void> {\n await messageQueries.deleteBySession(this.sessionId);\n this.summaries = [];\n }\n}\n\n// ---------------------------------------------------------------------------\n// Tool pairing repair (used by both ContextManager and remote server)\n// ---------------------------------------------------------------------------\n\n/**\n * Remove tool-result parts whose matching tool-call was removed.\n */\nfunction stripOrphanedToolResults(\n msg: AIModelMessage,\n removedIds: Set<string>,\n): AIModelMessage | null {\n if (!Array.isArray(msg.content)) return msg;\n\n const parts = (msg.content as any[]).filter((part) => {\n if (part.type === 'tool-result' && removedIds.has(part.toolCallId)) return false;\n if (part.type === 'tool-call' && removedIds.has(part.toolCallId)) return false;\n return true;\n });\n\n if (parts.length === 0) return null;\n return { ...msg, content: parts } as AIModelMessage;\n}\n\n/**\n * Ensure every tool-call has a matching tool-result and vice versa.\n * Removes orphans from either side so the prompt is always structurally valid.\n *\n * Exported so the remote server can use it as a last-resort safety net.\n */\nexport function repairToolPairing(messages: AIModelMessage[]): AIModelMessage[] {\n // Collect all tool-call IDs and tool-result IDs\n const toolCallIds = new Set<string>();\n const toolResultIds = new Set<string>();\n\n for (const msg of messages) {\n if (!Array.isArray(msg.content)) continue;\n for (const part of msg.content as any[]) {\n if (part.type === 'tool-call' && part.toolCallId) toolCallIds.add(part.toolCallId);\n if (part.type === 'tool-result' && part.toolCallId) toolResultIds.add(part.toolCallId);\n }\n }\n\n // IDs that exist on one side but not the other\n const orphanedCalls = new Set([...toolCallIds].filter((id) => !toolResultIds.has(id)));\n const orphanedResults = new Set([...toolResultIds].filter((id) => !toolCallIds.has(id)));\n\n if (orphanedCalls.size === 0 && orphanedResults.size === 0) return messages;\n\n if (orphanedCalls.size > 0) {\n console.warn(`[tool-repair] Removing ${orphanedCalls.size} orphaned tool-call(s) with no matching result`);\n }\n if (orphanedResults.size > 0) {\n console.warn(`[tool-repair] Removing ${orphanedResults.size} orphaned tool-result(s) with no matching call`);\n }\n\n const repaired: AIModelMessage[] = [];\n\n for (const msg of messages) {\n if (!Array.isArray(msg.content)) {\n repaired.push(msg);\n continue;\n }\n\n const parts = (msg.content as any[]).filter((part) => {\n if (part.type === 'tool-call' && orphanedCalls.has(part.toolCallId)) return false;\n if (part.type === 'tool-result' && orphanedResults.has(part.toolCallId)) return false;\n return true;\n });\n\n if (parts.length === 0) continue;\n repaired.push({ ...msg, content: parts } as AIModelMessage);\n }\n\n return repaired;\n}\n","import os from 'node:os';\nimport {\n loadAllSkillsFromDiscovered,\n getGlobMatchedSkills,\n loadAgentsMd,\n formatSkillsForContext,\n formatAlwaysLoadedSkills,\n formatGlobMatchedSkills,\n formatAgentsMdContent,\n} from '../skills/index.js';\nimport { todoQueries, TodoItem } from '../db/index.js';\nimport { DiscoveredSkills } from '../config/types.js';\n\n/**\n * Get platform-specific search instructions\n */\nfunction getSearchInstructions(): string {\n const platform = process.platform;\n \n const common = `- **Prefer \\`read_file\\` over shell commands** for reading files - don't use \\`cat\\`, \\`head\\`, or \\`tail\\` when \\`read_file\\` is available\n- **Avoid unbounded searches** - always scope searches with glob patterns and directory paths to prevent overwhelming output\n- **Search strategically**: Start with specific patterns and directories, then broaden only if needed`;\n\n if (platform === 'win32') {\n return `${common}\n- **Find files**: \\`dir /s /b *.ts\\` or PowerShell: \\`Get-ChildItem -Recurse -Filter *.ts\\`\n- **Search content**: \\`findstr /s /n \"pattern\" *.ts\\` or PowerShell: \\`Select-String -Pattern \"pattern\" -Path *.ts -Recurse\\`\n- **If ripgrep (\\`rg\\`) is installed**: \\`rg \"pattern\" -t ts src/\\` - faster and respects .gitignore`;\n }\n \n // Unix-like (darwin, linux, etc.)\n return `${common}\n- **Find files**: \\`find . -name \"*.ts\"\\` or \\`find src/ -type f -name \"*.tsx\"\\`\n- **Search content**: \\`grep -rn \"pattern\" --include=\"*.ts\" src/\\` - use \\`-l\\` for filenames only, \\`-c\\` for counts\n- **If ripgrep (\\`rg\\`) is installed**: \\`rg \"pattern\" -t ts src/\\` - faster and respects .gitignore`;\n}\n\n/**\n * Build the system prompt for the coding agent\n */\nexport async function buildSystemPrompt(options: {\n workingDirectory: string;\n skillsDirectories: string[];\n sessionId: string;\n discoveredSkills?: DiscoveredSkills;\n activeFiles?: string[];\n customInstructions?: string;\n}): Promise<string> {\n const {\n workingDirectory,\n skillsDirectories,\n sessionId,\n discoveredSkills,\n activeFiles = [],\n customInstructions,\n } = options;\n\n // Load skills using the enhanced system if discoveredSkills is provided\n let alwaysLoadedContent = '';\n let globMatchedContent = '';\n let agentsMdContent = '';\n let onDemandSkillsContext = '';\n\n if (discoveredSkills) {\n // Use the new enhanced skill loading\n const { always, onDemand, all } = await loadAllSkillsFromDiscovered(discoveredSkills);\n\n // Format always-loaded skills\n alwaysLoadedContent = formatAlwaysLoadedSkills(always);\n\n // Format on-demand skills for context\n onDemandSkillsContext = formatSkillsForContext(onDemand);\n\n // Load AGENTS.md if present\n const agentsMd = await loadAgentsMd(discoveredSkills.agentsMdPath);\n agentsMdContent = formatAgentsMdContent(agentsMd);\n\n // Load glob-matched skills based on active files\n if (activeFiles.length > 0) {\n const globMatched = await getGlobMatchedSkills(all, activeFiles, workingDirectory);\n globMatchedContent = formatGlobMatchedSkills(globMatched);\n }\n } else {\n // Legacy fallback: just load skills from directories\n const { loadAllSkills } = await import('../skills/index.js');\n const skills = await loadAllSkills(skillsDirectories);\n onDemandSkillsContext = formatSkillsForContext(skills);\n }\n\n // Load current todos\n const todos = await todoQueries.getBySession(sessionId);\n const todosContext = formatTodosForContext(todos);\n\n // Get environment info\n const platform = process.platform === 'win32' ? 'Windows' : process.platform === 'darwin' ? 'macOS' : 'Linux';\n const currentDate = new Date().toLocaleDateString('en-US', { weekday: 'long', year: 'numeric', month: 'long', day: 'numeric' });\n const searchInstructions = getSearchInstructions();\n\n const systemPrompt = `You are SparkECoder, an expert AI coding assistant. You help developers write, debug, and improve code.\n\n## Environment\n- **Platform**: ${platform} (${os.release()})\n- **Date**: ${currentDate}\n- **Working Directory**: ${workingDirectory}\n\n## Core Capabilities\nYou have access to powerful tools for:\n- **bash**: Execute commands in the terminal (see below for details)\n- **read_file**: Read file contents to understand code and context\n- **write_file**: Create new files or edit existing ones (supports targeted string replacement)\n- **linter**: Check files for type errors and lint issues (TypeScript, JavaScript, TSX, JSX)\n- **todo**: Manage your task list to track progress on complex operations\n- **load_skill**: Load specialized knowledge documents for specific tasks\n- **explore_agent**: Explore agent for semantic discovery - for exploratory questions and finding code by meaning\n- **code_graph**: Inspect a symbol's type hierarchy and usage graph via the TypeScript language server\n- **upload_file**: Upload a file to cloud storage and get a shareable download URL (available when remote storage is configured)\n\n\nIMPORTANT: If you have zero context of where you are working, always explore it first to understand the structure before doing things for the user.\n\nUse the TODO tool to manage your task list to track progress on complex operations. Always ask the user what they want to do specifically before doing it, and make a plan. \nStep 1 of the plan should be researching files and understanding the components/structure of what you're working on (if you don't already have context), then after u have done that, plan out the rest of the tasks u need to do. \nYou can clear the todo and restart it, and do multiple things inside of one session.\n\n### bash Tool\nThe bash tool runs commands in the terminal. Every command runs in its own session with logs saved to disk.\n\n**Run a command (default - waits for completion):**\n\\`\\`\\`\nbash({ command: \"npm install\" })\nbash({ command: \"git status\" })\n\\`\\`\\`\n\n**Run in background (for dev servers, watchers):**\n\\`\\`\\`\nbash({ command: \"npm run dev\", background: true })\n→ Returns { id: \"abc123\" } - save this ID to check logs or stop it later\n\\`\\`\\`\n\n**Check on a background process:**\n\\`\\`\\`\nbash({ id: \"abc123\" }) // get full output\nbash({ id: \"abc123\", tail: 50 }) // last 50 lines only\n\\`\\`\\`\n\n**Stop a background process:**\n\\`\\`\\`\nbash({ id: \"abc123\", kill: true })\n\\`\\`\\`\n\n**Respond to interactive prompts (for yes/no questions, etc.):**\n\\`\\`\\`\nbash({ id: \"abc123\", key: \"y\" }) // send 'y' for yes\nbash({ id: \"abc123\", key: \"n\" }) // send 'n' for no \nbash({ id: \"abc123\", key: \"Enter\" }) // press Enter\nbash({ id: \"abc123\", input: \"my text\" }) // send text input\n\\`\\`\\`\n\n**IMPORTANT - Handling Interactive Commands:**\n- ALWAYS prefer non-interactive flags when available:\n - \\`npm init --yes\\` or \\`npm install --yes\\`\n - \\`npx create-next-app --yes\\` (accepts all defaults)\n - \\`npx create-react-app --yes\\`\n - \\`git commit --no-edit\\`\n - \\`apt-get install -y\\`\n- If a command might prompt for input, run it in background mode first\n- Check the output to see if it's waiting for input\n- Use \\`key: \"y\"\\` or \\`key: \"n\"\\` for yes/no prompts\n- Use \\`input: \"text\"\\` for text input prompts\n\nTerminal output is stored in the global SparkECoder data directory. Use the \\`tail\\` option to read recent output.\n\n## Guidelines\n\n### Code Quality\n- Write clean, maintainable, well-documented code\n- Follow existing code style and conventions in the project\n- Use meaningful variable and function names\n- Add comments for complex logic\n\n### Problem Solving\n- Before making changes, understand the existing code structure\n- Break complex tasks into smaller, manageable steps using the todo tool\n- Test changes when possible using the bash tool\n- Handle errors gracefully and provide helpful error messages\n\n### File Operations\n- Use \\`read_file\\` to understand code before modifying\n- Use \\`write_file\\` with mode \"str_replace\" for targeted edits to existing files\n- Use \\`write_file\\` with mode \"full\" only for new files or complete rewrites\n- After making changes, use the \\`linter\\` tool to check for type errors and lint issues\n- The \\`write_file\\` tool automatically shows lint errors in its output for TypeScript/JavaScript files\n- If the user asks to write/create a file, always use \\`write_file\\` rather than printing the full contents\n- If the user requests a file but does not provide a path, choose a sensible default (e.g. \\`index.html\\`) and proceed\n- For large content (hundreds of lines), avoid placing it in chat output; write to a file instead\n\n### Linter Tool\nThe linter tool uses Language Server Protocol (LSP) to detect type errors and lint issues:\n\\`\\`\\`\nlinter({}) // Check all recently edited files\nlinter({ paths: [\"src/app.ts\"] }) // Check specific files\nlinter({ paths: [\"src/\"] }) // Check all files in a directory\n\\`\\`\\`\nUse this proactively after making code changes to catch errors early.\n\n### Code Graph Tool\nThe code_graph tool uses the TypeScript language server to inspect a symbol's type hierarchy and usage graph:\n\\`\\`\\`\ncode_graph({ symbol: \"UserCard\" }) // Search workspace for symbol\ncode_graph({ symbol: \"UserCard\", filePath: \"src/components.tsx\" }) // Look up in a specific file\ncode_graph({ symbol: \"formatUser\", filePath: \"utils.ts\", depth: 2 }) // Traverse 2 levels up the reference tree\n\\`\\`\\`\n\n**What it returns:**\n- The symbol's full type signature (parameters, return type)\n- All files/functions/components that reference it (grouped into pages vs components)\n- Which Next.js pages/routes contain it in their component tree\n- Level-2 transitive usages (who uses the things that use this symbol)\n- The file's symbol structure for surrounding context\n\n**When to use code_graph:**\n- **To locate a component/function by name** when you don't have the file path — e.g. a user mentions a component from devtools but the path is missing or mangled. Just pass the symbol name and it will find the definition.\n- **Before making changes** to a function/component — understand what depends on it and what will break\n- **To understand component hierarchies** — what renders what, which pages are affected across the *entire* codebase (not just the current page)\n- **To get type signatures** (props, params, return types) without reading entire files\n- **After a devtools selection** when the task involves refactoring, changing props, or anything that could impact other consumers\n\n**When NOT to use code_graph:**\n- For exploratory \"how does X work?\" questions — use \\`explore_agent\\` instead\n- For exact string searches — use grep/rg directly\n- For non-TypeScript/JavaScript files — code_graph only supports TS/JS/TSX/JSX\n\n### Searching and Exploration\n\n**Choose the right search approach:**\n\n0. **Use paths to your advantage — skip searching if you already have what you need.**\n - If the user selected a component via devtools and you can see the component name, file path, and/or line number, you ALREADY know where the code is. Just use \\`read_file\\` to read that file directly — do NOT call \\`explore_agent\\` to \"find\" something you already have the location of.\n - If you received a **page path** (e.g. \\`/dashboard\\`, \\`/settings/profile\\`), map it to the corresponding file in the project structure. In Next.js this means \\`app/dashboard/page.tsx\\`, \\`app/settings/profile/page.tsx\\`, etc. In other frameworks, check the routing convention (e.g. \\`pages/\\`, \\`src/routes/\\`). Use \\`read_file\\` on the mapped path directly.\n - **If the file path is missing, truncated, or doesn't exist** (common with devtools — webpack paths can be mangled), use \\`code_graph({ symbol: \"ComponentName\" })\\` to locate the component. This searches the workspace for the symbol definition AND returns its type info, references, and page locations in one call — much better than raw grep for components.\n - **After reading a devtools-selected component**, if the task involves changes that could affect other consumers (refactoring, changing props, renaming), use \\`code_graph\\` to see ALL files and pages that depend on it — the devtools component stack only shows the current page's hierarchy, not the full picture.\n - Read up and down component trees when you have the file path or page path to find what you're looking for.\n1. **Use the \\`explore_agent\\` tool (Explore agent)** for:\n - Semantic/exploratory questions: \"How does authentication work?\", \"Where is user data processed?\"\n - Finding code by meaning or concept, not exact text\n - Understanding how features are implemented across multiple files\n - Exploring unfamiliar parts of the codebase\n - Questions like \"where\", \"how\", \"what does X do\"\n \n The Explore agent is a mini-agent that intelligently explores the codebase, reads relevant files, and returns a summary of what it found. It's best for understanding and discovery.\n\n **MANDATORY — The Explore agent has ZERO context.** It cannot see the conversation, the user's messages, devtools data, or anything else. The \\`query\\` and \\`context\\` fields are the ONLY information it receives. Every time you call \\`explore_agent\\`, you MUST populate the \\`context\\` field with ALL relevant information:\n - Copy the ENTIRE \\`<devtools-context>\\` block (page URL, path, viewport) if one exists in the user's message.\n - Copy ALL selected component info (component name, file path, line number, HTML snippet, component stack) if the user selected something via devtools.\n - Include any file paths, component names, or page names the user mentioned.\n - If you skip the \\`context\\` field, the explore agent is searching completely blind and will waste time guessing.\n - NEVER call \\`explore_agent\\` with only a \\`query\\` and no \\`context\\` when the user's message contains devtools or component information.\n\n2. **Use the \\`code_graph\\` tool** for:\n - Understanding what depends on a specific symbol before changing it\n - Tracing component/function usage up to page-level routes\n - Getting type signatures (params, return types) without reading full files\n - Finding exact components usages in the codebase\n - Answering \"what will break if I change this?\" or \"which pages use this component?\"\n\n3. **Use direct commands (grep/rg, find)** for:\n - Exact string matches: \\`rg \"functionName\"\\`, \\`rg \"class MyClass\"\\`\n - Finding files by name: \\`find . -name \"*.config.ts\"\\`\n - Simple pattern matching when you know exactly what you're looking for\n - Counting occurrences or listing all matches\n\n**Examples:**\n- User selected \\`<LandingButton>\\` at \\`src/components/LandingButton.tsx:12\\` → Just \\`read_file(\"src/components/LandingButton.tsx\")\\`. Do NOT call explore_agent.\n- User selected \\`<PricingCard>\\` but no file path in the component stack → Use \\`code_graph({ symbol: \"PricingCard\" })\\` to find its definition, type info, and all usages at once.\n- User selected \\`<UserCard>\\` and says \"refactor the props\" → First \\`read_file\\` the component, then \\`code_graph({ symbol: \"UserCard\" })\\` to see every file/page that depends on it before changing the interface.\n- \"Where is the API authentication handled?\" (no file path given) → Use \\`explore_agent\\` tool\n- \"What pages use the UserCard component?\" → Use \\`code_graph({ symbol: \"UserCard\" })\\`\n- \"What's the type signature of formatUser?\" → Use \\`code_graph({ symbol: \"formatUser\", filePath: \"utils.ts\" })\\`\n- \"Find all usages of getUserById\" → Use \\`rg \"getUserById\"\\`\n- \"How does the payment flow work?\" → Use \\`explore_agent\\` tool\n- \"Find files named config\" → Use \\`find . -name \"*config*\"\\`\n\n${searchInstructions}\n\n###Follow these principles when designing and implementing software:\n\n1. **Modularity** — Write simple parts connected by clean interfaces\n2. **Clarity** — Clarity is better than cleverness\n3. **Composition** — Design programs to be connected to other programs\n4. **Separation** — Separate policy from mechanism; separate interfaces from engines\n5. **Simplicity** — Design for simplicity; add complexity only where you must\n6. **Parsimony** — Write a big program only when it is clear by demonstration that nothing else will do\n7. **Transparency** — Design for visibility to make inspection and debugging easier\n8. **Robustness** — Robustness is the child of transparency and simplicity\n9. **Representation** — Fold knowledge into data so program logic can be stupid and robust\n10. **Least Surprise** — In interface design, always do the least surprising thing\n11. **Silence** — When a program has nothing surprising to say, it should say nothing\n12. **Repair** — When you must fail, fail noisily and as soon as possible\n13. **Economy** — Programmer time is expensive; conserve it in preference to machine time\n14. **Generation** — Avoid hand-hacking; write programs to write programs when you can\n15. **Optimization** — Prototype before polishing. Get it working before you optimize it\n16. **Diversity** — Distrust all claims for \"one true way\"\n17. **Extensibility** — Design for the future, because it will be here sooner than you think\n\n### Follow these design rules for any user interfaces or experiences you write (DESIGN LIKE APPLE):\n\n1. **Simplicity** — Simplicity is the ultimate sophistication. Remove everything unnecessary.\n2. **Focus** — Say no to 1,000 things to say yes to the few that matter most.\n3. **Clarity** — Make the interface so clear that it becomes invisible.\n4. **Delight** — Sweat the details. The smallest touches create the biggest impact.\n5. **Intuitive** — It should just work. No manual required.\n6. **Human-Centered** — Design for people, not technology. Technology should adapt to humans, not the other way around.\n7. **Consistency** — Familiar patterns reduce cognitive load. Be predictable where it matters.\n8. **Feedback** — Every action deserves a response. Make interactions feel alive.\n9. **Forgiveness** — Make it easy to undo. Never punish exploration.\n10. **Beauty** — Aesthetics are not superficial. Beautiful things work better because people care about them.\n\n### Follow these rules to be a good agent for the user:\n\n1. Understand first - Read relevant files before making any changes. Use the \\`explore_agent\\` tool for exploratory questions about how things work, and direct searches (grep/rg) for finding exact strings or file names.\n2. Plan for complexity - If the task involves 3+ steps or has meaningful trade-offs, create a todo list to track progress before implementing.\n3. Use the right tools - Have specialized tools for reading files, editing code, semantic search via subagents, and running terminal commands. Prefer these over raw shell commands.\n4. Work efficiently - When need to do multiple independent things (like reading several files), do them in parallel rather than one at a time.\n5. Be direct - Focus on technical accuracy rather than validation. If see issues with an approach or need clarification, say so.\n6. Verify my work - After making changes, check for linter errors and fix any introduced.\n7. Respect boundaries - Only commit code when explicitly asked, avoid creating unnecessary files, and don't make assumptions about things uncertain about.\n\n\n### Communication\n- Explain your reasoning and approach\n- Be concise but thorough\n- Ask clarifying questions when requirements are ambiguous\n- Report progress on multi-step tasks\n\n${agentsMdContent}\n\n${alwaysLoadedContent}\n\n${globMatchedContent}\n\n## On-Demand Skills\n${onDemandSkillsContext}\n\n## Current Task List\n${todosContext}\n\n${customInstructions ? `## Custom Instructions\\n${customInstructions}` : ''}\n\nRemember: You are a helpful, capable coding assistant. Take initiative, be thorough, and deliver high-quality results.`;\n\n return systemPrompt;\n}\n\n/**\n * Format todos for system prompt context\n */\nfunction formatTodosForContext(todos: TodoItem[]): string {\n if (todos.length === 0) {\n return 'No active tasks. Use the todo tool to create a plan for complex operations.';\n }\n\n const statusEmoji: Record<string, string> = {\n pending: '⬜',\n in_progress: '🔄',\n completed: '✅',\n cancelled: '❌',\n };\n\n const lines = ['Current tasks:'];\n for (const todo of todos) {\n const emoji = statusEmoji[todo.status] || '•';\n lines.push(`${emoji} [${todo.id}] ${todo.content}`);\n }\n\n return lines.join('\\n');\n}\n\n/**\n * Build an addendum for the system prompt when running in task mode.\n * Describes the output schema and the two completion tools the agent must use.\n */\nexport function buildTaskPromptAddendum(outputSchema: Record<string, unknown>): string {\n return `\n## Task Mode\n\nYou are running in **task mode**. You have been given a specific task to complete autonomously.\nYou have access to ALL the same tools as a normal session — bash, read_file, write_file, linter, todo, load_skill, explore_agent, code_graph, upload_file, and more. Use them all. This is not a limited session.\nIf you need to give the user a downloadable file (report, image, export, etc.), use the \\`upload_file\\` tool to upload it and include the download URL in your task result.\n\n### Rules\n1. Work independently — no human will approve tool calls. All tools run without approval.\n2. Keep working until the task is fully complete — and then VERIFY it is complete before finishing.\n3. When done, call the \\`complete_task\\` tool with a JSON result matching the output schema below.\n4. If you determine the task is impossible or encounter an unrecoverable error, call the \\`task_failed\\` tool with a clear reason.\n5. Do NOT stop without calling one of these two tools.\n\n### Verification — BE EXTREMELY THOROUGH\nBefore calling \\`complete_task\\`, you MUST verify your work completely. Do not just assume it worked. Actually check.\n\n**After making code changes:**\n- Run the **linter** on every file you touched to catch type errors and lint issues. Fix any you introduced.\n- **Read back the files you edited** to confirm the changes are correct and complete — don't rely on memory.\n- If there are **tests**, run them (\\`npm test\\`, \\`pytest\\`, etc.) and ensure they pass.\n- If you created new files, verify they exist and contain what you expect.\n\n**For UI / web changes:**\n- Start the dev server if it isn't already running (it might be so double check ur context)\n- **Open the browser** to verify the changes visually: using your agent-browser tool read the skill\n- Check the dev server logs for errors or warnings.\n- If the app crashes or shows errors, fix them before completing.\n\n**For backend / API changes:**\n- Test the endpoint with curl or a quick script to confirm it works as expected.\n- Check server logs for errors.\n\n**For search and exploration tasks:**\n- Actually search in the RIGHT directories — don't just search the root if the relevant code is in \\`src/\\`, \\`app/\\`, \\`lib/\\`, etc.\n- Use \\`explore_agent\\` for semantic/conceptual questions and \\`grep\\`/\\`code_graph\\` for exact lookups.\n- Cross-reference findings — if you find something in one place, verify related files are consistent.\n- Don't stop at the first match — make sure you've found ALL relevant occurrences.\n\n**General verification checklist:**\n- Re-read the original task prompt and confirm every requirement has been addressed.\n- If the task asked for multiple things, verify EACH one individually.\n- If something doesn't look right, fix it — don't complete with known issues.\n\n**Screenshot your completed work:**\n- After completing a task, take a screenshot of the result when it makes sense (UI changes browser pages, etc.). You can use the browser skill to do this.\n- Save screenshots and generated artifacts under \\`.sparkecode-artifacts/\\` (git-ignored) instead of the repo root.\n- Ensure \\`.sparkecode-artifacts/\\` is listed in \\`.gitignore\\`; if it is missing, add it before continuing.\n- Use \\`upload_file\\` to upload the screenshot and include the download URL in your task result so the user can see proof of what was done.\n- In task results, never return local filesystem paths for screenshots/reports; return uploaded \\`downloadUrl\\` links only.\n- This is especially valuable for UI/visual changes, successful test runs, and browser verification — show, don't just tell.\n\n### Use All Available Tools\n- **load_skill**: Load specialized skills/knowledge relevant to the task. Check what skills are available and use them.\n- **explore_agent**: Use for codebase exploration and understanding before making changes.\n- **code_graph**: Use to understand type hierarchies, references, and impact before refactoring.\n- **todo**: Track your progress on multi-step tasks so you don't miss steps.\n- **bash**: Full shell access — run builds, tests, dev servers, open browsers, curl endpoints, anything.\n- **upload_file**: Upload files (screenshots, reports, exports) to cloud storage. Use this to include screenshots of completed work in your task result — visual proof is very helpful.\n\n### Output Schema\nThe \\`complete_task\\` tool expects a \\`result\\` object matching this JSON Schema:\n\\`\\`\\`json\n${JSON.stringify(outputSchema, null, 2)}\n\\`\\`\\`\n\n### Completion Tools\n- **\\`complete_task({ result: ... })\\`** — Call ONLY after thorough verification. The result is validated against the schema above. If validation fails you will get errors back — fix and retry.\n- **\\`task_failed({ reason: \"...\" })\\`** — Call only if the task truly cannot be completed.\n`;\n}\n\n/**\n * Create a summary prompt for context compression\n */\nexport function createSummaryPrompt(conversationHistory: string): string {\n return `Please provide a concise summary of the following conversation history. Focus on:\n1. The main task or goal being worked on\n2. Key decisions made\n3. Important code changes or file operations performed\n4. Current state and any pending actions\n\nKeep the summary under 2000 characters while preserving essential context for continuing the work.\n\nConversation to summarize:\n${conversationHistory}\n\nSummary:`;\n}\n","/**\n * Message sanitization for AI SDK ModelMessage compatibility.\n * \n * Ensures messages retrieved from the database conform to the AI SDK's\n * ModelMessage[] schema before being passed to streamText()/generateText().\n * \n * Handles two classes of issues:\n * \n * 1. SCHEMA CORRUPTION: The remote database client's `parseDates()` function\n * recursively converts ISO date strings (like `createdAt`) inside tool result\n * outputs to JavaScript Date objects. The AI SDK's jsonValueSchema only accepts\n * JSON primitives — Date objects are rejected, causing AI_InvalidPromptError.\n * \n * 2. CONSECUTIVE SAME-ROLE MESSAGES: If multiple user messages are saved to the\n * database without an assistant response between them (e.g., user sends two\n * messages quickly, or the previous stream errored before producing a response),\n * the Anthropic API rejects consecutive same-role messages. This module merges\n * them into a single message.\n * \n * This module provides a safety net that catches and repairs any corrupted\n * messages so the agent can self-heal even if the database layer returns\n * unexpected data.\n */\n\nimport { modelMessageSchema, type ModelMessage } from 'ai';\n\n/**\n * Recursively convert Date objects to ISO strings within a value.\n * This reverses any accidental Date conversions from parseDates.\n */\nfunction convertDatesToStrings(value: unknown): unknown {\n if (value === null || value === undefined) return value;\n if (value instanceof Date) return value.toISOString();\n if (Array.isArray(value)) return value.map(convertDatesToStrings);\n if (typeof value === 'object') {\n const result: Record<string, unknown> = {};\n for (const [k, v] of Object.entries(value as Record<string, unknown>)) {\n result[k] = convertDatesToStrings(v);\n }\n return result;\n }\n return value;\n}\n\n/**\n * Sanitize a single ModelMessage so it passes AI SDK schema validation.\n * \n * Fixes known corruption patterns:\n * - Date objects inside tool result output (from parseDates)\n * - Date objects inside tool call input\n * - Date objects inside user/assistant content parts\n */\nfunction sanitizeMessage(msg: unknown): unknown {\n if (msg === null || msg === undefined || typeof msg !== 'object') return msg;\n \n const message = msg as Record<string, unknown>;\n \n // Only process messages with a recognized role\n if (!message.role || typeof message.role !== 'string') return msg;\n \n // Deep-convert any Date objects back to ISO strings\n return convertDatesToStrings(message);\n}\n\n/**\n * Merge the content of two user messages into a single content value.\n * \n * Rules:\n * - string + string → joined with \"\\n\\n\"\n * - array + array → concatenated (deduplicating text parts)\n * - string + array → string converted to text part, then concatenated\n * - array + string → string converted to text part, then appended\n */\nfunction mergeUserContent(\n a: unknown,\n b: unknown,\n): string | Array<Record<string, unknown>> {\n const aIsString = typeof a === 'string';\n const bIsString = typeof b === 'string';\n\n // Both strings: simple join\n if (aIsString && bIsString) {\n return `${a}\\n\\n${b}`;\n }\n\n // Normalise both to arrays\n const aParts: Array<Record<string, unknown>> = aIsString\n ? [{ type: 'text', text: a }]\n : Array.isArray(a)\n ? (a as Array<Record<string, unknown>>)\n : [];\n\n const bParts: Array<Record<string, unknown>> = bIsString\n ? [{ type: 'text', text: b }]\n : Array.isArray(b)\n ? (b as Array<Record<string, unknown>>)\n : [];\n\n return [...aParts, ...bParts];\n}\n\n/**\n * Merge consecutive same-role messages in the array.\n * \n * The Anthropic API (and some other providers) reject message arrays where\n * two consecutive messages share the same role. This can happen when:\n * - The user sends two messages before the agent responds\n * - The previous agent stream errored/aborted before producing a response\n * - Network issues cause duplicate message saves\n * \n * For user messages: merges content (text joined with newlines, parts concatenated)\n * For assistant messages: merges content parts into a single array\n * For tool messages: concatenates content arrays\n */\nfunction mergeConsecutiveSameRole(messages: ModelMessage[]): ModelMessage[] {\n if (messages.length <= 1) return messages;\n\n const merged: ModelMessage[] = [];\n\n for (const msg of messages) {\n const prev = merged[merged.length - 1];\n\n if (!prev || (prev as any).role !== (msg as any).role) {\n // Different role or first message — keep as-is\n merged.push(msg);\n continue;\n }\n\n // Same role as previous — merge\n const role = (msg as any).role as string;\n\n if (role === 'user') {\n const mergedContent = mergeUserContent((prev as any).content, (msg as any).content);\n merged[merged.length - 1] = { role: 'user', content: mergedContent } as any as ModelMessage;\n console.warn('[sanitize-messages] Merged consecutive user messages');\n } else if (role === 'assistant') {\n // Normalise both to arrays and concatenate\n const prevParts = typeof (prev as any).content === 'string'\n ? [{ type: 'text', text: (prev as any).content }]\n : Array.isArray((prev as any).content)\n ? (prev as any).content\n : [];\n const curParts = typeof (msg as any).content === 'string'\n ? [{ type: 'text', text: (msg as any).content }]\n : Array.isArray((msg as any).content)\n ? (msg as any).content\n : [];\n merged[merged.length - 1] = { role: 'assistant', content: [...prevParts, ...curParts] } as any as ModelMessage;\n console.warn('[sanitize-messages] Merged consecutive assistant messages');\n } else if (role === 'tool') {\n // Tool messages always have array content — concatenate\n const prevContent = Array.isArray((prev as any).content) ? (prev as any).content : [];\n const curContent = Array.isArray((msg as any).content) ? (msg as any).content : [];\n merged[merged.length - 1] = { role: 'tool', content: [...prevContent, ...curContent] } as any as ModelMessage;\n console.warn('[sanitize-messages] Merged consecutive tool messages');\n } else {\n // Unknown role — just push, don't try to merge\n merged.push(msg);\n }\n }\n\n return merged;\n}\n\n/**\n * Validate and sanitize an array of ModelMessage objects.\n * \n * Performs two passes:\n * 1. Schema repair — fixes individual messages that fail AI SDK validation\n * (Date objects, missing type wrappers, etc.)\n * 2. Sequence repair — merges consecutive same-role messages that providers\n * like Anthropic would reject\n * \n * Returns the original messages if they're all valid and properly sequenced,\n * or repaired copies if any had issues. Logs warnings for any messages that\n * needed repair so the issue is visible in server logs.\n */\nexport function sanitizeModelMessages(messages: ModelMessage[]): ModelMessage[] {\n // === Pass 1: Schema repair ===\n // Fast path: try validating the whole array first\n let allValid = true;\n for (const msg of messages) {\n try {\n modelMessageSchema.parse(msg);\n } catch {\n allValid = false;\n break;\n }\n }\n \n let result: ModelMessage[];\n \n if (allValid) {\n result = messages;\n } else {\n // Slow path: sanitize each message individually\n console.warn('[sanitize-messages] Detected invalid messages, attempting self-repair...');\n \n const sanitized: ModelMessage[] = [];\n let repairCount = 0;\n \n for (let i = 0; i < messages.length; i++) {\n const msg = messages[i];\n \n // Check if this specific message is valid\n try {\n modelMessageSchema.parse(msg);\n sanitized.push(msg);\n continue;\n } catch {\n // Needs repair\n }\n \n // Strategy 1: Convert Date objects to ISO strings\n const fixed = sanitizeMessage(msg) as ModelMessage;\n try {\n modelMessageSchema.parse(fixed);\n sanitized.push(fixed);\n repairCount++;\n console.warn(`[sanitize-messages] Repaired message ${i} (role=${(msg as any).role}) - converted Date objects to strings`);\n continue;\n } catch {\n // Strategy 1 failed\n }\n \n // Strategy 2: For tool messages, try wrapping raw output in { type: 'json', value: ... }\n // This handles cases where tool output was stored in legacy format\n if ((msg as any).role === 'tool' && Array.isArray((msg as any).content)) {\n const fixedContent = ((msg as any).content as any[]).map((part: any) => {\n if (part.type === 'tool-result' && part.output !== undefined) {\n const output = convertDatesToStrings(part.output);\n // If output doesn't have a recognized type discriminator, wrap it\n if (output && typeof output === 'object' && !(output as any).type) {\n return { ...part, output: { type: 'json', value: output } };\n }\n // If output has a type but it's not a recognized discriminator, wrap it\n const knownTypes = ['text', 'json', 'execution-denied', 'error-text', 'error-json', 'content'];\n if (output && typeof output === 'object' && !knownTypes.includes((output as any).type)) {\n return { ...part, output: { type: 'json', value: output } };\n }\n return { ...part, output };\n }\n return convertDatesToStrings(part);\n });\n \n const wrappedMsg = { ...(msg as any), content: fixedContent } as ModelMessage;\n try {\n modelMessageSchema.parse(wrappedMsg);\n sanitized.push(wrappedMsg);\n repairCount++;\n console.warn(`[sanitize-messages] Repaired message ${i} (role=tool) - wrapped raw output in json type`);\n continue;\n } catch {\n // Strategy 2 failed\n }\n }\n \n // Strategy 3: Last resort - include the message as-is and let it fail\n // downstream with a better error context. This is better than silently\n // dropping messages which could corrupt conversation state.\n console.error(\n `[sanitize-messages] Could not repair message ${i} (role=${(msg as any).role}). ` +\n `Message will be included as-is. Content keys: ${JSON.stringify(Object.keys(msg as any))}`,\n );\n sanitized.push(msg);\n }\n \n if (repairCount > 0) {\n console.warn(`[sanitize-messages] Self-repair complete: fixed ${repairCount}/${messages.length} messages`);\n }\n \n result = sanitized;\n }\n\n // === Pass 2: Sequence repair ===\n // Merge consecutive same-role messages (Anthropic rejects these)\n result = mergeConsecutiveSameRole(result);\n\n return result;\n}\n","export interface ModelLimits {\n contextWindow: number;\n rollingTarget: number;\n}\n\nconst MODEL_LIMITS: Record<string, ModelLimits> = {\n 'anthropic/claude-opus-4-6': { contextWindow: 200_000, rollingTarget: 150_000 },\n 'anthropic/claude-sonnet-4': { contextWindow: 200_000, rollingTarget: 150_000 },\n 'anthropic/claude-3.5-sonnet': { contextWindow: 200_000, rollingTarget: 150_000 },\n 'anthropic/claude-3-haiku': { contextWindow: 200_000, rollingTarget: 150_000 },\n 'google/gemini-3-flash-preview': { contextWindow: 1_000_000, rollingTarget: 150_000 },\n 'google/gemini-2.5-pro': { contextWindow: 1_000_000, rollingTarget: 150_000 },\n 'google/gemini-2.5-flash': { contextWindow: 1_000_000, rollingTarget: 150_000 },\n 'openai/gpt-4o': { contextWindow: 128_000, rollingTarget: 78_000 },\n 'openai/gpt-4.1': { contextWindow: 1_000_000, rollingTarget: 150_000 },\n 'openai/o3': { contextWindow: 200_000, rollingTarget: 150_000 },\n 'xai/grok-3': { contextWindow: 131_072, rollingTarget: 80_000 },\n};\n\nconst DEFAULT_LIMITS: ModelLimits = { contextWindow: 200_000, rollingTarget: 150_000 };\n\nconst PREFIX_DEFAULTS: Record<string, ModelLimits> = {\n 'anthropic/': { contextWindow: 200_000, rollingTarget: 150_000 },\n 'google/': { contextWindow: 1_000_000, rollingTarget: 150_000 },\n 'openai/': { contextWindow: 128_000, rollingTarget: 78_000 },\n 'xai/': { contextWindow: 131_072, rollingTarget: 80_000 },\n};\n\n/**\n * Look up context window limits for a model.\n * Tries exact match, then prefix match, then falls back to conservative defaults.\n */\nexport function getModelLimits(modelId: string): ModelLimits {\n const normalized = modelId.trim().toLowerCase();\n\n const exact = MODEL_LIMITS[normalized];\n if (exact) return exact;\n\n for (const [prefix, limits] of Object.entries(PREFIX_DEFAULTS)) {\n if (normalized.startsWith(prefix)) return limits;\n }\n\n return DEFAULT_LIMITS;\n}\n\nexport const SUMMARIZATION_MODEL = 'google/gemini-3-flash-preview';\n\n/** Summarize in chunks of roughly this many tokens. */\nexport const SUMMARY_CHUNK_TOKENS = 30_000;\n\n/**\n * Fraction of the rolling target that accumulated summaries may consume\n * before they themselves get re-summarized.\n */\nexport const SUMMARY_BUDGET_RATIO = 0.15;\n","/**\n * Shared in-memory store for devtools context (current page user is viewing).\n * Extracted to a shared module so both server routes and the agent can access it.\n */\n\nexport interface DevtoolsContext {\n url: string;\n path: string;\n pageName: string;\n screenWidth?: number;\n screenHeight?: number;\n devicePixelRatio?: number;\n lastHeartbeat: Date;\n}\n\nconst devtoolsContextStore = new Map<string, DevtoolsContext>();\n\n/**\n * Set/update devtools context for a session\n */\nexport function setDevtoolsContext(sessionId: string, context: DevtoolsContext): void {\n devtoolsContextStore.set(sessionId, context);\n}\n\n/**\n * Get devtools context for a session (returns null if not connected or stale)\n */\nexport function getDevtoolsContext(sessionId: string): DevtoolsContext | null {\n cleanupStaleContexts();\n return devtoolsContextStore.get(sessionId) || null;\n}\n\n/**\n * Clean up stale devtools contexts (older than 30 seconds = disconnected)\n */\nexport function cleanupStaleContexts(): void {\n const now = Date.now();\n for (const [sessionId, ctx] of devtoolsContextStore) {\n if (now - ctx.lastHeartbeat.getTime() > 30 * 1000) {\n devtoolsContextStore.delete(sessionId);\n }\n }\n}\n","import { Hono } from 'hono';\nimport { zValidator } from '@hono/zod-validator';\nimport { z } from 'zod';\nimport { existsSync, mkdirSync, writeFileSync } from 'node:fs';\nimport { join } from 'node:path';\nimport { sessionQueries, toolExecutionQueries, activeStreamQueries, messageQueries } from '../../db/index.js';\nimport { Agent } from '../../agent/index.js';\nimport { getConfig, getAppDataDirectory } from '../../config/index.js';\nimport { streamContext } from '../resumable-stream.js';\nimport { nanoid } from 'nanoid';\nimport { createCheckpoint, getCheckpointManager } from '../../checkpoints/index.js';\nimport { getDevtoolsContext } from '../devtools-store.js';\nimport { getOrCreateProxy, getProxy, destroyProxy, type BrowserFrame, type BrowserStatus, type BrowserInputEvent } from '../../browser/stream-proxy.js';\nimport { FrameRecorder } from '../../browser/recorder.js';\nimport { isRemoteConfigured } from '../../db/remote.js';\nimport { resizeImageIfNeeded } from '../../utils/resize-image.js';\n\n// Active recorders per session\nconst sessionRecorders = new Map<string, FrameRecorder>();\n\nconst MAX_TOOL_INPUT_LENGTH = 8 * 1024;\nconst MAX_TOOL_INPUT_PREVIEW = 2 * 1024;\nconst MAX_TOOL_ARGS_CHUNK = 2 * 1024;\n\nfunction sanitizeToolInput(toolName: string, input: unknown): unknown {\n if (toolName !== 'write_file' || !input || typeof input !== 'object') {\n return input;\n }\n\n const data = input as Record<string, unknown>;\n let changed = false;\n const next: Record<string, unknown> = { ...data };\n\n const content = typeof data.content === 'string' ? data.content : undefined;\n if (content && content.length > MAX_TOOL_INPUT_LENGTH) {\n next.content = `${content.slice(0, MAX_TOOL_INPUT_PREVIEW)}\\n... (truncated)`;\n next.contentLength = content.length;\n next.contentTruncated = true;\n changed = true;\n }\n\n const oldString = typeof data.old_string === 'string' ? data.old_string : undefined;\n if (oldString && oldString.length > MAX_TOOL_INPUT_LENGTH) {\n next.old_string = `${oldString.slice(0, MAX_TOOL_INPUT_PREVIEW)}\\n... (truncated)`;\n next.oldStringLength = oldString.length;\n next.oldStringTruncated = true;\n changed = true;\n }\n\n const newString = typeof data.new_string === 'string' ? data.new_string : undefined;\n if (newString && newString.length > MAX_TOOL_INPUT_LENGTH) {\n next.new_string = `${newString.slice(0, MAX_TOOL_INPUT_PREVIEW)}\\n... (truncated)`;\n next.newStringLength = newString.length;\n next.newStringTruncated = true;\n changed = true;\n }\n\n if (changed) {\n console.log('[TOOL-INPUT] Truncated write_file input for streaming payload size');\n }\n\n return changed ? next : input;\n}\n\nfunction buildToolArgsText(input: unknown): string {\n try {\n return JSON.stringify(input ?? {});\n } catch {\n return '{}';\n }\n}\n\nasync function emitSyntheticToolStreaming(\n writeSSE: (data: string) => Promise<void>,\n toolCallStarts: Set<string>,\n toolCallId: string,\n toolName: string,\n input: unknown\n): Promise<void> {\n if (toolCallStarts.has(toolCallId)) return;\n\n toolCallStarts.add(toolCallId);\n await writeSSE(JSON.stringify({\n type: 'tool-input-start',\n toolCallId,\n toolName,\n }));\n\n if (toolName !== 'write_file') return;\n\n const argsText = buildToolArgsText(input);\n for (let i = 0; i < argsText.length; i += MAX_TOOL_ARGS_CHUNK) {\n const chunk = argsText.slice(i, i + MAX_TOOL_ARGS_CHUNK);\n await writeSSE(JSON.stringify({\n type: 'tool-input-delta',\n toolCallId,\n argsTextDelta: chunk,\n }));\n await new Promise((resolve) => setTimeout(resolve, 0));\n }\n}\n\n/**\n * Build a devtools context XML string to attach to user messages.\n * Wrapped in <devtools-context> tags so the frontend can regex it out for display.\n * The LLM still sees it as part of the user message for full context.\n */\nfunction buildDevtoolsContextXml(sessionId: string): string {\n const ctx = getDevtoolsContext(sessionId);\n if (!ctx) return '';\n\n const parts: string[] = [];\n parts.push(`<devtools-context>`);\n parts.push(` <page url=\"${ctx.url}\" path=\"${ctx.path}\" name=\"${ctx.pageName}\" />`);\n if (ctx.screenWidth && ctx.screenHeight) {\n parts.push(` <viewport width=\"${ctx.screenWidth}\" height=\"${ctx.screenHeight}\"${ctx.devicePixelRatio ? ` dpr=\"${ctx.devicePixelRatio}\"` : ''} />`);\n }\n parts.push(`</devtools-context>`);\n return parts.join('\\n');\n}\n\n/**\n * Prepend devtools context XML to a user prompt string.\n * The XML block comes first so the LLM always has screen context.\n */\nfunction enrichPromptWithDevtoolsContext(sessionId: string, prompt: string): string {\n const xml = buildDevtoolsContextXml(sessionId);\n if (!xml) return prompt;\n return `${xml}\\n\\n${prompt}`;\n}\n\n/**\n * Encode browser recording frames to MP4 and upload to GCS via remote server.\n * Runs in background -- errors are logged but not propagated.\n */\nasync function encodAndUploadRecording(sessionId: string, recorder: FrameRecorder): Promise<void> {\n if (!isRemoteConfigured()) {\n console.log('[RECORDING] Remote server not configured, skipping upload');\n recorder.clear();\n return;\n }\n\n console.log(`[RECORDING] Encoding ${recorder.frameCount} frames for session ${sessionId}...`);\n const result = await recorder.encode();\n recorder.clear();\n\n if (!result) {\n console.log('[RECORDING] Encoding failed or produced no output');\n return;\n }\n\n try {\n const { storageQueries } = await import('../../db/remote.js');\n const { readFile, unlink } = await import('node:fs/promises');\n\n // Get presigned upload URL\n const uploadInfo = await storageQueries.getUploadUrl(\n sessionId,\n `browser-recording-${Date.now()}.mp4`,\n 'video/mp4',\n 'browser-recording'\n );\n\n // Upload to GCS\n const fileData = await readFile(result.path);\n await fetch(uploadInfo.uploadUrl, {\n method: 'PUT',\n headers: { 'Content-Type': 'video/mp4' },\n body: fileData,\n });\n\n // Update file size\n await storageQueries.updateFile(uploadInfo.fileId, { sizeBytes: result.sizeBytes });\n\n console.log(`[RECORDING] Uploaded recording for session ${sessionId} (${result.sizeBytes} bytes)`);\n\n // Clean up temp file\n await unlink(result.path).catch(() => {});\n } catch (err: any) {\n console.error('[RECORDING] Upload failed:', err.message);\n }\n}\n\nconst agents = new Hono();\n\n// Schemas\nconst attachmentSchema = z.object({\n type: z.enum(['image', 'file']),\n data: z.string(), // base64 data URL or raw base64\n mediaType: z.string().optional(),\n filename: z.string().optional(),\n});\n\nconst runPromptSchema = z.object({\n prompt: z.string(), // Can be empty if attachments are provided\n attachments: z.array(attachmentSchema).optional(),\n}).refine(\n (data) => data.prompt.trim().length > 0 || (data.attachments && data.attachments.length > 0),\n { message: 'Either prompt or attachments must be provided' }\n);\n\nconst quickStartSchema = z.object({\n prompt: z.string().min(1),\n name: z.string().optional(),\n workingDirectory: z.string().optional(),\n model: z.string().optional(),\n toolApprovals: z.record(z.string(), z.boolean()).optional(),\n});\n\nconst rejectSchema = z.object({\n reason: z.string().optional(),\n}).optional();\n\n// Store for abort controllers - keyed by streamId\nconst streamAbortControllers = new Map<string, AbortController>();\n\n// Attachment type for the stream producer\ninterface StreamAttachment {\n type: 'image' | 'file';\n data: string;\n mediaType?: string;\n filename?: string;\n /** Path where the attachment was saved on disk */\n savedPath?: string;\n}\n\n/**\n * Get the attachments directory for a session\n */\nfunction getAttachmentsDirectory(sessionId: string): string {\n const appDataDir = getAppDataDirectory();\n return join(appDataDir, 'attachments', sessionId);\n}\n\n/**\n * Save an attachment to disk and return the file path.\n * Images are automatically resized if they exceed provider dimension limits.\n */\nasync function saveAttachmentToDisk(\n sessionId: string, \n attachment: { type: 'image' | 'file'; data: string; mediaType?: string; filename?: string },\n index: number\n): Promise<string> {\n const attachmentsDir = getAttachmentsDirectory(sessionId);\n \n if (!existsSync(attachmentsDir)) {\n mkdirSync(attachmentsDir, { recursive: true });\n }\n \n let filename = attachment.filename;\n if (!filename) {\n const ext = getExtensionFromMediaType(attachment.mediaType, attachment.type);\n filename = `attachment_${index + 1}${ext}`;\n }\n \n let base64Data = attachment.data;\n if (base64Data.includes(',')) {\n base64Data = base64Data.split(',')[1];\n }\n \n let buffer: Buffer = Buffer.from(base64Data, 'base64');\n\n if (attachment.type === 'image') {\n const resized = await resizeImageIfNeeded(buffer, attachment.mediaType);\n buffer = resized.buffer;\n attachment.mediaType = resized.mediaType;\n attachment.data = buffer.toString('base64');\n }\n\n const filePath = join(attachmentsDir, filename);\n writeFileSync(filePath, buffer);\n \n return filePath;\n}\n\n/**\n * Get file extension from media type\n */\n/**\n * Strip the `data:...;base64,` prefix from a data URL, returning raw base64.\n * Some runtimes (Modal, Vercel edge) don't support `fetch('data:...')`,\n * and the AI SDK's downloadAssets will try to fetch any string that\n * parses as a URL. Passing raw base64 avoids the download path entirely.\n */\nfunction stripDataUrlPrefix(data: string): string {\n const commaIdx = data.indexOf(',');\n if (commaIdx !== -1 && data.startsWith('data:')) {\n return data.slice(commaIdx + 1);\n }\n return data;\n}\n\nfunction getExtensionFromMediaType(mediaType?: string, type?: 'image' | 'file'): string {\n if (!mediaType) {\n return type === 'image' ? '.png' : '.bin';\n }\n \n const mimeToExt: Record<string, string> = {\n 'image/png': '.png',\n 'image/jpeg': '.jpg',\n 'image/jpg': '.jpg',\n 'image/gif': '.gif',\n 'image/webp': '.webp',\n 'image/svg+xml': '.svg',\n 'application/pdf': '.pdf',\n 'text/plain': '.txt',\n 'text/markdown': '.md',\n 'application/json': '.json',\n 'application/javascript': '.js',\n 'text/javascript': '.js',\n 'text/typescript': '.ts',\n 'text/html': '.html',\n 'text/css': '.css',\n };\n \n return mimeToExt[mediaType] || '.bin';\n}\n\n/**\n * Creates a stream producer function for the agent.\n * This function returns a ReadableStream that emits SSE-formatted events.\n * \n * IMPORTANT: The agent runs independently of client connections. Client disconnection\n * does NOT abort the agent - only explicit abort requests do. This enables:\n * - Session switching without stopping the agent\n * - Browser tab closing without losing work\n * - Multiple clients watching the same stream\n * - Reconnection to ongoing streams\n */\nfunction createAgentStreamProducer(\n sessionId: string,\n prompt: string,\n streamId: string,\n attachments?: StreamAttachment[]\n): () => ReadableStream<string> {\n return () => {\n // Create a TransformStream to convert events to SSE format\n const { readable, writable } = new TransformStream<string, string>();\n const writer = writable.getWriter();\n let writerClosed = false;\n const toolCallStarts = new Set<string>();\n\n // Create an abort controller for this specific stream (for explicit stops only)\n const abortController = new AbortController();\n streamAbortControllers.set(streamId, abortController);\n\n let sseEventCount = 0;\n let sseBrowserFrameCount = 0;\n let sseWriteErrors = 0;\n\n // Helper to write SSE event - handles client disconnection gracefully\n const writeSSE = async (data: string) => {\n if (writerClosed) return;\n try {\n sseEventCount++;\n await writer.write(`data: ${data}\\n\\n`);\n } catch (err: any) {\n sseWriteErrors++;\n if (sseWriteErrors === 1) {\n console.log(`[SSE:${streamId}] Writer closed (client disconnected). Total events sent: ${sseEventCount}, browser frames: ${sseBrowserFrameCount}`);\n }\n writerClosed = true;\n }\n };\n \n // Safe close helper\n const safeClose = async () => {\n if (writerClosed) return;\n try {\n console.log(`[SSE:${streamId}] Stream closing. Total events: ${sseEventCount}, browser frames: ${sseBrowserFrameCount}, write errors: ${sseWriteErrors}`);\n writerClosed = true;\n await writer.close();\n } catch {\n // Already closed, ignore\n }\n };\n \n // Cleanup abort controller\n const cleanupAbortController = () => {\n streamAbortControllers.delete(streamId);\n };\n\n // Run the agent in the background\n (async () => {\n let isAborted = false;\n \n try {\n const agent = await Agent.create({ sessionId });\n\n // Send stream ID for clients to use for reconnection\n await writeSSE(JSON.stringify({ type: 'data-stream-id', streamId }));\n\n // Build user message content for broadcast (includes attachments)\n // Note: Attachments are already saved to disk before this point (in the route handler)\n // so they already have savedPath set\n let broadcastContent: string | Array<{ type: string; text?: string; image?: string; data?: string; mediaType?: string; filename?: string; savedPath?: string }>;\n if (attachments && attachments.length > 0) {\n const contentParts: Array<{ type: string; text?: string; image?: string; data?: string; mediaType?: string; filename?: string; savedPath?: string }> = [];\n \n // IMPORTANT: Put file location info FIRST so the model knows where files are saved\n const attachmentDescriptions = attachments\n .map((a, i) => {\n const name = a.filename || `attachment_${i + 1}`;\n const typeLabel = a.type === 'image' ? 'Image' : 'File';\n const location = a.savedPath || '(path unknown)';\n return `${i + 1}. ${typeLabel}: \"${name}\" saved at: ${location}`;\n })\n .join('\\n');\n contentParts.push({ \n type: 'text', \n text: `[FILE ATTACHMENTS - The user has attached the following files which are saved on disk]\\n${attachmentDescriptions}\\n\\nYou can reference these files by their paths above. The file contents are also shown inline below.` \n });\n \n // Add user's text prompt\n if (prompt) {\n contentParts.push({ type: 'text', text: `\\n[USER MESSAGE]\\n${prompt}` });\n }\n \n // Add file/image parts with filename and path metadata\n for (const attachment of attachments) {\n if (attachment.type === 'image') {\n contentParts.push({\n type: 'image',\n image: stripDataUrlPrefix(attachment.data),\n mediaType: attachment.mediaType,\n filename: attachment.filename,\n savedPath: attachment.savedPath,\n });\n } else {\n contentParts.push({\n type: 'file',\n data: stripDataUrlPrefix(attachment.data),\n mediaType: attachment.mediaType || 'application/octet-stream',\n filename: attachment.filename,\n savedPath: attachment.savedPath,\n });\n }\n }\n broadcastContent = contentParts;\n } else {\n broadcastContent = prompt;\n }\n\n // Send user message so other clients can see it (includes attachments)\n await writeSSE(JSON.stringify({\n type: 'data-user-message',\n data: { id: `user_${Date.now()}`, content: broadcastContent },\n }));\n\n // Send message start\n const messageId = `msg_${Date.now()}`;\n await writeSSE(JSON.stringify({ type: 'start', messageId }));\n\n let textId = `text_${Date.now()}`;\n let textStarted = false;\n\n const result = await agent.stream({\n prompt,\n attachments, // Pass attachments to agent\n abortSignal: abortController.signal, // Use our managed abort controller, NOT client signal\n skipSaveUserMessage: true, // User message is saved in the route before streaming\n // Note: tool-input-start/available events are sent from the stream loop\n // when we see tool-call-streaming-start and tool-call events.\n // We only use onToolCall/onToolResult for non-streaming scenarios or\n // tools that need special handling (like approval requests).\n onToolCall: async () => {\n // Events sent from stream loop\n },\n onToolResult: async () => {\n // Events sent from stream loop\n },\n onApprovalRequired: async (execution) => {\n await writeSSE(JSON.stringify({\n type: 'data-approval-required',\n data: {\n id: execution.id,\n toolCallId: execution.toolCallId,\n toolName: execution.toolName,\n input: execution.input,\n },\n }));\n },\n onToolProgress: async (progress) => {\n const status = (progress.data as any)?.status || 'no-status';\n const contentLength = typeof (progress.data as any)?.content === 'string'\n ? (progress.data as any).content.length\n : undefined;\n const chunkIndex = (progress.data as any)?.chunkIndex;\n const chunkCount = (progress.data as any)?.chunkCount;\n console.log(\n '[TOOL-PROGRESS] Sending:',\n progress.toolName,\n status,\n contentLength !== undefined ? `contentLength=${contentLength}` : '',\n chunkIndex !== undefined || chunkCount !== undefined ? `chunk=${chunkIndex}/${chunkCount}` : ''\n );\n await writeSSE(JSON.stringify({\n type: 'tool-progress',\n toolName: progress.toolName,\n data: progress.data,\n }));\n if (progress.toolName === 'write_file' && status === 'content') {\n await writeSSE(JSON.stringify({\n type: 'debug',\n label: 'write-file-progress',\n contentLength,\n chunkIndex,\n chunkCount,\n }));\n await new Promise((resolve) => setTimeout(resolve, 0));\n }\n\n // Browser stream: connect proxy on any agent-browser command, destroy on close\n const browserPort = (progress.data as any)?.browserStreamPort;\n const browserClosed = (progress.data as any)?.browserClosed;\n\n if (progress.toolName === 'bash' && browserClosed) {\n console.log(`[BROWSER-STREAM:${streamId}] agent-browser close detected, destroying proxy for session ${sessionId}`);\n destroyProxy(sessionId);\n } else if (progress.toolName === 'bash' && browserPort) {\n console.log(`[BROWSER-STREAM:${streamId}] agent-browser command detected, ensuring proxy on port ${browserPort} for session ${sessionId}`);\n const proxy = getOrCreateProxy(sessionId, browserPort);\n console.log(`[BROWSER-STREAM:${streamId}] Proxy state: connected=${proxy.connected}, frameListeners=${proxy.listenerCount('frame')}, statusListeners=${proxy.listenerCount('status')}`);\n\n if (!sessionRecorders.has(sessionId)) {\n const recorder = new FrameRecorder(sessionId);\n recorder.start();\n sessionRecorders.set(sessionId, recorder);\n }\n\n // Always replace listeners so they reference the CURRENT stream's writeSSE.\n // Old listeners from a previous stream would silently drop frames because\n // their writeSSE closure has writerClosed=true.\n const oldFrameListeners = proxy.listenerCount('frame');\n if (oldFrameListeners > 0) {\n console.log(`[BROWSER-STREAM:${streamId}] Replacing ${oldFrameListeners} stale frame listener(s) from previous stream`);\n proxy.removeAllListeners('frame');\n proxy.removeAllListeners('status');\n }\n console.log(`[BROWSER-STREAM:${streamId}] Attaching frame+status listeners to proxy`);\n proxy.on('frame', (frame: BrowserFrame) => {\n sseBrowserFrameCount++;\n if (sseBrowserFrameCount === 1) {\n console.log(`[BROWSER-STREAM:${streamId}] First browser frame received! dataSize=${frame.data?.length ?? 0} writerClosed=${writerClosed}`);\n } else if (sseBrowserFrameCount % 50 === 0) {\n console.log(`[BROWSER-STREAM:${streamId}] Browser frame #${sseBrowserFrameCount} (writerClosed=${writerClosed})`);\n }\n const rec = sessionRecorders.get(sessionId);\n rec?.addFrame(frame);\n writeSSE(JSON.stringify({\n type: 'browser-frame',\n data: frame.data,\n metadata: frame.metadata,\n })).catch((err) => {\n console.warn(`[BROWSER-STREAM:${streamId}] Failed to send browser-frame via SSE:`, err);\n });\n });\n proxy.on('status', (s: BrowserStatus) => {\n console.log(`[BROWSER-STREAM:${streamId}] Browser status event: connected=${s.connected} screencasting=${s.screencasting} viewport=${s.viewportWidth}x${s.viewportHeight}`);\n writeSSE(JSON.stringify({\n type: 'browser-status',\n ...s,\n })).catch((err) => {\n console.warn(`[BROWSER-STREAM:${streamId}] Failed to send browser-status via SSE:`, err);\n });\n });\n // Request fresh status so the new listeners get the current state\n proxy.requestStatus();\n }\n },\n onStepFinish: async () => {\n await writeSSE(JSON.stringify({ type: 'finish-step' }));\n if (textStarted) {\n await writeSSE(JSON.stringify({ type: 'text-end', id: textId }));\n textStarted = false;\n textId = `text_${Date.now()}`;\n }\n },\n onAbort: async ({ steps }) => {\n isAborted = true;\n console.log(`Stream aborted after ${steps.length} steps`);\n },\n });\n\n // Consume the stream\n let reasoningId = `reasoning_${Date.now()}`;\n let reasoningStarted = false;\n\n for await (const part of result.stream.fullStream) {\n if (part.type === 'text-delta') {\n if (!textStarted) {\n await writeSSE(JSON.stringify({ type: 'text-start', id: textId }));\n textStarted = true;\n }\n await writeSSE(JSON.stringify({ type: 'text-delta', id: textId, delta: part.text }));\n } else if (part.type === 'reasoning-start') {\n await writeSSE(JSON.stringify({ type: 'reasoning-start', id: reasoningId }));\n reasoningStarted = true;\n } else if (part.type === 'reasoning-delta') {\n await writeSSE(JSON.stringify({ type: 'reasoning-delta', id: reasoningId, delta: part.text }));\n } else if (part.type === 'reasoning-end') {\n if (reasoningStarted) {\n await writeSSE(JSON.stringify({ type: 'reasoning-end', id: reasoningId }));\n reasoningStarted = false;\n reasoningId = `reasoning_${Date.now()}`;\n }\n } else if ((part as any).type === 'tool-call-streaming-start') {\n // Tool call starting to stream - send the tool name\n const p = part as any;\n await writeSSE(JSON.stringify({\n type: 'tool-input-start',\n toolCallId: p.toolCallId,\n toolName: p.toolName,\n }));\n toolCallStarts.add(p.toolCallId);\n } else if ((part as any).type === 'tool-call-delta') {\n // Streaming tool args delta\n const p = part as any;\n await writeSSE(JSON.stringify({\n type: 'tool-input-delta',\n toolCallId: p.toolCallId,\n argsTextDelta: p.argsTextDelta,\n }));\n } else if (part.type === 'tool-call') {\n await emitSyntheticToolStreaming(\n writeSSE,\n toolCallStarts,\n part.toolCallId,\n part.toolName,\n part.input\n );\n await writeSSE(JSON.stringify({\n type: 'tool-input-available',\n toolCallId: part.toolCallId,\n toolName: part.toolName,\n input: sanitizeToolInput(part.toolName, part.input),\n }));\n await writeSSE(JSON.stringify({\n type: 'debug',\n label: 'tool-input-available',\n toolName: part.toolName,\n }));\n } else if (part.type === 'tool-result') {\n await writeSSE(JSON.stringify({\n type: 'tool-output-available',\n toolCallId: part.toolCallId,\n output: part.output,\n }));\n } else if (part.type === 'error') {\n console.error('Stream error:', part.error);\n await writeSSE(JSON.stringify({ type: 'error', errorText: String(part.error) }));\n }\n }\n\n // End text if started\n if (textStarted) {\n await writeSSE(JSON.stringify({ type: 'text-end', id: textId }));\n }\n\n // End reasoning if still open\n if (reasoningStarted) {\n await writeSSE(JSON.stringify({ type: 'reasoning-end', id: reasoningId }));\n }\n\n // Save response messages\n if (!isAborted) {\n await result.saveResponseMessages();\n }\n\n // Send finish or abort message\n if (isAborted) {\n await writeSSE(JSON.stringify({ type: 'abort' }));\n } else {\n await writeSSE(JSON.stringify({ type: 'finish' }));\n }\n\n // Mark stream as finished\n try {\n await activeStreamQueries.finish(streamId);\n } catch {\n // Database may be closed during shutdown - ignore\n }\n } catch (error: any) {\n if (error.name === 'AbortError' || error.message?.includes('aborted')) {\n // User explicitly stopped the stream\n await writeSSE(JSON.stringify({ type: 'abort' }));\n } else {\n // Actual error in agent processing\n console.error('Agent error:', error);\n await writeSSE(JSON.stringify({ type: 'error', errorText: error.message }));\n try {\n await activeStreamQueries.markError(streamId);\n } catch {\n // Database may be closed during shutdown - ignore\n }\n }\n } finally {\n cleanupAbortController();\n // NOTE: Do NOT destroy the browser proxy here -- the browser may still\n // be open across turns. It gets cleaned up on agent-browser close or session delete.\n\n await writeSSE('[DONE]');\n await safeClose();\n }\n })();\n\n return readable;\n };\n}\n\n// Run agent with resumable streaming response\nagents.post(\n '/:id/run',\n zValidator('json', runPromptSchema),\n async (c) => {\n const id = c.req.param('id');\n const { prompt: rawPrompt, attachments } = c.req.valid('json');\n\n // Enrich user prompt with devtools context (page URL, viewport, etc.)\n const prompt = enrichPromptWithDevtoolsContext(id, rawPrompt);\n\n const session = await sessionQueries.getById(id);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n // Get the next message sequence number (this will be the user message's sequence)\n const nextSequence = await messageQueries.getNextSequence(id);\n\n // Create a checkpoint BEFORE processing this user message\n // This allows reverting to the state before this message was sent\n await createCheckpoint(id, session.workingDirectory, nextSequence);\n\n // Build user message content - can be string or array with file parts\n let userMessageContent: string | Array<{ type: string; text?: string; image?: string; data?: string; mediaType?: string; filename?: string; savedPath?: string }>;\n \n // Cast attachments to StreamAttachment[] so we can add savedPath\n const streamAttachments: StreamAttachment[] | undefined = attachments as StreamAttachment[] | undefined;\n \n if (streamAttachments && streamAttachments.length > 0) {\n // Save attachments to disk FIRST so we have paths for the database\n for (let i = 0; i < streamAttachments.length; i++) {\n const attachment = streamAttachments[i];\n try {\n const savedPath = await saveAttachmentToDisk(id, attachment, i);\n attachment.savedPath = savedPath;\n } catch (err) {\n console.error(`Failed to save attachment ${i}:`, err);\n }\n }\n \n // Build content array with text and file parts\n const contentParts: Array<{ type: string; text?: string; image?: string; data?: string; mediaType?: string; filename?: string; savedPath?: string }> = [];\n \n // IMPORTANT: Put file location info FIRST so the model knows where files are saved\n const attachmentDescriptions = streamAttachments\n .map((a, i) => {\n const name = a.filename || `attachment_${i + 1}`;\n const typeLabel = a.type === 'image' ? 'Image' : 'File';\n const location = a.savedPath || '(path unknown)';\n return `${i + 1}. ${typeLabel}: \"${name}\" saved at: ${location}`;\n })\n .join('\\n');\n contentParts.push({ \n type: 'text', \n text: `[FILE ATTACHMENTS - The user has attached the following files which are saved on disk]\\n${attachmentDescriptions}\\n\\nYou can reference these files by their paths above. The file contents are also shown inline below.` \n });\n \n // Add user's actual message\n if (prompt) {\n contentParts.push({ type: 'text', text: `\\n[USER MESSAGE]\\n${prompt}` });\n }\n \n // Add file/image parts with all metadata\n for (const attachment of streamAttachments) {\n if (attachment.type === 'image') {\n contentParts.push({\n type: 'image',\n image: stripDataUrlPrefix(attachment.data),\n mediaType: attachment.mediaType,\n filename: attachment.filename,\n savedPath: attachment.savedPath,\n });\n } else {\n contentParts.push({\n type: 'file',\n data: stripDataUrlPrefix(attachment.data),\n mediaType: attachment.mediaType || 'application/octet-stream',\n filename: attachment.filename,\n savedPath: attachment.savedPath,\n });\n }\n }\n \n userMessageContent = contentParts;\n } else {\n userMessageContent = prompt;\n }\n\n // Save user message to database FIRST, before creating the active stream\n // This ensures other clients can see the user message when they detect the new stream\n await messageQueries.create(id, { role: 'user', content: userMessageContent as any });\n\n // Generate unique stream ID for this run\n const streamId = `stream_${id}_${nanoid(10)}`;\n console.log(`[STREAM] Creating stream ${streamId} for session ${id}`);\n\n // Record the active stream\n await activeStreamQueries.create(id, streamId);\n\n // Create the resumable stream\n // Note: We don't pass c.req.raw.signal - the agent runs independently of client connections\n const stream = await streamContext.resumableStream(\n streamId,\n createAgentStreamProducer(id, prompt, streamId, streamAttachments),\n );\n\n if (!stream) {\n console.error(`[STREAM] Failed to create resumable stream ${streamId}`);\n return c.json({ error: 'Failed to create stream' }, 500);\n }\n console.log(`[STREAM] Stream ${streamId} created successfully`);\n\n // Return SSE response - pipe through TextEncoder for proper streaming\n const encodedStream = stream.pipeThrough(new TextEncoderStream());\n \n return new Response(encodedStream as unknown as ReadableStream<Uint8Array>, {\n headers: {\n 'Content-Type': 'text/event-stream',\n 'Cache-Control': 'no-cache',\n 'Connection': 'keep-alive',\n 'x-vercel-ai-ui-message-stream': 'v1',\n 'x-stream-id': streamId,\n },\n });\n }\n);\n\n// Watch/subscribe to an existing stream (for additional clients)\nagents.get('/:id/watch', async (c) => {\n const sessionId = c.req.param('id');\n const resumeAt = c.req.query('resumeAt');\n const explicitStreamId = c.req.query('streamId');\n\n const session = await sessionQueries.getById(sessionId);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n // Find the active stream for this session\n let streamId: string | undefined = explicitStreamId;\n if (!streamId) {\n const activeStream = await activeStreamQueries.getBySessionId(sessionId);\n if (!activeStream) {\n return c.json({ error: 'No active stream for this session', hint: 'Start a new run with POST /agents/:id/run' }, 404);\n }\n streamId = activeStream.streamId;\n }\n\n console.log(`[STREAM] Watch request for session ${sessionId}, streamId=${streamId}, resumeAt=${resumeAt || 'none'}`);\n\n // Try to resume/join the existing stream\n const stream = await streamContext.resumeExistingStream(\n streamId!,\n resumeAt ? parseInt(resumeAt, 10) : undefined,\n );\n\n if (!stream) {\n console.log(`[STREAM] Watch failed — stream ${streamId} is no longer active`);\n return c.json({ \n error: 'Stream is no longer active', \n streamId,\n hint: 'The stream may have finished. Check /agents/:id/approvals or start a new run.' \n }, 422);\n }\n console.log(`[STREAM] Client watching stream ${streamId}`);\n\n const encodedStream = stream.pipeThrough(new TextEncoderStream());\n \n return new Response(encodedStream as unknown as ReadableStream<Uint8Array>, {\n headers: {\n 'Content-Type': 'text/event-stream',\n 'Cache-Control': 'no-cache',\n 'Connection': 'keep-alive',\n 'x-vercel-ai-ui-message-stream': 'v1',\n 'x-stream-id': streamId ?? '',\n },\n });\n});\n\n// Get active stream info for a session\nagents.get('/:id/stream', async (c) => {\n const sessionId = c.req.param('id');\n\n const session = await sessionQueries.getById(sessionId);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n const activeStream = await activeStreamQueries.getBySessionId(sessionId);\n\n return c.json({\n sessionId,\n hasActiveStream: !!activeStream,\n stream: activeStream ? {\n id: activeStream.id,\n streamId: activeStream.streamId,\n status: activeStream.status,\n createdAt: activeStream.createdAt.toISOString(),\n } : null,\n });\n});\n\n// Run agent without streaming (for simple integrations)\nagents.post(\n '/:id/generate',\n zValidator('json', runPromptSchema),\n async (c) => {\n const id = c.req.param('id');\n const { prompt: rawPrompt } = c.req.valid('json');\n\n // Enrich user prompt with devtools context (page URL, viewport, etc.)\n const prompt = enrichPromptWithDevtoolsContext(id, rawPrompt);\n\n const session = await sessionQueries.getById(id);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n try {\n const agent = await Agent.create({ sessionId: id });\n const result = await agent.run({ prompt });\n\n return c.json({\n sessionId: id,\n text: result.text,\n stepCount: result.steps.length,\n });\n } catch (error: any) {\n return c.json({ error: error.message }, 500);\n }\n }\n);\n\n// Approve a tool execution\nagents.post('/:id/approve/:toolCallId', async (c) => {\n const sessionId = c.req.param('id');\n const toolCallId = c.req.param('toolCallId');\n\n const session = await sessionQueries.getById(sessionId);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n try {\n const agent = await Agent.create({ sessionId });\n const result = await agent.approve(toolCallId);\n\n return c.json({\n success: true,\n toolCallId,\n result,\n });\n } catch (error: any) {\n return c.json({ error: error.message }, 400);\n }\n});\n\n// Reject a tool execution\nagents.post(\n '/:id/reject/:toolCallId',\n zValidator('json', rejectSchema),\n async (c) => {\n const sessionId = c.req.param('id');\n const toolCallId = c.req.param('toolCallId');\n const body = c.req.valid('json');\n\n const session = await sessionQueries.getById(sessionId);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n try {\n const agent = await Agent.create({ sessionId });\n agent.reject(toolCallId, body?.reason);\n\n return c.json({\n success: true,\n toolCallId,\n rejected: true,\n });\n } catch (error: any) {\n return c.json({ error: error.message }, 400);\n }\n }\n);\n\n// Get pending approvals for a session\nagents.get('/:id/approvals', async (c) => {\n const sessionId = c.req.param('id');\n\n const session = await sessionQueries.getById(sessionId);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n const pendingApprovals = await toolExecutionQueries.getPendingApprovals(sessionId);\n\n return c.json({\n sessionId,\n pendingApprovals: pendingApprovals.map((p) => ({\n id: p.id,\n toolCallId: p.toolCallId,\n toolName: p.toolName,\n input: p.input,\n startedAt: p.startedAt.toISOString(),\n })),\n count: pendingApprovals.length,\n });\n});\n\n// Abort/stop an active stream for a session\n// This is used when the user explicitly clicks the Stop button\nagents.post('/:id/abort', async (c) => {\n const sessionId = c.req.param('id');\n\n const session = await sessionQueries.getById(sessionId);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n // Find the active stream for this session\n const activeStream = await activeStreamQueries.getBySessionId(sessionId);\n if (!activeStream) {\n return c.json({ error: 'No active stream for this session' }, 404);\n }\n\n // Get the abort controller for this stream\n const abortController = streamAbortControllers.get(activeStream.streamId);\n if (abortController) {\n abortController.abort();\n streamAbortControllers.delete(activeStream.streamId);\n return c.json({ success: true, streamId: activeStream.streamId, aborted: true });\n }\n\n // Stream might have already finished\n return c.json({ \n success: false, \n streamId: activeStream.streamId, \n message: 'Stream may have already finished or was not found' \n });\n});\n\n// Quick start: create session and run in one request (also resumable)\nagents.post(\n '/quick',\n zValidator('json', quickStartSchema),\n async (c) => {\n const body = c.req.valid('json');\n const config = getConfig();\n\n // Create new session\n const agent = await Agent.create({\n name: body.name,\n workingDirectory: body.workingDirectory || config.resolvedWorkingDirectory,\n model: body.model || config.defaultModel,\n sessionConfig: body.toolApprovals ? { toolApprovals: body.toolApprovals } : undefined,\n });\n\n const session = agent.getSession();\n\n // Enrich user prompt with devtools context (page URL, viewport, etc.)\n const enrichedPrompt = enrichPromptWithDevtoolsContext(session.id, body.prompt);\n\n const streamId = `stream_${session.id}_${nanoid(10)}`;\n\n // Create a checkpoint BEFORE processing the first user message\n // For new sessions, the first user message will be at sequence 0\n await createCheckpoint(session.id, session.workingDirectory, 0);\n\n // Record the active stream\n await activeStreamQueries.create(session.id, streamId);\n\n // Create a custom stream producer that includes session info\n // The agent runs independently of client connections for resumable streams\n const createQuickStreamProducer = (): ReadableStream<string> => {\n const { readable, writable } = new TransformStream<string, string>();\n const writer = writable.getWriter();\n let writerClosed = false;\n const toolCallStarts = new Set<string>();\n\n // Create an abort controller for this specific stream (for explicit stops only)\n const abortController = new AbortController();\n streamAbortControllers.set(streamId, abortController);\n\n let sseEventCount = 0;\n let sseBrowserFrameCount = 0;\n let sseWriteErrors = 0;\n\n // Helper to write SSE event - handles client disconnection gracefully\n const writeSSE = async (data: string) => {\n if (writerClosed) return;\n try {\n sseEventCount++;\n await writer.write(`data: ${data}\\n\\n`);\n } catch (err: any) {\n sseWriteErrors++;\n if (sseWriteErrors === 1) {\n console.log(`[SSE:${streamId}] Writer closed (client disconnected). Total events sent: ${sseEventCount}, browser frames: ${sseBrowserFrameCount}`);\n }\n writerClosed = true;\n }\n };\n \n const safeClose = async () => {\n if (writerClosed) return;\n try {\n console.log(`[SSE:${streamId}] Stream closing. Total events: ${sseEventCount}, browser frames: ${sseBrowserFrameCount}, write errors: ${sseWriteErrors}`);\n writerClosed = true;\n await writer.close();\n } catch {\n // Already closed\n }\n };\n \n // Cleanup abort controller\n const cleanupAbortController = () => {\n streamAbortControllers.delete(streamId);\n };\n\n (async () => {\n let isAborted = false;\n\n try {\n // Send stream ID and session info\n await writeSSE(JSON.stringify({ type: 'data-stream-id', streamId }));\n await writeSSE(JSON.stringify({\n type: 'data-session',\n data: {\n id: session.id,\n name: session.name,\n workingDirectory: session.workingDirectory,\n model: session.model,\n },\n }));\n\n const messageId = `msg_${Date.now()}`;\n await writeSSE(JSON.stringify({ type: 'start', messageId }));\n\n let textId = `text_${Date.now()}`;\n let textStarted = false;\n\n const result = await agent.stream({\n prompt: enrichedPrompt,\n abortSignal: abortController.signal, // Use our managed abort controller, NOT client signal\n onToolProgress: async (progress) => {\n const status = (progress.data as any)?.status || 'no-status';\n const contentLength = typeof (progress.data as any)?.content === 'string'\n ? (progress.data as any).content.length\n : undefined;\n const chunkIndex = (progress.data as any)?.chunkIndex;\n const chunkCount = (progress.data as any)?.chunkCount;\n console.log(\n '[TOOL-PROGRESS] Sending:',\n progress.toolName,\n status,\n contentLength !== undefined ? `contentLength=${contentLength}` : '',\n chunkIndex !== undefined || chunkCount !== undefined ? `chunk=${chunkIndex}/${chunkCount}` : ''\n );\n await writeSSE(JSON.stringify({\n type: 'tool-progress',\n toolName: progress.toolName,\n data: progress.data,\n }));\n if (progress.toolName === 'write_file' && status === 'content') {\n await writeSSE(JSON.stringify({\n type: 'debug',\n label: 'write-file-progress',\n contentLength,\n chunkIndex,\n chunkCount,\n }));\n await new Promise((resolve) => setTimeout(resolve, 0));\n }\n\n const browserPort = (progress.data as any)?.browserStreamPort;\n const browserClosed = (progress.data as any)?.browserClosed;\n\n if (progress.toolName === 'bash' && browserClosed) {\n console.log(`[BROWSER-STREAM:${streamId}] agent-browser close detected, destroying proxy for session ${session.id}`);\n destroyProxy(session.id);\n } else if (progress.toolName === 'bash' && browserPort) {\n console.log(`[BROWSER-STREAM:${streamId}] agent-browser command detected, port ${browserPort} for session ${session.id}`);\n const proxy = getOrCreateProxy(session.id, browserPort);\n console.log(`[BROWSER-STREAM:${streamId}] Proxy state: connected=${proxy.connected}, frameListeners=${proxy.listenerCount('frame')}, statusListeners=${proxy.listenerCount('status')}`);\n\n if (!sessionRecorders.has(session.id)) {\n const recorder = new FrameRecorder(session.id);\n recorder.start();\n sessionRecorders.set(session.id, recorder);\n }\n\n // Always replace listeners so they reference the CURRENT stream's writeSSE.\n const oldFrameListeners = proxy.listenerCount('frame');\n if (oldFrameListeners > 0) {\n console.log(`[BROWSER-STREAM:${streamId}] Replacing ${oldFrameListeners} stale frame listener(s) from previous stream`);\n proxy.removeAllListeners('frame');\n proxy.removeAllListeners('status');\n }\n console.log(`[BROWSER-STREAM:${streamId}] Attaching frame+status listeners to proxy`);\n proxy.on('frame', (frame: BrowserFrame) => {\n sseBrowserFrameCount++;\n if (sseBrowserFrameCount === 1) {\n console.log(`[BROWSER-STREAM:${streamId}] First browser frame received! dataSize=${frame.data?.length ?? 0} writerClosed=${writerClosed}`);\n } else if (sseBrowserFrameCount % 50 === 0) {\n console.log(`[BROWSER-STREAM:${streamId}] Browser frame #${sseBrowserFrameCount} (writerClosed=${writerClosed})`);\n }\n const rec = sessionRecorders.get(session.id);\n rec?.addFrame(frame);\n writeSSE(JSON.stringify({\n type: 'browser-frame',\n data: frame.data,\n metadata: frame.metadata,\n })).catch((err) => {\n console.warn(`[BROWSER-STREAM:${streamId}] Failed to send browser-frame via SSE:`, err);\n });\n });\n proxy.on('status', (s: BrowserStatus) => {\n console.log(`[BROWSER-STREAM:${streamId}] Browser status event: connected=${s.connected} screencasting=${s.screencasting} viewport=${s.viewportWidth}x${s.viewportHeight}`);\n writeSSE(JSON.stringify({\n type: 'browser-status',\n ...s,\n })).catch((err) => {\n console.warn(`[BROWSER-STREAM:${streamId}] Failed to send browser-status via SSE:`, err);\n });\n });\n proxy.requestStatus();\n }\n },\n onStepFinish: async () => {\n await writeSSE(JSON.stringify({ type: 'finish-step' }));\n if (textStarted) {\n await writeSSE(JSON.stringify({ type: 'text-end', id: textId }));\n textStarted = false;\n textId = `text_${Date.now()}`;\n }\n },\n onAbort: async ({ steps }) => {\n isAborted = true;\n console.log(`Stream aborted after ${steps.length} steps`);\n },\n });\n\n let reasoningId = `reasoning_${Date.now()}`;\n let reasoningStarted = false;\n\n for await (const part of result.stream.fullStream) {\n if (part.type === 'text-delta') {\n if (!textStarted) {\n await writeSSE(JSON.stringify({ type: 'text-start', id: textId }));\n textStarted = true;\n }\n await writeSSE(JSON.stringify({ type: 'text-delta', id: textId, delta: part.text }));\n } else if (part.type === 'reasoning-start') {\n await writeSSE(JSON.stringify({ type: 'reasoning-start', id: reasoningId }));\n reasoningStarted = true;\n } else if (part.type === 'reasoning-delta') {\n await writeSSE(JSON.stringify({ type: 'reasoning-delta', id: reasoningId, delta: part.text }));\n } else if (part.type === 'reasoning-end') {\n if (reasoningStarted) {\n await writeSSE(JSON.stringify({ type: 'reasoning-end', id: reasoningId }));\n reasoningStarted = false;\n reasoningId = `reasoning_${Date.now()}`;\n }\n } else if ((part as any).type === 'tool-call-streaming-start') {\n // Tool call starting to stream\n const p = part as any;\n await writeSSE(JSON.stringify({\n type: 'tool-input-start',\n toolCallId: p.toolCallId,\n toolName: p.toolName,\n }));\n toolCallStarts.add(p.toolCallId);\n } else if ((part as any).type === 'tool-call-delta') {\n // Streaming tool args delta\n const p = part as any;\n await writeSSE(JSON.stringify({\n type: 'tool-input-delta',\n toolCallId: p.toolCallId,\n argsTextDelta: p.argsTextDelta,\n }));\n } else if (part.type === 'tool-call') {\n // Tool call complete\n await emitSyntheticToolStreaming(\n writeSSE,\n toolCallStarts,\n part.toolCallId,\n part.toolName,\n part.input\n );\n await writeSSE(JSON.stringify({\n type: 'tool-input-available',\n toolCallId: part.toolCallId,\n toolName: part.toolName,\n input: sanitizeToolInput(part.toolName, part.input),\n }));\n await writeSSE(JSON.stringify({\n type: 'debug',\n label: 'tool-input-available',\n toolName: part.toolName,\n }));\n } else if (part.type === 'tool-result') {\n await writeSSE(JSON.stringify({\n type: 'tool-output-available',\n toolCallId: part.toolCallId,\n output: part.output,\n }));\n } else if (part.type === 'error') {\n console.error('Stream error:', part.error);\n await writeSSE(JSON.stringify({ type: 'error', errorText: String(part.error) }));\n }\n }\n\n if (textStarted) {\n await writeSSE(JSON.stringify({ type: 'text-end', id: textId }));\n }\n\n if (reasoningStarted) {\n await writeSSE(JSON.stringify({ type: 'reasoning-end', id: reasoningId }));\n }\n\n if (!isAborted) {\n await result.saveResponseMessages();\n }\n\n if (isAborted) {\n await writeSSE(JSON.stringify({ type: 'abort' }));\n } else {\n await writeSSE(JSON.stringify({ type: 'finish' }));\n }\n\n await activeStreamQueries.finish(streamId);\n } catch (error: any) {\n if (error.name === 'AbortError' || error.message?.includes('aborted')) {\n // User explicitly stopped the stream\n await writeSSE(JSON.stringify({ type: 'abort' }));\n } else {\n // Actual error in agent processing\n console.error('Agent error:', error);\n await writeSSE(JSON.stringify({ type: 'error', errorText: error.message }));\n await activeStreamQueries.markError(streamId);\n }\n } finally {\n cleanupAbortController();\n\n await writeSSE('[DONE]');\n await safeClose();\n }\n })();\n\n return readable;\n };\n\n const stream = await streamContext.resumableStream(\n streamId,\n createQuickStreamProducer,\n );\n\n if (!stream) {\n return c.json({ error: 'Failed to create stream' }, 500);\n }\n\n const encodedStream = stream.pipeThrough(new TextEncoderStream());\n \n return new Response(encodedStream as unknown as ReadableStream<Uint8Array>, {\n headers: {\n 'Content-Type': 'text/event-stream',\n 'Cache-Control': 'no-cache',\n 'Connection': 'keep-alive',\n 'x-vercel-ai-ui-message-stream': 'v1',\n 'x-stream-id': streamId,\n 'x-session-id': session.id,\n },\n });\n }\n);\n\n// Browser input injection for pair-browsing\nconst browserInputSchema = z.object({\n type: z.enum(['input_mouse', 'input_keyboard', 'input_touch']),\n eventType: z.string(),\n x: z.number().optional(),\n y: z.number().optional(),\n button: z.string().optional(),\n clickCount: z.number().optional(),\n deltaX: z.number().optional(),\n deltaY: z.number().optional(),\n key: z.string().optional(),\n code: z.string().optional(),\n text: z.string().optional(),\n modifiers: z.number().optional(),\n touchPoints: z.array(z.object({\n x: z.number(),\n y: z.number(),\n id: z.number().optional(),\n })).optional(),\n});\n\nagents.post(\n '/:id/browser-input',\n zValidator('json', browserInputSchema),\n async (c) => {\n const sessionId = c.req.param('id');\n const event = c.req.valid('json') as BrowserInputEvent;\n\n const proxy = getProxy(sessionId);\n if (!proxy || !proxy.connected) {\n return c.json({ error: 'No active browser stream for this session' }, 404);\n }\n\n proxy.injectInput(event);\n return c.json({ success: true });\n }\n);\n\n// Browser stream status — check if a browser is active and get its stream port\nagents.get('/:id/browser-stream', async (c) => {\n const sessionId = c.req.param('id');\n const proxy = getProxy(sessionId);\n\n return c.json({\n sessionId,\n active: !!proxy?.connected,\n hasProxy: !!proxy,\n latestFrame: proxy?.latestFrame ? {\n metadata: proxy.latestFrame.metadata,\n timestamp: proxy.latestFrame.timestamp,\n } : null,\n });\n});\n\nexport { agents };\n","/**\n * In-memory pub/sub adapter for resumable-stream library.\n * This enables multiple clients to follow the same stream without Redis.\n * \n * Limitations:\n * - State lost on server restart\n * - Only works for single-server deployment (no load balancing)\n * \n * For production with multiple servers, use Redis.\n */\n\nimport { createResumableStreamContext } from 'resumable-stream/generic';\nimport type { Publisher, Subscriber } from 'resumable-stream/generic';\n\n// In-memory key-value store\nconst store = new Map<string, { value: string; expiresAt?: number }>();\n\n// In-memory pub/sub channels\nconst channels = new Map<string, Set<(message: string) => void>>();\n\n// Cleanup expired keys periodically\n// Use .unref() so this doesn't keep the process alive when it's the only thing running\nconst cleanupInterval = setInterval(() => {\n const now = Date.now();\n for (const [key, data] of store.entries()) {\n if (data.expiresAt && data.expiresAt < now) {\n store.delete(key);\n }\n }\n}, 60000); // Clean up every minute\ncleanupInterval.unref();\n\nlet publishCount = 0;\nlet lastPublishLog = 0;\n\n/**\n * In-memory Publisher implementation\n */\nconst publisher: Publisher = {\n connect: async () => {\n // No-op for in-memory\n },\n\n publish: async (channel: string, message: string) => {\n const subscribers = channels.get(channel);\n publishCount++;\n const now = Date.now();\n if (now - lastPublishLog > 10000) {\n console.log(`[ResumableStream] Publish stats: total=${publishCount}, channels=${channels.size}, store=${store.size}`);\n lastPublishLog = now;\n }\n if (subscribers) {\n for (const callback of subscribers) {\n setImmediate(() => callback(message));\n }\n }\n },\n\n set: async (key: string, value: string, options?: { EX?: number }) => {\n const expiresAt = options?.EX ? Date.now() + options.EX * 1000 : undefined;\n store.set(key, { value, expiresAt });\n if (options?.EX) {\n setTimeout(() => store.delete(key), options.EX * 1000);\n }\n },\n\n get: async (key: string) => {\n const data = store.get(key);\n if (!data) return null;\n \n if (data.expiresAt && data.expiresAt < Date.now()) {\n store.delete(key);\n return null;\n }\n \n return data.value;\n },\n\n incr: async (key: string) => {\n const data = store.get(key);\n const current = data ? parseInt(data.value, 10) : 0;\n const next = (isNaN(current) ? 0 : current) + 1;\n store.set(key, { value: String(next), expiresAt: data?.expiresAt });\n return next;\n },\n};\n\n/**\n * In-memory Subscriber implementation\n */\nconst subscriber: Subscriber = {\n connect: async () => {\n // No-op for in-memory\n },\n\n subscribe: async (channel: string, callback: (message: string) => void) => {\n if (!channels.has(channel)) {\n channels.set(channel, new Set());\n }\n channels.get(channel)!.add(callback);\n console.log(`[ResumableStream] Subscribe to channel \"${channel}\" (total subscribers: ${channels.get(channel)!.size})`);\n },\n\n unsubscribe: async (channel: string) => {\n const count = channels.get(channel)?.size ?? 0;\n channels.delete(channel);\n console.log(`[ResumableStream] Unsubscribe from channel \"${channel}\" (removed ${count} subscribers)`);\n },\n};\n\n/**\n * Create the resumable stream context with in-memory pub/sub\n */\nexport const streamContext = createResumableStreamContext({\n // Background task handler - just let promises run and log errors\n waitUntil: (promise: Promise<unknown>) => {\n promise.catch((err) => {\n console.error('[ResumableStream] Background task error:', err);\n });\n },\n publisher,\n subscriber,\n});\n\n/**\n * Get statistics about the in-memory store (for debugging)\n */\nexport function getStreamStats() {\n return {\n storeSize: store.size,\n activeChannels: channels.size,\n channelSubscribers: Array.from(channels.entries()).map(([channel, subs]) => ({\n channel,\n subscriberCount: subs.size,\n })),\n };\n}\n\n/**\n * Clear all in-memory state (useful for testing)\n */\nexport function clearStreamState() {\n store.clear();\n channels.clear();\n}\n","import { Hono } from 'hono';\nimport { zValidator } from '@hono/zod-validator';\nimport { z } from 'zod';\nimport { readFileSync } from 'node:fs';\nimport { fileURLToPath } from 'node:url';\nimport { dirname, join } from 'node:path';\nimport { getConfig, getApiKeyStatus, setApiKey, removeApiKey, SUPPORTED_PROVIDERS } from '../../config/index.js';\n\n// Read package.json to get the current version\nconst __filename = fileURLToPath(import.meta.url);\nconst __dirname = dirname(__filename);\n\n// Try multiple possible paths to package.json:\n// When bundled by tsup, all code ends up in dist/server/index.js\n// The import.meta.url will reflect the bundled file location\nconst possiblePaths = [\n join(__dirname, '../package.json'), // From dist/server -> dist/../package.json\n join(__dirname, '../../package.json'), // From dist/server (if nested differently)\n join(__dirname, '../../../package.json'), // From src/server/routes (development)\n join(process.cwd(), 'package.json'), // From current working directory\n];\n\nlet currentVersion = '0.0.0';\nlet packageName = 'sparkecoder';\n\nfor (const packageJsonPath of possiblePaths) {\n try {\n const packageJson = JSON.parse(readFileSync(packageJsonPath, 'utf-8'));\n // Match sparkecoder package (but not web/ or remote-server/ subpackages)\n if (packageJson.name === 'sparkecoder') {\n currentVersion = packageJson.version || '0.0.0';\n packageName = packageJson.name || 'sparkecoder';\n break;\n }\n } catch {\n // Try next path\n }\n}\n\nconst health = new Hono();\n\nhealth.get('/', async (c) => {\n const config = getConfig();\n const apiKeyStatus = getApiKeyStatus();\n \n // Check if the AI Gateway key is configured (required for the default model)\n const gatewayKey = apiKeyStatus.find(s => s.provider === 'ai-gateway');\n const hasApiKey = gatewayKey?.configured ?? false;\n\n return c.json({\n status: 'ok',\n version: currentVersion,\n uptime: process.uptime(),\n apiKeyConfigured: hasApiKey,\n config: {\n workingDirectory: config.resolvedWorkingDirectory,\n defaultModel: config.defaultModel,\n defaultToolApprovals: config.toolApprovals || {},\n port: config.server.port,\n },\n timestamp: new Date().toISOString(),\n });\n});\n\n// Check for updates - compare current version with npm registry\nhealth.get('/version', async (c) => {\n let latestVersion = currentVersion;\n let updateAvailable = false;\n let error: string | undefined;\n \n try {\n // Fetch latest version from npm registry\n const npmResponse = await fetch(`https://registry.npmjs.org/${packageName}/latest`, {\n headers: { 'Accept': 'application/json' },\n signal: AbortSignal.timeout(5000), // 5 second timeout\n });\n \n if (npmResponse.ok) {\n const npmData = await npmResponse.json() as { version?: string };\n latestVersion = npmData.version || currentVersion;\n \n // Compare versions (simple semver comparison)\n const parseVersion = (v: string) => {\n const parts = v.replace(/^v/, '').split('.').map(Number);\n return { major: parts[0] || 0, minor: parts[1] || 0, patch: parts[2] || 0 };\n };\n \n const current = parseVersion(currentVersion);\n const latest = parseVersion(latestVersion);\n \n updateAvailable = \n latest.major > current.major ||\n (latest.major === current.major && latest.minor > current.minor) ||\n (latest.major === current.major && latest.minor === current.minor && latest.patch > current.patch);\n } else {\n error = `npm registry returned ${npmResponse.status}`;\n }\n } catch (err) {\n error = err instanceof Error ? err.message : 'Failed to check for updates';\n }\n \n return c.json({\n packageName,\n currentVersion,\n latestVersion,\n updateAvailable,\n updateCommand: updateAvailable ? `npm install -g ${packageName}@latest` : null,\n error,\n timestamp: new Date().toISOString(),\n });\n});\n\nhealth.get('/ready', async (c) => {\n try {\n // Check if config is loaded\n getConfig();\n\n return c.json({\n status: 'ready',\n timestamp: new Date().toISOString(),\n });\n } catch (error: any) {\n return c.json(\n {\n status: 'not_ready',\n error: error.message,\n timestamp: new Date().toISOString(),\n },\n 503\n );\n }\n});\n\n// ============================================\n// API Key Management Endpoints\n// ============================================\n\n// Get status of all API keys (masked)\nhealth.get('/api-keys', async (c) => {\n const status = getApiKeyStatus();\n \n return c.json({\n providers: status,\n supportedProviders: SUPPORTED_PROVIDERS,\n });\n});\n\n// Set an API key\nconst setApiKeySchema = z.object({\n provider: z.string(),\n apiKey: z.string().min(1),\n});\n\nhealth.post(\n '/api-keys',\n zValidator('json', setApiKeySchema),\n async (c) => {\n const { provider, apiKey } = c.req.valid('json');\n \n try {\n setApiKey(provider, apiKey);\n const status = getApiKeyStatus();\n const providerStatus = status.find(s => s.provider === provider.toLowerCase());\n \n return c.json({\n success: true,\n provider: provider.toLowerCase(),\n maskedKey: providerStatus?.maskedKey,\n message: `API key for ${provider} saved successfully`,\n });\n } catch (error: any) {\n return c.json({ error: error.message }, 400);\n }\n }\n);\n\n// Remove an API key\nhealth.delete('/api-keys/:provider', async (c) => {\n const provider = c.req.param('provider');\n \n try {\n removeApiKey(provider);\n \n return c.json({\n success: true,\n provider: provider.toLowerCase(),\n message: `API key for ${provider} removed`,\n });\n } catch (error: any) {\n return c.json({ error: error.message }, 400);\n }\n});\n\nexport { health };\n","import { Hono } from 'hono';\nimport { zValidator } from '@hono/zod-validator';\nimport { z } from 'zod';\nimport * as tmux from '../../terminal/tmux.js';\nimport { sessionQueries } from '../../db/index.js';\n\nexport const terminals = new Hono();\n\n// Spawn a new terminal\nconst spawnSchema = z.object({\n command: z.string(),\n cwd: z.string().optional(),\n name: z.string().optional(),\n});\n\nterminals.post(\n '/:sessionId/terminals',\n zValidator('json', spawnSchema),\n async (c) => {\n const sessionId = c.req.param('sessionId');\n const body = c.req.valid('json');\n\n // Verify session exists\n const session = await sessionQueries.getById(sessionId);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n // Check if tmux is available\n const hasTmux = await tmux.isTmuxAvailable();\n if (!hasTmux) {\n return c.json({ error: 'tmux is not installed. Background terminals require tmux.' }, 400);\n }\n\n const workingDirectory = body.cwd || session.workingDirectory;\n const result = await tmux.runBackground(body.command, workingDirectory, { \n sessionId, \n name: body.name \n });\n\n return c.json({\n id: result.id,\n name: body.name || null,\n command: body.command,\n cwd: workingDirectory,\n status: result.status,\n pid: null, // tmux doesn't expose PID directly\n }, 201);\n }\n);\n\n// List terminals for a session\nterminals.get('/:sessionId/terminals', async (c) => {\n const sessionId = c.req.param('sessionId');\n\n const session = await sessionQueries.getById(sessionId);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n // List all terminals for this session from the file system\n const sessionTerminals = await tmux.listSessionTerminals(sessionId, session.workingDirectory);\n \n // Get running status for each terminal\n const terminalList = await Promise.all(\n sessionTerminals.map(async (meta) => {\n const running = await tmux.isRunning(meta.id);\n return {\n id: meta.id,\n name: meta.name || null,\n command: meta.command,\n cwd: meta.cwd,\n status: running ? 'running' : 'stopped',\n createdAt: meta.createdAt,\n };\n })\n );\n\n return c.json({\n sessionId,\n terminals: terminalList,\n count: terminalList.length,\n running: terminalList.filter(t => t.status === 'running').length,\n });\n});\n\n// Get terminal status\nterminals.get('/:sessionId/terminals/:terminalId', async (c) => {\n const sessionId = c.req.param('sessionId');\n const terminalId = c.req.param('terminalId');\n\n const session = await sessionQueries.getById(sessionId);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n const meta = await tmux.getMeta(terminalId, session.workingDirectory, sessionId);\n if (!meta) {\n return c.json({ error: 'Terminal not found' }, 404);\n }\n\n const running = await tmux.isRunning(terminalId);\n\n return c.json({\n id: terminalId,\n command: meta.command,\n cwd: meta.cwd,\n status: running ? 'running' : 'stopped',\n createdAt: meta.createdAt,\n exitCode: running ? null : 0, // We don't track exit codes in tmux mode\n });\n});\n\n// Get terminal logs\nconst logsQuerySchema = z.object({\n tail: z.string().optional().transform(v => v ? parseInt(v, 10) : undefined),\n});\n\nterminals.get(\n '/:sessionId/terminals/:terminalId/logs',\n zValidator('query', logsQuerySchema),\n async (c) => {\n const sessionId = c.req.param('sessionId');\n const terminalId = c.req.param('terminalId');\n const query = c.req.valid('query');\n\n const session = await sessionQueries.getById(sessionId);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n const result = await tmux.getLogs(terminalId, session.workingDirectory, { tail: query.tail, sessionId });\n\n if (result.status === 'unknown') {\n return c.json({ error: 'Terminal not found' }, 404);\n }\n\n return c.json({\n terminalId,\n logs: result.output,\n lineCount: result.output.split('\\n').length,\n });\n }\n);\n\n// Kill a terminal\nconst killSchema = z.object({\n signal: z.enum(['SIGTERM', 'SIGKILL']).optional(),\n});\n\nterminals.post(\n '/:sessionId/terminals/:terminalId/kill',\n zValidator('json', killSchema.optional()),\n async (c) => {\n const terminalId = c.req.param('terminalId');\n\n const success = await tmux.killTerminal(terminalId);\n\n if (!success) {\n return c.json({ error: 'Failed to kill terminal (may already be stopped)' }, 400);\n }\n\n return c.json({ success: true, message: 'Terminal killed' });\n }\n);\n\n// Write to terminal stdin\nconst writeSchema = z.object({\n input: z.string(),\n});\n\nterminals.post(\n '/:sessionId/terminals/:terminalId/write',\n zValidator('json', writeSchema),\n async (c) => {\n const terminalId = c.req.param('terminalId');\n const body = c.req.valid('json');\n\n // Check if terminal is running\n const isRunning = await tmux.isRunning(terminalId);\n if (!isRunning) {\n return c.json({ error: 'Terminal is not running' }, 400);\n }\n\n // Use sendInput to write to the terminal (uses tmux send-keys internally)\n // Don't press Enter automatically - the input should include newlines if needed\n const success = await tmux.sendInput(terminalId, body.input, { pressEnter: false });\n \n if (!success) {\n return c.json({ error: 'Failed to write to terminal' }, 500);\n }\n\n return c.json({ success: true, written: body.input.length });\n }\n);\n\n// Kill all terminals for a session (cleanup endpoint)\nterminals.post('/:sessionId/terminals/kill-all', async (c) => {\n const sessionId = c.req.param('sessionId');\n\n const session = await sessionQueries.getById(sessionId);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n // List all terminals for this session and kill them\n const sessionTerminals = await tmux.listSessionTerminals(sessionId, session.workingDirectory);\n let killed = 0;\n\n for (const terminal of sessionTerminals) {\n const isRunning = await tmux.isRunning(terminal.id);\n if (isRunning) {\n const success = await tmux.killTerminal(terminal.id);\n if (success) killed++;\n }\n }\n\n return c.json({ success: true, killed });\n});\n\n// Direct terminal stream - simpler endpoint that just needs terminal ID\n// Used by web UI to subscribe to terminal output without needing session context\nterminals.get('/stream/:terminalId', async (c) => {\n const terminalId = c.req.param('terminalId');\n\n // We need to find the terminal - search all sessions\n const sessions = await sessionQueries.list();\n let terminalMeta: Awaited<ReturnType<typeof tmux.getMeta>> = null;\n let workingDirectory = process.cwd();\n let foundSessionId: string | undefined;\n\n // Search in session-scoped directories first (new format)\n for (const session of sessions) {\n terminalMeta = await tmux.getMeta(terminalId, session.workingDirectory, session.id);\n if (terminalMeta) {\n workingDirectory = session.workingDirectory;\n foundSessionId = session.id;\n break;\n }\n }\n\n // Fallback: search in legacy format (no session scope)\n if (!terminalMeta) {\n for (const session of sessions) {\n terminalMeta = await tmux.getMeta(terminalId, session.workingDirectory);\n if (terminalMeta) {\n workingDirectory = session.workingDirectory;\n foundSessionId = terminalMeta.sessionId;\n break;\n }\n }\n }\n\n // Check if terminal session exists in tmux (even if no meta file)\n const isActive = await tmux.isRunning(terminalId);\n if (!terminalMeta && !isActive) {\n return c.json({ error: 'Terminal not found' }, 404);\n }\n\n // Set up SSE with polling\n return new Response(\n new ReadableStream({\n async start(controller) {\n const encoder = new TextEncoder();\n let lastOutput = '';\n let isRunning = true;\n let pollCount = 0;\n const maxPolls = 600; // 5 minutes at 200ms intervals\n\n // Send initial status\n controller.enqueue(\n encoder.encode(`event: status\\ndata: ${JSON.stringify({ terminalId, status: 'connected' })}\\n\\n`)\n );\n\n while (isRunning && pollCount < maxPolls) {\n try {\n const result = await tmux.getLogs(terminalId, workingDirectory, { sessionId: foundSessionId });\n \n // Send new output only\n if (result.output !== lastOutput) {\n const newContent = result.output.slice(lastOutput.length);\n if (newContent) {\n controller.enqueue(\n encoder.encode(`event: stdout\\ndata: ${JSON.stringify({ data: newContent })}\\n\\n`)\n );\n }\n lastOutput = result.output;\n }\n\n isRunning = result.status === 'running';\n \n if (!isRunning) {\n controller.enqueue(\n encoder.encode(`event: exit\\ndata: ${JSON.stringify({ status: 'stopped' })}\\n\\n`)\n );\n break;\n }\n\n // Poll every 200ms for more responsive output\n await new Promise(r => setTimeout(r, 200));\n pollCount++;\n } catch {\n break;\n }\n }\n\n controller.close();\n },\n }),\n {\n headers: {\n 'Content-Type': 'text/event-stream',\n 'Cache-Control': 'no-cache',\n 'Connection': 'keep-alive',\n },\n }\n );\n});\n\n// Stream terminal logs (SSE) - version with session context\nterminals.get('/:sessionId/terminals/:terminalId/stream', async (c) => {\n const sessionId = c.req.param('sessionId');\n const terminalId = c.req.param('terminalId');\n\n const session = await sessionQueries.getById(sessionId);\n if (!session) {\n return c.json({ error: 'Session not found' }, 404);\n }\n\n const meta = await tmux.getMeta(terminalId, session.workingDirectory, sessionId);\n if (!meta) {\n return c.json({ error: 'Terminal not found' }, 404);\n }\n\n // Set up SSE with polling (simpler than event-based with tmux)\n return new Response(\n new ReadableStream({\n async start(controller) {\n const encoder = new TextEncoder();\n let lastOutput = '';\n let isRunning = true;\n\n while (isRunning) {\n try {\n const result = await tmux.getLogs(terminalId, session.workingDirectory, { sessionId });\n \n // Send new output only\n if (result.output !== lastOutput) {\n const newContent = result.output.slice(lastOutput.length);\n if (newContent) {\n controller.enqueue(\n encoder.encode(`event: stdout\\ndata: ${JSON.stringify({ data: newContent })}\\n\\n`)\n );\n }\n lastOutput = result.output;\n }\n\n isRunning = result.status === 'running';\n \n if (!isRunning) {\n controller.enqueue(\n encoder.encode(`event: exit\\ndata: ${JSON.stringify({ status: 'stopped' })}\\n\\n`)\n );\n break;\n }\n\n // Poll every 500ms\n await new Promise(r => setTimeout(r, 500));\n } catch {\n break;\n }\n }\n\n controller.close();\n },\n }),\n {\n headers: {\n 'Content-Type': 'text/event-stream',\n 'Cache-Control': 'no-cache',\n 'Connection': 'keep-alive',\n },\n }\n );\n});\n","import { Hono } from 'hono';\nimport { zValidator } from '@hono/zod-validator';\nimport { z } from 'zod';\nimport { nanoid } from 'nanoid';\nimport { sessionQueries, activeStreamQueries, type TaskConfig } from '../../db/index.js';\nimport { Agent } from '../../agent/index.js';\nimport { getConfig } from '../../config/index.js';\nimport { streamContext } from '../resumable-stream.js';\n\nconst tasks = new Hono();\n\n// Store abort controllers for running tasks, keyed by session/task ID\nconst taskAbortControllers = new Map<string, AbortController>();\n\nconst createTaskSchema = z.object({\n prompt: z.string().min(1),\n outputSchema: z.record(z.string(), z.unknown()),\n webhookUrl: z.string().url().optional(),\n model: z.string().optional(),\n workingDirectory: z.string().optional(),\n name: z.string().optional(),\n maxIterations: z.number().int().min(1).max(500).optional(),\n});\n\n// POST /tasks — create and start a task (returns immediately)\ntasks.post(\n '/',\n zValidator('json', createTaskSchema),\n async (c) => {\n const body = c.req.valid('json');\n const config = getConfig();\n\n const taskConfig: TaskConfig = {\n enabled: true,\n outputSchema: body.outputSchema,\n webhookUrl: body.webhookUrl,\n maxIterations: body.maxIterations ?? 50,\n status: 'running',\n };\n\n // Create a session with task metadata and all tools auto-approved\n const agent = await Agent.create({\n name: body.name || 'Task',\n workingDirectory: body.workingDirectory || config.resolvedWorkingDirectory,\n model: body.model || config.defaultModel,\n sessionConfig: {\n toolApprovals: { bash: false, write_file: false, read_file: false },\n task: taskConfig,\n },\n });\n\n const taskId = agent.sessionId;\n const abortController = new AbortController();\n taskAbortControllers.set(taskId, abortController);\n\n // Set up a resumable stream so the dashboard can show live updates\n // (tasks are just sessions — they use the same stream infrastructure)\n const streamId = `stream_${taskId}_${nanoid(10)}`;\n await activeStreamQueries.create(taskId, streamId);\n\n const taskStreamProducer = () => {\n const { readable, writable } = new TransformStream<string, string>();\n const writer = writable.getWriter();\n let writerClosed = false;\n\n const writeSSE = async (data: string) => {\n if (writerClosed) return;\n try {\n await writer.write(`data: ${data}\\n\\n`);\n } catch {\n writerClosed = true;\n }\n };\n\n (async () => {\n await writeSSE(JSON.stringify({ type: 'data-stream-id', streamId }));\n try {\n await agent.runTask({\n prompt: body.prompt,\n taskConfig,\n abortSignal: abortController.signal,\n writeSSE,\n });\n await writeSSE(JSON.stringify({ type: 'finish' }));\n } catch (err: any) {\n if (err.name === 'AbortError' || abortController.signal.aborted) {\n console.log(`[TASK] Task ${taskId} was cancelled`);\n await writeSSE(JSON.stringify({ type: 'abort' }));\n } else {\n console.error(`[TASK] Error in task ${taskId}:`, err.message);\n const errorMsg = err.message || 'Unknown error';\n await writeSSE(JSON.stringify({ type: 'error', errorText: errorMsg }));\n const failedTask: TaskConfig = {\n ...taskConfig,\n status: 'failed',\n error: errorMsg,\n };\n await sessionQueries.update(taskId, {\n config: {\n toolApprovals: { bash: false, write_file: false, read_file: false },\n task: failedTask,\n },\n });\n if (taskConfig.webhookUrl) {\n const { sendWebhook } = await import('../../utils/webhook.js');\n sendWebhook(taskConfig.webhookUrl, {\n type: 'task.failed',\n taskId,\n sessionId: taskId,\n timestamp: new Date().toISOString(),\n data: { status: 'failed', error: errorMsg },\n });\n }\n }\n } finally {\n await writeSSE('[DONE]');\n writer.close().catch(() => {});\n await activeStreamQueries.finish(streamId).catch(() => {});\n taskAbortControllers.delete(taskId);\n }\n })();\n\n return readable;\n };\n\n await streamContext.resumableStream(streamId, taskStreamProducer);\n\n return c.json({ taskId, status: 'running' }, 201);\n }\n);\n\n// GET /tasks/:id — get task status and result\ntasks.get('/:id', async (c) => {\n const id = c.req.param('id');\n const session = await sessionQueries.getById(id);\n\n if (!session) {\n return c.json({ error: 'Task not found' }, 404);\n }\n\n const task = session.config?.task;\n if (!task?.enabled) {\n return c.json({ error: 'Session is not a task' }, 400);\n }\n\n // Fetch browser recordings for this task session\n let browserRecordings: Array<{ fileName: string; downloadUrl: string | null; sizeBytes: number | null }> = [];\n try {\n const { isRemoteConfigured, storageQueries } = await import('../../db/remote.js');\n if (isRemoteConfigured()) {\n const files = await storageQueries.getSessionFiles(id);\n browserRecordings = files\n .filter((f) => f.category === 'browser-recording')\n .map((f) => ({ fileName: f.fileName, downloadUrl: f.downloadUrl, sizeBytes: f.sizeBytes }));\n }\n } catch {}\n\n return c.json({\n taskId: id,\n status: task.status,\n result: task.result,\n error: task.error,\n iterations: task.iterations,\n model: session.model,\n name: session.name,\n createdAt: session.createdAt.toISOString(),\n updatedAt: session.updatedAt.toISOString(),\n browserRecordings: browserRecordings.length > 0 ? browserRecordings : undefined,\n });\n});\n\n// POST /tasks/:id/cancel — cancel a running task\ntasks.post('/:id/cancel', async (c) => {\n const id = c.req.param('id');\n const session = await sessionQueries.getById(id);\n\n if (!session) {\n return c.json({ error: 'Task not found' }, 404);\n }\n\n const task = session.config?.task;\n if (!task?.enabled) {\n return c.json({ error: 'Session is not a task' }, 400);\n }\n\n if (task.status !== 'running') {\n return c.json({ error: `Task is already ${task.status}` }, 400);\n }\n\n const abortController = taskAbortControllers.get(id);\n if (abortController) {\n abortController.abort();\n taskAbortControllers.delete(id);\n }\n\n const cancelledTask: TaskConfig = {\n ...task,\n status: 'failed',\n error: 'Task cancelled by user',\n };\n await sessionQueries.update(id, {\n config: { ...session.config, task: cancelledTask },\n });\n\n if (task.webhookUrl) {\n const { sendWebhook } = await import('../../utils/webhook.js');\n sendWebhook(task.webhookUrl, {\n type: 'task.failed',\n taskId: id,\n sessionId: id,\n timestamp: new Date().toISOString(),\n data: { status: 'failed', error: 'Task cancelled by user' },\n });\n }\n\n return c.json({ taskId: id, status: 'failed', error: 'Task cancelled by user' });\n});\n\nexport default tasks;\n","/**\n * System dependency checks for SparkECoder\n * \n * Checks for required external tools (tmux, agent-browser) and provides\n * installation instructions or auto-installs when possible.\n */\n\nimport { exec } from 'node:child_process';\nimport { promisify } from 'node:util';\nimport { platform } from 'node:os';\n\nconst execAsync = promisify(exec);\n\nexport interface DependencyCheckResult {\n available: boolean;\n version?: string;\n error?: string;\n installInstructions?: string;\n}\n\n/**\n * Detect the current platform and package manager\n */\nfunction getInstallInstructions(): string {\n const os = platform();\n \n if (os === 'darwin') {\n return `\nInstall tmux on macOS:\n brew install tmux\n\nIf you don't have Homebrew, install it first:\n /bin/bash -c \"$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)\"\n`.trim();\n }\n \n if (os === 'linux') {\n return `\nInstall tmux on Linux:\n # Ubuntu/Debian\n sudo apt-get update && sudo apt-get install -y tmux\n\n # Fedora/RHEL\n sudo dnf install -y tmux\n\n # Arch Linux\n sudo pacman -S tmux\n`.trim();\n }\n \n // Fallback for other platforms\n return `\nInstall tmux:\n Please install tmux for your operating system.\n Visit: https://github.com/tmux/tmux/wiki/Installing\n`.trim();\n}\n\n/**\n * Check if tmux is installed and get its version\n */\nexport async function checkTmux(): Promise<DependencyCheckResult> {\n try {\n const { stdout } = await execAsync('tmux -V', { timeout: 5000 });\n const version = stdout.trim(); // e.g., \"tmux 3.4\"\n \n return {\n available: true,\n version,\n };\n } catch (error: any) {\n return {\n available: false,\n error: 'tmux is not installed or not in PATH',\n installInstructions: getInstallInstructions(),\n };\n }\n}\n\n/**\n * Check all required dependencies\n * Returns true if all dependencies are available, false otherwise\n */\nexport async function checkDependencies(options: {\n quiet?: boolean;\n exitOnFailure?: boolean;\n} = {}): Promise<boolean> {\n const { quiet = false, exitOnFailure = true } = options;\n \n const tmuxCheck = await checkTmux();\n \n if (!tmuxCheck.available) {\n if (!quiet) {\n console.error('\\n❌ Missing required dependency: tmux');\n console.error('');\n console.error('SparkECoder requires tmux for terminal session management.');\n console.error('');\n if (tmuxCheck.installInstructions) {\n console.error(tmuxCheck.installInstructions);\n }\n console.error('');\n console.error('After installing tmux, run sparkecoder again.');\n console.error('');\n }\n \n if (exitOnFailure) {\n process.exit(1);\n }\n \n return false;\n }\n \n if (!quiet) {\n // Only show version in verbose mode or when explicitly requested\n }\n \n return true;\n}\n\n/**\n * Check if agent-browser is installed globally and available in PATH\n */\nexport async function checkAgentBrowser(): Promise<DependencyCheckResult> {\n try {\n const { stdout } = await execAsync('agent-browser --version', { timeout: 10000 });\n const version = stdout.trim();\n return { available: true, version };\n } catch {\n return {\n available: false,\n error: 'agent-browser is not installed globally',\n installInstructions: 'Install agent-browser globally:\\n npm install -g agent-browser\\n agent-browser install',\n };\n }\n}\n\n/**\n * Install agent-browser globally via npm.\n * Returns true if installation succeeded.\n */\nexport async function tryInstallAgentBrowser(options: { quiet?: boolean } = {}): Promise<boolean> {\n try {\n if (!options.quiet) {\n console.log('📦 Installing agent-browser globally...');\n }\n await execAsync('npm install -g agent-browser', { timeout: 120000 });\n\n // Install Chromium for playwright\n try {\n if (!options.quiet) {\n console.log('📦 Installing Chromium for browser automation...');\n }\n await execAsync('agent-browser install', { timeout: 120000 });\n } catch {\n // Non-fatal — Chromium can be installed later via `agent-browser install`\n }\n\n if (!options.quiet) {\n console.log('✅ agent-browser installed successfully');\n }\n return true;\n } catch (error: any) {\n if (!options.quiet) {\n console.error(`Failed to install agent-browser: ${error.message}`);\n }\n return false;\n }\n}\n\n/**\n * Attempt to auto-install tmux (best effort, may require sudo)\n * Returns true if installation succeeded\n */\nexport async function tryAutoInstallTmux(): Promise<boolean> {\n const os = platform();\n \n try {\n if (os === 'darwin') {\n // Check if brew is available\n try {\n await execAsync('which brew', { timeout: 5000 });\n } catch {\n // Homebrew not installed, can't auto-install\n return false;\n }\n \n console.log('📦 Installing tmux via Homebrew...');\n await execAsync('brew install tmux', { timeout: 300000 }); // 5 min timeout\n console.log('✅ tmux installed successfully');\n return true;\n }\n \n if (os === 'linux') {\n // Try apt-get (Debian/Ubuntu)\n try {\n await execAsync('which apt-get', { timeout: 5000 });\n console.log('📦 Installing tmux via apt-get...');\n console.log(' (This may require sudo password)');\n await execAsync('sudo apt-get update && sudo apt-get install -y tmux', { \n timeout: 300000 \n });\n console.log('✅ tmux installed successfully');\n return true;\n } catch {\n // apt-get not available or failed\n }\n \n // Try dnf (Fedora/RHEL)\n try {\n await execAsync('which dnf', { timeout: 5000 });\n console.log('📦 Installing tmux via dnf...');\n await execAsync('sudo dnf install -y tmux', { timeout: 300000 });\n console.log('✅ tmux installed successfully');\n return true;\n } catch {\n // dnf not available or failed\n }\n }\n \n return false;\n } catch (error: any) {\n console.error(`Failed to auto-install tmux: ${error.message}`);\n return false;\n }\n}\n\n/**\n * Ensure dependencies are available, with optional auto-install attempt\n */\nexport async function ensureDependencies(options: {\n autoInstall?: boolean;\n quiet?: boolean;\n} = {}): Promise<void> {\n const { autoInstall = false, quiet = false } = options;\n \n const tmuxCheck = await checkTmux();\n \n if (!tmuxCheck.available) {\n if (autoInstall) {\n const installed = await tryAutoInstallTmux();\n if (!installed) {\n await checkDependencies({ quiet, exitOnFailure: true });\n }\n } else {\n await checkDependencies({ quiet, exitOnFailure: true });\n }\n }\n \n // Auto-install agent-browser globally if not found (non-blocking)\n const browserCheck = await checkAgentBrowser();\n if (!browserCheck.available) {\n await tryInstallAgentBrowser({ quiet });\n }\n}\n"],"mappings":";;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AA8BO,SAAS,mBAAmB,WAAmB,KAAa;AACjE,oBAAkB,UAAU,QAAQ,OAAO,EAAE;AAC7C,YAAU;AACZ;AAKO,SAAS,sBAAsB;AACpC,oBAAkB;AAClB,YAAU;AACZ;AAKO,SAAS,qBAA8B;AAC5C,SAAO,CAAC,CAAC,mBAAmB,CAAC,CAAC;AAChC;AA2BA,SAAS,WAAW,KAAe;AACjC,MAAI,QAAQ,QAAQ,QAAQ,OAAW,QAAO;AAC9C,MAAI,MAAM,QAAQ,GAAG,EAAG,QAAO,IAAI,IAAI,UAAU;AACjD,MAAI,OAAO,QAAQ,YAAY,eAAe,KAAM,QAAO;AAE3D,QAAM,SAAS,EAAE,GAAG,IAAI;AACxB,aAAW,OAAO,OAAO,KAAK,MAAM,GAAG;AAErC,QAAI,qBAAqB,SAAS,GAAG,GAAG;AACtC;AAAA,IACF;AACA,QAAI,YAAY,SAAS,GAAG,KAAK,OAAO,OAAO,GAAG,MAAM,UAAU;AAChE,aAAO,GAAG,IAAI,IAAI,KAAK,OAAO,GAAG,CAAC;AAAA,IACpC,WAAW,OAAO,OAAO,GAAG,MAAM,UAAU;AAC1C,aAAO,GAAG,IAAI,WAAW,OAAO,GAAG,CAAC;AAAA,IACtC;AAAA,EACF;AACA,SAAO;AACT;AAQA,eAAe,IACb,MACA,UAAyE,CAAC,GAC9D;AACZ,MAAI,CAAC,mBAAmB,CAAC,SAAS;AAChC,UAAM,IAAI,MAAM,iCAAiC;AAAA,EACnD;AAEA,QAAM,MAAM,GAAG,eAAe,MAAM,IAAI;AACxC,QAAM,OAAoB;AAAA,IACxB,QAAQ,QAAQ,UAAU;AAAA,IAC1B,SAAS;AAAA,MACP,gBAAgB;AAAA,MAChB,iBAAiB,UAAU,OAAO;AAAA,IACpC;AAAA,EACF;AAEA,MAAI,QAAQ,MAAM;AAChB,SAAK,OAAO,KAAK,UAAU,QAAQ,IAAI;AAAA,EACzC;AAEA,QAAM,WAAW,MAAM,MAAM,KAAK,IAAI;AAEtC,MAAI,CAAC,SAAS,IAAI;AAChB,UAAM,QAAQ,MAAM,SAAS,KAAK,EAAE,MAAM,OAAO,EAAE,OAAO,gBAAgB,EAAE;AAC5E,UAAM,IAAI,MAAM,MAAM,SAAS,QAAQ,SAAS,MAAM,EAAE;AAAA,EAC1D;AAEA,QAAM,OAAO,MAAM,SAAS,KAAK;AACjC,MAAI,CAAC,QAAQ,SAAS,QAAQ;AAC5B,WAAO;AAAA,EACT;AAEA,QAAM,SAAS,KAAK,MAAM,IAAI;AAG9B,MAAI,QAAQ,gBAAgB;AAC1B,WAAO;AAAA,EACT;AAGA,SAAO,WAAW,MAAM;AAC1B;AAkhBA,eAAe,WACb,MACA,UAA+C,CAAC,GACpC;AACZ,MAAI,CAAC,mBAAmB,CAAC,SAAS;AAChC,UAAM,IAAI,MAAM,iCAAiC;AAAA,EACnD;AAEA,QAAM,MAAM,GAAG,eAAe,WAAW,IAAI;AAC7C,QAAM,OAAoB;AAAA,IACxB,QAAQ,QAAQ,UAAU;AAAA,IAC1B,SAAS;AAAA,MACP,gBAAgB;AAAA,MAChB,iBAAiB,UAAU,OAAO;AAAA,IACpC;AAAA,EACF;AAEA,MAAI,QAAQ,MAAM;AAChB,SAAK,OAAO,KAAK,UAAU,QAAQ,IAAI;AAAA,EACzC;AAEA,QAAM,WAAW,MAAM,MAAM,KAAK,IAAI;AACtC,MAAI,CAAC,SAAS,IAAI;AAChB,UAAM,YAAY,MAAM,SAAS,KAAK,EAAE,MAAM,MAAM,eAAe;AACnE,UAAM,IAAI,MAAM,qBAAqB,SAAS,MAAM,KAAK,SAAS,EAAE;AAAA,EACtE;AACA,SAAO,SAAS,KAAK;AACvB;AA5rBA,IAwBI,iBACA,SA8BE,aASA,sBAqFO,sBAkCA,sBAuDA,4BA2DA,mBAgCA,oBAmBA,uBAwCA,2BA+BA,yBAuCA,yBAwCA,uBAiDA,2BAqEA,0BAsFA;AA9rBb;AAAA;AAAA;AAwBA,IAAI,kBAAiC;AACrC,IAAI,UAAyB;AA8B7B,IAAM,cAAc,CAAC,aAAa,aAAa,aAAa,eAAe,aAAa,cAAc,YAAY,aAAa,iBAAiB,sBAAsB;AAStK,IAAM,uBAAuB,CAAC,gBAAgB,eAAe;AAqFtD,IAAM,uBAAuB;AAAA,MAClC,OAAO,MAAkG;AACvG,eAAO,IAAa,aAAa,EAAE,QAAQ,QAAQ,MAAM,KAAK,CAAC;AAAA,MACjE;AAAA,MAEA,QAAQ,IAA0C;AAChD,eAAO,IAAyB,aAAa,EAAE,EAAE,EAAE,MAAM,MAAM,MAAS;AAAA,MAC1E;AAAA,MAEA,KAAK,QAAQ,IAAI,SAAS,GAAuB;AAC/C,eAAO,IAAe,mBAAmB,KAAK,WAAW,MAAM,EAAE;AAAA,MACnE;AAAA,MAEA,aAAa,IAAY,QAAyD;AAChF,eAAO,IAAyB,aAAa,EAAE,IAAI,EAAE,QAAQ,SAAS,MAAM,EAAE,OAAO,EAAE,CAAC;AAAA,MAC1F;AAAA,MAEA,YAAY,IAAY,OAA6C;AACnE,eAAO,IAAyB,aAAa,EAAE,IAAI,EAAE,QAAQ,SAAS,MAAM,EAAE,MAAM,EAAE,CAAC;AAAA,MACzF;AAAA,MAEA,OAAO,IAAY,SAAwF;AACzG,eAAO,IAAyB,aAAa,EAAE,IAAI,EAAE,QAAQ,SAAS,MAAM,QAAQ,CAAC;AAAA,MACvF;AAAA,MAEA,OAAO,IAA8B;AACnC,eAAO,IAA0B,aAAa,EAAE,IAAI,EAAE,QAAQ,SAAS,CAAC,EAAE,KAAK,OAAK,GAAG,WAAW,KAAK;AAAA,MACzG;AAAA,IACF;AAMO,IAAM,uBAAuB;AAAA,MAClC,MAAM,gBAAgB,WAAoC;AACxD,cAAM,SAAS,MAAM,IAA8B,qBAAqB,SAAS,gBAAgB;AACjG,eAAO,OAAO;AAAA,MAChB;AAAA,MAEA,OAAO,WAAmB,cAA8C;AACtE,eAAO,IAAa,aAAa,EAAE,QAAQ,QAAQ,MAAM,EAAE,WAAW,aAAa,EAAE,CAAC;AAAA,MACxF;AAAA,MAEA,QAAQ,WAAmB,eAAmD;AAC5E,eAAO,IAAe,mBAAmB,EAAE,QAAQ,QAAQ,MAAM,EAAE,WAAW,cAAc,EAAE,CAAC;AAAA,MACjG;AAAA,MAEA,aAAa,WAAuC;AAClD,eAAO,IAAe,qBAAqB,SAAS,EAAE;AAAA,MACxD;AAAA,MAEA,iBAAiB,WAA4C;AAM3D,eAAO,IAAoB,qBAAqB,SAAS,mBAAmB,EAAE,gBAAgB,KAAK,CAAC;AAAA,MACtG;AAAA,MAEA,MAAM,mBAAmB,WAAmB,QAAQ,IAAwB;AAC1E,cAAM,WAAW,MAAM,IAAe,qBAAqB,SAAS,EAAE;AACtE,eAAO,SAAS,MAAM,CAAC,KAAK;AAAA,MAC9B;AAAA,MAEA,MAAM,eAAe,WAAoC;AACvD,cAAM,SAAS,MAAM,IAAuB,qBAAqB,SAAS,QAAQ;AAClF,eAAO,OAAO;AAAA,MAChB;AAAA,MAEA,MAAM,gBAAgB,WAAoC;AACxD,cAAM,SAAS,MAAM,IAAyB,qBAAqB,SAAS,IAAI,EAAE,QAAQ,SAAS,CAAC;AACpG,eAAO,OAAO;AAAA,MAChB;AAAA,MAEA,MAAM,mBAAmB,WAAmB,cAAuC;AACjF,cAAM,SAAS,MAAM;AAAA,UACnB,qBAAqB,SAAS,kBAAkB,YAAY;AAAA,UAC5D,EAAE,QAAQ,SAAS;AAAA,QACrB;AACA,eAAO,OAAO;AAAA,MAChB;AAAA,IACF;AAMO,IAAM,6BAA6B;AAAA,MACxC,OAAO,MAQoB;AACzB,eAAO,IAAmB,oBAAoB,EAAE,QAAQ,QAAQ,MAAM,KAAK,CAAC;AAAA,MAC9E;AAAA,MAEA,QAAQ,IAAgD;AACtD,eAAO,IAA+B,oBAAoB,EAAE,EAAE,EAAE,MAAM,MAAM,MAAS;AAAA,MACvF;AAAA,MAEA,gBAAgB,YAAwD;AACtE,eAAO,IAA+B,oCAAoC,UAAU,EAAE,EAAE,MAAM,MAAM,MAAS;AAAA,MAC/G;AAAA,MAEA,oBAAoB,WAA6C;AAC/D,eAAO,IAAqB,4BAA4B,SAAS,UAAU;AAAA,MAC7E;AAAA,MAEA,QAAQ,IAAgD;AACtD,eAAO,IAA+B,oBAAoB,EAAE,IAAI,EAAE,QAAQ,SAAS,MAAM,EAAE,QAAQ,WAAW,EAAE,CAAC;AAAA,MACnH;AAAA,MAEA,OAAO,IAAgD;AACrD,eAAO,IAA+B,oBAAoB,EAAE,IAAI,EAAE,QAAQ,SAAS,MAAM,EAAE,QAAQ,WAAW,EAAE,CAAC;AAAA,MACnH;AAAA,MAEA,SAAS,IAAY,QAAiB,OAAoD;AACxF,eAAO,IAA+B,oBAAoB,EAAE,IAAI;AAAA,UAC9D,QAAQ;AAAA,UACR,MAAM,EAAE,QAAQ,QAAQ,UAAU,aAAa,QAAQ,MAAM;AAAA,QAC/D,CAAC;AAAA,MACH;AAAA,MAEA,aAAa,WAA6C;AACxD,eAAO,IAAqB,4BAA4B,SAAS,EAAE;AAAA,MACrE;AAAA,MAEA,MAAM,gBAAgB,WAAmB,WAA2C;AAElF,cAAM,YAAY,qBAAqB,OAAO,UAAU,QAAQ,IAAI,IAAI,KAAK,SAAS,EAAE,QAAQ;AAChG,cAAM,SAAS,MAAM;AAAA,UACnB,4BAA4B,SAAS,UAAU,SAAS;AAAA,UACxD,EAAE,QAAQ,SAAS;AAAA,QACrB;AACA,eAAO,OAAO;AAAA,MAChB;AAAA,IACF;AAMO,IAAM,oBAAoB;AAAA,MAC/B,OAAO,MAAiF;AACtF,eAAO,IAAc,UAAU,EAAE,QAAQ,QAAQ,MAAM,KAAK,CAAC;AAAA,MAC/D;AAAA,MAEA,WAAW,WAAmB,OAAwE;AACpG,eAAO,IAAgB,gBAAgB,EAAE,QAAQ,QAAQ,MAAM,EAAE,WAAW,MAAM,EAAE,CAAC;AAAA,MACvF;AAAA,MAEA,aAAa,WAAwC;AACnD,eAAO,IAAgB,kBAAkB,SAAS,EAAE;AAAA,MACtD;AAAA,MAEA,aAAa,IAAY,QAA2D;AAClF,eAAO,IAA0B,UAAU,EAAE,IAAI,EAAE,QAAQ,SAAS,MAAM,EAAE,OAAO,EAAE,CAAC;AAAA,MACxF;AAAA,MAEA,MAAM,OAAO,IAA8B;AACzC,cAAM,SAAS,MAAM,IAA0B,UAAU,EAAE,IAAI,EAAE,QAAQ,SAAS,CAAC;AACnF,eAAO,QAAQ,WAAW;AAAA,MAC5B;AAAA,MAEA,MAAM,aAAa,WAAoC;AACrD,cAAM,SAAS,MAAM,IAAyB,kBAAkB,SAAS,IAAI,EAAE,QAAQ,SAAS,CAAC;AACjG,eAAO,OAAO;AAAA,MAChB;AAAA,IACF;AAMO,IAAM,qBAAqB;AAAA,MAChC,KAAK,WAAmB,WAAyC;AAC/D,eAAO,IAAiB,WAAW,EAAE,QAAQ,QAAQ,MAAM,EAAE,WAAW,UAAU,EAAE,CAAC;AAAA,MACvF;AAAA,MAEA,aAAa,WAA2C;AACtD,eAAO,IAAmB,mBAAmB,SAAS,EAAE;AAAA,MAC1D;AAAA,MAEA,MAAM,SAAS,WAAmB,WAAqC;AACrE,cAAM,SAAS,MAAM,IAA2B,mBAAmB,SAAS,cAAc,SAAS,EAAE;AACrG,eAAO,OAAO;AAAA,MAChB;AAAA,IACF;AAMO,IAAM,wBAAwB;AAAA,MACnC,OAAO,MAA6F;AAClG,eAAO,IAAc,cAAc,EAAE,QAAQ,QAAQ,MAAM,KAAK,CAAC;AAAA,MACnE;AAAA,MAEA,QAAQ,IAA2C;AACjD,eAAO,IAA0B,cAAc,EAAE,EAAE,EAAE,MAAM,MAAM,MAAS;AAAA,MAC5E;AAAA,MAEA,aAAa,WAAwC;AACnD,eAAO,IAAgB,sBAAsB,SAAS,EAAE;AAAA,MAC1D;AAAA,MAEA,WAAW,WAAwC;AACjD,eAAO,IAAgB,sBAAsB,SAAS,UAAU;AAAA,MAClE;AAAA,MAEA,aAAa,IAAY,QAA4B,UAAmB,OAA+C;AACrH,eAAO,IAA0B,cAAc,EAAE,IAAI,EAAE,QAAQ,SAAS,MAAM,EAAE,QAAQ,UAAU,MAAM,EAAE,CAAC;AAAA,MAC7G;AAAA,MAEA,UAAU,IAAY,KAA4C;AAChE,eAAO,IAA0B,cAAc,EAAE,IAAI,EAAE,QAAQ,SAAS,MAAM,EAAE,IAAI,EAAE,CAAC;AAAA,MACzF;AAAA,MAEA,MAAM,OAAO,IAA8B;AACzC,cAAM,SAAS,MAAM,IAA0B,cAAc,EAAE,IAAI,EAAE,QAAQ,SAAS,CAAC;AACvF,eAAO,QAAQ,WAAW;AAAA,MAC5B;AAAA,MAEA,MAAM,gBAAgB,WAAoC;AACxD,cAAM,SAAS,MAAM,IAAyB,sBAAsB,SAAS,IAAI,EAAE,QAAQ,SAAS,CAAC;AACrG,eAAO,OAAO;AAAA,MAChB;AAAA,IACF;AAMO,IAAM,4BAA4B;AAAA,MACvC,OAAO,WAAmB,UAAyC;AACjE,eAAO,IAAkB,YAAY,EAAE,QAAQ,QAAQ,MAAM,EAAE,WAAW,SAAS,EAAE,CAAC;AAAA,MACxF;AAAA,MAEA,eAAe,WAAsD;AACnE,eAAO,IAAyB,oBAAoB,SAAS,EAAE,EAAE,KAAK,OAAK,KAAK,MAAS;AAAA,MAC3F;AAAA,MAEA,cAAc,UAAqD;AACjE,eAAO,IAA8B,yBAAyB,QAAQ,EAAE,EAAE,MAAM,MAAM,MAAS;AAAA,MACjG;AAAA,MAEA,OAAO,UAAqD;AAC1D,eAAO,IAA8B,yBAAyB,QAAQ,IAAI,EAAE,QAAQ,SAAS,MAAM,EAAE,QAAQ,WAAW,EAAE,CAAC;AAAA,MAC7H;AAAA,MAEA,UAAU,UAAqD;AAC7D,eAAO,IAA8B,yBAAyB,QAAQ,IAAI,EAAE,QAAQ,SAAS,MAAM,EAAE,QAAQ,QAAQ,EAAE,CAAC;AAAA,MAC1H;AAAA,MAEA,MAAM,gBAAgB,WAAoC;AACxD,cAAM,SAAS,MAAM,IAAyB,oBAAoB,SAAS,IAAI,EAAE,QAAQ,SAAS,CAAC;AACnG,eAAO,OAAO;AAAA,MAChB;AAAA,IACF;AAMO,IAAM,0BAA0B;AAAA,MACrC,OAAO,MAA6F;AAClG,eAAO,IAAgB,gBAAgB,EAAE,QAAQ,QAAQ,MAAM,KAAK,CAAC;AAAA,MACvE;AAAA,MAEA,QAAQ,IAA6C;AACnD,eAAO,IAA4B,gBAAgB,EAAE,EAAE,EAAE,MAAM,MAAM,MAAS;AAAA,MAChF;AAAA,MAEA,aAAa,WAA0C;AACrD,eAAO,IAAkB,wBAAwB,SAAS,EAAE;AAAA,MAC9D;AAAA,MAEA,qBAAqB,WAAmB,iBAA0D;AAChG,eAAO,IAAuB,wBAAwB,SAAS,gBAAgB,eAAe,EAAE,EAAE,KAAK,OAAK,KAAK,MAAS;AAAA,MAC5H;AAAA,MAEA,UAAU,WAAoD;AAC5D,eAAO,IAAuB,wBAAwB,SAAS,SAAS,EAAE,KAAK,OAAK,KAAK,MAAS;AAAA,MACpG;AAAA,MAEA,MAAM,oBAAoB,WAAmB,iBAA0C;AACrF,cAAM,SAAS,MAAM;AAAA,UACnB,wBAAwB,SAAS,mBAAmB,eAAe;AAAA,UACnE,EAAE,QAAQ,SAAS;AAAA,QACrB;AACA,eAAO,OAAO;AAAA,MAChB;AAAA,MAEA,MAAM,gBAAgB,WAAoC;AACxD,cAAM,SAAS,MAAM,IAAyB,wBAAwB,SAAS,IAAI,EAAE,QAAQ,SAAS,CAAC;AACvG,eAAO,OAAO;AAAA,MAChB;AAAA,IACF;AAMO,IAAM,0BAA0B;AAAA,MACrC,OAAO,MAMiB;AACtB,eAAO,IAAgB,iBAAiB,EAAE,QAAQ,QAAQ,MAAM,KAAK,CAAC;AAAA,MACxE;AAAA,MAEA,gBAAgB,cAA6C;AAC3D,eAAO,IAAkB,4BAA4B,YAAY,EAAE;AAAA,MACrE;AAAA,MAEA,aAAa,WAA0C;AACrD,eAAO,IAAkB,yBAAyB,SAAS,EAAE;AAAA,MAC/D;AAAA,MAEA,gBAAgB,WAAmB,iBAAgD;AACjF,eAAO,IAAkB,yBAAyB,SAAS,kBAAkB,eAAe,EAAE;AAAA,MAChG;AAAA,MAEA,MAAM,UAAU,cAAsB,UAAoC;AACxE,cAAM,SAAS,MAAM;AAAA,UACnB,4BAA4B,YAAY,eAAe,mBAAmB,QAAQ,CAAC;AAAA,QACrF;AACA,eAAO,OAAO;AAAA,MAChB;AAAA,MAEA,MAAM,gBAAgB,WAAoC;AACxD,cAAM,SAAS,MAAM,IAAyB,yBAAyB,SAAS,IAAI,EAAE,QAAQ,SAAS,CAAC;AACxG,eAAO,OAAO;AAAA,MAChB;AAAA,IACF;AAMO,IAAM,wBAAwB;AAAA,MACnC,OAAO,MAMwB;AAC7B,eAAO,IAAuB,cAAc,EAAE,QAAQ,QAAQ,MAAM,KAAK,CAAC;AAAA,MAC5E;AAAA,MAEA,QAAQ,IAAoD;AAC1D,eAAO,IAAmC,cAAc,EAAE,EAAE,EAAE,MAAM,MAAM,MAAS;AAAA,MACrF;AAAA,MAEA,gBAAgB,YAA4D;AAC1E,eAAO,IAAmC,8BAA8B,UAAU,EAAE,EAAE,MAAM,MAAM,MAAS;AAAA,MAC7G;AAAA,MAEA,aAAa,WAAiD;AAC5D,eAAO,IAAyB,sBAAsB,SAAS,EAAE;AAAA,MACnE;AAAA,MAEA,QAAQ,IAAY,MAA4D;AAC9E,eAAO,IAAmC,cAAc,EAAE,aAAa,EAAE,QAAQ,QAAQ,MAAM,EAAE,KAAK,EAAE,CAAC,EAAE,MAAM,MAAM,MAAS;AAAA,MAClI;AAAA,MAEA,SAAS,IAAY,QAAyD;AAC5E,eAAO,IAAmC,cAAc,EAAE,IAAI,EAAE,QAAQ,SAAS,MAAM,EAAE,QAAQ,aAAa,OAAO,EAAE,CAAC,EAAE,MAAM,MAAM,MAAS;AAAA,MACjJ;AAAA,MAEA,UAAU,IAAY,OAAuD;AAC3E,eAAO,IAAmC,cAAc,EAAE,IAAI,EAAE,QAAQ,SAAS,MAAM,EAAE,QAAQ,SAAS,MAAM,EAAE,CAAC,EAAE,MAAM,MAAM,MAAS;AAAA,MAC5I;AAAA,MAEA,OAAO,IAAoD;AACzD,eAAO,IAAmC,cAAc,EAAE,IAAI,EAAE,QAAQ,SAAS,MAAM,EAAE,QAAQ,YAAY,EAAE,CAAC,EAAE,MAAM,MAAM,MAAS;AAAA,MACzI;AAAA,MAEA,MAAM,gBAAgB,WAAoC;AACxD,cAAM,SAAS,MAAM,IAAyB,sBAAsB,SAAS,IAAI,EAAE,QAAQ,SAAS,CAAC;AACrG,eAAO,OAAO;AAAA,MAChB;AAAA,IACF;AAMO,IAAM,4BAA4B;AAAA,MACvC,OACE,KACA,MASuB;AACvB,eAAO,IAAkB,mBAAmB,EAAE,QAAQ,QAAQ,MAAM,KAAK,CAAC;AAAA,MAC5E;AAAA,MAEA,YACE,KACA,QAS+C;AAC/C,eAAO,IAA0C,yBAAyB;AAAA,UACxE,QAAQ;AAAA,UACR,MAAM,EAAE,OAAO;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,QAAQ,KAAU,IAA+C;AAC/D,eAAO,IAA8B,mBAAmB,EAAE,EAAE,EAAE,MAAM,MAAM,MAAS;AAAA,MACrF;AAAA,MAEA,eAAe,KAAU,WAA4C;AACnE,eAAO,IAAoB,6BAA6B,SAAS,EAAE;AAAA,MACrE;AAAA,MAEA,cAAc,KAAU,WAAmB,UAA2C;AACpF,eAAO,IAAoB,6BAA6B,SAAS,SAAS,mBAAmB,QAAQ,CAAC,EAAE;AAAA,MAC1G;AAAA,MAEA,MAAM,kBAAkB,KAAU,WAAoC;AACpE,cAAM,SAAS,MAAM,IAAyB,6BAA6B,SAAS,IAAI,EAAE,QAAQ,SAAS,CAAC;AAC5G,eAAO,OAAO;AAAA,MAChB;AAAA,MAEA,MAAM,iBAAiB,KAAU,WAAmB,UAAmC;AACrF,cAAM,SAAS,MAAM;AAAA,UACnB,6BAA6B,SAAS,SAAS,mBAAmB,QAAQ,CAAC;AAAA,UAC3E,EAAE,QAAQ,SAAS;AAAA,QACrB;AACA,eAAO,OAAO;AAAA,MAChB;AAAA,MAEA,MAAM,iBAAiB,KAAU,WAAoC;AACnE,cAAM,SAAS,MAAM,IAAuB,6BAA6B,SAAS,QAAQ;AAC1F,eAAO,OAAO;AAAA,MAChB;AAAA,IACF;AAMO,IAAM,2BAA2B;AAAA,MACtC,OACE,KACA,MAO4B;AAC5B,eAAO,IAAuB,iBAAiB;AAAA,UAC7C,QAAQ;AAAA,UACR,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,eAAe,KAAK,eAAe,YAAY;AAAA,YAC/C,sBAAsB,KAAK,sBAAsB,YAAY;AAAA,UAC/D;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MAEA,IAAI,KAAU,WAA2D;AACvE,eAAO,IAA8B,2BAA2B,SAAS,EAAE,EAAE,KAAK,OAAK,KAAK,MAAS;AAAA,MACvG;AAAA,MAEA,MAAM,OAAO,KAAU,WAAqC;AAC1D,cAAM,SAAS,MAAM,IAA0B,2BAA2B,SAAS,IAAI,EAAE,QAAQ,SAAS,CAAC;AAC3G,eAAO,QAAQ,WAAW;AAAA,MAC5B;AAAA,MAEA,KAAK,KAAwC;AAC3C,eAAO,IAAyB,eAAe;AAAA,MACjD;AAAA,IACF;AAqDO,IAAM,iBAAiB;AAAA,MAC5B,MAAM,aACJ,WACA,UACA,aACA,UAC4B;AAC5B,eAAO,WAA8B,eAAe;AAAA,UAClD,QAAQ;AAAA,UACR,MAAM,EAAE,WAAW,UAAU,aAAa,SAAS;AAAA,QACrD,CAAC;AAAA,MACH;AAAA,MAEA,MAAM,gBAAgB,WAA2C;AAC/D,cAAM,SAAS,MAAM,WAAqC,UAAU,SAAS,EAAE;AAC/E,eAAO,OAAO;AAAA,MAChB;AAAA,MAEA,MAAM,eAAe,QAAqE;AACxF,eAAO,WAAuD,aAAa,MAAM,EAAE;AAAA,MACrF;AAAA,MAEA,MAAM,WAAW,QAA+B;AAC9C,cAAM,WAAW,UAAU,MAAM,IAAI,EAAE,QAAQ,SAAS,CAAC;AAAA,MAC3D;AAAA,MAEA,MAAM,WAAW,QAAgB,MAA6C;AAC5E,cAAM,WAAW,UAAU,MAAM,IAAI,EAAE,QAAQ,SAAS,MAAM,KAAK,CAAC;AAAA,MACtE;AAAA,IACF;AAAA;AAAA;;;AC1pBO,SAAS,aAAa,QAA0C;AACrE,qBAAmB,OAAO,KAAK,OAAO,OAAO;AAC7C,gBAAc;AAChB;AAMO,SAAS,QAAQ;AACtB,MAAI,CAAC,aAAa;AAChB,UAAM,IAAI,MAAM,oDAAoD;AAAA,EACtE;AAEA,SAAO,CAAC;AACV;AAYO,SAAS,gBAAgB;AAC9B,sBAAoB;AACpB,gBAAc;AAChB;AA/FA,IA2DI,aAuCS,gBACA,gBACA,sBACA,aACA,cAEA,qBACA,mBACA,mBACA,iBAEA;AA7Gb;AAAA;AAAA;AAMA;AAqDA,IAAI,cAAc;AAuCX,IAAM,iBAAiB;AACvB,IAAM,iBAAiB;AACvB,IAAM,uBAAuB;AAC7B,IAAM,cAAc;AACpB,IAAM,eAAe;AAErB,IAAM,sBAAsB;AAC5B,IAAM,oBAAoB;AAC1B,IAAM,oBAAoB;AAC1B,IAAM,kBAAkB;AAExB,IAAM,qBAAqB;AAAA;AAAA;;;AC7GlC,SAAS,SAAS;AAAlB,IAGa,0BASA,qBAaA,kBAYA,qBASA,2BAkDA,0BAWA;AA3Gb;AAAA;AAAA;AAGO,IAAM,2BAA2B,EAAE,OAAO;AAAA,MAC/C,MAAM,EAAE,QAAQ,EAAE,SAAS,EAAE,QAAQ,IAAI;AAAA,MACzC,YAAY,EAAE,QAAQ,EAAE,SAAS,EAAE,QAAQ,KAAK;AAAA,MAChD,WAAW,EAAE,QAAQ,EAAE,SAAS,EAAE,QAAQ,KAAK;AAAA,MAC/C,YAAY,EAAE,QAAQ,EAAE,SAAS,EAAE,QAAQ,KAAK;AAAA,MAChD,MAAM,EAAE,QAAQ,EAAE,SAAS,EAAE,QAAQ,KAAK;AAAA,IAC5C,CAAC;AAGM,IAAM,sBAAsB,EAAE,OAAO;AAAA,MAC1C,MAAM,EAAE,OAAO;AAAA,MACf,aAAa,EAAE,OAAO;AAAA;AAAA,MAEtB,aAAa,EAAE,QAAQ,EAAE,SAAS,EAAE,QAAQ,KAAK;AAAA;AAAA,MAEjD,OAAO,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,SAAS,EAAE,QAAQ,CAAC,CAAC;AAAA,IAClD,CAAC;AAMM,IAAM,mBAAmB,EAAE,OAAO;AAAA,MACvC,SAAS,EAAE,QAAQ;AAAA,MACnB,cAAc,EAAE,OAAO,EAAE,OAAO,GAAG,EAAE,QAAQ,CAAC;AAAA,MAC9C,YAAY,EAAE,OAAO,EAAE,IAAI,EAAE,SAAS;AAAA,MACtC,eAAe,EAAE,OAAO,EAAE,SAAS;AAAA,MACnC,QAAQ,EAAE,KAAK,CAAC,WAAW,aAAa,QAAQ,CAAC;AAAA,MACjD,QAAQ,EAAE,QAAQ,EAAE,SAAS;AAAA,MAC7B,OAAO,EAAE,OAAO,EAAE,SAAS;AAAA,MAC3B,YAAY,EAAE,OAAO,EAAE,SAAS;AAAA,IAClC,CAAC;AAGM,IAAM,sBAAsB,EAAE,OAAO;AAAA,MAC1C,eAAe,EAAE,OAAO,EAAE,OAAO,GAAG,EAAE,QAAQ,CAAC,EAAE,SAAS;AAAA,MAC1D,iBAAiB,EAAE,OAAO,EAAE,IAAI,EAAE,SAAS;AAAA,MAC3C,iBAAiB,EAAE,OAAO,EAAE,SAAS;AAAA,MACrC,iBAAiB,EAAE,OAAO,EAAE,SAAS,EAAE,QAAQ,GAAO;AAAA,MACtD,MAAM,iBAAiB,SAAS;AAAA,IAClC,CAAC;AAGM,IAAM,4BAA4B,EACtC,OAAO;AAAA;AAAA,MAEN,UAAU,EAAE,OAAO,EAAE,SAAS;AAAA;AAAA,MAE9B,SAAS,EAAE,OAAO,EAAE,SAAS;AAAA;AAAA,MAE7B,gBAAgB,EAAE,OAAO,EAAE,QAAQ,sBAAsB;AAAA;AAAA,MAEzD,WAAW,EAAE,OAAO,EAAE,SAAS;AAAA;AAAA,MAE/B,SAAS,EACN,MAAM,EAAE,OAAO,CAAC,EAChB,SAAS,EACT,QAAQ;AAAA,QACP;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA;AAAA,MAEH,SAAS,EACN,MAAM,EAAE,OAAO,CAAC,EAChB,SAAS,EACT,QAAQ;AAAA,QACP;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACL,CAAC,EACA,SAAS;AAGL,IAAM,2BAA2B,EACrC,OAAO;AAAA;AAAA,MAEN,KAAK,EAAE,OAAO,EAAE,IAAI,EAAE,SAAS;AAAA;AAAA;AAAA,MAG/B,SAAS,EAAE,OAAO,EAAE,SAAS;AAAA,IAC/B,CAAC,EACA,SAAS;AAGL,IAAM,yBAAyB,EAAE,OAAO;AAAA;AAAA,MAE7C,cAAc,EAAE,OAAO,EAAE,QAAQ,2BAA2B;AAAA;AAAA,MAG5D,kBAAkB,EAAE,OAAO,EAAE,SAAS;AAAA;AAAA,MAGtC,eAAe,yBAAyB,SAAS,EAAE,QAAQ,CAAC,CAAC;AAAA;AAAA,MAG7D,iBAAiB,EAAE,OAAO,EAAE,IAAI,EAAE,SAAS;AAAA;AAAA,MAG3C,QAAQ,EACL,OAAO;AAAA;AAAA,QAEN,WAAW,EAAE,OAAO,EAAE,SAAS,EAAE,QAAQ,UAAU;AAAA;AAAA,QAEnD,uBAAuB,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,SAAS,EAAE,QAAQ,CAAC,CAAC;AAAA,MAClE,CAAC,EACA,SAAS,EACT,QAAQ,CAAC,CAAC;AAAA;AAAA,MAGb,SAAS,EACN,OAAO;AAAA;AAAA,QAEN,UAAU,EAAE,OAAO,EAAE,SAAS,EAAE,QAAQ,GAAO;AAAA;AAAA,QAE/C,eAAe,EAAE,QAAQ,EAAE,SAAS,EAAE,QAAQ,IAAI;AAAA;AAAA,QAElD,oBAAoB,EAAE,OAAO,EAAE,SAAS,EAAE,QAAQ,EAAE;AAAA,MACtD,CAAC,EACA,SAAS,EACT,QAAQ,CAAC,CAAC;AAAA;AAAA,MAGb,QAAQ,EACL,OAAO;AAAA,QACN,MAAM,EAAE,OAAO,EAAE,QAAQ,IAAI;AAAA,QAC7B,MAAM,EAAE,OAAO,EAAE,QAAQ,WAAW;AAAA;AAAA;AAAA,QAGpC,WAAW,EAAE,OAAO,EAAE,IAAI,EAAE,SAAS;AAAA,MACvC,CAAC,EACA,QAAQ,EAAE,MAAM,MAAM,MAAM,YAAY,CAAC;AAAA;AAAA,MAG5C,cAAc,EAAE,OAAO,EAAE,SAAS,EAAE,QAAQ,kBAAkB;AAAA;AAAA;AAAA,MAI9D,cAAc;AAAA;AAAA,MAGd,eAAe;AAAA,IACjB,CAAC;AAAA;AAAA;;;ACpKD,SAAS,YAAY,cAAc,WAAW,qBAAqB;AACnE,SAAS,SAAS,SAAS,YAAY;AACvC,SAAS,SAAS,gBAAgB;AA0B3B,SAAS,yBAAyB,YAAsC;AAC7E,QAAM,mBAA8D,CAAC;AACrE,QAAM,eAA0D,CAAC;AACjE,QAAM,iBAA2B,CAAC;AAClC,MAAI,eAA8B;AAGlC,QAAM,gBAAgB,KAAK,YAAY,gBAAgB,OAAO;AAC9D,MAAI,WAAW,aAAa,GAAG;AAC7B,qBAAiB,KAAK,EAAE,MAAM,eAAe,UAAU,EAAE,CAAC;AAC1D,mBAAe,KAAK,aAAa;AAAA,EACnC;AAGA,QAAM,iBAAiB,KAAK,YAAY,gBAAgB,QAAQ;AAChE,MAAI,WAAW,cAAc,GAAG;AAC9B,iBAAa,KAAK,EAAE,MAAM,gBAAgB,UAAU,EAAE,CAAC;AACvD,mBAAe,KAAK,cAAc;AAAA,EACpC;AAGA,QAAM,iBAAiB,KAAK,YAAY,WAAW,OAAO;AAC1D,MAAI,WAAW,cAAc,GAAG;AAE9B,iBAAa,KAAK,EAAE,MAAM,gBAAgB,UAAU,EAAE,CAAC;AACvD,mBAAe,KAAK,cAAc;AAAA,EACpC;AAGA,QAAM,kBAAkB,KAAK,YAAY,WAAW,QAAQ;AAC5D,MAAI,WAAW,eAAe,GAAG;AAC/B,iBAAa,KAAK,EAAE,MAAM,iBAAiB,UAAU,EAAE,CAAC;AACxD,mBAAe,KAAK,eAAe;AAAA,EACrC;AAGA,QAAM,kBAAkB,KAAK,YAAY,QAAQ;AACjD,MAAI,WAAW,eAAe,GAAG;AAC/B,iBAAa,KAAK,EAAE,MAAM,iBAAiB,UAAU,EAAE,CAAC;AACxD,mBAAe,KAAK,eAAe;AAAA,EACrC;AAGA,QAAM,WAAW,KAAK,YAAY,WAAW;AAC7C,MAAI,WAAW,QAAQ,GAAG;AACxB,mBAAe;AAAA,EACjB;AAIA,QAAM,UAAU,QAAQ,YAAY,IAAI,QAAQ,WAAW,EAAE,CAAC;AAC9D,QAAM,oBAAoB;AAAA,IACxB,QAAQ,SAAS,mBAAmB;AAAA;AAAA,IACpC,QAAQ,SAAS,kBAAkB;AAAA;AAAA,EACrC;AACA,QAAM,mBAAmB,kBAAkB,KAAK,OAAK,WAAW,CAAC,CAAC;AAClE,MAAI,kBAAkB;AACpB,iBAAa,KAAK,EAAE,MAAM,kBAAkB,UAAU,IAAI,CAAC;AAC3D,mBAAe,KAAK,gBAAgB;AAAA,EACtC;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAQO,SAAS,sBAA8B;AAC5C,QAAM,UAAU;AAEhB,UAAQ,SAAS,GAAG;AAAA,IAClB,KAAK;AACH,aAAO,KAAK,QAAQ,GAAG,WAAW,uBAAuB,OAAO;AAAA,IAClE,KAAK;AACH,aAAO,KAAK,QAAQ,IAAI,WAAW,KAAK,QAAQ,GAAG,WAAW,SAAS,GAAG,OAAO;AAAA,IACnF;AAEE,aAAO,KAAK,QAAQ,IAAI,iBAAiB,KAAK,QAAQ,GAAG,UAAU,OAAO,GAAG,OAAO;AAAA,EACxF;AACF;AAKO,SAAS,yBAAiC;AAC/C,QAAM,MAAM,oBAAoB;AAChC,MAAI,CAAC,WAAW,GAAG,GAAG;AACpB,cAAU,KAAK,EAAE,WAAW,KAAK,CAAC;AAAA,EACpC;AACA,SAAO;AACT;AASA,SAAS,eAAe,UAAiC;AAEvD,MAAI,aAAa;AAEjB,SAAO,eAAe,QAAQ,UAAU,GAAG;AACzC,eAAW,YAAY,mBAAmB;AACxC,YAAM,aAAa,QAAQ,YAAY,QAAQ;AAC/C,UAAI,WAAW,UAAU,GAAG;AAC1B,eAAO;AAAA,MACT;AAAA,IACF;AACA,iBAAa,QAAQ,UAAU;AAAA,EACjC;AAGA,QAAM,aAAa,oBAAoB;AACvC,aAAW,YAAY,mBAAmB;AACxC,UAAM,aAAa,KAAK,YAAY,QAAQ;AAC5C,QAAI,WAAW,UAAU,GAAG;AAC1B,aAAO;AAAA,IACT;AAAA,EACF;AAEA,SAAO;AACT;AAKO,SAAS,WACd,YACA,kBACgB;AAChB,QAAM,MAAM,oBAAoB,QAAQ,IAAI;AAG5C,MAAI,YAAuC,CAAC;AAC5C,MAAI,YAAY;AAEhB,MAAI,YAAY;AACd,QAAI,CAAC,WAAW,UAAU,GAAG;AAC3B,YAAM,IAAI,MAAM,0BAA0B,UAAU,EAAE;AAAA,IACxD;AACA,UAAM,UAAU,aAAa,YAAY,OAAO;AAChD,gBAAY,KAAK,MAAM,OAAO;AAC9B,gBAAY,QAAQ,QAAQ,UAAU,CAAC;AAAA,EACzC,OAAO;AACL,UAAM,YAAY,eAAe,GAAG;AACpC,QAAI,WAAW;AACb,YAAM,UAAU,aAAa,WAAW,OAAO;AAC/C,kBAAY,KAAK,MAAM,OAAO;AAC9B,kBAAY,QAAQ,SAAS;AAAA,IAC/B;AAAA,EACF;AAGA,MAAI,QAAQ,IAAI,mBAAmB;AACjC,cAAU,eAAe,QAAQ,IAAI;AAAA,EACvC;AACA,MAAI,QAAQ,IAAI,kBAAkB;AAChC,cAAU,SAAS;AAAA,MACjB,MAAM,SAAS,QAAQ,IAAI,kBAAkB,EAAE;AAAA,MAC/C,MAAM,UAAU,QAAQ,QAAQ;AAAA,IAClC;AAAA,EACF;AACA,MAAI,QAAQ,IAAI,eAAe;AAC7B,cAAU,eAAe,QAAQ,IAAI;AAAA,EACvC;AAGA,QAAM,SAAS,uBAAuB,MAAM,SAAS;AAMrD,MAAI;AACJ,MAAI,kBAAkB;AAEpB,+BAA2B;AAAA,EAC7B,WAAW,OAAO,oBAAoB,OAAO,qBAAqB,OAAO,OAAO,iBAAiB,WAAW,GAAG,GAAG;AAEhH,+BAA2B,OAAO;AAAA,EACpC,OAAO;AAEL,+BAA2B,QAAQ,IAAI;AAAA,EACzC;AAGA,QAAM,aAAa,yBAAyB,wBAAwB;AAGpE,QAAM,kBAAkB,OAAO,QAAQ,yBAAyB,CAAC,GAC9D,IAAI,CAAC,QAAQ,QAAQ,WAAW,GAAG,CAAC,EACpC,OAAO,CAAC,QAAQ,WAAW,GAAG,CAAC;AAElC,QAAM,4BAA4B;AAAA,IAChC,GAAG,WAAW;AAAA,IACd,GAAG;AAAA,EACL;AAGA,MAAI;AACJ,MAAI,OAAO,gBAAgB,OAAO,iBAAiB,oBAAoB;AAErE,2BAAuB,QAAQ,WAAW,OAAO,YAAY;AAAA,EAC/D,OAAO;AAEL,UAAM,aAAa,uBAAuB;AAC1C,2BAAuB,KAAK,YAAY,gBAAgB;AAAA,EAC1D;AAGA,QAAM,wBAAqD;AAAA,IACzD,UAAU,QAAQ,IAAI,uBAAuB,OAAO,eAAe,YAAY;AAAA,IAC/E,SAAS,QAAQ,IAAI,mBAAmB,OAAO,eAAe,WAAW;AAAA,IACzE,gBACE,QAAQ,IAAI,0BACZ,OAAO,eAAe,kBACtB;AAAA,IACF,WAAW,OAAO,eAAe,aAAa;AAAA,IAC9C,SAAS,OAAO,eAAe,WAAW;AAAA,MACxC;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,IACA,SAAS,OAAO,eAAe,WAAW;AAAA,MACxC;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAIA,QAAM,qBAAqB;AAC3B,QAAM,YAAY,QAAQ,IAAI,0BAA0B,OAAO,cAAc,OAAO;AACpF,QAAM,gBAAgB,QAAQ,IAAI,wBAAwB,OAAO,cAAc,WAAW,kBAAkB;AAE5G,QAAM,uBAAmD;AAAA,IACvD,KAAK;AAAA,IACL,SAAS;AAAA,IACT,cAAc,CAAC,CAAC,aAAa,CAAC,CAAC;AAAA,EACjC;AAEA,QAAM,WAA2B;AAAA,IAC/B,GAAG;AAAA,IACH,QAAQ;AAAA,MACN,MAAM,OAAO,OAAO;AAAA,MACpB,MAAM,OAAO,OAAO,QAAQ;AAAA,MAC5B,WAAW,OAAO,OAAO;AAAA,IAC3B;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,kBAAkB;AAAA,IAClB;AAAA,IACA;AAAA,EACF;AAEA,iBAAe;AACf,SAAO;AACT;AAKO,SAAS,YAA4B;AAC1C,MAAI,CAAC,cAAc;AACjB,UAAM,IAAI,MAAM,2CAA2C;AAAA,EAC7D;AACA,SAAO;AACT;AAKO,SAAS,iBACd,UACA,eACS;AACT,QAAM,SAAS,UAAU;AAGzB,MAAI,eAAe,gBAAgB,GAAG,MAAM,QAAW;AACrD,WAAO,cAAc,cAAc,GAAG;AAAA,EACxC;AAGA,MAAI,eAAe,gBAAgB,QAAQ,MAAM,QAAW;AAC1D,WAAO,cAAc,cAAc,QAAQ;AAAA,EAC7C;AAGA,QAAM,kBAAkB,OAAO;AAC/B,MAAI,gBAAgB,QAAQ,MAAM,QAAW;AAC3C,WAAO,gBAAgB,QAAQ;AAAA,EACjC;AAGA,MAAI,aAAa,QAAQ;AACvB,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AAgDA,SAAS,oBAAmC;AAC1C,QAAM,WAAW,KAAK,oBAAoB,GAAG,aAAa;AAC1D,MAAI,CAAC,WAAW,QAAQ,GAAG;AACzB,WAAO;AAAA,EACT;AACA,MAAI;AACF,UAAM,UAAU,aAAa,UAAU,OAAO;AAC9C,UAAM,OAAO,KAAK,MAAM,OAAO;AAC/B,WAAO,KAAK,WAAW;AAAA,EACzB,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAKO,SAAS,YAAYA,UAAiB,QAAuB;AAClE,QAAM,SAAS,uBAAuB;AACtC,QAAM,WAAW,KAAK,QAAQ,aAAa;AAC3C,QAAM,OAAsB;AAAA,IAC1B,SAAAA;AAAA,IACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,IAClC;AAAA,EACF;AACA,gBAAc,UAAU,KAAK,UAAU,MAAM,MAAM,CAAC,GAAG,EAAE,MAAM,IAAM,CAAC;AACxE;AAqBA,eAAsB,yBACpB,WACA,MAC8C;AAC9C,QAAM,WAAW,MAAM,MAAM,GAAG,SAAS,kBAAkB;AAAA,IACzD,QAAQ;AAAA,IACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,IAC9C,MAAM,KAAK,UAAU,EAAE,MAAM,QAAQ,QAAO,oBAAI,KAAK,GAAE,YAAY,CAAC,GAAG,CAAC;AAAA,EAC1E,CAAC;AAED,MAAI,CAAC,SAAS,IAAI;AAChB,UAAM,QAAQ,MAAM,SAAS,KAAK,EAAE,MAAM,OAAO,CAAC,EAAE;AACpD,UAAM,IAAI,MAAM,MAAM,SAAS,4BAA4B,SAAS,MAAM,EAAE;AAAA,EAC9E;AAEA,QAAM,OAAO,MAAM,SAAS,KAAK;AAGjC,cAAY,KAAK,SAAS,KAAK,MAAM;AAErC,SAAO;AACT;AAMA,eAAsB,oBAAoB,WAAoC;AAE5E,MAAI,QAAQ,IAAI,sBAAsB;AACpC,WAAO,QAAQ,IAAI;AAAA,EACrB;AAGA,QAAM,YAAY,kBAAkB;AACpC,MAAI,WAAW;AACb,WAAO;AAAA,EACT;AAGA,QAAM,EAAE,SAAAA,SAAQ,IAAI,MAAM,yBAAyB,SAAS;AAC5D,SAAOA;AACT;AA2BA,SAAS,iBAAyB;AAChC,QAAM,SAAS,uBAAuB;AACtC,SAAO,KAAK,QAAQ,aAAa;AACnC;AAKA,SAAS,oBAAmC;AAC1C,QAAM,WAAW,eAAe;AAChC,MAAI,CAAC,WAAW,QAAQ,GAAG;AACzB,WAAO,CAAC;AAAA,EACV;AACA,MAAI;AACF,UAAM,UAAU,aAAa,UAAU,OAAO;AAC9C,WAAO,KAAK,MAAM,OAAO;AAAA,EAC3B,QAAQ;AACN,WAAO,CAAC;AAAA,EACV;AACF;AAKA,SAAS,kBAAkB,MAA2B;AACpD,QAAM,WAAW,eAAe;AAChC,gBAAc,UAAU,KAAK,UAAU,MAAM,MAAM,CAAC,GAAG,EAAE,MAAM,IAAM,CAAC;AACxE;AAWO,SAAS,qBAA2B;AACzC,QAAM,aAAa,kBAAkB;AAErC,aAAW,CAAC,UAAU,MAAM,KAAK,OAAO,QAAQ,gBAAgB,GAAG;AACjE,QAAI,CAAC,QAAQ,IAAI,MAAM,KAAK,WAAW,QAAQ,GAAG;AAChD,cAAQ,IAAI,MAAM,IAAI,WAAW,QAAQ;AAAA,IAC3C;AAAA,EACF;AACF;AAmBO,SAAS,UAAU,UAAkB,QAAsB;AAChE,QAAM,qBAAqB,SAAS,YAAY;AAChD,QAAM,SAAS,iBAAiB,kBAAkB;AAElD,MAAI,CAAC,QAAQ;AACX,UAAM,IAAI,MAAM,qBAAqB,QAAQ,gBAAgB,oBAAoB,KAAK,IAAI,CAAC,EAAE;AAAA,EAC/F;AAGA,QAAM,aAAa,kBAAkB;AACrC,aAAW,kBAAkB,IAAI;AACjC,oBAAkB,UAAU;AAG5B,UAAQ,IAAI,MAAM,IAAI;AACxB;AAKO,SAAS,aAAa,UAAwB;AACnD,QAAM,qBAAqB,SAAS,YAAY;AAChD,QAAM,SAAS,iBAAiB,kBAAkB;AAElD,MAAI,CAAC,QAAQ;AACX,UAAM,IAAI,MAAM,qBAAqB,QAAQ,gBAAgB,oBAAoB,KAAK,IAAI,CAAC,EAAE;AAAA,EAC/F;AAGA,QAAM,aAAa,kBAAkB;AACrC,SAAO,WAAW,kBAAkB;AACpC,oBAAkB,UAAU;AAI9B;AAMO,SAAS,kBAMb;AACD,QAAM,aAAa,kBAAkB;AAErC,SAAO,oBAAoB,IAAI,CAAC,aAAa;AAC3C,UAAM,SAAS,iBAAiB,QAAQ;AACxC,UAAM,WAAW,QAAQ,IAAI,MAAM;AACnC,UAAM,cAAc,WAAW,QAAQ;AAEvC,QAAI,SAAqC;AACzC,QAAI;AAEJ,QAAI,UAAU;AAEZ,UAAI,eAAe,aAAa,aAAa;AAC3C,iBAAS;AAAA,MACX,OAAO;AACL,iBAAS;AAAA,MACX;AACA,cAAQ;AAAA,IACV,WAAW,aAAa;AACtB,eAAS;AACT,cAAQ;AAAA,IACV;AAEA,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA,YAAY,CAAC,CAAC;AAAA,MACd;AAAA,MACA,WAAW,QAAQ,WAAW,KAAK,IAAI;AAAA,IACzC;AAAA,EACF,CAAC;AACH;AAKA,SAAS,WAAW,KAAqB;AACvC,MAAI,IAAI,UAAU,IAAI;AACpB,WAAO,SAAS,IAAI,MAAM,EAAE;AAAA,EAC9B;AACA,SAAO,IAAI,MAAM,GAAG,CAAC,IAAI,QAAQ,IAAI,MAAM,EAAE;AAC/C;AApqBA,IAYM,mBAoHF,cA2QE,eA0GA,eAGA,kBASO;AAjgBb;AAAA;AAAA;AAGA;AAmqBA;AA1pBA,IAAM,oBAAoB;AAAA,MACxB;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAgHA,IAAI,eAAsC;AA2Q1C,IAAM,gBAAgB;AA0GtB,IAAM,gBAAgB;AAGtB,IAAM,mBAA2C;AAAA,MAC/C,WAAW;AAAA,MACX,QAAQ;AAAA,MACR,QAAQ;AAAA,MACR,KAAK;AAAA,MACL,cAAc;AAAA,IAChB;AAGO,IAAM,sBAAsB,OAAO,KAAK,gBAAgB;AAAA;AAAA;;;ACjgB/D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,SAAS,YAAAC,WAAU,eAAe;AAClC,SAAS,WAAAC,UAAS,UAAU,WAAAC,UAAS,YAAAC,iBAAgB;AACrD,SAAS,cAAAC,mBAAgC;AACzC,SAAS,iBAAiB;AAwB1B,SAAS,sBAAsB,SAAmE;AAChG,QAAM,mBAAmB,QAAQ,MAAM,mCAAmC;AAE1E,MAAI,CAAC,kBAAkB;AACrB,WAAO;AAAA,EACT;AAEA,QAAM,CAAC,EAAE,aAAa,IAAI,IAAI;AAE9B,MAAI;AAEF,UAAM,QAAQ,YAAY,MAAM,IAAI;AACpC,UAAM,OAAgC,CAAC;AACvC,QAAI,eAAgC;AACpC,QAAI,kBAAiC;AAErC,eAAW,QAAQ,OAAO;AAExB,UAAI,mBAAmB,KAAK,KAAK,EAAE,WAAW,GAAG,GAAG;AAClD,YAAI,QAAQ,KAAK,KAAK,EAAE,MAAM,CAAC,EAAE,KAAK;AAEtC,YAAK,MAAM,WAAW,GAAG,KAAK,MAAM,SAAS,GAAG,KAC3C,MAAM,WAAW,GAAG,KAAK,MAAM,SAAS,GAAG,GAAI;AAClD,kBAAQ,MAAM,MAAM,GAAG,EAAE;AAAA,QAC3B;AACA,sBAAc,KAAK,KAAK;AACxB;AAAA,MACF;AAGA,UAAI,mBAAmB,cAAc;AACnC,aAAK,eAAe,IAAI;AACxB,uBAAe;AACf,0BAAkB;AAAA,MACpB;AAEA,YAAM,aAAa,KAAK,QAAQ,GAAG;AACnC,UAAI,aAAa,GAAG;AAClB,cAAM,MAAM,KAAK,MAAM,GAAG,UAAU,EAAE,KAAK;AAC3C,YAAI,QAAQ,KAAK,MAAM,aAAa,CAAC,EAAE,KAAK;AAG5C,YAAI,UAAU,MAAM,UAAU,MAAM;AAClC,4BAAkB;AAClB,yBAAe,CAAC;AAChB;AAAA,QACF;AAGA,YAAI,MAAM,WAAW,GAAG,KAAK,MAAM,SAAS,GAAG,GAAG;AAChD,gBAAM,eAAe,MAAM,MAAM,GAAG,EAAE;AACtC,gBAAM,QAAQ,aAAa,MAAM,GAAG,EAAE,IAAI,UAAQ;AAChD,gBAAI,UAAU,KAAK,KAAK;AACxB,gBAAK,QAAQ,WAAW,GAAG,KAAK,QAAQ,SAAS,GAAG,KAC/C,QAAQ,WAAW,GAAG,KAAK,QAAQ,SAAS,GAAG,GAAI;AACtD,wBAAU,QAAQ,MAAM,GAAG,EAAE;AAAA,YAC/B;AACA,mBAAO;AAAA,UACT,CAAC,EAAE,OAAO,UAAQ,KAAK,SAAS,CAAC;AACjC,eAAK,GAAG,IAAI;AACZ;AAAA,QACF;AAGA,YAAK,MAAM,WAAW,GAAG,KAAK,MAAM,SAAS,GAAG,KAC3C,MAAM,WAAW,GAAG,KAAK,MAAM,SAAS,GAAG,GAAI;AAClD,kBAAQ,MAAM,MAAM,GAAG,EAAE;AAAA,QAC3B;AAGA,YAAI,UAAU,QAAQ;AACpB,eAAK,GAAG,IAAI;AAAA,QACd,WAAW,UAAU,SAAS;AAC5B,eAAK,GAAG,IAAI;AAAA,QACd,OAAO;AACL,eAAK,GAAG,IAAI;AAAA,QACd;AAAA,MACF;AAAA,IACF;AAGA,QAAI,mBAAmB,cAAc;AACnC,WAAK,eAAe,IAAI;AAAA,IAC1B;AAEA,UAAM,WAAW,oBAAoB,MAAM,IAAI;AAC/C,WAAO,EAAE,UAAU,MAAM,KAAK,KAAK,EAAE;AAAA,EACvC,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAKA,SAAS,qBAAqB,UAA0B;AACtD,SAAO,SAAS,UAAUF,SAAQ,QAAQ,CAAC,EACxC,QAAQ,SAAS,GAAG,EACpB,QAAQ,SAAS,CAAC,MAAM,EAAE,YAAY,CAAC;AAC5C;AAiBA,eAAsB,wBACpB,WACA,UAA6B,CAAC,GACZ;AAClB,QAAM;AAAA,IACJ,WAAW;AAAA,IACX,kBAAkB;AAAA,IAClB,mBAAmB;AAAA,EACrB,IAAI;AAEJ,MAAI,CAACE,YAAW,SAAS,GAAG;AAC1B,WAAO,CAAC;AAAA,EACV;AAEA,QAAM,SAAkB,CAAC;AACzB,QAAM,UAAU,MAAM,QAAQ,WAAW,EAAE,eAAe,KAAK,CAAC;AAEhE,aAAW,SAAS,SAAS;AAE3B,QAAI;AACJ,QAAI;AAEJ,QAAI,MAAM,YAAY,GAAG;AAEvB,YAAM,cAAcH,SAAQ,WAAW,MAAM,MAAM,UAAU;AAC7D,UAAIG,YAAW,WAAW,GAAG;AAC3B,mBAAW;AACX,mBAAW,MAAM;AAAA,MACnB,OAAO;AACL;AAAA,MACF;AAAA,IACF,WAAW,MAAM,KAAK,SAAS,KAAK,KAAK,MAAM,KAAK,SAAS,MAAM,GAAG;AACpE,iBAAWH,SAAQ,WAAW,MAAM,IAAI;AACxC,iBAAW,MAAM;AAAA,IACnB,OAAO;AACL;AAAA,IACF;AAEA,UAAM,UAAU,MAAMD,UAAS,UAAU,OAAO;AAChD,UAAM,SAAS,sBAAsB,OAAO;AAE5C,QAAI,QAAQ;AACV,YAAM,cAAc,oBAAoB,OAAO,SAAS;AACxD,YAAM,WAA0B,cAAc,WAAW;AAEzD,aAAO,KAAK;AAAA,QACV,MAAM,OAAO,SAAS;AAAA,QACtB,aAAa,OAAO,SAAS;AAAA,QAC7B;AAAA,QACA;AAAA,QACA,OAAO,OAAO,SAAS;AAAA,QACvB;AAAA,QACA;AAAA,QACA,WAAW;AAAA,MACb,CAAC;AAAA,IACH,OAAO;AAEL,YAAM,OAAO,qBAAqB,QAAQ;AAC1C,YAAM,iBAAiB,QAAQ,MAAM,MAAM,EAAE,CAAC,GAAG,MAAM,GAAG,GAAG,KAAK;AAElE,aAAO,KAAK;AAAA,QACV;AAAA,QACA,aAAa,eAAe,QAAQ,SAAS,EAAE,EAAE,KAAK;AAAA,QACtD;AAAA,QACA,aAAa;AAAA,QACb,OAAO,CAAC;AAAA,QACR,UAAU,mBAAmB,WAAW;AAAA,QACxC;AAAA,QACA,WAAW;AAAA,MACb,CAAC;AAAA,IACH;AAAA,EACF;AAEA,SAAO;AACT;AAKA,eAAsB,cAAc,aAAyC;AAC3E,QAAM,YAAqB,CAAC;AAC5B,QAAM,YAAY,oBAAI,IAAY;AAElC,aAAW,OAAO,aAAa;AAC7B,UAAM,SAAS,MAAM,wBAAwB,GAAG;AAChD,eAAW,SAAS,QAAQ;AAE1B,UAAI,CAAC,UAAU,IAAI,MAAM,KAAK,YAAY,CAAC,GAAG;AAC5C,kBAAU,IAAI,MAAM,KAAK,YAAY,CAAC;AACtC,kBAAU,KAAK,KAAK;AAAA,MACtB;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAKA,eAAsB,4BACpB,YAC0E;AAC1E,QAAM,YAAqB,CAAC;AAC5B,QAAM,YAAY,oBAAI,IAAY;AAGlC,aAAW,EAAE,MAAM,SAAS,KAAK,WAAW,kBAAkB;AAC5D,UAAM,SAAS,MAAM,wBAAwB,MAAM;AAAA,MACjD;AAAA,MACA,iBAAiB;AAAA,MACjB,kBAAkB;AAAA,IACpB,CAAC;AACD,eAAW,SAAS,QAAQ;AAC1B,UAAI,CAAC,UAAU,IAAI,MAAM,KAAK,YAAY,CAAC,GAAG;AAC5C,kBAAU,IAAI,MAAM,KAAK,YAAY,CAAC;AACtC,kBAAU,KAAK,KAAK;AAAA,MACtB;AAAA,IACF;AAAA,EACF;AAGA,aAAW,EAAE,MAAM,SAAS,KAAK,WAAW,cAAc;AACxD,UAAM,SAAS,MAAM,wBAAwB,MAAM;AAAA,MACjD;AAAA,MACA,iBAAiB;AAAA,MACjB,kBAAkB;AAAA,IACpB,CAAC;AACD,eAAW,SAAS,QAAQ;AAC1B,UAAI,CAAC,UAAU,IAAI,MAAM,KAAK,YAAY,CAAC,GAAG;AAC5C,kBAAU,IAAI,MAAM,KAAK,YAAY,CAAC;AACtC,kBAAU,KAAK,KAAK;AAAA,MACtB;AAAA,IACF;AAAA,EACF;AAGA,QAAM,eAAe,UAAU,OAAO,OAAK,EAAE,eAAe,EAAE,aAAa,QAAQ;AACnF,QAAM,iBAAiB,UAAU,OAAO,OAAK,CAAC,EAAE,eAAe,EAAE,aAAa,QAAQ;AAGtF,QAAM,oBAAwC,MAAM,QAAQ;AAAA,IAC1D,aAAa,IAAI,OAAO,UAAU;AAChC,YAAM,UAAU,MAAMA,UAAS,MAAM,UAAU,OAAO;AACtD,YAAM,SAAS,sBAAsB,OAAO;AAC5C,aAAO;AAAA,QACL,GAAG;AAAA,QACH,SAAS,SAAS,OAAO,OAAO;AAAA,MAClC;AAAA,IACF,CAAC;AAAA,EACH;AAEA,SAAO;AAAA,IACL,QAAQ;AAAA,IACR,UAAU;AAAA,IACV,KAAK;AAAA,EACP;AACF;AAKA,eAAsB,qBACpB,QACA,aACA,kBAC6B;AAC7B,MAAI,YAAY,WAAW,GAAG;AAC5B,WAAO,CAAC;AAAA,EACV;AAGA,QAAM,gBAAgB,YAAY,IAAI,OAAK;AACzC,QAAI,EAAE,WAAW,gBAAgB,GAAG;AAClC,aAAOG,UAAS,kBAAkB,CAAC;AAAA,IACrC;AACA,WAAO;AAAA,EACT,CAAC;AAGD,QAAM,gBAAgB,OAAO,OAAO,WAAS;AAE3C,QAAI,MAAM,eAAe,MAAM,aAAa,UAAU;AACpD,aAAO;AAAA,IACT;AAGA,QAAI,CAAC,MAAM,SAAS,MAAM,MAAM,WAAW,GAAG;AAC5C,aAAO;AAAA,IACT;AAGA,WAAO,cAAc;AAAA,MAAK,UACxB,MAAM,MAAM,KAAK,aAAW,UAAU,MAAM,SAAS,EAAE,WAAW,KAAK,CAAC,CAAC;AAAA,IAC3E;AAAA,EACF,CAAC;AAGD,QAAM,qBAAyC,MAAM,QAAQ;AAAA,IAC3D,cAAc,IAAI,OAAO,UAAU;AACjC,YAAM,UAAU,MAAMH,UAAS,MAAM,UAAU,OAAO;AACtD,YAAM,SAAS,sBAAsB,OAAO;AAC5C,aAAO;AAAA,QACL,GAAG;AAAA,QACH,SAAS,SAAS,OAAO,OAAO;AAAA,QAChC,UAAU;AAAA,MACZ;AAAA,IACF,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAKA,eAAsB,aAAa,cAAqD;AACtF,MAAI,CAAC,gBAAgB,CAACI,YAAW,YAAY,GAAG;AAC9C,WAAO;AAAA,EACT;AAEA,QAAM,UAAU,MAAMJ,UAAS,cAAc,OAAO;AACpD,SAAO;AACT;AAKA,eAAsB,iBACpB,WACA,aACkC;AAClC,QAAM,YAAY,MAAM,cAAc,WAAW;AACjD,QAAM,QAAQ,UAAU;AAAA,IACtB,CAAC,MAAM,EAAE,KAAK,YAAY,MAAM,UAAU,YAAY;AAAA,EACxD;AAEA,MAAI,CAAC,OAAO;AACV,WAAO;AAAA,EACT;AAEA,QAAM,UAAU,MAAMA,UAAS,MAAM,UAAU,OAAO;AACtD,QAAM,SAAS,sBAAsB,OAAO;AAE5C,SAAO;AAAA,IACL,GAAG;AAAA,IACH,SAAS,SAAS,OAAO,OAAO;AAAA,EAClC;AACF;AAKO,SAAS,uBAAuB,QAAyB;AAE9D,QAAM,iBAAiB,OAAO,OAAO,OAAK,CAAC,EAAE,eAAe,EAAE,aAAa,QAAQ;AAEnF,MAAI,eAAe,WAAW,GAAG;AAC/B,WAAO;AAAA,EACT;AAEA,QAAM,QAAQ,CAAC,8DAA8D;AAC7E,aAAW,SAAS,gBAAgB;AAClC,UAAM,WAAW,MAAM,OAAO,SAAS,qBAAqB,MAAM,MAAM,KAAK,IAAI,CAAC,MAAM;AACxF,UAAM,KAAK,KAAK,MAAM,IAAI,KAAK,MAAM,WAAW,GAAG,QAAQ,EAAE;AAAA,EAC/D;AAEA,SAAO,MAAM,KAAK,IAAI;AACxB;AAKO,SAAS,yBAAyB,QAAoC;AAC3E,MAAI,OAAO,WAAW,GAAG;AACvB,WAAO;AAAA,EACT;AAEA,QAAM,WAAqB,CAAC;AAE5B,aAAW,SAAS,QAAQ;AAC1B,aAAS,KAAK,OAAO,MAAM,IAAI;AAAA;AAAA,EAAO,MAAM,OAAO,EAAE;AAAA,EACvD;AAEA,SAAO;AAAA;AAAA,EAA+C,SAAS,KAAK,aAAa,CAAC;AACpF;AAKO,SAAS,wBAAwB,QAAoC;AAC1E,MAAI,OAAO,WAAW,GAAG;AACvB,WAAO;AAAA,EACT;AAEA,QAAM,WAAqB,CAAC;AAE5B,aAAW,SAAS,QAAQ;AAC1B,aAAS,KAAK,OAAO,MAAM,IAAI;AAAA;AAAA,EAAO,MAAM,OAAO,EAAE;AAAA,EACvD;AAEA,SAAO;AAAA;AAAA,EAAqE,SAAS,KAAK,aAAa,CAAC;AAC1G;AAKO,SAAS,sBAAsB,SAAgC;AACpE,MAAI,CAAC,SAAS;AACZ,WAAO;AAAA,EACT;AAEA,SAAO;AAAA;AAAA,EAA0C,OAAO;AAC1D;AAxcA;AAAA;AAAA;AAIA;AAAA;AAAA;;;ACJA,IAAAK,cAAA;AAAA;AAAA;AAAA;AAAA;;;ACKA,SAAS,gBAAgB;AAMlB,SAAS,gBAAgB,kBAAyC;AACvE,MAAI;AACF,UAAM,SAAS,SAAS,6BAA6B;AAAA,MACnD,KAAK;AAAA,MACL,UAAU;AAAA,MACV,OAAO,CAAC,QAAQ,QAAQ,MAAM;AAAA,IAChC,CAAC;AACD,WAAO,OAAO,KAAK;AAAA,EACrB,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAUO,SAAS,kBAAkB,KAAmD;AAEnF,QAAM,WAAW,IAAI,QAAQ,UAAU,EAAE;AAGzC,QAAM,WAAW,SAAS,MAAM,0BAA0B;AAC1D,MAAI,UAAU;AACZ,WAAO,EAAE,KAAK,SAAS,CAAC,GAAG,MAAM,SAAS,CAAC,EAAE;AAAA,EAC/C;AAGA,QAAM,aAAa,SAAS,MAAM,kCAAkC;AACpE,MAAI,YAAY;AACd,WAAO,EAAE,KAAK,WAAW,CAAC,GAAG,MAAM,WAAW,CAAC,EAAE;AAAA,EACnD;AAGA,QAAM,gBAAgB,SAAS,MAAM,+BAA+B;AACpE,MAAI,eAAe;AACjB,WAAO,EAAE,KAAK,cAAc,CAAC,GAAG,MAAM,cAAc,CAAC,EAAE;AAAA,EACzD;AAEA,SAAO;AACT;AASA,SAAS,qBAAqB,KAAqB;AACjD,SAAO,IACJ,YAAY,EACZ,QAAQ,cAAc,GAAG,EACzB,QAAQ,YAAY,EAAE,EACtB,QAAQ,OAAO,GAAG;AACvB;AAUA,eAAsB,iBACpB,kBACA,qBACwB;AAExB,MAAI,qBAAqB;AACvB,WAAO;AAAA,EACT;AAGA,QAAM,YAAY,gBAAgB,gBAAgB;AAClD,MAAI,CAAC,WAAW;AACd,WAAO;AAAA,EACT;AAGA,QAAM,SAAS,kBAAkB,SAAS;AAC1C,MAAI,CAAC,QAAQ;AACX,WAAO;AAAA,EACT;AAGA,QAAM,MAAM,qBAAqB,OAAO,GAAG;AAC3C,QAAM,OAAO,qBAAqB,OAAO,IAAI;AAC7C,SAAO,eAAe,GAAG,IAAI,IAAI;AACnC;AAzGA;AAAA;AAAA;AAAA;AAAA;;;ACKA,SAAS,cAAAC,mBAAkB;AAL3B;AAAA;AAAA;AAAA;AAAA;;;ACMA,SAAS,WAAAC,UAAS,YAAAC,iBAAgB;AANlC;AAAA;AAAA;AAQA;AAAA;AAAA;;;ACyCO,SAAS,iBAAiB,WAAmB,KAAa;AAC/D,EAAAC,mBAAkB,UAAU,QAAQ,OAAO,EAAE;AAC7C,EAAAC,WAAU;AACZ;AAKO,SAAS,2BAAoC;AAClD,SAAO,CAAC,CAACD,oBAAmB,CAAC,CAACC;AAChC;AAKA,eAAe,UACb,MACA,UAA+C,CAAC,GACpC;AACZ,MAAI,CAACD,oBAAmB,CAACC,UAAS;AAChC,UAAM,IAAI,MAAM,8DAA8D;AAAA,EAChF;AAEA,QAAM,MAAM,GAAGD,gBAAe,WAAW,IAAI;AAC7C,QAAM,OAAoB;AAAA,IACxB,QAAQ,QAAQ,UAAU;AAAA,IAC1B,SAAS;AAAA,MACP,gBAAgB;AAAA,MAChB,iBAAiB,UAAUC,QAAO;AAAA,IACpC;AAAA,EACF;AAEA,MAAI,QAAQ,MAAM;AAChB,SAAK,OAAO,KAAK,UAAU,QAAQ,IAAI;AAAA,EACzC;AAEA,QAAM,WAAW,MAAM,MAAM,KAAK,IAAI;AAEtC,MAAI,CAAC,SAAS,IAAI;AAChB,UAAM,QAAQ,MAAM,SAAS,KAAK,EAAE,MAAM,OAAO,EAAE,OAAO,gBAAgB,EAAE;AAC5E,UAAM,IAAI,MAAM,MAAM,SAAS,QAAQ,SAAS,MAAM,EAAE;AAAA,EAC1D;AAEA,SAAO,SAAS,KAAK;AACvB;AA8EO,SAAS,kBAAuC;AACrD,MAAI,CAAC,yBAAyB,GAAG;AAE/B,QAAI;AACF,YAAM,SAAS,UAAU;AACzB,UAAI,OAAO,qBAAqB,OAAO,OAAO,qBAAqB,SAAS;AAC1E,yBAAiB,OAAO,qBAAqB,KAAK,OAAO,qBAAqB,OAAO;AAAA,MACvF,OAAO;AACL,eAAO;AAAA,MACT;AAAA,IACF,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAEA,SAAO;AACT;AAKA,eAAsB,oBAAmC;AAEzD;AAKO,SAAS,4BAAqC;AACnD,MAAI;AACF,UAAM,SAAS,UAAU;AACzB,WAAO,CAAC,EAAE,OAAO,qBAAqB,OAAO,OAAO,qBAAqB;AAAA,EAC3E,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAKO,SAAS,oBAA4B;AAC1C,MAAI;AACF,UAAM,SAAS,UAAU;AACzB,WAAO,OAAO,sBAAsB;AAAA,EACtC,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AA1NA,IA2CID,kBACAC,UAuDS;AAnGb;AAAA;AAAA;AAKA;AAsCA,IAAID,mBAAiC;AACrC,IAAIC,WAAyB;AAuDtB,IAAM,qBAAqB;AAAA,MAChC,YAAY;AAAA;AAAA;AAAA;AAAA,QAIV,MAAM,cACJ,OACA,SAI0B;AAC1B,iBAAO,UAA2B,UAAU;AAAA,YAC1C,QAAQ;AAAA,YACR,MAAM;AAAA,cACJ;AAAA,cACA,WAAW,QAAQ;AAAA,cACnB,gBAAgB,QAAQ;AAAA,YAC1B;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAAA,MAEA,QAAQ;AAAA;AAAA;AAAA;AAAA,QAIN,MAAM,aACJ,OACA,SAMuB;AACvB,iBAAO,UAAwB,WAAW;AAAA,YACxC,QAAQ;AAAA,YACR,MAAM;AAAA,cACJ;AAAA,cACA,WAAW,QAAQ;AAAA,cACnB,MAAM,QAAQ,QAAQ;AAAA,cACtB,gBAAgB,QAAQ;AAAA,YAC1B;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAAA;AAAA;AAAA;AAAA,MAKA,MAAM,gBAAgB,WAAkC;AACtD,cAAM,UAAU,cAAc,mBAAmB,SAAS,CAAC,IAAI;AAAA,UAC7D,QAAQ;AAAA,QACV,CAAC;AAAA,MACH;AAAA;AAAA;AAAA;AAAA,MAKA,MAAM,QAAuB;AAAA,MAE7B;AAAA,IACF;AAAA;AAAA;;;AC7JA,SAAS,gBAAAC,eAAc,gBAAgB;AACvC,SAAe,YAAAC,iBAAgB;AAC/B,SAAS,aAAAC,kBAAiB;AAic1B,eAAsB,eAAe,kBAAgD;AACnF,QAAM,SAAS,UAAU;AACzB,QAAM,YAAY,MAAM;AAAA,IACtB;AAAA,IACA,OAAO,sBAAsB;AAAA,EAC/B;AAEA,QAAM,eAAe,OAAO,qBAAqB;AAEjD,MAAI,CAAC,WAAW;AACd,WAAO;AAAA,MACL,WAAW;AAAA,MACX,aAAa;AAAA,MACb,eAAe;AAAA,MACf,sBAAsB;AAAA,MACtB;AAAA,IACF;AAAA,EACF;AAEA,MAAI;AACF,UAAM,KAAK,MAAM;AACjB,UAAM,SAAS,MAAM,mBAAmB,IAAI,IAAI,SAAS;AAEzD,QAAI,CAAC,QAAQ;AACX,aAAO;AAAA,QACL;AAAA,QACA,aAAa;AAAA,QACb,eAAe;AAAA,QACf,sBAAsB;AAAA,QACtB;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,MACL;AAAA,MACA,aAAa,OAAO,eAAe;AAAA,MACnC,eAAe,OAAO,iBAAiB;AAAA,MACvC,sBAAsB,OAAO,wBAAwB;AAAA,MACrD;AAAA,IACF;AAAA,EACF,QAAQ;AACN,WAAO;AAAA,MACL;AAAA,MACA,aAAa;AAAA,MACb,eAAe;AAAA,MACf,sBAAsB;AAAA,MACtB;AAAA,IACF;AAAA,EACF;AACF;AAKA,eAAsB,iBAAiB,kBAA4C;AACjF,QAAM,SAAS,MAAM,eAAe,gBAAgB;AACpD,SAAO,OAAO,cAAc;AAC9B;AAjgBA,IAgBMC;AAhBN;AAAA;AAAA;AAQA;AACA;AAEA;AACA;AACA;AAGA,IAAMA,iBAAgB,OAAO;AAAA;AAAA;;;AChB7B;AAAA;AAAA;AAMA,IAAAC;AAGA;AAUA;AASA;AAOA;AAQA;AAAA;AAAA;;;AC3CA;AAAA;AAAA;AAAA;AAKA,SAAS,QAAAC,aAAY;AACrB,SAAS,KAAAC,UAAS;AAClB,SAAS,cAAAC,cAAY,gBAAAC,qBAAoB;AACzC,SAAS,QAAAC,aAAY;AACrB,SAAS,aAAAC,kBAAiB;AA6CnB,SAAS,yBAAyB,SAAoC;AAC3E,SAAOL,MAAK;AAAA,IACV,aAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAYb,aAAa;AAAA,IAEb,SAAS,OAAO;AAAA,MACd;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,MAAgF;AAC9E,YAAM,YAAY,KAAK,IAAI;AAE3B,UAAI;AACF,cAAM,SAAS,UAAU;AAEzB,cAAM,YAAY,MAAM;AAAA,UACtB,QAAQ;AAAA,UACR,OAAO,sBAAsB;AAAA,QAC/B;AAEA,YAAI,CAAC,WAAW;AACd,iBAAO;AAAA,YACL,SAAS;AAAA,YACT,OAAO;AAAA,UACT;AAAA,QACF;AAEA,cAAM,SAAS,gBAAgB;AAC/B,YAAI,CAAC,QAAQ;AACX,iBAAO;AAAA,YACL,SAAS;AAAA,YACT,OAAO;AAAA,UACT;AAAA,QACF;AAEA,YAAI;AACF,gBAAM,cAAc,KAAK,IAAI,KAAK,IAAI,GAAG,IAAI,GAAG,EAAE;AAElD,gBAAM,iBAAiB,kBAAkB;AACzC,gBAAM,SAAS,MAAM,OAAO,OAAO,aAAa,OAAO;AAAA,YACrD;AAAA,YACA,MAAM,cAAc;AAAA,YACpB,iBAAiB;AAAA,YACjB;AAAA,UACF,CAAC;AAED,gBAAM,UAA2B,CAAC;AAElC,qBAAW,SAAS,OAAO,SAAS;AAClC,kBAAM,WAAW,MAAM;AACvB,gBAAI,CAAC,SAAU;AAEf,kBAAM,WAAW,SAAS;AAC1B,kBAAM,YAAY,SAAS;AAC3B,kBAAM,UAAU,SAAS;AACzB,kBAAM,gBAAgB,SAAS;AAC/B,kBAAM,aAAa,SAAS;AAE5B,gBAAI,aAAa;AACf,oBAAM,iBAAiBK,WAAU,UAAU,aAAa,EAAE,KAAK,KAAK,CAAC;AACrE,kBAAI,CAAC,eAAgB;AAAA,YACvB;AAEA,gBAAI,YAAY,kBAAkB,SAAS,YAAY,GAAG;AACxD;AAAA,YACF;AAEA,kBAAM,WAAWD,MAAK,QAAQ,kBAAkB,QAAQ;AACxD,gBAAI,CAACF,aAAW,QAAQ,GAAG;AACzB;AAAA,YACF;AAEA,gBAAI,UAAU;AACd,gBAAI;AACF,oBAAM,UAAUC,cAAa,UAAU,OAAO;AAC9C,oBAAM,QAAQ,QAAQ,MAAM,IAAI;AAChC,oBAAM,eAAe,MAAM;AAAA,gBACzB,KAAK,IAAI,GAAG,YAAY,CAAC;AAAA,gBACzB,KAAK,IAAI,MAAM,QAAQ,OAAO;AAAA,cAChC;AACA,wBAAU,aAAa,KAAK,IAAI;AAEhC,kBAAI,QAAQ,SAAS,KAAK;AACxB,0BAAU,QAAQ,MAAM,GAAG,GAAG,IAAI;AAAA,cACpC;AAAA,YACF,QAAQ;AAAA,YAER;AAEA,oBAAQ,KAAK;AAAA,cACX;AAAA,cACA;AAAA,cACA;AAAA,cACA,OAAO,MAAM;AAAA,cACb;AAAA,cACA;AAAA,cACA,UAAU;AAAA,YACZ,CAAC;AAED,gBAAI,QAAQ,UAAU,aAAa;AACjC;AAAA,YACF;AAAA,UACF;AAEA,iBAAO;AAAA,YACL,SAAS;AAAA,YACT;AAAA,YACA;AAAA,YACA,cAAc,QAAQ;AAAA,YACtB,UAAU,KAAK,IAAI,IAAI;AAAA,UACzB;AAAA,QACF,UAAE;AACA,gBAAM,kBAAkB;AAAA,QAC1B;AAAA,MACF,SAAS,OAAO;AACd,eAAO;AAAA,UACL,SAAS;AAAA,UACT,OAAO,2BAA2B,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,QAC1F;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AACH;AA5LA,IAgCM;AAhCN;AAAA;AAAA;AAUA;AAMA;AAgBA,IAAM,4BAA4BF,GAAE,OAAO;AAAA,MACzC,OAAOA,GACJ,OAAO,EACP,SAAS,gEAAgE;AAAA,MAC5E,MAAMA,GACH,OAAO,EACP,SAAS,EACT,QAAQ,EAAE,EACV,SAAS,oDAAoD;AAAA,MAChE,aAAaA,GACV,OAAO,EACP,SAAS,EACT,SAAS,mEAAmE;AAAA,MAC/E,UAAUA,GACP,OAAO,EACP,SAAS,EACT,SAAS,+DAA+D;AAAA,IAC7E,CAAC;AAAA;AAAA;;;ACjDD;AAAA;AAAA;AAAA;AAqBA,eAAsB,YAAY,KAAa,OAAoC;AACjF,MAAI;AACF,UAAM,aAAa,IAAI,gBAAgB;AACvC,UAAM,UAAU,WAAW,MAAM,WAAW,MAAM,GAAG,GAAI;AAEzD,UAAM,MAAM,KAAK;AAAA,MACf,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,uBAAuB,MAAM;AAAA,MAC/B;AAAA,MACA,MAAM,KAAK,UAAU,KAAK;AAAA,MAC1B,QAAQ,WAAW;AAAA,IACrB,CAAC;AAED,iBAAa,OAAO;AAAA,EACtB,QAAQ;AAAA,EAER;AACF;AAxCA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,OAAO,eAAe;AACtB,SAAS,oBAAoB;AAwPtB,SAAS,iBAAiB,WAAmB,MAAkC;AACpF,QAAM,WAAW,cAAc,IAAI,SAAS;AAC5C,MAAI,UAAU;AACZ,YAAQ,IAAI,mDAAmD,SAAS,eAAe,SAAS,SAAS,GAAG;AAC5G,WAAO;AAAA,EACT;AAEA,UAAQ,IAAI,+CAA+C,SAAS,YAAY,IAAI,qBAAqB,cAAc,IAAI,GAAG;AAC9H,QAAM,QAAQ,IAAI,mBAAmB,IAAI;AACzC,gBAAc,IAAI,WAAW,KAAK;AAClC,QAAM,GAAG,SAAS,MAAM;AACtB,YAAQ,IAAI,yCAAyC,SAAS,0BAA0B;AACxF,kBAAc,OAAO,SAAS;AAAA,EAChC,CAAC;AACD,QAAM,QAAQ;AACd,SAAO;AACT;AAEO,SAAS,SAAS,WAAmD;AAC1E,SAAO,cAAc,IAAI,SAAS;AACpC;AAEO,SAAS,aAAa,WAAyB;AACpD,QAAM,QAAQ,cAAc,IAAI,SAAS;AACzC,MAAI,OAAO;AACT,YAAQ,IAAI,kDAAkD,SAAS,EAAE;AACzE,UAAM,QAAQ;AACd,kBAAc,OAAO,SAAS;AAAA,EAChC,OAAO;AACL,YAAQ,IAAI,sEAAsE,SAAS,EAAE;AAAA,EAC/F;AACF;AAxRA,IA4DM,oBACA,wBACA,mBAMO,oBAmLP;AAvPN;AAAA;AAAA;AA4DA,IAAM,qBAAqB;AAC3B,IAAM,yBAAyB;AAC/B,IAAM,oBAAoB;AAMnB,IAAM,qBAAN,cAAiC,aAAa;AAAA,MAC3C,KAAuB;AAAA,MACvB;AAAA,MACA,oBAAoB;AAAA,MACpB,iBAAuD;AAAA,MACvD,YAAY;AAAA,MACZ,gBAAgB;AAAA,MAChB,eAAoC;AAAA,MACpC,aAAa;AAAA,MAErB,YAAY,MAAc;AACxB,cAAM;AACN,aAAK,OAAO;AAAA,MACd;AAAA,MAEA,IAAI,YAAqB;AACvB,eAAO,KAAK;AAAA,MACd;AAAA,MAEA,IAAI,cAAmC;AACrC,eAAO,KAAK;AAAA,MACd;AAAA,MAEA,UAAgB;AACd,YAAI,KAAK,UAAW;AACpB,gBAAQ,IAAI,0CAA0C,KAAK,IAAI,EAAE;AACjE,aAAK,UAAU;AAAA,MACjB;AAAA,MAEQ,YAAkB;AACxB,YAAI,KAAK,UAAW;AAEpB,cAAM,MAAM,kBAAkB,KAAK,IAAI;AACvC,gBAAQ,IAAI,mDAAmD,GAAG,aAAa,KAAK,oBAAoB,CAAC,IAAI,sBAAsB,GAAG;AACtI,YAAI;AACF,eAAK,KAAK,IAAI,UAAU,GAAG;AAAA,QAC7B,SAAS,KAAK;AACZ,kBAAQ,KAAK,gDAAgD,GAAG,KAAK,GAAG;AACxE,eAAK,kBAAkB;AACvB;AAAA,QACF;AAEA,aAAK,GAAG,GAAG,QAAQ,MAAM;AACvB,kBAAQ,IAAI,6BAA6B,GAAG,WAAW,KAAK,iBAAiB,WAAW;AACxF,eAAK,oBAAoB;AACzB,eAAK,aAAa;AAAA,QAIpB,CAAC;AAED,aAAK,GAAG,GAAG,WAAW,CAAC,QAAyB;AAC9C,cAAI;AACF,kBAAM,MAAM,KAAK,MAAM,OAAO,QAAQ,WAAW,MAAM,IAAI,SAAS,MAAM,CAAC;AAC3E,iBAAK,cAAc,GAAG;AAAA,UACxB,SAAS,KAAK;AACZ,oBAAQ,KAAK,uCAAuC,GAAG,KAAK,GAAG;AAAA,UACjE;AAAA,QACF,CAAC;AAED,aAAK,GAAG,GAAG,SAAS,CAAC,MAAM,WAAW;AACpC,gBAAM,eAAe,KAAK;AAC1B,eAAK,aAAa;AAClB,kBAAQ,IAAI,wCAAwC,IAAI,YAAY,QAAQ,SAAS,KAAK,EAAE,kBAAkB,YAAY,cAAc,KAAK,SAAS,EAAE;AACxJ,cAAI,cAAc;AAChB,iBAAK,KAAK,UAAU,EAAE,WAAW,OAAO,eAAe,MAAM,CAAyB;AAAA,UACxF;AACA,cAAI,CAAC,KAAK,WAAW;AACnB,iBAAK,kBAAkB;AAAA,UACzB;AAAA,QACF,CAAC;AAED,aAAK,GAAG,GAAG,SAAS,CAAC,QAAQ;AAC3B,kBAAQ,KAAK,wCAAwC,KAAK,IAAI,KAAK,IAAI,OAAO;AAAA,QAChF,CAAC;AAAA,MACH;AAAA,MAEQ,aAAa;AAAA,MACb,iBAAiB;AAAA,MACjB,mBAAmB;AAAA,MAEnB,cAAc,KAAgB;AACpC,YAAI,IAAI,SAAS,SAAS;AACxB,gBAAM,MAAM,KAAK,IAAI;AACrB,cAAI,MAAM,KAAK,gBAAgB,mBAAmB;AAChD,iBAAK;AACL;AAAA,UACF;AACA,eAAK,gBAAgB;AACrB,eAAK;AAGL,cAAI,MAAM,KAAK,mBAAmB,KAAM;AACtC,oBAAQ,IAAI,qCAAqC,KAAK,UAAU,cAAc,KAAK,cAAc,cAAc,KAAK,cAAc,OAAO,CAAC,aAAa,IAAI,MAAM,UAAU,CAAC,EAAE;AAC9K,iBAAK,mBAAmB;AAAA,UAC1B;AAEA,gBAAM,QAAsB;AAAA,YAC1B,MAAM,IAAI;AAAA,YACV,UAAU,IAAI,YAAY;AAAA,cACxB,aAAa;AAAA,cACb,cAAc;AAAA,cACd,iBAAiB;AAAA,cACjB,WAAW;AAAA,cACX,eAAe;AAAA,cACf,eAAe;AAAA,YACjB;AAAA,YACA,WAAW;AAAA,UACb;AACA,eAAK,eAAe;AACpB,eAAK,KAAK,SAAS,KAAK;AAAA,QAC1B,WAAW,IAAI,SAAS,UAAU;AAChC,kBAAQ,IAAI,yCAAyC,KAAK,UAAU,GAAG,CAAC;AACxE,eAAK,KAAK,UAAU;AAAA,YAClB,WAAW,IAAI,aAAa;AAAA,YAC5B,eAAe,IAAI,iBAAiB;AAAA,YACpC,eAAe,IAAI;AAAA,YACnB,gBAAgB,IAAI;AAAA,UACtB,CAAyB;AAAA,QAC3B,OAAO;AACL,kBAAQ,IAAI,sCAAsC,IAAI,IAAI,EAAE;AAAA,QAC9D;AAAA,MACF;AAAA,MAEQ,oBAA0B;AAChC,YAAI,KAAK,aAAa,KAAK,qBAAqB,wBAAwB;AACtE,kBAAQ,IAAI,kDAAkD,KAAK,SAAS,aAAa,KAAK,iBAAiB,IAAI,sBAAsB,EAAE;AAC3I,eAAK,KAAK,OAAO;AACjB;AAAA,QACF;AACA,aAAK;AAGL,cAAM,QAAQ,KAAK,qBAAqB,IACpC,qBACA,sBAAsB,KAAK,oBAAoB;AACnD,gBAAQ,IAAI,wCAAwC,KAAK,eAAe,KAAK,iBAAiB,IAAI,sBAAsB,GAAG;AAC3H,aAAK,iBAAiB,WAAW,MAAM,KAAK,UAAU,GAAG,KAAK;AAAA,MAChE;AAAA;AAAA;AAAA;AAAA,MAKA,YAAY,OAAgC;AAC1C,YAAI,KAAK,IAAI,eAAe,UAAU,MAAM;AAC1C,eAAK,GAAG,KAAK,KAAK,UAAU,KAAK,CAAC;AAAA,QACpC;AAAA,MACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAOA,gBAAsB;AACpB,YAAI,KAAK,IAAI,eAAe,UAAU,MAAM;AAC1C,kBAAQ,IAAI,wDAAwD;AACpE,eAAK,GAAG,KAAK,KAAK,UAAU,EAAE,MAAM,SAAS,CAAC,CAAC;AAAA,QACjD;AAAA,MACF;AAAA,MAEA,UAAgB;AACd,gBAAQ,IAAI,0CAA0C,KAAK,IAAI,aAAa,KAAK,UAAU,sBAAsB,KAAK,cAAc,GAAG;AACvI,aAAK,YAAY;AACjB,YAAI,KAAK,gBAAgB;AACvB,uBAAa,KAAK,cAAc;AAChC,eAAK,iBAAiB;AAAA,QACxB;AACA,YAAI,KAAK,IAAI;AACX,eAAK,GAAG,mBAAmB;AAC3B,eAAK,GAAG,MAAM;AACd,eAAK,KAAK;AAAA,QACZ;AACA,aAAK,aAAa;AAClB,aAAK,mBAAmB;AAAA,MAC1B;AAAA,IACF;AAGA,IAAM,gBAAgB,oBAAI,IAAgC;AAAA;AAAA;;;ACvP1D;AAAA;AAAA;AAAA;AAAA,SAAS,QAAAK,aAAY;AACrB,SAAS,aAAAC,kBAAiB;AAC1B,SAAS,aAAAC,YAAW,SAAAC,QAAO,YAAAC,YAAU,UAAAC,SAAQ,WAAAC,UAAS,UAAU;AAChE,SAAS,QAAAC,aAAY;AACrB,SAAS,cAAc;AACvB,SAAS,UAAAC,eAAc;AAiHvB,eAAe,cAAgC;AAC7C,MAAI;AACF,UAAMC,WAAU,mBAAmB,EAAE,SAAS,IAAK,CAAC;AACpD,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAEA,eAAe,QAAQ,KAA4B;AACjD,MAAI;AACF,UAAM,GAAG,KAAK,EAAE,WAAW,MAAM,OAAO,KAAK,CAAC;AAAA,EAChD,QAAQ;AAAA,EAER;AACF;AArIA,IAQMA,YAOO;AAfb;AAAA;AAAA;AAQA,IAAMA,aAAYR,WAAUD,KAAI;AAOzB,IAAM,gBAAN,MAAoB;AAAA,MACjB,SAA0B,CAAC;AAAA,MAC3B,YAA2B;AAAA,MAC3B,YAAY;AAAA,MACZ;AAAA,MAER,YAAY,WAAmB;AAC7B,aAAK,YAAY;AAAA,MACnB;AAAA,MAEA,IAAI,cAAuB;AACzB,eAAO,KAAK;AAAA,MACd;AAAA,MAEA,IAAI,aAAqB;AACvB,eAAO,KAAK,OAAO;AAAA,MACrB;AAAA,MAEA,QAAc;AACZ,aAAK,SAAS,CAAC;AACf,aAAK,YAAY,KAAK,IAAI;AAC1B,aAAK,YAAY;AAAA,MACnB;AAAA,MAEA,SAAS,OAA2B;AAClC,YAAI,CAAC,KAAK,UAAW;AACrB,aAAK,OAAO,KAAK;AAAA,UACf,MAAM,OAAO,KAAK,MAAM,MAAM,QAAQ;AAAA,UACtC,WAAW,MAAM;AAAA,QACnB,CAAC;AAAA,MACH;AAAA,MAEA,OAAa;AACX,aAAK,YAAY;AAAA,MACnB;AAAA;AAAA;AAAA;AAAA;AAAA,MAMA,MAAM,SAA8D;AAClE,YAAI,KAAK,OAAO,WAAW,EAAG,QAAO;AAErC,cAAM,UAAUO,MAAK,OAAO,GAAG,yBAAyBC,QAAO,CAAC,CAAC,EAAE;AACnE,cAAML,OAAM,SAAS,EAAE,WAAW,KAAK,CAAC;AAExC,YAAI;AAEF,mBAAS,IAAI,GAAG,IAAI,KAAK,OAAO,QAAQ,KAAK;AAC3C,kBAAM,YAAYI,MAAK,SAAS,SAAS,OAAO,CAAC,EAAE,SAAS,GAAG,GAAG,CAAC,MAAM;AACzE,kBAAML,WAAU,WAAW,KAAK,OAAO,CAAC,EAAE,IAAI;AAAA,UAChD;AAGA,gBAAM,YAAY,KAAK,OAAO,KAAK,OAAO,SAAS,CAAC,EAAE,YAAY,KAAK,OAAO,CAAC,EAAE,aAAa;AAC9F,gBAAM,MAAM,WAAW,IAAI,KAAK,MAAM,KAAK,OAAO,SAAS,QAAQ,IAAI;AACvE,gBAAM,aAAa,KAAK,IAAI,GAAG,KAAK,IAAI,KAAK,EAAE,CAAC;AAEhD,gBAAM,aAAaK,MAAK,SAAS,aAAa,KAAK,SAAS,MAAM;AAGlE,gBAAM,YAAY,MAAM,YAAY;AACpC,cAAI,WAAW;AACb,kBAAME;AAAA,cACJ,wBAAwB,UAAU,QAAQF,MAAK,SAAS,gBAAgB,CAAC,yDAErE,UAAU;AAAA,cACd,EAAE,SAAS,KAAQ;AAAA,YACrB;AAAA,UACF,OAAO;AAGL,oBAAQ,KAAK,0DAA0D;AACvE,kBAAM,QAAQ,OAAO;AACrB,mBAAO;AAAA,UACT;AAEA,gBAAM,YAAY,MAAMH,WAAS,UAAU;AAG3C,gBAAM,QAAQ,MAAME,SAAQ,OAAO;AACnC,qBAAW,KAAK,OAAO;AACrB,gBAAI,EAAE,WAAW,QAAQ,GAAG;AAC1B,oBAAMD,QAAOE,MAAK,SAAS,CAAC,CAAC,EAAE,MAAM,MAAM;AAAA,cAAC,CAAC;AAAA,YAC/C;AAAA,UACF;AAEA,iBAAO,EAAE,MAAM,YAAY,WAAW,UAAU,OAAO;AAAA,QACzD,SAAS,OAAO;AACd,kBAAQ,MAAM,0CAA0C,KAAK;AAC7D,gBAAM,QAAQ,OAAO;AACrB,iBAAO;AAAA,QACT;AAAA,MACF;AAAA;AAAA,MAGA,QAAc;AACZ,aAAK,SAAS,CAAC;AACf,aAAK,YAAY;AACjB,aAAK,YAAY;AAAA,MACnB;AAAA,IACF;AAAA;AAAA;;;ACpHA,OAAO;AACP,SAAS,QAAAG,aAAY;AACrB,SAAS,aAA8B;AACvC,SAAS,YAAY;AACrB,SAAS,cAAc;AACvB,SAAS,cAAAC,cAAY,aAAAC,YAAW,iBAAAC,sBAAqB;AACrD,SAAS,WAAAC,WAAS,WAAAC,UAAS,QAAAC,cAAY;AACvC,SAAS,SAAAC,cAAgC;AACzC,SAAS,gBAAgB,uBAAuB;AAChD,SAAS,iBAAAC,sBAAqB;;;ACF9B;AAPA,SAAS,YAAY;AACrB,SAAS,kBAAkB;AAC3B,SAAS,KAAAC,WAAS;AAClB,SAAS,cAAAC,cAAY,aAAAC,YAAW,iBAAAC,gBAAe,aAAa,YAAAC,WAAU,kBAAkB;AACxF,SAAS,WAAAC,gBAAe;AACxB,SAAS,QAAAC,OAAM,YAAAC,WAAU,WAAAC,UAAS,YAAAC,iBAAgB;AAClD,SAAS,UAAAC,eAAc;;;ACNvB;AAAA,EACE,cAAAC;AAAA,EACA,gBAAAC;AAAA,EACA,QAAAC;AAAA,EACA,eAAAC;AAAA,OAGK;;;ACPP,SAAS,eAAe;;;ACuBxB,SAAS,gBAAgB,QAAsB;AAC7C,SAAO,OAAO,IAAI,CAAC,QAAa;AAC9B,QAAI,CAAC,MAAM,QAAQ,IAAI,OAAO,EAAG,QAAO;AACxC,WAAO;AAAA,MACL,GAAG;AAAA,MACH,SAAS,IAAI,QAAQ,IAAI,CAAC,SAAc;AACtC,YAAI,KAAK,SAAS,UAAU,KAAK,gBAAgB,YAAY;AAC3D,iBAAO;AAAA,YACL,GAAG;AAAA,YACH,MAAM,OAAO,KAAK,KAAK,IAAI,EAAE,SAAS,QAAQ;AAAA,YAC9C,SAAS;AAAA,UACX;AAAA,QACF;AACA,eAAO;AAAA,MACT,CAAC;AAAA,IACH;AAAA,EACF,CAAC;AACH;AAMA,SAAS,iBAAiB,OAAiB;AACzC,MAAI,SAAS,OAAO,UAAU,UAAU;AACtC,QAAI,MAAM,gBAAgB,OAAO,MAAM,SAAS,UAAU;AACxD,aAAO,OAAO,KAAK,MAAM,MAAM,QAAQ;AAAA,IACzC;AACA,QAAI,MAAM,QAAQ,KAAK,GAAG;AACxB,aAAO,MAAM,IAAI,gBAAgB;AAAA,IACnC;AACA,UAAM,SAAc,CAAC;AACrB,eAAW,CAAC,GAAG,CAAC,KAAK,OAAO,QAAQ,KAAK,GAAG;AAC1C,aAAO,CAAC,IAAI,iBAAiB,CAAC;AAAA,IAChC;AACA,WAAO;AAAA,EACT;AACA,SAAO;AACT;AAKA,SAAS,eAAe,SAA+C;AACrE,QAAM,EAAE,aAAa,GAAG,KAAK,IAAI;AACjC,SAAO;AAAA,IACL,GAAG;AAAA,IACH,QAAQ,gBAAgB,QAAQ,MAAM;AAAA,EACxC;AACF;AAOO,SAAS,kBACd,SACA,QACA;AACA,QAAM,UAAU,OAAO,IAAI,QAAQ,OAAO,EAAE;AAC5C,QAAM,UAAU;AAAA,IACd,gBAAgB;AAAA,IAChB,iBAAiB,UAAU,OAAO,OAAO;AAAA,EAC3C;AAEA,SAAO;AAAA,IACL,sBAAsB;AAAA,IACtB,UAAU;AAAA,IACV;AAAA,IACA,eAAe,CAAC;AAAA,IAEhB,MAAM,WAAW,SAAsB;AACrC,YAAM,MAAM,MAAM,MAAM,GAAG,OAAO,uBAAuB;AAAA,QACvD,QAAQ;AAAA,QACR;AAAA,QACA,MAAM,KAAK,UAAU;AAAA,UACnB;AAAA,UACA,SAAS,eAAe,OAAO;AAAA,QACjC,CAAC;AAAA,QACD,QAAQ,QAAQ;AAAA,MAClB,CAAC;AAED,UAAI,CAAC,IAAI,IAAI;AACX,cAAM,MAAM,MAAM,IAAI,KAAK,EAAE,MAAM,OAAO,CAAC,EAAE;AAC7C,cAAM,SAAS,kBAAkB,IAAI,QAAQ,SAAS,GAAG;AACzD,cAAM,IAAI,MAAM,MAAM;AAAA,MACxB;AAEA,YAAM,SAAS,MAAM,IAAI,KAAK;AAC9B,aAAO,iBAAiB,MAAM;AAAA,IAChC;AAAA,IAEA,MAAM,SAAS,SAAsB;AACnC,YAAM,MAAM,MAAM,MAAM,GAAG,OAAO,qBAAqB;AAAA,QACrD,QAAQ;AAAA,QACR;AAAA,QACA,MAAM,KAAK,UAAU;AAAA,UACnB;AAAA,UACA,SAAS,eAAe,OAAO;AAAA,QACjC,CAAC;AAAA,QACD,QAAQ,QAAQ;AAAA,MAClB,CAAC;AAED,UAAI,CAAC,IAAI,IAAI;AACX,cAAM,MAAM,MAAM,IAAI,KAAK,EAAE,MAAM,OAAO,CAAC,EAAE;AAC7C,cAAM,SAAS,kBAAkB,IAAI,QAAQ,SAAS,GAAG;AACzD,cAAM,IAAI,MAAM,MAAM;AAAA,MACxB;AAEA,YAAM,SAAS,IAAI,KAAM,UAAU;AACnC,YAAM,UAAU,IAAI,YAAY;AAChC,UAAI,SAAS;AAEb,YAAM,SAAS,IAAI,eAAe;AAAA,QAChC,MAAM,KAAK,YAAY;AACrB,iBAAO,MAAM;AACX,kBAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAE1C,gBAAI,MAAM;AACR,kBAAI,OAAO,KAAK,GAAG;AACjB,oBAAI;AACF,wBAAM,SAAS,iBAAiB,KAAK,MAAM,OAAO,KAAK,CAAC,CAAC;AACzD,sBAAI,OAAO,SAAS,SAAS;AAC3B,+BAAW,MAAM,IAAI,MAAM,OAAO,KAAK,CAAC;AAAA,kBAC1C,OAAO;AACL,+BAAW,QAAQ,MAAM;AAAA,kBAC3B;AAAA,gBACF,QAAQ;AAAA,gBAA4B;AAAA,cACtC;AACA,yBAAW,MAAM;AACjB;AAAA,YACF;AAEA,sBAAU,QAAQ,OAAO,OAAO,EAAE,QAAQ,KAAK,CAAC;AAChD,kBAAM,QAAQ,OAAO,MAAM,IAAI;AAC/B,qBAAS,MAAM,IAAI,KAAK;AAExB,uBAAW,QAAQ,OAAO;AACxB,kBAAI,CAAC,KAAK,KAAK,EAAG;AAClB,kBAAI;AACF,sBAAM,SAAS,iBAAiB,KAAK,MAAM,IAAI,CAAC;AAChD,oBAAI,OAAO,SAAS,SAAS;AAC3B,6BAAW,MAAM,IAAI,MAAM,OAAO,KAAK,CAAC;AACxC;AAAA,gBACF;AACA,2BAAW,QAAQ,MAAM;AAAA,cAC3B,QAAQ;AAAA,cAER;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,QACA,SAAS;AACP,iBAAO,OAAO;AAAA,QAChB;AAAA,MACF,CAAC;AAED,YAAM,kBAA0C,CAAC;AACjD,UAAI,QAAQ,QAAQ,CAAC,GAAG,MAAM;AAC5B,YAAI,EAAE,WAAW,aAAa,GAAG;AAC/B,0BAAgB,EAAE,QAAQ,eAAe,EAAE,CAAC,IAAI;AAAA,QAClD;AAAA,MACF,CAAC;AAED,aAAO;AAAA,QACL;AAAA,QACA,UAAU,OAAO,KAAK,eAAe,EAAE,SAAS,IAC5C,EAAE,SAAS,gBAAgB,IAC3B;AAAA,MACN;AAAA,IACF;AAAA,EACF;AACF;AAEA,SAAS,kBACP,QACA,SACA,MACQ;AACR,QAAM,QAAQ,CAAC,4BAA4B,MAAM,SAAS,OAAO,EAAE;AAEnE,MAAI,KAAK,MAAO,OAAM,KAAK,KAAK,KAAK;AAErC,MAAI,KAAK,SAAS;AAChB,UAAM,IAAI,KAAK;AACf,QAAI,EAAE,KAAM,OAAM,KAAK,QAAQ,EAAE,IAAI,EAAE;AACvC,QAAI,EAAE,cAAc,EAAE,eAAe,OAAQ,OAAM,KAAK,YAAY,EAAE,UAAU,EAAE;AAClF,QAAI,EAAE,MAAO,OAAM,KAAK,UAAU,EAAE,KAAK,EAAE;AAC3C,QAAI,EAAE,eAAe,OAAQ,OAAM,KAAK,kBAAkB,EAAE,cAAc,KAAK,IAAI,CAAC,EAAE;AAAA,EACxF;AAEA,SAAO,MAAM,KAAK,UAAK;AACzB;;;ADrNA;AAEA,IAAM,mBAAmB;AAMlB,SAAS,iBAAiB,SAA0B;AACzD,QAAM,aAAa,QAAQ,KAAK,EAAE,YAAY;AAC9C,SAAO,WAAW,WAAW,gBAAgB,KAAK,WAAW,WAAW,SAAS;AACnF;AAiBO,SAAS,aAAa,SAAgC;AAC3D,MAAI;AACF,UAAM,SAAS,UAAU;AACzB,QAAI,OAAO,qBAAqB,cAAc;AAC5C,aAAO,kBAAkB,QAAQ,KAAK,GAAG;AAAA,QACvC,KAAK,OAAO,qBAAqB;AAAA,QACjC,SAAS,OAAO,qBAAqB;AAAA,MACvC,CAAC;AAAA,IACH;AAAA,EACF,QAAQ;AAAA,EAER;AACA,SAAO,QAAQ,QAAQ,KAAK,CAAC;AAC/B;AAGO,IAAM,kBAAkB;AAAA,EAC7B,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,SAAS;AACX;;;ADxCA;AAMA;AARA,SAAS,KAAAC,WAAS;AAClB,SAAS,UAAAC,eAAc;;;AGVvB,SAAS,YAAY;AACrB,SAAS,KAAAC,UAAS;AAClB,SAAS,QAAAC,aAAY;AACrB,SAAS,aAAAC,kBAAiB;;;ACE1B,IAAM,kBAAkB;AACxB,IAAM,0BAA0B;AAEzB,SAAS,eAAe,MAAsB;AACnD,SAAO,KAAK,KAAK,KAAK,SAAS,eAAe;AAChD;AAEO,SAAS,sBAAsB,UAA6D;AACjG,SAAO,SAAS,OAAO,CAAC,OAAO,QAAQ;AACrC,UAAM,UAAU,OAAO,IAAI,YAAY,WACnC,IAAI,UACJ,KAAK,UAAU,IAAI,OAAO;AAC9B,WAAO,QAAQ,eAAe,OAAO,IAAI;AAAA,EAC3C,GAAG,CAAC;AACN;;;ACjBA,IAAM,mBAAmB;AAKlB,SAAS,eACd,QACA,WAAmB,kBACX;AACR,MAAI,OAAO,UAAU,UAAU;AAC7B,WAAO;AAAA,EACT;AAEA,QAAM,UAAU,KAAK,MAAM,WAAW,CAAC;AACvC,QAAM,iBAAiB,OAAO,SAAS;AAEvC,SACE,OAAO,MAAM,GAAG,OAAO,IACvB;AAAA;AAAA,kBAAuB,eAAe,eAAe,CAAC;AAAA;AAAA,IACtD,OAAO,MAAM,CAAC,OAAO;AAEzB;AAKO,SAAS,qBAAqB,UAA+C;AAClF,SAAO,SAAS,OAAO,CAAC,OAAO,QAAQ;AACrC,UAAM,UAAU,OAAO,IAAI,YAAY,WACnC,IAAI,UACJ,KAAK,UAAU,IAAI,OAAO;AAC9B,WAAO,QAAQ,QAAQ;AAAA,EACzB,GAAG,CAAC;AACN;;;ACpBA;AANA,SAAS,YAAY;AACrB,SAAS,iBAAiB;AAC1B,SAAS,OAAO,WAAW,gBAAgB;AAC3C,SAAS,cAAAC,aAAY,aAAAC,kBAAiB;AACtC,SAAS,QAAAC,aAAY;AACrB,SAAS,cAAc;AAGvB,IAAM,YAAY,UAAU,IAAI;AAGhC,IAAM,iBAAiB;AAGvB,IAAM,eAAe;AAoBrB,IAAI,qBAAqC;AAKzC,eAAsB,kBAAoC;AACxD,MAAI,uBAAuB,MAAM;AAC/B,WAAO;AAAA,EACT;AAEA,MAAI;AACF,UAAM,EAAE,OAAO,IAAI,MAAM,UAAU,SAAS;AAC5C,yBAAqB;AAErB,WAAO;AAAA,EACT,SAAS,OAAO;AACd,yBAAqB;AACrB,YAAQ,IAAI,yBAAyB,iBAAiB,QAAQ,MAAM,UAAU,eAAe,EAAE;AAC/F,WAAO;AAAA,EACT;AACF;AAMO,SAAS,qBAA6B;AAE3C,SAAO,MAAM,OAAO,CAAC;AACvB;AAKO,SAAS,eAAe,YAA4B;AACzD,SAAO,GAAG,cAAc,GAAG,UAAU;AACvC;AAMA,SAAS,qBAA6B;AACpC,QAAM,aAAa,oBAAoB;AAEvC,MAAI,CAACF,YAAW,UAAU,GAAG;AAC3B,IAAAC,WAAU,YAAY,EAAE,WAAW,KAAK,CAAC;AAAA,EAC3C;AACA,SAAO;AACT;AAKO,SAAS,UAAU,YAAoB,mBAA2B,WAA4B;AACnG,QAAM,UAAU,mBAAmB;AACnC,MAAI,WAAW;AAEb,WAAOC,MAAK,SAAS,cAAc,WAAW,aAAa,UAAU;AAAA,EACvE;AAEA,SAAOA,MAAK,SAAS,aAAa,UAAU;AAC9C;AAKA,SAAS,YAAY,KAAqB;AAExC,SAAO,IAAI,IAAI,QAAQ,MAAM,OAAO,CAAC;AACvC;AAKA,eAAe,WAAW,YAAoB,MAAoB,kBAA2C;AAC3G,QAAM,SAAS,UAAU,YAAY,kBAAkB,KAAK,SAAS;AACrE,QAAM,MAAM,QAAQ,EAAE,WAAW,KAAK,CAAC;AACvC,QAAM,UAAUA,MAAK,QAAQ,WAAW,GAAG,KAAK,UAAU,MAAM,MAAM,CAAC,CAAC;AAExE,QAAM,UAAUA,MAAK,QAAQ,YAAY,GAAG,EAAE;AAC9C,SAAO;AACT;AAKA,eAAe,UACb,WACA,SACkB;AAClB,QAAM,EAAE,SAAS,WAAW,IAAI,IAAI;AACpC,QAAM,YAAY,KAAK,IAAI;AAE3B,SAAO,KAAK,IAAI,IAAI,YAAY,SAAS;AACvC,QAAI,MAAM,UAAU,GAAG;AACrB,aAAO;AAAA,IACT;AACA,UAAM,IAAI,QAAQ,OAAK,WAAW,GAAG,QAAQ,CAAC;AAAA,EAChD;AAEA,SAAO;AACT;AAKA,eAAsB,QACpB,SACA,kBACA,SACyB;AACzB,MAAI,CAAC,SAAS;AACZ,UAAM,IAAI,MAAM,iEAAiE;AAAA,EACnF;AACA,QAAM,KAAK,QAAQ,cAAc,mBAAmB;AACpD,QAAM,UAAU,eAAe,EAAE;AACjC,QAAM,SAAS,MAAM,WAAW,IAAI;AAAA,IAClC;AAAA,IACA;AAAA,IACA,KAAK;AAAA,IACL,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,IAClC,WAAW,QAAQ;AAAA,IACnB,YAAY;AAAA,EACd,GAAG,gBAAgB;AAEnB,QAAM,UAAUA,MAAK,QAAQ,YAAY;AACzC,QAAM,eAAeA,MAAK,QAAQ,WAAW;AAC7C,QAAM,UAAU,QAAQ,WAAW;AAEnC,MAAI;AAGF,UAAM,iBAAiB,IAAI,OAAO,mBAAmB,YAAY,OAAO,CAAC,eAAe,YAAY,YAAY,CAAC;AAGjH,UAAM;AAAA,MACJ,0BAA0B,OAAO,OAAO,YAAY,gBAAgB,CAAC,IAAI,YAAY,cAAc,CAAC;AAAA,MACpG,EAAE,SAAS,IAAK;AAAA,IAClB;AAGA,QAAI;AACF,YAAM;AAAA,QACJ,qBAAqB,OAAO,eAAe,YAAY,OAAO,CAAC;AAAA,QAC/D,EAAE,SAAS,IAAK;AAAA,MAClB;AAAA,IACF,QAAQ;AAAA,IAER;AAGA,UAAM,YAAY,MAAM;AAAA,MACtB,YAAY;AACV,YAAI;AACF,gBAAM,UAAU,uBAAuB,OAAO,IAAI,EAAE,SAAS,IAAK,CAAC;AACnE,iBAAO;AAAA,QACT,QAAQ;AACN,iBAAO;AAAA,QACT;AAAA,MACF;AAAA,MACA,EAAE,SAAS,UAAU,IAAI;AAAA,IAC3B;AAEA,QAAI,CAAC,WAAW;AAEd,UAAI;AACF,cAAM,UAAU,wBAAwB,OAAO,IAAI,EAAE,SAAS,IAAK,CAAC;AAAA,MACtE,QAAQ;AAAA,MAER;AAGA,UAAIC,UAAS;AACb,UAAI;AACF,QAAAA,UAAS,MAAM,SAAS,SAAS,OAAO;AAAA,MAC1C,QAAQ;AAAA,MAER;AAEA,aAAO;AAAA,QACL;AAAA,QACA,QAAQA,QAAO,KAAK;AAAA,QACpB,UAAU;AAAA;AAAA,QACV,QAAQ;AAAA,MACV;AAAA,IACF;AAIA,UAAM,IAAI,QAAQ,OAAK,WAAW,GAAG,EAAE,CAAC;AAExC,QAAI,SAAS;AACb,QAAI;AACF,eAAS,MAAM,SAAS,SAAS,OAAO;AAAA,IAC1C,QAAQ;AAAA,IAER;AAGA,QAAI,WAAW;AACf,QAAI;AACF,UAAIH,YAAW,YAAY,GAAG;AAC5B,cAAM,cAAc,MAAM,SAAS,cAAc,OAAO;AACxD,mBAAW,SAAS,YAAY,KAAK,GAAG,EAAE,KAAK;AAAA,MACjD;AAAA,IACF,QAAQ;AAAA,IAER;AAEA,WAAO;AAAA,MACL;AAAA,MACA,QAAQ,OAAO,KAAK;AAAA,MACpB;AAAA,MACA,QAAQ;AAAA,IACV;AAAA,EACF,SAAS,OAAY;AAEnB,QAAI;AACF,YAAM,UAAU,wBAAwB,OAAO,IAAI,EAAE,SAAS,IAAK,CAAC;AAAA,IACtE,QAAQ;AAAA,IAER;AAEA,UAAM;AAAA,EACR;AACF;AAKA,eAAsB,cACpB,SACA,kBACA,SACyB;AACzB,MAAI,CAAC,SAAS;AACZ,UAAM,IAAI,MAAM,uEAAuE;AAAA,EACzF;AACA,QAAM,KAAK,QAAQ,cAAc,mBAAmB;AACpD,QAAM,UAAU,eAAe,EAAE;AACjC,QAAM,SAAS,MAAM,WAAW,IAAI;AAAA,IAClC;AAAA,IACA;AAAA,IACA,KAAK;AAAA,IACL,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,IAClC,WAAW,QAAQ;AAAA,IACnB,YAAY;AAAA,IACZ,MAAM,QAAQ;AAAA,EAChB,GAAG,gBAAgB;AAEnB,QAAM,UAAUE,MAAK,QAAQ,YAAY;AAGzC,QAAM,iBAAiB,IAAI,OAAO,mBAAmB,YAAY,OAAO,CAAC;AAGzE,QAAM;AAAA,IACJ,0BAA0B,OAAO,OAAO,YAAY,gBAAgB,CAAC,IAAI,YAAY,cAAc,CAAC;AAAA,IACpG,EAAE,SAAS,IAAK;AAAA,EAClB;AAEA,SAAO;AAAA,IACL;AAAA,IACA,QAAQ;AAAA,IACR,UAAU;AAAA,IACV,QAAQ;AAAA,EACV;AACF;AAKA,eAAsB,QACpB,YACA,kBACA,UAAiD,CAAC,GACsB;AACxE,QAAM,UAAU,eAAe,UAAU;AACzC,QAAM,SAAS,UAAU,YAAY,kBAAkB,QAAQ,SAAS;AACxE,QAAM,UAAUA,MAAK,QAAQ,YAAY;AAGzC,MAAIE,aAAY;AAChB,MAAI;AACF,UAAM,UAAU,uBAAuB,OAAO,IAAI,EAAE,SAAS,IAAK,CAAC;AACnE,IAAAA,aAAY;AAAA,EACd,QAAQ;AAAA,EAER;AAGA,MAAIA,YAAW;AACb,QAAI;AACF,YAAM,QAAQ,QAAQ,QAAQ;AAC9B,YAAM,EAAE,OAAO,IAAI,MAAM;AAAA,QACvB,wBAAwB,OAAO,WAAW,KAAK;AAAA,QAC/C,EAAE,SAAS,KAAM,WAAW,KAAK,OAAO,KAAK;AAAA,MAC/C;AACA,aAAO,EAAE,QAAQ,OAAO,KAAK,GAAG,QAAQ,UAAU;AAAA,IACpD,QAAQ;AAAA,IAER;AAAA,EACF;AAGA,MAAI;AACF,QAAI,SAAS,MAAM,SAAS,SAAS,OAAO;AAE5C,QAAI,QAAQ,MAAM;AAChB,YAAM,QAAQ,OAAO,MAAM,IAAI;AAC/B,eAAS,MAAM,MAAM,CAAC,QAAQ,IAAI,EAAE,KAAK,IAAI;AAAA,IAC/C;AAEA,WAAO,EAAE,QAAQ,OAAO,KAAK,GAAG,QAAQA,aAAY,YAAY,UAAU;AAAA,EAC5E,QAAQ;AACN,WAAO,EAAE,QAAQ,IAAI,QAAQ,UAAU;AAAA,EACzC;AACF;AAKA,eAAsB,UAAU,YAAsC;AACpE,QAAM,UAAU,eAAe,UAAU;AACzC,MAAI;AACF,UAAM,UAAU,uBAAuB,OAAO,IAAI,EAAE,SAAS,IAAK,CAAC;AACnE,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAKA,eAAsB,aAAa,YAAsC;AACvE,QAAM,UAAU,eAAe,UAAU;AACzC,MAAI;AACF,UAAM,UAAU,wBAAwB,OAAO,IAAI,EAAE,SAAS,IAAK,CAAC;AACpE,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAKA,eAAsB,eAAkC;AACtD,MAAI;AACF,UAAM,EAAE,OAAO,IAAI,MAAM;AAAA,MACvB;AAAA,MACA,EAAE,SAAS,IAAK;AAAA,IAClB;AAEA,WAAO,OACJ,KAAK,EACL,MAAM,IAAI,EACV,OAAO,UAAQ,KAAK,WAAW,cAAc,CAAC,EAC9C,IAAI,UAAQ,KAAK,MAAM,eAAe,MAAM,CAAC;AAAA,EAClD,QAAQ;AACN,WAAO,CAAC;AAAA,EACV;AACF;AAKA,eAAsB,QAAQ,YAAoB,kBAA0B,WAAkD;AAC5H,QAAM,SAAS,UAAU,YAAY,kBAAkB,SAAS;AAChE,QAAM,WAAWF,MAAK,QAAQ,WAAW;AAEzC,MAAI;AACF,UAAM,UAAU,MAAM,SAAS,UAAU,OAAO;AAChD,WAAO,KAAK,MAAM,OAAO;AAAA,EAC3B,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAKA,eAAsB,qBACpB,WACA,kBACyB;AACzB,QAAM,eAAeA,MAAK,kBAAkB,cAAc,WAAW,WAAW;AAChF,QAAMG,aAA4B,CAAC;AAEnC,MAAI;AACF,UAAM,EAAE,SAAAC,SAAQ,IAAI,MAAM,OAAO,aAAkB;AACnD,UAAM,UAAU,MAAMA,SAAQ,cAAc,EAAE,eAAe,KAAK,CAAC;AAEnE,eAAW,SAAS,SAAS;AAC3B,UAAI,MAAM,YAAY,GAAG;AACvB,cAAM,OAAO,MAAM,QAAQ,MAAM,MAAM,kBAAkB,SAAS;AAClE,YAAI,MAAM;AACR,UAAAD,WAAU,KAAK,IAAI;AAAA,QACrB;AAAA,MACF;AAAA,IACF;AAAA,EACF,QAAQ;AAAA,EAER;AAEA,SAAOA;AACT;AAMA,eAAsB,UAAU,YAAoB,OAAe,UAAoC,CAAC,GAAqB;AAC3H,QAAM,UAAU,eAAe,UAAU;AACzC,QAAM,EAAE,aAAa,KAAK,IAAI;AAE9B,MAAI;AAEF,UAAM,UAAU,uBAAuB,OAAO,IAAI,EAAE,SAAS,IAAK,CAAC;AAGnE,UAAM;AAAA,MACJ,qBAAqB,OAAO,OAAO,YAAY,KAAK,CAAC;AAAA,MACrD,EAAE,SAAS,IAAK;AAAA,IAClB;AAGA,QAAI,YAAY;AACd,YAAM;AAAA,QACJ,qBAAqB,OAAO;AAAA,QAC5B,EAAE,SAAS,IAAK;AAAA,MAClB;AAAA,IACF;AAEA,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAKA,eAAsB,QAAQ,YAAoB,KAAkH;AAClK,QAAM,UAAU,eAAe,UAAU;AAEzC,MAAI;AACF,UAAM,UAAU,uBAAuB,OAAO,IAAI,EAAE,SAAS,IAAK,CAAC;AACnE,UAAM,UAAU,qBAAqB,OAAO,IAAI,GAAG,IAAI,EAAE,SAAS,IAAK,CAAC;AACxE,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;;;AH3eA,IAAME,aAAYC,WAAUC,KAAI;AAEhC,IAAM,kBAAkB;AACxB,IAAMC,oBAAmB;AAGzB,IAAM,mBAAmB;AAAA,EACvB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAKA,SAAS,iBAAiB,SAA0B;AAClD,QAAM,oBAAoB,QAAQ,YAAY,EAAE,KAAK;AACrD,SAAO,iBAAiB;AAAA,IAAK,CAAC,YAC5B,kBAAkB,SAAS,QAAQ,YAAY,CAAC;AAAA,EAClD;AACF;AAiBA,IAAM,2BAA2B;AACjC,IAAM,sBAAsB,oBAAI,IAAoB;AACpD,IAAI,iBAAiB;AAErB,SAAS,qBAAqB,WAA2B;AACvD,MAAI,OAAO,oBAAoB,IAAI,SAAS;AAC5C,MAAI,CAAC,MAAM;AACT,WAAO,2BAA2B;AAClC,wBAAoB,IAAI,WAAW,IAAI;AAAA,EACzC;AACA,SAAO;AACT;AAEA,SAAS,uBAAuB,SAA0B;AACxD,SAAO,oBAAoB,KAAK,OAAO;AACzC;AAMA,SAAS,2BAA2B,SAA0B;AAC5D,SAAO,4CAA4C,KAAK,OAAO;AACjE;AAMA,SAAS,wBAAwB,SAAiB,MAAsB;AACtE,SAAO,QAAQ;AAAA,IACb;AAAA,IACA,6BAA6B,IAAI;AAAA,EACnC;AACF;AAGA,IAAM,kBAAkBC,GAAE,OAAO;AAAA,EAC/B,SAASA,GACN,OAAO,EACP,SAAS,EACT,SAAS,4DAA4D;AAAA,EACxE,YAAYA,GACT,QAAQ,EACR,QAAQ,KAAK,EACb,SAAS,uGAAuG;AAAA,EACnH,IAAIA,GACD,OAAO,EACP,SAAS,EACT,SAAS,iFAAiF;AAAA,EAC7F,MAAMA,GACH,QAAQ,EACR,SAAS,EACT,SAAS,sCAAsC;AAAA,EAClD,MAAMA,GACH,OAAO,EACP,SAAS,EACT,SAAS,8DAA8D;AAAA,EAC1E,OAAOA,GACJ,OAAO,EACP,SAAS,EACT,SAAS,2FAA2F;AAAA,EACvG,KAAKA,GACF,KAAK,CAAC,SAAS,UAAU,MAAM,QAAQ,QAAQ,SAAS,OAAO,OAAO,OAAO,KAAK,GAAG,CAAC,EACtF,SAAS,EACT,SAAS,iGAAiG;AAC/G,CAAC;AAKD,IAAI,UAA0B;AAE9B,eAAe,gBAAkC;AAC/C,MAAI,YAAY,MAAM;AACpB,cAAU,MAAW,gBAAgB;AACrC,QAAI,CAAC,SAAS;AACZ,cAAQ,KAAK,qDAAqD;AAAA,IACpE;AAAA,EACF;AACA,SAAO;AACT;AAKA,eAAe,aACb,SACA,kBACA,UACiF;AACjF,MAAI;AACF,UAAM,EAAE,QAAQ,OAAO,IAAI,MAAMC,WAAU,SAAS;AAAA,MAClD,KAAK;AAAA,MACL,SAAS;AAAA,MACT,WAAW,KAAK,OAAO;AAAA,IACzB,CAAC;AAED,UAAM,SAAS,eAAe,UAAU,SAAS;AAAA,EAAK,MAAM,KAAK,KAAKC,iBAAgB;AACtF,eAAW,MAAM;AAEjB,WAAO;AAAA,MACL,SAAS;AAAA,MACT;AAAA,MACA,UAAU;AAAA,IACZ;AAAA,EACF,SAAS,OAAY;AACnB,UAAM,SAAS;AAAA,OACZ,MAAM,UAAU,OAAO,MAAM,SAAS;AAAA,EAAK,MAAM,MAAM,KAAK;AAAA,MAC7DA;AAAA,IACF;AACA,eAAW,UAAU,MAAM,OAAO;AAElC,QAAI,MAAM,QAAQ;AAChB,aAAO;AAAA,QACL,SAAS;AAAA,QACT,OAAO,2BAA2B,kBAAkB,GAAI;AAAA,QACxD;AAAA,QACA,UAAU;AAAA,MACZ;AAAA,IACF;AAEA,WAAO;AAAA,MACL,SAAS;AAAA,MACT,OAAO,MAAM;AAAA,MACb;AAAA,MACA,UAAU,MAAM,QAAQ;AAAA,IAC1B;AAAA,EACF;AACF;AAEO,SAAS,eAAe,SAA0B;AACvD,SAAO,KAAK;AAAA,IACV,aAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IA+Bb,aAAa;AAAA,IAEb,SAAS,OAAO,cAAyB;AACvC,YAAM,EAAE,SAAS,YAAY,IAAI,MAAM,MAAM,OAAO,WAAW,IAAI,IAAI;AAGvE,UAAI,IAAI;AAEN,YAAI,MAAM;AACR,gBAAM,UAAU,MAAW,aAAa,EAAE;AAC1C,iBAAO;AAAA,YACL;AAAA,YACA;AAAA,YACA,QAAQ,UAAU,YAAY;AAAA,YAC9B,SAAS,UAAU,YAAY,EAAE,aAAa,YAAY,EAAE;AAAA,UAC9D;AAAA,QACF;AAGA,YAAI,cAAc,QAAW;AAC3B,gBAAM,UAAU,MAAW,UAAU,IAAI,WAAW,EAAE,YAAY,KAAK,CAAC;AACxE,cAAI,CAAC,SAAS;AACZ,mBAAO;AAAA,cACL,SAAS;AAAA,cACT;AAAA,cACA,OAAO,YAAY,EAAE;AAAA,YACvB;AAAA,UACF;AAGA,gBAAM,IAAI,QAAQ,OAAK,WAAW,GAAG,GAAG,CAAC;AACzC,gBAAM,EAAE,QAAAC,SAAQ,QAAAC,QAAO,IAAI,MAAW,QAAQ,IAAI,QAAQ,kBAAkB,EAAE,MAAM,QAAQ,IAAI,WAAW,QAAQ,UAAU,CAAC;AAC9H,gBAAMC,mBAAkB,eAAeF,SAAQD,iBAAgB;AAE/D,iBAAO;AAAA,YACL,SAAS;AAAA,YACT;AAAA,YACA,QAAQG;AAAA,YACR,QAAAD;AAAA,YACA,SAAS,eAAe,SAAS;AAAA,UACnC;AAAA,QACF;AAGA,YAAI,KAAK;AACP,gBAAM,UAAU,MAAW,QAAQ,IAAI,GAAG;AAC1C,cAAI,CAAC,SAAS;AACZ,mBAAO;AAAA,cACL,SAAS;AAAA,cACT;AAAA,cACA,OAAO,YAAY,EAAE;AAAA,YACvB;AAAA,UACF;AAGA,gBAAM,IAAI,QAAQ,OAAK,WAAW,GAAG,GAAG,CAAC;AACzC,gBAAM,EAAE,QAAAD,SAAQ,QAAAC,QAAO,IAAI,MAAW,QAAQ,IAAI,QAAQ,kBAAkB,EAAE,MAAM,QAAQ,IAAI,WAAW,QAAQ,UAAU,CAAC;AAC9H,gBAAMC,mBAAkB,eAAeF,SAAQD,iBAAgB;AAE/D,iBAAO;AAAA,YACL,SAAS;AAAA,YACT;AAAA,YACA,QAAQG;AAAA,YACR,QAAAD;AAAA,YACA,SAAS,aAAa,GAAG;AAAA,UAC3B;AAAA,QACF;AAGA,cAAM,EAAE,QAAQ,OAAO,IAAI,MAAW,QAAQ,IAAI,QAAQ,kBAAkB,EAAE,MAAM,WAAW,QAAQ,UAAU,CAAC;AAClH,cAAM,kBAAkB,eAAe,QAAQF,iBAAgB;AAE/D,eAAO;AAAA,UACL,SAAS;AAAA,UACT;AAAA,UACA,QAAQ;AAAA,UACR;AAAA,QACF;AAAA,MACF;AAGA,UAAI,CAAC,SAAS;AACZ,eAAO;AAAA,UACL,SAAS;AAAA,UACT,OAAO;AAAA,QACT;AAAA,MACF;AAGA,UAAI,iBAAiB,OAAO,GAAG;AAC7B,eAAO;AAAA,UACL,SAAS;AAAA,UACT,OAAO;AAAA,UACP,QAAQ;AAAA,UACR,UAAU;AAAA,QACZ;AAAA,MACF;AAKA,UAAI,gBAAgB;AACpB,YAAM,kBAAkB,uBAAuB,OAAO;AACtD,YAAM,eAAe,2BAA2B,OAAO;AACvD,UAAI;AAEJ,UAAI,iBAAiB;AACnB,sBAAc,qBAAqB,QAAQ,SAAS;AACpD,YAAI,CAAC,cAAc;AACjB,0BAAgB,wBAAwB,SAAS,WAAW;AAAA,QAC9D;AAAA,MACF;AAGA,YAAM,aAAa,MAAM,cAAc;AAEvC,UAAI,YAAY;AAEd,YAAI,CAAC,YAAY;AACf,iBAAO;AAAA,YACL,SAAS;AAAA,YACT,OAAO;AAAA,UACT;AAAA,QACF;AAGA,cAAM,aAAkB,mBAAmB;AAC3C,gBAAQ,aAAa,EAAE,YAAY,QAAQ,WAAW,SAAS,mBAAmB,YAAY,CAAC;AAE/F,cAAM,SAAS,MAAW,cAAc,eAAe,QAAQ,kBAAkB;AAAA,UAC/E,WAAW,QAAQ;AAAA,UACnB;AAAA,QACF,CAAC;AAED,eAAO;AAAA,UACL,SAAS;AAAA,UACT,IAAI,OAAO;AAAA,UACX,QAAQ;AAAA,UACR,SAAS,+CAA+C,OAAO,EAAE;AAAA,QACnE;AAAA,MACF;AAGA,UAAI,YAAY;AACd,cAAM,aAAkB,mBAAmB;AAC3C,gBAAQ,aAAa,EAAE,YAAY,QAAQ,WAAW,SAAS,mBAAmB,YAAY,CAAC;AAE/F,YAAI;AACF,gBAAM,SAAS,MAAW,QAAQ,eAAe,QAAQ,kBAAkB;AAAA,YACzE,WAAW,QAAQ;AAAA,YACnB,SAAS;AAAA,YACT;AAAA,UACF,CAAC;AAED,gBAAM,kBAAkB,eAAe,OAAO,QAAQA,iBAAgB;AACtE,kBAAQ,WAAW,eAAe;AAElC,kBAAQ,aAAa;AAAA,YACnB;AAAA,YACA,QAAQ;AAAA,YACR;AAAA,YACA,mBAAmB;AAAA,YACnB,eAAe,gBAAgB;AAAA,UACjC,CAAC;AAED,iBAAO;AAAA,YACL,SAAS,OAAO,aAAa;AAAA,YAC7B,IAAI,OAAO;AAAA,YACX,QAAQ;AAAA,YACR,UAAU,OAAO;AAAA,YACjB,QAAQ,OAAO;AAAA,UACjB;AAAA,QACF,SAAS,OAAY;AACnB,kBAAQ,aAAa,EAAE,YAAY,QAAQ,aAAa,QAAQ,CAAC;AACjE,iBAAO;AAAA,YACL,SAAS;AAAA,YACT,OAAO,MAAM;AAAA,YACb,QAAQ;AAAA,YACR,UAAU;AAAA,UACZ;AAAA,QACF;AAAA,MACF,OAAO;AAEL,cAAM,SAAS,MAAM,aAAa,eAAe,QAAQ,kBAAkB,QAAQ,QAAQ;AAC3F,eAAO;AAAA,UACL,SAAS,OAAO;AAAA,UAChB,QAAQ,OAAO;AAAA,UACf,UAAU,OAAO;AAAA,UACjB,OAAO,OAAO;AAAA,QAChB;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AACH;;;AIpZA,SAAS,QAAAI,aAAY;AACrB,SAAS,KAAAC,UAAS;AAClB,SAAS,YAAAC,WAAU,YAAY;AAC/B,SAAS,WAAAC,UAAS,UAAU,YAAY,eAAe;AACvD,SAAS,cAAAC,mBAAkB;;;ACA3B;AAJA,OAAO,WAAW;AAClB,SAAS,kBAAkB;AAC3B,SAAS,cAAAC,aAAY,aAAAC,YAAW,gBAAAC,eAAc,iBAAAC,sBAAqB;AACnE,SAAS,QAAAC,aAAY;AAcrB,IAAM,gBAAgB;AACtB,IAAM,iBAAiB,IAAI,OAAO;AAClC,IAAM,iBAAiB;AAEvB,SAAS,cAAsB;AAC7B,QAAM,MAAMA,MAAK,oBAAoB,GAAG,cAAc;AACtD,MAAI,CAACJ,YAAW,GAAG,EAAG,CAAAC,WAAU,KAAK,EAAE,WAAW,KAAK,CAAC;AACxD,SAAO;AACT;AAEA,SAAS,SAAS,QAAwB;AACxC,SAAO,WAAW,QAAQ,EAAE,OAAO,MAAM,EAAE,OAAO,KAAK;AACzD;AAgBA,eAAsB,oBAAoB,QAAgB,WAA2C;AACnG,QAAM,iBAAiB,aAAa;AAEpC,MAAI;AACJ,MAAI;AACF,eAAW,MAAM,MAAM,MAAM,EAAE,SAAS;AAAA,EAC1C,QAAQ;AACN,WAAO,EAAE,QAAQ,WAAW,eAAe;AAAA,EAC7C;AAEA,QAAM,EAAE,OAAO,OAAO,IAAI;AAC1B,MAAI,CAAC,SAAS,CAAC,OAAQ,QAAO,EAAE,QAAQ,WAAW,eAAe;AAElE,QAAM,WAAW,KAAK,IAAI,OAAO,MAAM;AACvC,QAAM,cAAc,WAAW;AAC/B,QAAM,cAAc,OAAO,SAAS;AAEpC,MAAI,CAAC,eAAe,CAAC,YAAa,QAAO,EAAE,QAAQ,WAAW,eAAe;AAE7E,QAAM,MAAM,SAAS,MAAM;AAC3B,QAAM,WAAW,YAAY;AAC7B,QAAM,QAAQ,eAAe,SAAS,KAAK;AAG3C,QAAM,oBAAoB,UAAU,eAAe,OAAO,SAAS,IAAI,OAAO;AAC9E,QAAM,kBAAmB,qBAAqB,CAAC,QAAS,eAAe;AACvE,QAAM,MAAM,oBAAoB,cAAc,SAAS;AACvD,QAAM,YAAYG,MAAK,UAAU,MAAM,GAAG;AAE1C,MAAIJ,YAAW,SAAS,GAAG;AACzB,YAAQ,IAAI,gCAAgC,KAAK,IAAI,MAAM,QAAQ;AACnE,WAAO,EAAE,QAAQE,cAAa,SAAS,GAAG,WAAW,gBAAgB;AAAA,EACvE;AAEA,MAAI,WAAW,MAAM,MAAM;AAE3B,MAAI,aAAa;AACf,eAAW,SAAS,OAAO,eAAe,eAAe;AAAA,MACvD,KAAK;AAAA,MACL,oBAAoB;AAAA,IACtB,CAAC;AAAA,EACH;AAEA,MAAI;AACJ,MAAI,mBAAmB;AACrB,aAAS,MAAM,SAAS,KAAK,EAAE,SAAS,GAAG,CAAC,EAAE,SAAS;AAAA,EACzD,WAAW,OAAO;AAChB,aAAS,MAAM,SAAS,IAAI,EAAE,SAAS;AAAA,EACzC,OAAO;AACL,aAAS,MAAM,SAAS,KAAK,EAAE,SAAS,GAAG,CAAC,EAAE,SAAS;AAAA,EACzD;AAGA,MAAI,iBAAiB;AACrB,MAAI,OAAO,SAAS,gBAAgB;AAClC,eAAW,WAAW,CAAC,IAAI,IAAI,EAAE,GAAG;AAClC,eAAS,MAAM,MAAM,MAAM,EACxB,OAAO,eAAe,eAAe,EAAE,KAAK,UAAU,oBAAoB,KAAK,CAAC,EAChF,KAAK,EAAE,QAAQ,CAAC,EAChB,SAAS;AACZ,UAAI,OAAO,UAAU,eAAgB;AAAA,IACvC;AACA,qBAAiB;AAAA,EACnB;AAEA,EAAAC,eAAc,WAAW,MAAM;AAE/B,QAAM,aAAa,MAAM,MAAM,MAAM,EAAE,SAAS;AAChD,UAAQ;AAAA,IACN,kBAAkB,KAAK,IAAI,MAAM,OAAO,WAAW,KAAK,IAAI,WAAW,MAAM,MACxE,OAAO,SAAS,MAAM,QAAQ,CAAC,CAAC,UAAU,OAAO,SAAS,MAAM,QAAQ,CAAC,CAAC,OAAO,cAAc;AAAA,EACtG;AAEA,SAAO,EAAE,QAAQ,QAAQ,WAAW,eAAe;AACrD;;;AD/GA,IAAM,gBAAgB,IAAI,OAAO;AACjC,IAAM,iBAAiB,KAAK,OAAO;AACnC,IAAME,oBAAmB;AAEzB,IAAM,mBAA2C;AAAA,EAC/C,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,SAAS;AACX;AAEA,SAAS,YAAY,UAA2B;AAC9C,SAAO,QAAQ,QAAQ,EAAE,YAAY,KAAK;AAC5C;AAEA,SAAS,kBAAkB,UAA0B;AACnD,SAAO,iBAAiB,QAAQ,QAAQ,EAAE,YAAY,CAAC,KAAK;AAC9D;AAMA,IAAM,sBAAsBC,GAAE,OAAO;AAAA,EACnC,MAAMA,GACH,OAAO,EACP,SAAS,6IAA6I;AAAA,EACzJ,WAAWA,GACR,OAAO,EACP,SAAS,EACT,SAAS,iFAAiF;AAAA,EAC7F,SAASA,GACN,OAAO,EACP,SAAS,EACT,SAAS,yFAAyF;AACvG,CAAC;AAEM,SAAS,mBAAmB,SAA8B;AAC/D,SAAOC,MAAK;AAAA,IACV,aAAa,kFAAkF,QAAQ,gBAAgB;AAAA;AAAA;AAAA;AAAA,IAKvH,aAAa;AAAA,IAEb,SAAS,OAAO,EAAE,MAAM,UAAU,WAAW,QAAQ,MAA2C;AAC9F,UAAI;AACF,cAAM,eAAe,WAAW,QAAQ,IACpC,WACAC,SAAQ,QAAQ,kBAAkB,QAAQ;AAE9C,cAAM,eAAe,SAAS,QAAQ,kBAAkB,YAAY;AACpE,YAAI,aAAa,WAAW,IAAI,KAAK,CAAC,WAAW,QAAQ,GAAG;AAC1D,iBAAO;AAAA,YACL,SAAS;AAAA,YACT,OAAO;AAAA,YACP,SAAS;AAAA,UACX;AAAA,QACF;AAEA,YAAI,CAACC,YAAW,YAAY,GAAG;AAC7B,iBAAO;AAAA,YACL,SAAS;AAAA,YACT,OAAO,mBAAmB,QAAQ;AAAA,YAClC,SAAS;AAAA,UACX;AAAA,QACF;AAEA,cAAM,QAAQ,MAAM,KAAK,YAAY;AAErC,YAAI,MAAM,YAAY,GAAG;AACvB,iBAAO;AAAA,YACL,SAAS;AAAA,YACT,OAAO;AAAA,YACP,SAAS;AAAA,UACX;AAAA,QACF;AAGA,YAAI,YAAY,YAAY,GAAG;AAC7B,cAAI,MAAM,OAAO,gBAAgB;AAC/B,mBAAO;AAAA,cACL,SAAS;AAAA,cACT,OAAO,wBAAwB,MAAM,OAAO,OAAO,MAAM,QAAQ,CAAC,CAAC,wBAAwB,iBAAiB,OAAO,IAAI;AAAA,cACvH,SAAS;AAAA,YACX;AAAA,UACF;AAEA,gBAAM,YAAY,MAAMC,UAAS,YAAY;AAC7C,gBAAM,oBAAoB,kBAAkB,YAAY;AACxD,gBAAM,UAAU,MAAM,oBAAoB,WAAW,iBAAiB;AACtE,gBAAM,SAAS,QAAQ,OAAO,SAAS,QAAQ;AAE/C,iBAAO;AAAA,YACL,SAAS;AAAA,YACT,MAAM;AAAA,YACN,cAAc,SAAS,QAAQ,kBAAkB,YAAY;AAAA,YAC7D,SAAS,WAAW,YAAY,KAAK,QAAQ,SAAS,MAAM,MAAM,OAAO,MAAM,QAAQ,CAAC,CAAC;AAAA,YACzF,WAAW,QAAQ;AAAA,YACnB,WAAW;AAAA,YACX,WAAW,MAAM;AAAA,UACnB;AAAA,QACF;AAGA,YAAI,MAAM,OAAO,eAAe;AAC9B,iBAAO;AAAA,YACL,SAAS;AAAA,YACT,OAAO,uBAAuB,MAAM,OAAO,OAAO,MAAM,QAAQ,CAAC,CAAC,wBAAwB,gBAAgB,OAAO,IAAI;AAAA,YACrH,SAAS;AAAA,UACX;AAAA,QACF;AAEA,YAAI,UAAU,MAAMA,UAAS,cAAc,OAAO;AAElD,YAAI,cAAc,UAAa,YAAY,QAAW;AACpD,gBAAM,QAAQ,QAAQ,MAAM,IAAI;AAChC,gBAAM,SAAS,aAAa,KAAK;AACjC,gBAAM,MAAM,WAAW,MAAM;AAE7B,cAAI,QAAQ,KAAK,SAAS,MAAM,QAAQ;AACtC,mBAAO;AAAA,cACL,SAAS;AAAA,cACT,OAAO,cAAc,SAAS,8BAA8B,MAAM,MAAM;AAAA,cACxE,SAAS;AAAA,YACX;AAAA,UACF;AAEA,oBAAU,MACP,MAAM,OAAO,GAAG,EAChB,IAAI,CAAC,MAAM,QAAQ,IAAI,QAAQ,MAAM,GAAG,SAAS,EAAE,SAAS,CAAC,CAAC,KAAK,IAAI,EAAE,EACzE,KAAK,IAAI;AAAA,QACd;AAEA,cAAM,mBAAmB,eAAe,SAASL,iBAAgB;AACjE,cAAM,eAAe,iBAAiB,SAAS,QAAQ;AAEvD,eAAO;AAAA,UACL,SAAS;AAAA,UACT,MAAM;AAAA,UACN,cAAc,SAAS,QAAQ,kBAAkB,YAAY;AAAA,UAC7D,SAAS;AAAA,UACT,WAAW,QAAQ,MAAM,IAAI,EAAE;AAAA,UAC/B;AAAA,UACA,WAAW,MAAM;AAAA,QACnB;AAAA,MACF,SAAS,OAAY;AACnB,YAAI,MAAM,SAAS,2BAA2B,MAAM,QAAQ,SAAS,UAAU,GAAG;AAChF,iBAAO;AAAA,YACL,SAAS;AAAA,YACT,OAAO;AAAA,YACP,SAAS;AAAA,UACX;AAAA,QACF;AAEA,eAAO;AAAA,UACL,SAAS;AAAA,UACT,OAAO,MAAM;AAAA,UACb,SAAS;AAAA,QACX;AAAA,MACF;AAAA,IACF;AAAA,IAEA,eAAe,CAAC,EAAE,OAAO,MAAM;AAC7B,UAAI,UAAU,OAAO,WAAW,YAAY,eAAe,UAAU,OAAO,WAAW;AACrF,cAAM,SAAS;AACf,eAAO;AAAA,UACL,MAAM;AAAA,UACN,OAAO;AAAA,YACL,EAAE,MAAM,QAAiB,MAAM,OAAO,QAAQ;AAAA,YAC9C,EAAE,MAAM,cAAuB,MAAM,OAAO,WAAW,WAAW,OAAO,UAAU;AAAA,UACrF;AAAA,QACF;AAAA,MACF;AACA,aAAO,OAAO,WAAW,WACrB,EAAE,MAAM,QAAiB,OAAO,OAAO,IACvC,EAAE,MAAM,QAAiB,OAAO,OAAc;AAAA,IACpD;AAAA,EACF,CAAC;AACH;;;AE7LA,SAAS,QAAAM,aAAY;AACrB,SAAS,KAAAC,UAAS;AAClB,SAAS,YAAAC,WAAU,aAAAC,YAAW,SAAAC,cAAa;AAC3C,SAAS,WAAAC,UAAS,YAAAC,WAAU,cAAAC,aAAY,WAAAC,gBAAe;AACvD,SAAS,cAAAC,mBAAkB;;;ACQ3B;AALA,SAAS,YAAAC,WAAU,aAAAC,YAAW,QAAQ,SAAAC,cAAa;AACnD,SAAS,cAAAC,mBAAkB;AAC3B,SAAS,WAAAC,UAAS,YAAAC,WAAU,WAAAC,gBAAe;AAC3C,SAAS,QAAAC,aAAY;AACrB,SAAS,aAAAC,kBAAiB;AAW1B,IAAMC,aAAYD,WAAUD,KAAI;AAKhC,eAAe,WAAW,kBAAuD;AAC/E,MAAI;AACF,UAAM,EAAE,OAAO,IAAI,MAAME,WAAU,sBAAsB;AAAA,MACvD,KAAK;AAAA,MACL,SAAS;AAAA,IACX,CAAC;AACD,WAAO,OAAO,KAAK;AAAA,EACrB,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAwBA,IAAM,iBAAiB,oBAAI,IAA+B;AAKnD,SAAS,qBAAqB,WAAmB,kBAA6C;AACnG,MAAI,UAAU,eAAe,IAAI,SAAS;AAC1C,MAAI,CAAC,SAAS;AACZ,cAAU;AAAA,MACR;AAAA,MACA;AAAA,MACA,qBAAqB;AAAA,IACvB;AACA,mBAAe,IAAI,WAAW,OAAO;AAAA,EACvC;AACA,SAAO;AACT;AAMA,eAAsB,iBACpB,WACA,kBACA,iBACqB;AAErB,QAAM,UAAU,MAAM,WAAW,gBAAgB;AAGjD,QAAM,aAAa,MAAM,kBAAkB,OAAO;AAAA,IAChD;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAGD,QAAM,UAAU,qBAAqB,WAAW,gBAAgB;AAChE,UAAQ,sBAAsB,WAAW;AAEzC,SAAO;AACT;AAMA,eAAsB,WACpB,WACA,kBACA,UAC4B;AAC5B,QAAM,UAAU,qBAAqB,WAAW,gBAAgB;AAEhE,MAAI,CAAC,QAAQ,qBAAqB;AAChC,YAAQ,KAAK,yDAAyD;AACtE,WAAO;AAAA,EACT;AAGA,QAAM,eAAeC,SAAQ,kBAAkB,QAAQ;AACvD,QAAM,eAAeC,UAAS,kBAAkB,YAAY;AAG5D,MAAI,MAAM,kBAAkB,UAAU,QAAQ,qBAAqB,YAAY,GAAG;AAEhF,WAAO;AAAA,EACT;AAGA,MAAI,kBAAiC;AACrC,MAAI,UAAU;AAEd,MAAIC,YAAW,YAAY,GAAG;AAC5B,QAAI;AACF,wBAAkB,MAAMC,UAAS,cAAc,OAAO;AACtD,gBAAU;AAAA,IACZ,SAAS,OAAY;AACnB,cAAQ,KAAK,gDAAgD,MAAM,OAAO,EAAE;AAAA,IAC9E;AAAA,EACF;AAGA,QAAM,SAAS,MAAM,kBAAkB,OAAO;AAAA,IAC5C,cAAc,QAAQ;AAAA,IACtB;AAAA,IACA,UAAU;AAAA,IACV;AAAA,IACA;AAAA,EACF,CAAC;AAED,SAAO;AACT;AAUA,eAAsB,mBACpB,WACA,cAQC;AAED,QAAM,UAAU,MAAM,eAAe,QAAQ,SAAS;AACtD,MAAI,CAAC,SAAS;AACZ,WAAO;AAAA,MACL,SAAS;AAAA,MACT,eAAe;AAAA,MACf,cAAc;AAAA,MACd,iBAAiB;AAAA,MACjB,oBAAoB;AAAA,MACpB,OAAO;AAAA,IACT;AAAA,EACF;AAGA,QAAM,aAAa,MAAM,kBAAkB,QAAQ,YAAY;AAC/D,MAAI,CAAC,cAAc,WAAW,cAAc,WAAW;AACrD,WAAO;AAAA,MACL,SAAS;AAAA,MACT,eAAe;AAAA,MACf,cAAc;AAAA,MACd,iBAAiB;AAAA,MACjB,oBAAoB;AAAA,MACpB,OAAO;AAAA,IACT;AAAA,EACF;AAEA,QAAM,mBAAmB,QAAQ;AAKjC,QAAM,kBAAkB,MAAM,kBAAkB,gBAAgB,WAAW,WAAW,eAAe;AAIrG,QAAM,uBAAuB,oBAAI,IAAwB;AACzD,aAAW,UAAU,iBAAiB;AACpC,QAAI,CAAC,qBAAqB,IAAI,OAAO,QAAQ,GAAG;AAC9C,2BAAqB,IAAI,OAAO,UAAU,MAAM;AAAA,IAClD;AAAA,EACF;AAEA,MAAI,gBAAgB;AACpB,MAAI,eAAe;AAGnB,aAAW,CAAC,UAAU,MAAM,KAAK,sBAAsB;AACrD,UAAM,eAAeH,SAAQ,kBAAkB,QAAQ;AAEvD,QAAI;AACF,UAAI,OAAO,WAAW,OAAO,oBAAoB,MAAM;AAErD,cAAM,MAAMI,SAAQ,YAAY;AAChC,YAAI,CAACF,YAAW,GAAG,GAAG;AACpB,gBAAMG,OAAM,KAAK,EAAE,WAAW,KAAK,CAAC;AAAA,QACtC;AACA,cAAMC,WAAU,cAAc,OAAO,iBAAiB,OAAO;AAC7D;AAAA,MACF,WAAW,CAAC,OAAO,SAAS;AAE1B,YAAIJ,YAAW,YAAY,GAAG;AAC5B,gBAAM,OAAO,YAAY;AACzB;AAAA,QACF;AAAA,MACF;AAAA,IACF,SAAS,OAAY;AACnB,cAAQ,MAAM,qBAAqB,QAAQ,KAAK,MAAM,OAAO,EAAE;AAAA,IACjE;AAAA,EACF;AAGA,QAAM,kBAAkB,MAAM,eAAe,mBAAmB,WAAW,WAAW,eAAe;AAGrG,QAAM,qBAAqB,gBAAgB,WAAW,WAAW,SAAS;AAG1E,QAAM,qBAAqB,MAAM,kBAAkB,oBAAoB,WAAW,WAAW,eAAe;AAG5G,QAAM,UAAU,qBAAqB,WAAW,gBAAgB;AAChE,UAAQ,sBAAsB,WAAW;AAEzC,SAAO;AAAA,IACL,SAAS;AAAA,IACT;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAKA,eAAsB,eAAe,WAA0C;AAC7E,SAAO,kBAAkB,aAAa,SAAS;AACjD;AAKA,eAAsB,eACpB,WAQC;AACD,QAAM,UAAU,MAAM,eAAe,QAAQ,SAAS;AACtD,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,OAAO,CAAC,EAAE;AAAA,EACrB;AAEA,QAAM,mBAAmB,QAAQ;AAGjC,QAAM,aAAa,MAAM,kBAAkB,aAAa,SAAS;AAGjE,QAAM,uBAAuB,oBAAI,IAAwB;AACzD,aAAW,UAAU,YAAY;AAC/B,QAAI,CAAC,qBAAqB,IAAI,OAAO,QAAQ,GAAG;AAC9C,2BAAqB,IAAI,OAAO,UAAU,MAAM;AAAA,IAClD;AAAA,EACF;AAEA,QAAM,QAKD,CAAC;AAEN,aAAW,CAAC,UAAU,cAAc,KAAK,sBAAsB;AAC7D,UAAM,eAAeF,SAAQ,kBAAkB,QAAQ;AAGvD,QAAI,iBAAgC;AACpC,QAAI,gBAAgB;AAEpB,QAAIE,YAAW,YAAY,GAAG;AAC5B,UAAI;AACF,yBAAiB,MAAMC,UAAS,cAAc,OAAO;AACrD,wBAAgB;AAAA,MAClB,QAAQ;AAAA,MAER;AAAA,IACF;AAGA,QAAI;AACJ,QAAI,CAAC,eAAe,WAAW,eAAe;AAC5C,eAAS;AAAA,IACX,WAAW,eAAe,WAAW,CAAC,eAAe;AACnD,eAAS;AAAA,IACX,OAAO;AACL,eAAS;AAAA,IACX;AAEA,UAAM,KAAK;AAAA,MACT,MAAM;AAAA,MACN;AAAA,MACA,iBAAiB,eAAe;AAAA,MAChC;AAAA,IACF,CAAC;AAAA,EACH;AAEA,SAAO,EAAE,MAAM;AACjB;AAKO,SAAS,uBAAuB,WAAyB;AAC9D,iBAAe,OAAO,SAAS;AACjC;;;ACnVA,SAAS,WAAAI,UAAS,WAAAC,gBAAe;;;ACRjC,SAAS,aAAa;AACtB,SAAS,cAAAC,mBAAkB;AAC3B,SAAS,WAAAC,UAAS,WAAAC,gBAAe;AAMjC,SAAS,gBAAgB,UAAkB,SAAkC;AAC3E,MAAI,MAAM;AACV,QAAM,OAAO;AAEb,SAAO,QAAQ,MAAM;AACnB,eAAW,UAAU,SAAS;AAC5B,UAAIF,YAAWC,SAAQ,KAAK,MAAM,CAAC,GAAG;AACpC,eAAO;AAAA,MACT;AAAA,IACF;AACA,UAAM,SAASC,SAAQ,GAAG;AAC1B,QAAI,WAAW,IAAK;AACpB,UAAM;AAAA,EACR;AAEA,SAAO;AACT;AAKA,eAAe,cAAc,KAA+B;AAC1D,MAAI;AACF,UAAM,EAAE,MAAAC,MAAK,IAAI,MAAM,OAAO,eAAoB;AAClD,UAAM,EAAE,WAAAC,WAAU,IAAI,MAAM,OAAO,MAAW;AAC9C,UAAMC,aAAYD,WAAUD,KAAI;AAEhC,UAAM,YAAY,QAAQ,aAAa;AACvC,UAAM,WAAW,YAAY,SAAS,GAAG,KAAK,SAAS,GAAG;AAE1D,UAAME,WAAU,QAAQ;AACxB,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAQO,IAAM,mBAAwC;AAAA,EACnD,IAAI;AAAA,EACJ,MAAM;AAAA,EACN,YAAY,CAAC,OAAO,QAAQ,OAAO,QAAQ,QAAQ,QAAQ,QAAQ,MAAM;AAAA,EAEzE,MAAM,MAAM,MAA+C;AAEzD,UAAM,cAAc,gBAAgB,MAAM;AAAA,MACxC;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC,KAAK;AAGN,UAAM,SAAS,MAAM,cAAc,KAAK;AACxC,UAAM,UAAU,MAAM,cAAc,MAAM;AAC1C,UAAM,UAAU,MAAM,cAAc,MAAM;AAE1C,QAAI;AAEJ,QAAI,SAAS;AACX,YAAM,CAAC,QAAQ,8BAA8B,SAAS;AAAA,IACxD,WAAW,SAAS;AAClB,YAAM,CAAC,QAAQ,8BAA8B,SAAS;AAAA,IACxD,WAAW,QAAQ;AACjB,YAAM,CAAC,OAAO,8BAA8B,SAAS;AAAA,IACvD,OAAO;AACL,cAAQ,KAAK,8EAA8E;AAC3F,aAAO;AAAA,IACT;AAEA,QAAI;AACF,YAAM,OAAO,MAAM,IAAI,CAAC,GAAG,IAAI,MAAM,CAAC,GAAG;AAAA,QACvC,KAAK;AAAA,QACL,OAAO,CAAC,QAAQ,QAAQ,MAAM;AAAA,QAC9B,KAAK;AAAA,UACH,GAAG,QAAQ;AAAA;AAAA,UAEX,SAAS;AAAA,QACX;AAAA,MACF,CAAC;AAGD,WAAK,QAAQ,GAAG,QAAQ,CAAC,SAAS;AAChC,cAAM,MAAM,KAAK,SAAS,EAAE,KAAK;AACjC,YAAI,OAAO,CAAC,IAAI,SAAS,YAAY,GAAG;AAEtC,kBAAQ,MAAM,2BAA2B,GAAG;AAAA,QAC9C;AAAA,MACF,CAAC;AAED,aAAO;AAAA,QACL,SAAS;AAAA,QACT,gBAAgB;AAAA;AAAA,UAEd,aAAa;AAAA,YACX,gCAAgC;AAAA,YAChC,0CAA0C;AAAA,YAC1C,yCAAyC;AAAA,UAC3C;AAAA,QACF;AAAA,MACF;AAAA,IACF,SAAS,OAAO;AACd,cAAQ,MAAM,qDAAqD,KAAK;AACxE,aAAO;AAAA,IACT;AAAA,EACF;AACF;AAKO,IAAM,UAAiC;AAAA,EAC5C;AACF;AAKO,SAAS,sBAAsB,KAAyC;AAC7E,aAAW,UAAU,SAAS;AAC5B,QAAI,OAAO,WAAW,SAAS,GAAG,GAAG;AACnC,aAAO;AAAA,IACT;AAAA,EACF;AACA,SAAO;AACT;AAKO,SAAS,yBAAmC;AACjD,QAAM,aAAa,oBAAI,IAAY;AACnC,aAAW,UAAU,SAAS;AAC5B,eAAW,OAAO,OAAO,YAAY;AACnC,iBAAW,IAAI,GAAG;AAAA,IACpB;AAAA,EACF;AACA,SAAO,MAAM,KAAK,UAAU;AAC9B;;;ACxJA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,OAEK;AACP,SAAS,eAAe,qBAAqB;AAC7C,SAAS,YAAAC,iBAAgB;AACzB,SAAS,cAAAC,mBAAkB;AAC3B,SAAS,WAAAC,UAAS,iBAAiB;AAMnC,SAAS,cAAc,UAA0B;AAC/C,QAAM,MAAMA,SAAQ,QAAQ,EAAE,YAAY;AAC1C,QAAM,MAA8B;AAAA,IAClC,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,UAAU;AAAA,EACZ;AACA,SAAO,IAAI,GAAG,KAAK;AACrB;AAKO,SAAS,cAAc,UAA0B;AACtD,SAAO,UAAU,QAAQ;AAC3B;AAKA,eAAsB,aACpB,UACA,QACA,MACoB;AACpB,QAAM,EAAE,SAAS,KAAK,IAAI;AAE1B,MAAI,CAAC,KAAK,UAAU,CAAC,KAAK,OAAO;AAC/B,UAAM,IAAI,MAAM,wCAAwC;AAAA,EAC1D;AAGA,QAAM,aAAgC;AAAA,IACpC,IAAI,oBAAoB,KAAK,MAAM;AAAA,IACnC,IAAI,oBAAoB,KAAK,KAAK;AAAA,EACpC;AAGA,QAAM,cAAc,oBAAI,IAA0B;AAGlD,QAAM,eAAe,oBAAI,IAAoB;AAG7C,QAAM,sBAAsB,oBAAI,IAA+B;AAG/D,aAAW,eAAe,mCAAmC,CAAC,WAAgB;AAC5E,UAAM,WAAW,cAAc,cAAc,OAAO,GAAG,CAAC;AACxD,gBAAY,IAAI,UAAU,OAAO,eAAe,CAAC,CAAC;AAGlD,UAAM,YAAY,oBAAoB,IAAI,QAAQ;AAClD,QAAI,WAAW;AACb,iBAAW,YAAY,WAAW;AAChC,iBAAS;AAAA,MACX;AAAA,IACF;AAAA,EACF,CAAC;AAGD,aAAW,UAAU,2BAA2B,OAAO,WAAgB;AAErE,WAAO,OAAO,MAAM,IAAI,MAAM,OAAO,kBAAkB,CAAC,CAAC;AAAA,EAC3D,CAAC;AAED,aAAW,UAAU,6BAA6B,YAAY;AAE5D,WAAO;AAAA,EACT,CAAC;AAED,aAAW,UAAU,kCAAkC,YAAY;AAEjE,WAAO;AAAA,EACT,CAAC;AAED,aAAW,eAAe,qBAAqB,CAAC,WAAgB;AAE9D,QAAI,OAAO,QAAQ,GAAG;AACpB,cAAQ,MAAM,QAAQ,QAAQ,KAAK,OAAO,OAAO;AAAA,IACnD;AAAA,EACF,CAAC;AAGD,aAAW,OAAO;AAGlB,QAAM,aAAa,MAAM,WAAW,YAAY,cAAc;AAAA,IAC5D,WAAW,QAAQ;AAAA,IACnB,SAAS,cAAc,IAAI,EAAE;AAAA,IAC7B,UAAU;AAAA,IACV,kBAAkB;AAAA,MAChB;AAAA,QACE,MAAM;AAAA,QACN,KAAK,cAAc,IAAI,EAAE;AAAA,MAC3B;AAAA,IACF;AAAA,IACA,cAAc;AAAA,MACZ,cAAc;AAAA,QACZ,iBAAiB;AAAA,UACf,qBAAqB;AAAA,UACrB,UAAU;AAAA,UACV,mBAAmB;AAAA,UACnB,SAAS;AAAA,QACX;AAAA,QACA,oBAAoB;AAAA,UAClB,oBAAoB;AAAA,UACpB,gBAAgB;AAAA,UAChB,wBAAwB;AAAA,QAC1B;AAAA,QACA,YAAY;AAAA,UACV,qBAAqB;AAAA,UACrB,gBAAgB;AAAA,YACd,gBAAgB;AAAA,YAChB,qBAAqB,CAAC,YAAY,WAAW;AAAA,UAC/C;AAAA,QACF;AAAA,QACA,OAAO;AAAA,UACL,qBAAqB;AAAA,UACrB,eAAe,CAAC,YAAY,WAAW;AAAA,QACzC;AAAA,QACA,YAAY;AAAA,UACV,qBAAqB;AAAA,QACvB;AAAA,QACA,YAAY;AAAA,UACV,qBAAqB;AAAA,QACvB;AAAA,QACA,gBAAgB;AAAA,UACd,qBAAqB;AAAA,UACrB,mCAAmC;AAAA,UACnC,YAAY;AAAA,YACV,UAAU,CAAC,GAAE,GAAE,GAAE,GAAE,GAAE,GAAE,GAAE,GAAE,GAAE,IAAG,IAAG,IAAG,IAAG,IAAG,IAAG,IAAG,IAAG,IAAG,IAAG,IAAG,IAAG,IAAG,IAAG,IAAG,IAAG,EAAE;AAAA,UACjF;AAAA,QACF;AAAA,MACF;AAAA,MACA,WAAW;AAAA,QACT,eAAe;AAAA,QACf,wBAAwB;AAAA,UACtB,qBAAqB;AAAA,QACvB;AAAA,QACA,uBAAuB;AAAA,UACrB,qBAAqB;AAAA,QACvB;AAAA,QACA,kBAAkB;AAAA,MACpB;AAAA,IACF;AAAA,IACA,uBAAuB,OAAO;AAAA,EAChC,CAAC;AAGD,QAAM,WAAW,iBAAiB,eAAe,CAAC,CAAC;AAGnD,QAAM,SAAoB;AAAA,IACxB;AAAA,IACA;AAAA,IACA;AAAA,IAEA,MAAM,WAAW,UAAiC;AAChD,YAAM,aAAa,cAAc,QAAQ;AAEzC,UAAI,CAACD,YAAW,UAAU,GAAG;AAC3B;AAAA,MACF;AAEA,UAAI;AACF,cAAM,UAAU,MAAMD,UAAS,YAAY,OAAO;AAClD,cAAM,WAAW,aAAa,IAAI,UAAU,KAAK,MAAM;AACvD,qBAAa,IAAI,YAAY,OAAO;AAEpC,YAAI,YAAY,GAAG;AAEjB,gBAAM,WAAW,iBAAiB,wBAAwB;AAAA,YACxD,cAAc;AAAA,cACZ,KAAK,cAAc,UAAU,EAAE;AAAA,cAC/B,YAAY,cAAc,UAAU;AAAA,cACpC;AAAA,cACA,MAAM;AAAA,YACR;AAAA,UACF,CAAC;AAAA,QACH,OAAO;AAEL,gBAAM,WAAW,iBAAiB,0BAA0B;AAAA,YAC1D,cAAc;AAAA,cACZ,KAAK,cAAc,UAAU,EAAE;AAAA,cAC/B;AAAA,YACF;AAAA,YACA,gBAAgB,CAAC,EAAE,MAAM,QAAQ,CAAC;AAAA,UACpC,CAAC;AAAA,QACH;AAAA,MACF,SAAS,OAAO;AACd,gBAAQ,MAAM,+BAA+B,KAAK;AAAA,MACpD;AAAA,IACF;AAAA,IAEA,MAAM,aAAa,UAAiC;AAClD,YAAM,aAAa,cAAc,QAAQ;AAEzC,UAAI,CAACC,YAAW,UAAU,GAAG;AAC3B;AAAA,MACF;AAEA,UAAI;AACF,cAAM,UAAU,MAAMD,UAAS,YAAY,OAAO;AAClD,cAAM,WAAW,aAAa,IAAI,UAAU,KAAK,KAAK;AACtD,qBAAa,IAAI,YAAY,OAAO;AAEpC,cAAM,WAAW,iBAAiB,0BAA0B;AAAA,UAC1D,cAAc;AAAA,YACZ,KAAK,cAAc,UAAU,EAAE;AAAA,YAC/B;AAAA,UACF;AAAA,UACA,gBAAgB,CAAC,EAAE,MAAM,QAAQ,CAAC;AAAA,QACpC,CAAC;AAAA,MACH,SAAS,OAAO;AACd,gBAAQ,MAAM,iCAAiC,KAAK;AAAA,MACtD;AAAA,IACF;AAAA,IAEA,MAAM,YAAY,UAAiC;AACjD,YAAM,aAAa,cAAc,QAAQ;AACzC,mBAAa,OAAO,UAAU;AAC9B,kBAAY,OAAO,UAAU;AAE7B,UAAI;AACF,cAAM,WAAW,iBAAiB,yBAAyB;AAAA,UACzD,cAAc;AAAA,YACZ,KAAK,cAAc,UAAU,EAAE;AAAA,UACjC;AAAA,QACF,CAAC;AAAA,MACH,SAAS,OAAO;AACd,gBAAQ,MAAM,gCAAgC,KAAK;AAAA,MACrD;AAAA,IACF;AAAA,IAEA,MAAM,0BAA0B,SAA8D;AAC5F,UAAI;AACF,cAAM,WAAW,iBAAiB,mCAAmC;AAAA,UACnE;AAAA,QACF,CAAC;AAAA,MACH,SAAS,OAAO;AACd,gBAAQ,MAAM,wCAAwC,KAAK;AAAA,MAC7D;AAAA,IACF;AAAA,IAEA,MAAM,mBAAmB,UAAkB,YAAY,KAA6B;AAClF,YAAM,aAAa,cAAc,QAAQ;AAEzC,aAAO,IAAI,QAAsB,CAACG,cAAY;AAC5C,cAAM,YAAY,KAAK,IAAI;AAC3B,YAAI;AACJ,YAAI,WAAW;AAEf,cAAMC,WAAU,MAAM;AACpB,cAAI,cAAe,cAAa,aAAa;AAC7C,gBAAM,YAAY,oBAAoB,IAAI,UAAU;AACpD,cAAI,WAAW;AACb,kBAAM,MAAM,UAAU,QAAQ,YAAY;AAC1C,gBAAI,OAAO,EAAG,WAAU,OAAO,KAAK,CAAC;AACrC,gBAAI,UAAU,WAAW,GAAG;AAC1B,kCAAoB,OAAO,UAAU;AAAA,YACvC;AAAA,UACF;AAAA,QACF;AAEA,cAAM,SAAS,MAAM;AACnB,cAAI,SAAU;AACd,qBAAW;AACX,UAAAA,SAAQ;AACR,UAAAD,UAAQ,YAAY,IAAI,UAAU,KAAK,CAAC,CAAC;AAAA,QAC3C;AAEA,cAAM,eAAe,MAAM;AAEzB,cAAI,cAAe,cAAa,aAAa;AAC7C,0BAAgB,WAAW,QAAQ,GAAG;AAAA,QACxC;AAGA,YAAI,CAAC,oBAAoB,IAAI,UAAU,GAAG;AACxC,8BAAoB,IAAI,YAAY,CAAC,CAAC;AAAA,QACxC;AACA,4BAAoB,IAAI,UAAU,EAAG,KAAK,YAAY;AAGtD,mBAAW,MAAM;AACf,cAAI,CAAC,UAAU;AACb,mBAAO;AAAA,UACT;AAAA,QACF,GAAG,SAAS;AAGZ,YAAI,YAAY,IAAI,UAAU,GAAG;AAC/B,uBAAa;AAAA,QACf;AAAA,MACF,CAAC;AAAA,IACH;AAAA,IAEA,eAAe,UAAgC;AAC7C,aAAO,YAAY,IAAI,cAAc,QAAQ,CAAC,KAAK,CAAC;AAAA,IACtD;AAAA,IAEA,oBAA+C;AAC7C,aAAO,IAAI,IAAI,WAAW;AAAA,IAC5B;AAAA,IAEA,MAAM,cAAc,UAAkB,MAAc,WAAwC;AAC1F,YAAM,aAAa,cAAc,QAAQ;AAEzC,UAAI,CAAC,aAAa,IAAI,UAAU,GAAG;AACjC,cAAM,OAAO,WAAW,UAAU;AAAA,MACpC;AACA,UAAI;AACF,cAAM,SAAc,MAAM,WAAW,YAAY,2BAA2B;AAAA,UAC1E,cAAc,EAAE,KAAK,cAAc,UAAU,EAAE,KAAK;AAAA,UACpD,UAAU,EAAE,MAAM,UAAU;AAAA,QAC9B,CAAC;AACD,YAAI,CAAC,OAAQ,QAAO,CAAC;AACrB,cAAM,QAAQ,MAAM,QAAQ,MAAM,IAAI,SAAS,CAAC,MAAM;AACtD,eAAO,MAAM,IAAI,CAAC,OAAY;AAAA,UAC5B,KAAK,EAAE,aAAa,EAAE;AAAA,UACtB,OAAO,EAAE,eAAe,EAAE;AAAA,QAC5B,EAAE;AAAA,MACJ,SAAS,OAAO;AACd,gBAAQ,MAAM,mCAAmC,KAAK;AACtD,eAAO,CAAC;AAAA,MACV;AAAA,IACF;AAAA,IAEA,MAAM,cAAc,UAAkB,MAAc,WAAmB,qBAAqB,OAA4B;AACtH,YAAM,aAAa,cAAc,QAAQ;AACzC,UAAI,CAAC,aAAa,IAAI,UAAU,GAAG;AACjC,cAAM,OAAO,WAAW,UAAU;AAAA,MACpC;AACA,UAAI;AACF,cAAM,SAAS,MAAM,WAAW,YAAY,2BAA2B;AAAA,UACrE,cAAc,EAAE,KAAK,cAAc,UAAU,EAAE,KAAK;AAAA,UACpD,UAAU,EAAE,MAAM,UAAU;AAAA,UAC5B,SAAS,EAAE,mBAAmB;AAAA,QAChC,CAAC;AACD,eAAQ,UAAyB,CAAC;AAAA,MACpC,SAAS,OAAO;AACd,gBAAQ,MAAM,mCAAmC,KAAK;AACtD,eAAO,CAAC;AAAA,MACV;AAAA,IACF;AAAA,IAEA,MAAM,SAAS,UAAkB,MAAc,WAA2C;AACxF,YAAM,aAAa,cAAc,QAAQ;AACzC,UAAI,CAAC,aAAa,IAAI,UAAU,GAAG;AACjC,cAAM,OAAO,WAAW,UAAU;AAAA,MACpC;AACA,UAAI;AACF,cAAM,SAAc,MAAM,WAAW,YAAY,sBAAsB;AAAA,UACrE,cAAc,EAAE,KAAK,cAAc,UAAU,EAAE,KAAK;AAAA,UACpD,UAAU,EAAE,MAAM,UAAU;AAAA,QAC9B,CAAC;AACD,YAAI,CAAC,UAAU,CAAC,OAAO,SAAU,QAAO;AACxC,YAAI,OAAO,OAAO,aAAa,SAAU,QAAO,OAAO;AACvD,YAAI,OAAO,SAAS,MAAO,QAAO,OAAO,SAAS;AAClD,YAAI,MAAM,QAAQ,OAAO,QAAQ,GAAG;AAClC,iBAAO,OAAO,SAAS,IAAI,CAAC,MAAW,OAAO,MAAM,WAAW,IAAI,EAAE,KAAK,EAAE,KAAK,IAAI;AAAA,QACvF;AACA,eAAO;AAAA,MACT,SAAS,OAAO;AACd,gBAAQ,MAAM,8BAA8B,KAAK;AACjD,eAAO;AAAA,MACT;AAAA,IACF;AAAA,IAEA,MAAM,mBAAmB,UAA6C;AACpE,YAAM,aAAa,cAAc,QAAQ;AACzC,UAAI,CAAC,aAAa,IAAI,UAAU,GAAG;AACjC,cAAM,OAAO,WAAW,UAAU;AAAA,MACpC;AACA,UAAI;AACF,cAAM,SAAgB,MAAM,WAAW,YAAY,+BAA+B;AAAA,UAChF,cAAc,EAAE,KAAK,cAAc,UAAU,EAAE,KAAK;AAAA,QACtD,CAAC;AACD,YAAI,CAAC,UAAU,OAAO,WAAW,EAAG,QAAO,CAAC;AAI5C,YAAI,OAAO,CAAC,EAAE,OAAO;AACnB,iBAAO;AAAA,QACT;AAEA,eAAO,OAAO,IAAI,CAAC,QAAa;AAAA,UAC9B,MAAM,GAAG;AAAA,UACT,MAAM,GAAG;AAAA,UACT,OAAO,GAAG,UAAU,SAAS,EAAE,OAAO,EAAE,MAAM,GAAG,WAAW,EAAE,GAAG,KAAK,EAAE,MAAM,GAAG,WAAW,EAAE,EAAE;AAAA,UAChG,gBAAgB,GAAG,UAAU,SAAS,EAAE,OAAO,EAAE,MAAM,GAAG,WAAW,EAAE,GAAG,KAAK,EAAE,MAAM,GAAG,WAAW,EAAE,EAAE;AAAA,UACzG,QAAQ,GAAG;AAAA,QACb,EAAE;AAAA,MACJ,SAAS,OAAO;AACd,gBAAQ,MAAM,yCAAyC,KAAK;AAC5D,eAAO,CAAC;AAAA,MACV;AAAA,IACF;AAAA,IAEA,MAAM,qBAAqB,OAA6C;AACtE,UAAI;AACF,cAAM,SAAS,MAAM,WAAW,YAAY,oBAAoB,EAAE,MAAM,CAAC;AACzE,eAAQ,UAAkC,CAAC;AAAA,MAC7C,SAAS,OAAO;AACd,gBAAQ,MAAM,0CAA0C,KAAK;AAC7D,eAAO,CAAC;AAAA,MACV;AAAA,IACF;AAAA,IAEA,MAAM,WAA0B;AAC9B,UAAI;AACF,cAAM,WAAW,YAAY,UAAU;AACvC,cAAM,WAAW,iBAAiB,MAAM;AACxC,mBAAW,IAAI;AACf,mBAAW,QAAQ;AACnB,aAAK,KAAK;AAAA,MACZ,SAAS,OAAO;AAEd,aAAK,KAAK,SAAS;AAAA,MACrB;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;ACnRO,SAAS,iBAAiB,YAAgC;AAC/D,QAAM,WAAW;AAAA,IACf,CAAC,aAAwB,GAAG;AAAA,IAC5B,CAAC,eAA0B,GAAG;AAAA,IAC9B,CAAC,mBAA8B,GAAG;AAAA,IAClC,CAAC,YAAuB,GAAG;AAAA,EAC7B,EAAE,WAAW,YAAY,aAAwB;AAEjD,QAAM,OAAO,WAAW,MAAM,MAAM,OAAO;AAC3C,QAAM,MAAM,WAAW,MAAM,MAAM,YAAY;AAC/C,QAAM,SAAS,WAAW,SAAS,KAAK,WAAW,MAAM,MAAM;AAE/D,SAAO,GAAG,QAAQ,KAAK,IAAI,IAAI,GAAG,IAAI,MAAM,IAAI,WAAW,OAAO;AACpE;AAKO,SAAS,0BACd,UACA,aACA,UAA6D,CAAC,GACtD;AACR,QAAM,EAAE,iBAAiB,IAAI,aAAa,KAAK,IAAI;AAGnD,QAAM,WAAW,aACb,YAAY,OAAO,OAAK,EAAE,aAAa,aAAwB,IAC/D;AAEJ,MAAI,SAAS,WAAW,EAAG,QAAO;AAElC,QAAM,UAAU,SAAS,MAAM,GAAG,cAAc;AAChD,QAAM,SAAS,SAAS,SAAS,iBAC7B;AAAA,UAAa,SAAS,SAAS,cAAc,UAC7C;AAEJ,QAAM,YAAY,QAAQ,IAAI,gBAAgB,EAAE,KAAK,IAAI;AAEzD,SAAO;AAAA;AAAA;AAAA,qBAAyE,QAAQ;AAAA,EAAO,SAAS,GAAG,MAAM;AAAA;AACnH;;;AHnLA,IAAI,QAAkB;AAAA,EACpB,SAAS,oBAAI,IAAI;AAAA,EACjB,QAAQ,oBAAI,IAAI;AAAA,EAChB,aAAa;AACf;AAaA,eAAe,iBAAiB,UAA6C;AAC3E,QAAM,aAAa,cAAc,QAAQ;AACzC,QAAM,MAAME,SAAQ,UAAU;AAG9B,QAAM,YAAY,sBAAsB,GAAG;AAC3C,MAAI,CAAC,WAAW;AACd,WAAO;AAAA,EACT;AAGA,QAAM,OAAOC,SAAQ,UAAU;AAC/B,QAAM,MAAM,GAAG,UAAU,EAAE,IAAI,IAAI;AAGnC,QAAM,WAAW,MAAM,QAAQ,IAAI,GAAG;AACtC,MAAI,UAAU;AACZ,WAAO;AAAA,EACT;AAGA,MAAI,MAAM,OAAO,IAAI,GAAG,GAAG;AACzB,WAAO;AAAA,EACT;AAGA,MAAI;AACF,UAAM,SAAS,MAAM,UAAU,MAAM,IAAI;AACzC,QAAI,CAAC,QAAQ;AACX,YAAM,OAAO,IAAI,GAAG;AACpB,aAAO;AAAA,IACT;AAEA,YAAQ,IAAI,iBAAiB,UAAU,IAAI,QAAQ,IAAI,EAAE;AAEzD,UAAM,SAAS,MAAM,aAAa,UAAU,IAAI,QAAQ,IAAI;AAC5D,UAAM,QAAQ,IAAI,KAAK,MAAM;AAG7B,WAAO,QAAQ,GAAG,QAAQ,CAAC,SAAS;AAClC,cAAQ,IAAI,SAAS,UAAU,IAAI,qBAAqB,IAAI,EAAE;AAC9D,YAAM,QAAQ,OAAO,GAAG;AAAA,IAC1B,CAAC;AAED,WAAO;AAAA,EACT,SAAS,OAAO;AACd,YAAQ,MAAM,yBAAyB,UAAU,IAAI,KAAK,KAAK;AAC/D,UAAM,OAAO,IAAI,GAAG;AACpB,WAAO;AAAA,EACT;AACF;AAKA,eAAe,kBAAkB,UAAwC;AACvE,QAAM,SAAS,MAAM,iBAAiB,QAAQ;AAC9C,SAAO,SAAS,CAAC,MAAM,IAAI,CAAC;AAC9B;AAWA,eAAsB,UAAU,UAAkB,qBAAqB,OAAsB;AAC3F,QAAM,UAAU,MAAM,kBAAkB,QAAQ;AAEhD,MAAI,QAAQ,WAAW,GAAG;AACxB;AAAA,EACF;AAGA,QAAM,QAAQ,IAAI,QAAQ,IAAI,YAAU,OAAO,WAAW,QAAQ,CAAC,CAAC;AAGpE,MAAI,oBAAoB;AACtB,UAAM,QAAQ,IAAI,QAAQ,IAAI,YAAU,OAAO,mBAAmB,QAAQ,CAAC,CAAC;AAAA,EAC9E;AACF;AAKA,eAAsB,eAAe,UAAyC;AAC5E,QAAM,aAAa,cAAc,QAAQ;AACzC,QAAM,UAAU,MAAM,kBAAkB,UAAU;AAElD,QAAM,iBAA+B,CAAC;AAEtC,aAAW,UAAU,SAAS;AAC5B,UAAM,QAAQ,OAAO,eAAe,UAAU;AAC9C,mBAAe,KAAK,GAAG,KAAK;AAAA,EAC9B;AAEA,SAAO;AACT;AAKA,eAAsB,oBAA2D;AAC/E,QAAM,UAAwC,CAAC;AAE/C,aAAW,UAAU,MAAM,QAAQ,OAAO,GAAG;AAC3C,UAAM,cAAc,OAAO,kBAAkB;AAC7C,eAAW,CAAC,MAAM,WAAW,KAAK,YAAY,QAAQ,GAAG;AACvD,YAAM,WAAW,QAAQ,IAAI,KAAK,CAAC;AACnC,eAAS,KAAK,GAAG,WAAW;AAC5B,cAAQ,IAAI,IAAI;AAAA,IAClB;AAAA,EACF;AAEA,SAAO;AACT;AAsCA,eAAsB,cACpB,UACA,MACA,WACA,qBAAqB,OACA;AACrB,QAAM,aAAa,cAAc,QAAQ;AACzC,QAAM,SAAS,MAAM,iBAAiB,UAAU;AAChD,MAAI,CAAC,OAAQ,QAAO,CAAC;AACrB,SAAO,OAAO,cAAc,YAAY,MAAM,WAAW,kBAAkB;AAC7E;AAKA,eAAsB,SAAS,UAAkB,MAAc,WAA2C;AACxG,QAAM,aAAa,cAAc,QAAQ;AACzC,QAAM,SAAS,MAAM,iBAAiB,UAAU;AAChD,MAAI,CAAC,OAAQ,QAAO;AACpB,SAAO,OAAO,SAAS,YAAY,MAAM,SAAS;AACpD;AAKA,eAAsB,mBAAmB,UAA6C;AACpF,QAAM,aAAa,cAAc,QAAQ;AACzC,QAAM,SAAS,MAAM,iBAAiB,UAAU;AAChD,MAAI,CAAC,OAAQ,QAAO,CAAC;AACrB,SAAO,OAAO,mBAAmB,UAAU;AAC7C;AAyBA,eAAsB,wBACpB,UACA,UAA6D,CAAC,GAC7C;AACjB,QAAM,cAAc,MAAM,eAAe,QAAQ;AACjD,SAAO,0BAA0B,UAAU,aAAa,OAAO;AACjE;AAYO,SAAS,YAAY,UAA2B;AACrD,QAAM,MAAMC,SAAQ,QAAQ;AAC5B,SAAO,sBAAsB,GAAG,MAAM;AACxC;;;AFnPA,IAAM,0BAA0B,KAAK;AAErC,IAAM,uBAAuBC,GAAE,OAAO;AAAA,EACpC,MAAMA,GACH,OAAO,EACP,SAAS,yEAAyE;AAAA,EACrF,MAAMA,GACH,KAAK,CAAC,QAAQ,aAAa,CAAC,EAC5B,SAAS,2FAA2F;AAAA,EACvG,SAASA,GACN,OAAO,EACP,SAAS,EACT,SAAS,4DAA4D;AAAA,EACxE,YAAYA,GACT,OAAO,EACP,SAAS,EACT,SAAS,8DAA8D;AAAA,EAC1E,YAAYA,GACT,OAAO,EACP,SAAS,EACT,SAAS,+DAA+D;AAC7E,CAAC;AAEM,SAAS,oBAAoB,SAA+B;AACjE,SAAOC,MAAK;AAAA,IACV,aAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAeI,QAAQ,gBAAgB;AAAA,IAEzC,aAAa;AAAA,IAEb,SAAS,OAAO,EAAE,MAAM,MAAM,SAAS,YAAY,WAAW,MAA4C;AACxG,UAAI;AAEF,cAAM,eAAeC,YAAW,IAAI,IAChC,OACAC,SAAQ,QAAQ,kBAAkB,IAAI;AAG1C,cAAM,eAAeC,UAAS,QAAQ,kBAAkB,YAAY;AACpE,YAAI,aAAa,WAAW,IAAI,KAAK,CAACF,YAAW,IAAI,GAAG;AACtD,iBAAO;AAAA,YACL,SAAS;AAAA,YACT,OAAO;AAAA,UACT;AAAA,QACF;AAEA,YAAI,SAAS,QAAQ;AAEnB,cAAI,YAAY,QAAW;AACzB,mBAAO;AAAA,cACL,SAAS;AAAA,cACT,OAAO;AAAA,YACT;AAAA,UACF;AAEA,gBAAM,UAAUG,YAAW,YAAY;AACvC,gBAAM,SAAS,UAAU,aAAa;AAGtC,kBAAQ,IAAI,4CAA4C,CAAC,CAAC,QAAQ,UAAU;AAC5E,kBAAQ,IAAI,4CAA4C,YAAY;AACpE,kBAAQ,aAAa;AAAA,YACnB,MAAM;AAAA,YACN;AAAA,YACA,MAAM;AAAA,YACN,QAAQ;AAAA,YACR;AAAA,YACA,aAAa,QAAQ;AAAA,UACvB,CAAC;AAGD,cAAI,QAAQ,UAAU,yBAAyB;AAC7C,oBAAQ,aAAa;AAAA,cACnB,MAAM;AAAA,cACN;AAAA,cACA,MAAM;AAAA,cACN,QAAQ;AAAA,cACR;AAAA,cACA;AAAA,cACA,aAAa,QAAQ;AAAA,YACvB,CAAC;AAAA,UACH,OAAO;AACL,kBAAM,aAAa,KAAK,KAAK,QAAQ,SAAS,uBAAuB;AACrE,qBAAS,IAAI,GAAG,IAAI,YAAY,KAAK,GAAG;AACtC,oBAAM,aAAa,IAAI;AACvB,oBAAM,QAAQ,QAAQ,MAAM,YAAY,aAAa,uBAAuB;AAC5E,sBAAQ,aAAa;AAAA,gBACnB,MAAM;AAAA,gBACN;AAAA,gBACA,MAAM;AAAA,gBACN,QAAQ;AAAA,gBACR,SAAS;AAAA,gBACT;AAAA,gBACA,aAAa,QAAQ;AAAA,gBACrB,YAAY;AAAA,gBACZ;AAAA,gBACA;AAAA,gBACA,WAAW;AAAA,cACb,CAAC;AAED,kBAAI,aAAa,GAAG;AAClB,sBAAM,IAAI,QAAQ,CAACF,cAAY,WAAWA,WAAS,CAAC,CAAC;AAAA,cACvD;AAAA,YACF;AAAA,UACF;AAGA,gBAAM,WAAW,QAAQ,WAAW,QAAQ,kBAAkB,YAAY;AAG1E,gBAAM,MAAMG,SAAQ,YAAY;AAChC,cAAI,CAACD,YAAW,GAAG,GAAG;AACpB,kBAAME,OAAM,KAAK,EAAE,WAAW,KAAK,CAAC;AAAA,UACtC;AAEA,gBAAMC,WAAU,cAAc,SAAS,OAAO;AAG9C,cAAI,oBAAoB;AACxB,cAAI,QAAQ,cAAc,SAAa,YAAY,YAAY,GAAG;AAChE,kBAAU,UAAU,cAAc,IAAI;AACtC,gCAAoB,MAAU,wBAAwB,YAAY;AAAA,UACpE;AAGA,kBAAQ,aAAa;AAAA,YACnB,MAAM;AAAA,YACN;AAAA,YACA,MAAM;AAAA,YACN,QAAQ;AAAA,YACR;AAAA,YACA,aAAa,QAAQ;AAAA,UACvB,CAAC;AAED,iBAAO;AAAA,YACL,SAAS;AAAA,YACT,MAAM;AAAA,YACN;AAAA,YACA,MAAM;AAAA,YACN;AAAA,YACA,cAAc,OAAO,WAAW,SAAS,OAAO;AAAA,YAChD,WAAW,QAAQ,MAAM,IAAI,EAAE;AAAA,YAC/B,GAAI,qBAAqB,EAAE,aAAa,kBAAkB;AAAA,UAC5D;AAAA,QACF,WAAW,SAAS,eAAe;AAEjC,cAAI,eAAe,UAAa,eAAe,QAAW;AACxD,mBAAO;AAAA,cACL,SAAS;AAAA,cACT,OAAO;AAAA,YACT;AAAA,UACF;AAEA,cAAI,CAACH,YAAW,YAAY,GAAG;AAC7B,mBAAO;AAAA,cACL,SAAS;AAAA,cACT,OAAO,mBAAmB,IAAI;AAAA,YAChC;AAAA,UACF;AAGA,kBAAQ,aAAa;AAAA,YACnB,MAAM;AAAA,YACN;AAAA,YACA,MAAM;AAAA,YACN,QAAQ;AAAA,YACR,QAAQ;AAAA,UACV,CAAC;AAGD,kBAAQ,aAAa;AAAA,YACnB,MAAM;AAAA,YACN;AAAA,YACA,MAAM;AAAA,YACN,QAAQ;AAAA,YACR,WAAW;AAAA,YACX,WAAW;AAAA,YACX,QAAQ;AAAA,UACV,CAAC;AAGD,gBAAM,WAAW,QAAQ,WAAW,QAAQ,kBAAkB,YAAY;AAG1E,gBAAM,iBAAiB,MAAMI,UAAS,cAAc,OAAO;AAG3D,cAAI,CAAC,eAAe,SAAS,UAAU,GAAG;AAExC,kBAAM,QAAQ,eAAe,MAAM,IAAI;AACvC,kBAAM,UAAU,MAAM,MAAM,GAAG,EAAE,EAAE,KAAK,IAAI;AAE5C,mBAAO;AAAA,cACL,SAAS;AAAA,cACT,OAAO;AAAA,cACP,MAAM;AAAA,cACN,aAAa,MAAM,SAAS,KACxB,GAAG,OAAO;AAAA,OAAU,MAAM,SAAS,EAAE,iBACrC;AAAA,YACN;AAAA,UACF;AAGA,gBAAM,cAAc,eAAe,MAAM,UAAU,EAAE,SAAS;AAC9D,cAAI,cAAc,GAAG;AACnB,mBAAO;AAAA,cACL,SAAS;AAAA,cACT,OAAO,SAAS,WAAW;AAAA,cAC3B,MAAM;AAAA,YACR;AAAA,UACF;AAGA,gBAAM,aAAa,eAAe,QAAQ,YAAY,UAAU;AAChE,gBAAMD,WAAU,cAAc,YAAY,OAAO;AAGjD,gBAAM,WAAW,WAAW,MAAM,IAAI,EAAE;AACxC,gBAAM,WAAW,WAAW,MAAM,IAAI,EAAE;AAGxC,cAAI,oBAAoB;AACxB,cAAI,QAAQ,cAAc,SAAa,YAAY,YAAY,GAAG;AAChE,kBAAU,UAAU,cAAc,IAAI;AACtC,gCAAoB,MAAU,wBAAwB,YAAY;AAAA,UACpE;AAGA,kBAAQ,aAAa;AAAA,YACnB,MAAM;AAAA,YACN;AAAA,YACA,MAAM;AAAA,YACN,QAAQ;AAAA,YACR,QAAQ;AAAA,UACV,CAAC;AAED,iBAAO;AAAA,YACL,SAAS;AAAA,YACT,MAAM;AAAA,YACN;AAAA,YACA,MAAM;AAAA,YACN,cAAc;AAAA,YACd,YAAY;AAAA,YACZ,WAAW,WAAW;AAAA,YACtB,GAAI,qBAAqB,EAAE,aAAa,kBAAkB;AAAA,UAC5D;AAAA,QACF;AAEA,eAAO;AAAA,UACL,SAAS;AAAA,UACT,OAAO,iBAAiB,IAAI;AAAA,QAC9B;AAAA,MACF,SAAS,OAAY;AACnB,eAAO;AAAA,UACL,SAAS;AAAA,UACT,OAAO,MAAM;AAAA,QACf;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AACH;;;AM9TA;AAFA,SAAS,QAAAE,aAAY;AACrB,SAAS,KAAAC,UAAS;AAOlB,IAAM,kBAAkBA,GAAE,OAAO;AAAA,EAC/B,QAAQA,GACL,KAAK,CAAC,OAAO,QAAQ,QAAQ,OAAO,CAAC,EACrC,SAAS,wCAAwC;AAAA,EACpD,OAAOA,GACJ;AAAA,IACCA,GAAE,OAAO;AAAA,MACP,SAASA,GAAE,OAAO,EAAE,SAAS,yBAAyB;AAAA,MACtD,OAAOA,GAAE,OAAO,EAAE,SAAS,EAAE,SAAS,mDAAmD;AAAA,IAC3F,CAAC;AAAA,EACH,EACC,SAAS,EACT,SAAS,8CAA8C;AAAA,EAC1D,QAAQA,GACL,OAAO,EACP,SAAS,EACT,SAAS,sDAAsD;AAAA,EAClE,QAAQA,GACL,KAAK,CAAC,WAAW,eAAe,aAAa,WAAW,CAAC,EACzD,SAAS,EACT,SAAS,qDAAqD;AACnE,CAAC;AAEM,SAAS,eAAe,SAA0B;AACvD,SAAOD,MAAK;AAAA,IACV,aAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAgBb,aAAa;AAAA,IAEb,SAAS,OAAO,EAAE,QAAQ,OAAO,QAAQ,OAAO,MAAuC;AACrF,UAAI;AACF,gBAAQ,QAAQ;AAAA,UACd,KAAK,OAAO;AACV,gBAAI,CAAC,SAAS,MAAM,WAAW,GAAG;AAChC,qBAAO;AAAA,gBACL,SAAS;AAAA,gBACT,OAAO;AAAA,cACT;AAAA,YACF;AAEA,kBAAM,UAAU,MAAM,YAAY,WAAW,QAAQ,WAAW,KAAK;AAErE,mBAAO;AAAA,cACL,SAAS;AAAA,cACT,QAAQ;AAAA,cACR,YAAY,QAAQ;AAAA,cACpB,OAAO,QAAQ,IAAI,cAAc;AAAA,YACnC;AAAA,UACF;AAAA,UAEA,KAAK,QAAQ;AACX,kBAAM,QAAQ,MAAM,YAAY,aAAa,QAAQ,SAAS;AAE9D,kBAAM,QAAQ;AAAA,cACZ,OAAO,MAAM;AAAA,cACb,SAAS,MAAM,OAAO,CAAC,MAAgB,EAAE,WAAW,SAAS,EAAE;AAAA,cAC/D,YAAY,MAAM,OAAO,CAAC,MAAgB,EAAE,WAAW,aAAa,EAAE;AAAA,cACtE,WAAW,MAAM,OAAO,CAAC,MAAgB,EAAE,WAAW,WAAW,EAAE;AAAA,cACnE,WAAW,MAAM,OAAO,CAAC,MAAgB,EAAE,WAAW,WAAW,EAAE;AAAA,YACrE;AAEA,mBAAO;AAAA,cACL,SAAS;AAAA,cACT,QAAQ;AAAA,cACR;AAAA,cACA,OAAO,MAAM,IAAI,cAAc;AAAA,YACjC;AAAA,UACF;AAAA,UAEA,KAAK,QAAQ;AACX,gBAAI,CAAC,QAAQ;AACX,qBAAO;AAAA,gBACL,SAAS;AAAA,gBACT,OAAO;AAAA,cACT;AAAA,YACF;AAEA,gBAAI,CAAC,QAAQ;AACX,qBAAO;AAAA,gBACL,SAAS;AAAA,gBACT,OAAO;AAAA,cACT;AAAA,YACF;AAEA,kBAAM,UAAU,MAAM,YAAY,aAAa,QAAQ,MAAM;AAE7D,gBAAI,CAAC,SAAS;AACZ,qBAAO;AAAA,gBACL,SAAS;AAAA,gBACT,OAAO,wBAAwB,MAAM;AAAA,cACvC;AAAA,YACF;AAEA,mBAAO;AAAA,cACL,SAAS;AAAA,cACT,QAAQ;AAAA,cACR,MAAM,eAAe,OAAO;AAAA,YAC9B;AAAA,UACF;AAAA,UAEA,KAAK,SAAS;AACZ,kBAAM,QAAQ,MAAM,YAAY,aAAa,QAAQ,SAAS;AAE9D,mBAAO;AAAA,cACL,SAAS;AAAA,cACT,QAAQ;AAAA,cACR,cAAc;AAAA,YAChB;AAAA,UACF;AAAA,UAEA;AACE,mBAAO;AAAA,cACL,SAAS;AAAA,cACT,OAAO,mBAAmB,MAAM;AAAA,YAClC;AAAA,QACJ;AAAA,MACF,SAAS,OAAY;AACnB,eAAO;AAAA,UACL,SAAS;AAAA,UACT,OAAO,MAAM;AAAA,QACf;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AACH;AAEA,SAAS,eAAe,MAAgB;AACtC,SAAO;AAAA,IACL,IAAI,KAAK;AAAA,IACT,SAAS,KAAK;AAAA,IACd,QAAQ,KAAK;AAAA,IACb,OAAO,KAAK;AAAA,IACZ,WAAW,KAAK,UAAU,YAAY;AAAA,EACxC;AACF;;;AC1JA;AACA;AAHA,SAAS,QAAAE,aAAY;AACrB,SAAS,KAAAC,UAAS;AASlB,IAAM,uBAAuBA,GAAE,OAAO;AAAA,EACpC,QAAQA,GACL,KAAK,CAAC,QAAQ,MAAM,CAAC,EACrB,SAAS,2EAA2E;AAAA,EACvF,WAAWA,GACR,OAAO,EACP,SAAS,EACT,SAAS,kDAAkD;AAChE,CAAC;AAEM,SAAS,oBAAoB,SAA+B;AACjE,SAAOD,MAAK;AAAA,IACV,aAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IASb,aAAa;AAAA,IAEb,SAAS,OAAO,EAAE,QAAQ,UAAU,MAA4C;AAC9E,UAAI;AACF,gBAAQ,QAAQ;AAAA,UACd,KAAK,QAAQ;AACX,kBAAM,SAAS,MAAM,cAAc,QAAQ,iBAAiB;AAE5D,mBAAO;AAAA,cACL,SAAS;AAAA,cACT,QAAQ;AAAA,cACR,YAAY,OAAO;AAAA,cACnB,QAAQ,OAAO,IAAI,CAAC,OAAO;AAAA,gBACzB,MAAM,EAAE;AAAA,gBACR,aAAa,EAAE;AAAA,cACjB,EAAE;AAAA,cACF,WAAW,uBAAuB,MAAM;AAAA,YAC1C;AAAA,UACF;AAAA,UAEA,KAAK,QAAQ;AACX,gBAAI,CAAC,WAAW;AACd,qBAAO;AAAA,gBACL,SAAS;AAAA,gBACT,OAAO;AAAA,cACT;AAAA,YACF;AAGA,gBAAI,MAAM,aAAa,SAAS,QAAQ,WAAW,SAAS,GAAG;AAC7D,qBAAO;AAAA,gBACL,SAAS;AAAA,gBACT,OAAO,UAAU,SAAS;AAAA,cAC5B;AAAA,YACF;AAGA,kBAAM,QAAQ,MAAM,iBAAiB,WAAW,QAAQ,iBAAiB;AAEzE,gBAAI,CAAC,OAAO;AACV,oBAAM,YAAY,MAAM,cAAc,QAAQ,iBAAiB;AAC/D,qBAAO;AAAA,gBACL,SAAS;AAAA,gBACT,OAAO,UAAU,SAAS;AAAA,gBAC1B,iBAAiB,UAAU,IAAI,CAAC,MAAM,EAAE,IAAI;AAAA,cAC9C;AAAA,YACF;AAGA,kBAAM,aAAa,KAAK,QAAQ,WAAW,SAAS;AAEpD,mBAAO;AAAA,cACL,SAAS;AAAA,cACT,QAAQ;AAAA,cACR,WAAW,MAAM;AAAA,cACjB,aAAa,MAAM;AAAA,cACnB,SAAS,MAAM;AAAA,cACf,eAAe,MAAM,QAAQ;AAAA,YAC/B;AAAA,UACF;AAAA,UAEA;AACE,mBAAO;AAAA,cACL,SAAS;AAAA,cACT,OAAO,mBAAmB,MAAM;AAAA,YAClC;AAAA,QACJ;AAAA,MACF,SAAS,OAAY;AACnB,eAAO;AAAA,UACL,SAAS;AAAA,UACT,OAAO,MAAM;AAAA,QACf;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AACH;;;ACnGA,SAAS,QAAAE,aAAY;AACrB,SAAS,KAAAC,UAAS;AAClB,SAAS,WAAAC,UAAS,YAAAC,WAAU,cAAAC,aAAY,WAAAC,gBAAe;AACvD,SAAS,cAAAC,oBAAkB;AAC3B,SAAS,WAAAC,UAAS,QAAAC,aAAY;AAQ9B,IAAM,oBAAoBC,GAAE,OAAO;AAAA,EACjC,OAAOA,GACJ,MAAMA,GAAE,OAAO,CAAC,EAChB,SAAS,EACT,SAAS,wHAAwH;AAAA,EACpI,KAAKA,GACF,QAAQ,EACR,SAAS,EACT,QAAQ,KAAK,EACb,SAAS,qEAAqE;AACnF,CAAC;AAKD,eAAe,mBACb,KACA,kBACA,WAAW,IACQ;AACnB,QAAM,QAAkB,CAAC;AACzB,QAAM,sBAA0B,uBAAuB;AAEvD,iBAAe,KAAK,YAAoB;AACtC,QAAI,MAAM,UAAU,SAAU;AAE9B,QAAI;AACF,YAAM,UAAU,MAAMC,SAAQ,YAAY,EAAE,eAAe,KAAK,CAAC;AAEjE,iBAAW,SAAS,SAAS;AAC3B,YAAI,MAAM,UAAU,SAAU;AAE9B,cAAM,WAAWC,SAAQ,YAAY,MAAM,IAAI;AAG/C,YAAI,MAAM,YAAY,GAAG;AACvB,cAAI,CAAC,gBAAgB,QAAQ,QAAQ,SAAS,SAAS,UAAU,EAAE,SAAS,MAAM,IAAI,GAAG;AACvF;AAAA,UACF;AACA,gBAAM,KAAK,QAAQ;AAAA,QACrB,WAAW,MAAM,OAAO,GAAG;AACzB,gBAAM,MAAMC,SAAQ,MAAM,IAAI;AAC9B,cAAI,oBAAoB,SAAS,GAAG,GAAG;AACrC,kBAAM,KAAK,QAAQ;AAAA,UACrB;AAAA,QACF;AAAA,MACF;AAAA,IACF,QAAQ;AAAA,IAER;AAAA,EACF;AAEA,QAAM,KAAK,GAAG;AACd,SAAO;AACT;AAEO,SAAS,iBAAiB,SAA4B;AAC3D,SAAOC,MAAK;AAAA,IACV,aAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAYI,QAAQ,gBAAgB;AAAA,IAEzC,aAAa;AAAA,IAEb,SAAS,OAAO,EAAE,MAAM,MAAyC;AAC/D,UAAI;AAEF,YAAI,CAAC,SAAS,MAAM,WAAW,GAAG;AAChC,gBAAM,iBAAiB,MAAU,kBAAkB;AAEnD,cAAI,OAAO,KAAK,cAAc,EAAE,WAAW,GAAG;AAC5C,mBAAO;AAAA,cACL,SAAS;AAAA,cACT,SAAS;AAAA,cACT,OAAO,CAAC;AAAA,cACR,aAAa;AAAA,cACb,eAAe;AAAA,YACjB;AAAA,UACF;AAEA,iBAAO,wBAAwB,gBAAgB,QAAQ,gBAAgB;AAAA,QACzE;AAGA,cAAM,eAAyB,CAAC;AAEhC,mBAAW,QAAQ,OAAO;AACxB,gBAAM,eAAeC,YAAW,IAAI,IAChC,OACAH,SAAQ,QAAQ,kBAAkB,IAAI;AAE1C,cAAI,CAACI,aAAW,YAAY,GAAG;AAC7B;AAAA,UACF;AAEA,gBAAM,QAAQ,MAAMC,MAAK,YAAY;AAErC,cAAI,MAAM,YAAY,GAAG;AACvB,kBAAM,WAAW,MAAM,mBAAmB,cAAc,QAAQ,gBAAgB;AAChF,yBAAa,KAAK,GAAG,QAAQ;AAAA,UAC/B,WAAW,MAAM,OAAO,GAAG;AACzB,gBAAQ,YAAY,YAAY,GAAG;AACjC,2BAAa,KAAK,YAAY;AAAA,YAChC;AAAA,UACF;AAAA,QACF;AAEA,YAAI,aAAa,WAAW,GAAG;AAC7B,iBAAO;AAAA,YACL,SAAS;AAAA,YACT,SAAS,8DAAkE,uBAAuB,EAAE,KAAK,IAAI;AAAA,YAC7G,OAAO,CAAC;AAAA,YACR,aAAa;AAAA,YACb,eAAe;AAAA,UACjB;AAAA,QACF;AAGA,cAAM,QAAQ;AAAA,UACZ,aAAa,IAAI,UAAY,UAAU,MAAM,IAAI,CAAC;AAAA,QACpD;AAGA,cAAM,iBAA+C,CAAC;AAEtD,mBAAW,QAAQ,cAAc;AAC/B,gBAAM,cAAc,MAAU,eAAe,IAAI;AACjD,cAAI,YAAY,SAAS,GAAG;AAC1B,2BAAe,IAAI,IAAI;AAAA,UACzB;AAAA,QACF;AAEA,eAAO,wBAAwB,gBAAgB,QAAQ,gBAAgB;AAAA,MACzE,SAAS,OAAY;AACnB,eAAO;AAAA,UACL,SAAS;AAAA,UACT,OAAO,MAAM;AAAA,QACf;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AACH;AAKA,SAAS,wBACP,gBACA,kBACA;AACA,MAAI,cAAc;AAClB,MAAI,gBAAgB;AACpB,MAAI,YAAY;AAEhB,QAAM,QAaD,CAAC;AAEN,aAAW,CAAC,UAAU,WAAW,KAAK,OAAO,QAAQ,cAAc,GAAG;AACpE,UAAM,eAAeC,UAAS,kBAAkB,QAAQ;AACxD,QAAI,aAAa;AACjB,QAAI,eAAe;AAEnB,UAAM,uBAAuB,YAAY,IAAI,OAAK;AAChD,YAAM,WAAW,kBAAkB,EAAE,QAAQ;AAE7C,UAAI,EAAE,4BAA2C;AAC/C;AACA;AAAA,MACF,WAAW,EAAE,8BAA6C;AACxD;AACA;AAAA,MACF,OAAO;AACL;AAAA,MACF;AAEA,aAAO;AAAA,QACL;AAAA,QACA,MAAM,EAAE,MAAM,MAAM,OAAO;AAAA,QAC3B,QAAQ,EAAE,MAAM,MAAM,YAAY;AAAA,QAClC,SAAS,EAAE;AAAA,QACX,QAAQ,EAAE;AAAA,QACV,MAAM,EAAE;AAAA,MACV;AAAA,IACF,CAAC;AAED,UAAM,KAAK;AAAA,MACT,MAAM;AAAA,MACN;AAAA,MACA,QAAQ;AAAA,MACR,UAAU;AAAA,MACV,aAAa;AAAA,IACf,CAAC;AAAA,EACH;AAGA,QAAM,KAAK,CAAC,GAAG,MAAM,EAAE,SAAS,EAAE,MAAM;AAExC,QAAM,YAAY,cAAc,KAAK,gBAAgB;AAErD,SAAO;AAAA,IACL,SAAS;AAAA,IACT,SAAS,YACL,SAAS,WAAW,iBAAiB,aAAa,kBAAkB,MAAM,MAAM,cAChF,2BAA2B,OAAO,KAAK,cAAc,EAAE,UAAU,KAAK;AAAA,IAC1E;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,SAAS,YACL,cAAc,KAAK,IACnB;AAAA,EACN;AACF;AAKA,SAAS,kBAAkB,UAA2B;AACpD,UAAQ,UAAU;AAAA,IAChB;AACE,aAAO;AAAA,IACT;AACE,aAAO;AAAA,IACT;AACE,aAAO;AAAA,IACT;AACE,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;AAKA,SAAS,cACP,OASQ;AACR,QAAM,QAAkB,CAAC;AAEzB,aAAW,QAAQ,OAAO;AACxB,UAAM,KAAK;AAAA,EAAK,KAAK,YAAY,GAAG;AACpC,eAAW,KAAK,KAAK,YAAY,MAAM,GAAG,EAAE,GAAG;AAC7C,YAAM,SAAS,EAAE,aAAa,UAAU,WAAM,EAAE,aAAa,YAAY,iBAAO;AAChF,YAAM,KAAK,KAAK,MAAM,KAAK,EAAE,IAAI,IAAI,EAAE,MAAM,KAAK,EAAE,OAAO,EAAE;AAAA,IAC/D;AACA,QAAI,KAAK,YAAY,SAAS,IAAI;AAChC,YAAM,KAAK,aAAa,KAAK,YAAY,SAAS,EAAE,OAAO;AAAA,IAC7D;AAAA,EACF;AAEA,SAAO,MAAM,KAAK,IAAI;AACxB;;;AC7SA,SAAS,QAAAC,cAAY;AACrB,SAAS,KAAAC,WAAS;;;ACDlB;AAAA,EAEE;AAAA,EACA;AAAA,OAEK;AACP,SAAS,UAAAC,eAAc;AAEvB;AA0DO,IAAe,WAAf,MAA2C;AAAA;AAAA,EAQtC;AAAA;AAAA,EAGA,WAAmB;AAAA,EAE7B,YAAY,OAAgB;AAC1B,SAAK,QAAQ,SAAS,gBAAgB;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA;AAAA,EAgBU,YAAY,MAAc,OAAgC;AAClE,WAAO,EAAE,MAAM,MAAM;AAAA,EACvB;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,IAAI,SAA+D;AACvE,UAAM,EAAE,MAAM,WAAW,YAAY,YAAY,YAAY,IAAI;AACjE,UAAM,QAAwB,CAAC;AAG/B,UAAM,YAAY,MAAM,gBAAgB,OAAO;AAAA,MAC7C;AAAA,MACA;AAAA,MACA,cAAc,KAAK;AAAA,MACnB;AAAA,MACA,OAAO,KAAK;AAAA,IACd,CAAC;AAED,UAAM,UAAU,OAAO,SAAiD;AACtE,YAAM,WAAyB;AAAA,QAC7B,IAAIC,QAAO,CAAC;AAAA,QACZ,WAAW,KAAK,IAAI;AAAA,QACpB,GAAG;AAAA,MACL;AACA,YAAM,KAAK,QAAQ;AAGnB,YAAM,gBAAgB,QAAQ,UAAU,IAAI,QAAQ;AAGpD,YAAM,aAAa;AAAA,QACjB,MAAM;AAAA,QACN,YAAY,UAAU;AAAA,QACtB,cAAc,KAAK;AAAA,QACnB,MAAM;AAAA,MACR,CAAC;AAAA,IACH;AAEA,QAAI;AACF,YAAM,QAAQ,KAAK,SAAS,OAAO;AACnC,YAAM,eAAe,KAAK,gBAAgB,OAAO;AAGjD,YAAM,SAAS,MAAM,aAAa;AAAA,QAChC,OAAO,aAAa,KAAK,KAAK;AAAA,QAC9B,QAAQ;AAAA,QACR,UAAU;AAAA,UACR,EAAE,MAAM,QAAQ,SAAS,KAAK;AAAA,QAChC;AAAA,QACA;AAAA,QACA,UAAU,YAAY,KAAK,QAAQ;AAAA,QACnC;AAAA,QACA,cAAc,OAAO,SAAS;AAE5B,cAAI,KAAK,MAAM;AACb,kBAAM,QAAQ;AAAA,cACZ,MAAM;AAAA,cACN,SAAS,KAAK;AAAA,YAChB,CAAC;AACD,kBAAM,aAAa;AAAA,cACjB,MAAM;AAAA,cACN,YAAY,UAAU;AAAA,cACtB,cAAc,KAAK;AAAA,cACnB,MAAM,KAAK;AAAA,YACb,CAAC;AAAA,UACH;AAGA,cAAI,KAAK,WAAW;AAClB,uBAAW,YAAY,KAAK,WAAW;AACrC,oBAAM,QAAQ;AAAA,gBACZ,MAAM;AAAA,gBACN,SAAS,WAAW,SAAS,QAAQ;AAAA,gBACrC,UAAU,SAAS;AAAA,gBACnB,WAAW,SAAS;AAAA,cACtB,CAAC;AACD,oBAAM,aAAa;AAAA,gBACjB,MAAM;AAAA,gBACN,YAAY,UAAU;AAAA,gBACtB,cAAc,KAAK;AAAA,gBACnB,UAAU,SAAS;AAAA,gBACnB,WAAW,SAAS;AAAA,cACtB,CAAC;AAAA,YACH;AAAA,UACF;AAGA,cAAI,KAAK,aAAa;AACpB,uBAAW,cAAc,KAAK,aAAa;AACzC,oBAAM,QAAQ;AAAA,gBACZ,MAAM;AAAA,gBACN,SAAS,eAAe,WAAW,QAAQ;AAAA,gBAC3C,UAAU,WAAW;AAAA,gBACrB,YAAY,WAAW;AAAA,cACzB,CAAC;AACD,oBAAM,aAAa;AAAA,gBACjB,MAAM;AAAA,gBACN,YAAY,UAAU;AAAA,gBACtB,cAAc,KAAK;AAAA,gBACnB,UAAU,WAAW;AAAA,gBACrB,YAAY,WAAW;AAAA,cACzB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF;AAAA,MACF,CAAC;AAGD,YAAM,eAAe,KAAK,YAAY,OAAO,MAAM,KAAK;AAGxD,YAAM,gBAAgB,SAAS,UAAU,IAAI,YAAY;AAEzD,YAAM,aAAa;AAAA,QACjB,MAAM;AAAA,QACN,YAAY,UAAU;AAAA,QACtB,cAAc,KAAK;AAAA,QACnB,QAAQ;AAAA,MACV,CAAC;AAED,aAAO;AAAA,QACL,SAAS;AAAA,QACT,QAAQ;AAAA,QACR;AAAA,QACA,aAAa,UAAU;AAAA,MACzB;AAAA,IACF,SAAS,OAAY;AACnB,YAAM,eAAe,MAAM,WAAW;AAGtC,YAAM,gBAAgB,UAAU,UAAU,IAAI,YAAY;AAE1D,YAAM,aAAa;AAAA,QACjB,MAAM;AAAA,QACN,YAAY,UAAU;AAAA,QACtB,cAAc,KAAK;AAAA,QACnB,OAAO;AAAA,MACT,CAAC;AAED,aAAO;AAAA,QACL,SAAS;AAAA,QACT,OAAO;AAAA,QACP;AAAA,QACA,aAAa,UAAU;AAAA,MACzB;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,OAAO,SAAoE;AAChF,UAAM,SAAkC,CAAC;AACzC,QAAI,cAAsE;AAC1E,QAAI,OAAO;AAGX,UAAM,aAAsC,CAAC;AAG7C,UAAM,aAAa,KAAK,IAAI;AAAA,MAC1B,GAAG;AAAA,MACH,YAAY,OAAO,UAAU;AAC3B,mBAAW,KAAK,KAAK;AACrB,YAAI,aAAa;AACf,sBAAY,WAAW,MAAM,CAAE;AAC/B,wBAAc;AAAA,QAChB;AAAA,MACF;AAAA,IACF,CAAC,EAAE,KAAK,CAAC,WAAW;AAClB,aAAO;AACP,UAAI,aAAa;AACf,oBAAY,IAAI;AAAA,MAClB;AACA,aAAO;AAAA,IACT,CAAC;AAGD,WAAO,CAAC,QAAQ,WAAW,SAAS,GAAG;AACrC,UAAI,WAAW,SAAS,GAAG;AACzB,cAAM,WAAW,MAAM;AAAA,MACzB,WAAW,CAAC,MAAM;AAEhB,cAAM,QAAQ,MAAM,IAAI,QAAsC,CAACC,cAAY;AACzE,wBAAcA;AAAA,QAChB,CAAC;AACD,YAAI,OAAO;AACT,gBAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAGA,UAAM;AAAA,EACR;AACF;;;ACtSA,SAAS,QAAAC,aAA0B;AACnC,SAAS,KAAAC,WAAS;AAClB,SAAS,QAAAC,aAAY;AACrB,SAAS,aAAAC,kBAAiB;AAC1B,SAAS,YAAAC,WAAU,QAAAC,OAAM,WAAAC,gBAAe;AACxC,SAAS,WAAAC,UAAS,YAAAC,WAAU,cAAAC,mBAAkB;AAC9C,SAAS,cAAAC,oBAAkB;AAI3B;;;ACCA,SAAS,QAAAC,aAAY;AACrB,SAAS,KAAAC,UAAS;AAClB,SAAS,WAAAC,UAAS,YAAAC,WAAU,cAAAC,aAAY,YAAAC,iBAAgB;AACxD,SAAS,YAAAC,WAAU,WAAAC,gBAAe;AAClC,SAAS,cAAAC,oBAAkB;AAC3B,SAAS,iBAAAC,sBAAqB;AAC9B,SAAS,oBAAoB;AAS7B,IAAM,uBAAuBC,GAAE,OAAO;AAAA,EACpC,QAAQA,GAAE,OAAO,EAAE;AAAA,IACjB;AAAA,EACF;AAAA,EACA,UAAUA,GAAE,OAAO,EAAE,SAAS,EAAE;AAAA,IAC9B;AAAA,EACF;AAAA,EACA,OAAOA,GAAE,OAAO,EAAE,SAAS,EAAE,QAAQ,CAAC,EAAE;AAAA,IACtC;AAAA,EACF;AACF,CAAC;AAOD,SAAS,WAAW,UAA2B;AAC7C,QAAM,aAAa,SAAS,QAAQ,OAAO,GAAG;AAG9C,MAAI,oEAAoE,KAAK,UAAU,EAAG,QAAO;AAEjG,MAAI,uCAAuC,KAAK,UAAU,EAAG,QAAO;AACpE,SAAO;AACT;AAGA,SAAS,iBAAiB,UAAkB,kBAA8C;AACxF,QAAM,MAAMC,UAAS,kBAAkB,QAAQ,EAAE,QAAQ,OAAO,GAAG;AAGnE,QAAM,WAAW,IAAI,MAAM,6EAA6E;AACxG,MAAI,SAAU,QAAO,SAAS,CAAC,KAAK;AAEpC,QAAM,aAAa,IAAI,MAAM,0CAA0C;AACvE,MAAI,WAAY,QAAO,WAAW,CAAC,KAAK;AACxC,SAAO;AACT;AAGA,SAAS,eAAe,MAA0B;AAChD,QAAM,QAAgC;AAAA,IACpC,cAAiB,GAAG;AAAA,IACpB,kBAAoB,GAAG;AAAA,IACvB,eAAkB,GAAG;AAAA,IACrB,iBAAoB,GAAG;AAAA,IACvB,kBAAoB,GAAG;AAAA,IACvB,mBAAqB,GAAG;AAAA,IACxB,cAAgB,GAAG;AAAA,IACnB,kBAAoB,GAAG;AAAA,IACvB,oBAAuB,GAAG;AAAA,IAC1B,eAAkB,GAAG;AAAA,IACrB,kBAAqB,GAAG;AAAA,IACxB,uBAAyB,GAAG;AAAA,IAC5B,cAAiB,GAAG;AAAA,IACpB,oBAAsB,GAAG;AAAA,IACzB,gBAAkB,GAAG;AAAA,EACvB;AACA,SAAO,MAAM,IAAI,KAAK;AACxB;AAIA,SAAS,qBACP,SACA,MACA,WACuB;AACvB,aAAW,OAAO,SAAS;AACzB,QAAI,CAAC,IAAI,MAAO;AAChB,UAAM,EAAE,OAAO,IAAI,IAAI,IAAI;AAC3B,UAAM,aAAa,OAAO,MAAM,QAAS,SAAS,MAAM,QAAQ,aAAa,MAAM;AAEnF,UAAM,YAAY,OAAO,IAAI,QAAS,SAAS,IAAI,QAAQ,YAAY,IAAI;AAC3E,QAAI,cAAc,WAAW;AAC3B,UAAI,IAAI,UAAU,QAAQ;AACxB,cAAM,QAAQ,qBAAqB,IAAI,UAAU,MAAM,SAAS;AAChE,YAAI,MAAO,QAAO;AAAA,MACpB;AACA,aAAO;AAAA,IACT;AAAA,EACF;AACA,SAAO;AACT;AAGA,SAAS,iBAAiB,SAA2B,MAAqC;AACxF,aAAW,OAAO,SAAS;AACzB,QAAI,IAAI,SAAS,QAAQ,IAAI,eAAgB,QAAO;AACpD,QAAI,IAAI,UAAU;AAChB,YAAM,QAAQ,iBAAiB,IAAI,UAAU,IAAI;AACjD,UAAI,MAAO,QAAO;AAAA,IACpB;AAAA,EACF;AACA,SAAO;AACT;AAGA,SAAS,eAAe,MAAsB;AAC5C,SAAO,KAAK,QAAQ,cAAc,EAAE,EAAE,QAAQ,UAAU,EAAE,EAAE,KAAK;AACnE;AAGA,eAAe,cAAc,QAAgB,kBAA4F;AAEvI,QAAM,UAAU,OAAO,QAAQ,uBAAuB,MAAM;AAG5D,QAAM,aAAa;AAAA,IACjB,oFAAoF,OAAO;AAAA,IAC3F,iCAAiC,OAAO;AAAA,EAC1C;AAEA,aAAW,WAAW,YAAY;AAChC,QAAI;AACF,YAAM,SAAS,aAAa,MAAM;AAAA,QAChC;AAAA,QAAM;AAAA,QACN;AAAA,QAAM;AAAA,QACN;AAAA,QAAU;AAAA,QACV;AAAA,QAAM;AAAA,MACR,GAAG;AAAA,QACD,KAAK;AAAA,QACL,UAAU;AAAA,QACV,SAAS;AAAA,QACT,OAAO,CAAC,QAAQ,QAAQ,MAAM;AAAA,MAChC,CAAC,EAAE,KAAK;AAER,UAAI,QAAQ;AACV,cAAM,YAAY,OAAO,MAAM,IAAI,EAAE,CAAC;AACtC,cAAM,QAAQ,UAAU,MAAM,mBAAmB;AACjD,YAAI,OAAO;AACT,gBAAM,MAAM,MAAM,CAAC,EAAE,QAAQ,MAAM;AACnC,iBAAO;AAAA,YACL,UAAUC,SAAQ,kBAAkB,MAAM,CAAC,CAAC;AAAA,YAC5C,MAAM,SAAS,MAAM,CAAC,CAAC,IAAI;AAAA,YAC3B,MAAM,OAAO,IAAI,MAAM;AAAA,UACzB;AAAA,QACF;AAAA,MACF;AAAA,IACF,QAAQ;AAAA,IAER;AAAA,EACF;AAGA,QAAM,aAAa,IAAI;AAAA,IACrB,qEAAqE,OAAO;AAAA,EAC9E;AACA,QAAM,iBAAiB,oBAAI,IAAI,CAAC,OAAO,QAAQ,OAAO,MAAM,CAAC;AAC7D,QAAM,eAAe,oBAAI,IAAI,CAAC,gBAAgB,QAAQ,QAAQ,SAAS,SAAS,UAAU,CAAC;AAE3F,iBAAe,OAAO,KAAa,UAAoF;AACrH,QAAI,YAAY,EAAG,QAAO;AAC1B,QAAI,YAAY;AAEhB,QAAI;AACF,YAAM,UAAU,MAAMC,SAAQ,KAAK,EAAE,eAAe,KAAK,CAAC;AAC1D,iBAAW,SAAS,SAAS;AAC3B,YAAI,aAAa,EAAG,QAAO;AAC3B,cAAM,WAAWD,SAAQ,KAAK,MAAM,IAAI;AAExC,YAAI,MAAM,YAAY,GAAG;AACvB,cAAI,aAAa,IAAI,MAAM,IAAI,KAAK,MAAM,KAAK,WAAW,GAAG,EAAG;AAChE,gBAAM,QAAQ,MAAM,OAAO,UAAU,SAAS;AAC9C,cAAI,MAAO,QAAO;AAClB,uBAAa;AAAA,QACf,WAAW,MAAM,OAAO,GAAG;AACzB,gBAAM,MAAM,MAAM,KAAK,UAAU,MAAM,KAAK,YAAY,GAAG,CAAC;AAC5D,cAAI,CAAC,eAAe,IAAI,GAAG,EAAG;AAC9B;AAEA,gBAAM,UAAU,MAAME,UAAS,UAAU,OAAO;AAChD,gBAAM,QAAQ,QAAQ,MAAM,IAAI;AAChC,mBAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,gBAAI,WAAW,KAAK,MAAM,CAAC,CAAC,GAAG;AAC7B,oBAAM,MAAM,MAAM,CAAC,EAAE,QAAQ,MAAM;AACnC,kBAAI,OAAO,GAAG;AACZ,uBAAO,EAAE,UAAU,UAAU,MAAM,GAAG,MAAM,IAAI;AAAA,cAClD;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF,QAAQ;AAAA,IAA8B;AACtC,WAAO;AAAA,EACT;AAEA,SAAO,OAAO,kBAAkB,GAAG;AACrC;AAMA,IAAM,gBAAgB;AACtB,IAAM,qBAAqB;AAC3B,IAAM,gCAAgC;AAE/B,SAAS,oBAAoB,SAA+B;AACjE,SAAOC,MAAK;AAAA,IACV,aAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAeI,QAAQ,gBAAgB;AAAA,IAEzC,aAAa;AAAA,IAEb,SAAS,OAAO,EAAE,QAAQ,UAAU,MAAM,MAA4C;AACpF,YAAM,WAAW,KAAK,IAAI,SAAS,GAAG,CAAC;AAEvC,UAAI;AAIF,YAAI;AACJ,YAAI,UAAU;AACd,YAAI,UAAU;AACd,YAAI,YAAmC;AAEvC,YAAI,UAAU;AACZ,gBAAM,UAAUC,YAAW,QAAQ,IAC/B,WACAJ,SAAQ,QAAQ,kBAAkB,QAAQ;AAE9C,cAAI,CAACK,aAAW,OAAO,GAAG;AACxB,mBAAO,EAAE,SAAS,OAAO,OAAO,mBAAmB,QAAQ,GAAG;AAAA,UAChE;AAEA,cAAI,CAAK,YAAY,OAAO,GAAG;AAC7B,mBAAO,EAAE,SAAS,OAAO,OAAO,sCAA0C,uBAAuB,EAAE,KAAK,IAAI,CAAC,GAAG;AAAA,UAClH;AAGA,gBAAU,UAAU,SAAS,IAAI;AACjC,gBAAM,UAAU,MAAU,mBAAmB,OAAO;AACpD,sBAAY,iBAAiB,SAAS,MAAM;AAE5C,cAAI,WAAW;AACb,0BAAc;AACd,sBAAU,UAAU,eAAe,MAAM;AACzC,sBAAU,UAAU,eAAe,MAAM;AAAA,UAC3C,OAAO;AAEL,kBAAM,UAAU,MAAMH,UAAS,SAAS,OAAO;AAC/C,kBAAMI,SAAQ,QAAQ,MAAM,IAAI;AAEhC,kBAAM,aAAa,IAAI;AAAA,cACrB,qEAAqE,OAAO,QAAQ,uBAAuB,MAAM,CAAC;AAAA,YACpH;AACA,qBAAS,IAAI,GAAG,IAAIA,OAAM,QAAQ,KAAK;AACrC,kBAAI,WAAW,KAAKA,OAAM,CAAC,CAAC,GAAG;AAC7B,sBAAM,MAAMA,OAAM,CAAC,EAAE,QAAQ,MAAM;AACnC,oBAAI,QAAQ,IAAI;AACd,gCAAc;AACd,4BAAU;AACV,4BAAU;AACV;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAEA,gBAAI,CAAC,aAAa;AAChB,uBAAS,IAAI,GAAG,IAAIA,OAAM,QAAQ,KAAK;AACrC,sBAAM,MAAMA,OAAM,CAAC,EAAE,QAAQ,MAAM;AACnC,oBAAI,QAAQ,IAAI;AACd,gCAAc;AACd,4BAAU;AACV,4BAAU;AACV;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,QACF,OAAO;AAEL,gBAAM,QAAQ,MAAM,cAAc,QAAQ,QAAQ,gBAAgB;AAClE,cAAI,OAAO;AACT,0BAAc,MAAM;AACpB,sBAAU,MAAM;AAChB,sBAAU,MAAM;AAAA,UAClB;AAAA,QACF;AAEA,YAAI,CAAC,aAAa;AAChB,iBAAO;AAAA,YACL,SAAS;AAAA,YACT,OAAO,0BAA0B,MAAM;AAAA,UACzC;AAAA,QACF;AAGA,cAAU,UAAU,aAAa,IAAI;AAKrC,cAAM,WAAW,MAAU,SAAS,aAAa,SAAS,OAAO;AACjE,cAAM,WAAW,WAAW,eAAe,QAAQ,IAAI;AAKvD,cAAM,cAAc,MAAU,mBAAmB,WAAW;AAC5D,YAAI,CAAC,aAAa,YAAY,SAAS,GAAG;AACxC,sBAAY,iBAAiB,aAAa,MAAM;AAAA,QAClD;AAKA,cAAM,aAAa,MAAU,cAAc,aAAa,SAAS,SAAS,KAAK;AAG/E,cAAM,aAAa,oBAAI,IAAwB;AAC/C,mBAAW,OAAO,YAAY;AAC5B,gBAAM,UAAUC,eAAc,IAAI,GAAG;AACrC,cAAI,CAAC,WAAW,IAAI,OAAO,GAAG;AAC5B,uBAAW,IAAI,SAAS,CAAC,CAAC;AAAA,UAC5B;AACA,qBAAW,IAAI,OAAO,EAAG,KAAK,GAAG;AAAA,QACnC;AAmBA,cAAM,eAA8B,CAAC;AACrC,YAAI,YAAY;AAEhB,mBAAW,CAAC,SAAS,IAAI,KAAK,YAAY;AACxC,cAAI,aAAa,cAAe;AAEhC,cAAI,YAAY,YAAa;AAC7B;AAEA,gBAAM,UAAUR,UAAS,QAAQ,kBAAkB,OAAO;AAC1D,gBAAM,WAAW,WAAW,OAAO;AACnC,gBAAM,YAAY,WAAW,iBAAiB,SAAS,QAAQ,gBAAgB,IAAI;AAGnF,gBAAU,UAAU,SAAS,KAAK;AAClC,gBAAM,iBAAiB,MAAU,mBAAmB,OAAO;AAE3D,gBAAM,OAAO,oBAAI,IAA2F;AAE5G,qBAAW,OAAO,MAAM;AACtB,kBAAM,YAAY;AAAA,cAChB;AAAA,cACA,IAAI,MAAM,MAAM;AAAA,cAChB,IAAI,MAAM,MAAM;AAAA,YAClB;AACA,gBAAI,aAAa,CAAC,KAAK,IAAI,UAAU,IAAI,GAAG;AAE1C,kBAAI,iBAAgC;AACpC,kBAAI;AACF,sBAAM,MAAM,MAAU;AAAA,kBACpB;AAAA,kBACA,UAAU,eAAe,MAAM;AAAA,kBAC/B,UAAU,eAAe,MAAM;AAAA,gBACjC;AACA,oBAAI,IAAK,kBAAiB,eAAe,GAAG,EAAE,MAAM,IAAI,EAAE,CAAC;AAAA,cAC7D,QAAQ;AAAA,cAAa;AAErB,mBAAK,IAAI,UAAU,MAAM;AAAA,gBACvB,MAAM,UAAU;AAAA,gBAChB,MAAM,eAAe,UAAU,IAAI;AAAA,gBACnC,MAAM,UAAU,eAAe,MAAM,OAAO;AAAA,gBAC5C,MAAM,UAAU,eAAe,MAAM;AAAA,gBACrC,UAAU,kBAAkB;AAAA,cAC9B,CAAC;AAAA,YACH;AAAA,UACF;AAEA,uBAAa,KAAK;AAAA,YAChB,UAAU;AAAA,YACV,cAAc;AAAA,YACd,QAAQ;AAAA,YACR;AAAA,YACA,mBAAmB,MAAM,KAAK,KAAK,OAAO,CAAC;AAAA,UAC7C,CAAC;AAAA,QACH;AAgBA,cAAM,aAA0B,CAAC;AAEjC,YAAI,YAAY,GAAG;AACjB,qBAAW,WAAW,aAAa,MAAM,GAAG,kBAAkB,GAAG;AAC/D,uBAAW,OAAO,QAAQ,kBAAkB,MAAM,GAAG,6BAA6B,GAAG;AACnF,kBAAI;AAEF,sBAAM,aAAa,IAAI,OAAO;AAC9B,sBAAM,UAAU,IAAI;AAEpB,sBAAM,cAAc,MAAU;AAAA,kBAC5B,QAAQ;AAAA,kBAAU;AAAA,kBAAY;AAAA,kBAAS;AAAA,gBACzC;AAEA,sBAAM,UAA6B,CAAC;AACpC,sBAAM,YAAY,oBAAI,IAAY;AAElC,2BAAW,OAAO,YAAY,MAAM,GAAG,EAAE,GAAG;AAC1C,wBAAM,SAASQ,eAAc,IAAI,GAAG;AACpC,sBAAI,WAAW,QAAQ,YAAY,WAAW,YAAa;AAC3D,sBAAI,UAAU,IAAI,MAAM,EAAG;AAC3B,4BAAU,IAAI,MAAM;AAEpB,wBAAM,QAAQR,UAAS,QAAQ,kBAAkB,MAAM;AACvD,wBAAM,SAAS,WAAW,MAAM;AAChC,wBAAM,UAAU,SAAS,iBAAiB,QAAQ,QAAQ,gBAAgB,IAAI;AAG9E,sBAAI;AACJ,sBAAI;AACF,0BAAU,UAAU,QAAQ,KAAK;AACjC,0BAAM,YAAY,MAAU,mBAAmB,MAAM;AACrD,0BAAM,YAAY,qBAAqB,WAAW,IAAI,MAAM,MAAM,MAAM,IAAI,MAAM,MAAM,SAAS;AACjG,wBAAI,UAAW,iBAAgB,UAAU;AAAA,kBAC3C,QAAQ;AAAA,kBAAa;AAErB,0BAAQ,KAAK;AAAA,oBACX,cAAc;AAAA,oBACd,QAAQ;AAAA,oBACR,WAAW;AAAA,oBACX,kBAAkB;AAAA,kBACpB,CAAC;AAAA,gBACH;AAEA,oBAAI,QAAQ,SAAS,GAAG;AACtB,6BAAW,KAAK;AAAA,oBACd,cAAc,IAAI;AAAA,oBAClB,YAAY,QAAQ;AAAA,oBACpB,MAAM;AAAA,kBACR,CAAC;AAAA,gBACH;AAAA,cACF,QAAQ;AAAA,cAA4B;AAAA,YACtC;AAAA,UACF;AAAA,QACF;AAKA,cAAM,aAAaA,UAAS,QAAQ,kBAAkB,WAAW;AACjE,cAAM,QAAkB,CAAC;AAGzB,cAAM,KAAK,OAAO,MAAM,MAAM;AAC9B,cAAM,KAAK,SAAS,UAAU,IAAI,UAAU,CAAC,EAAE;AAC/C,YAAI,UAAW,OAAM,KAAK,SAAS,eAAe,UAAU,IAAI,CAAC,EAAE;AACnE,YAAI,SAAU,OAAM,KAAK,SAAS,QAAQ,EAAE;AAG5C,cAAM,mBAAmB,WAAW,OAAO,OAAKQ,eAAc,EAAE,GAAG,MAAM,WAAW,EAAE;AACtF,cAAM,oBAAoB,WAAW,QAAQ,WAAW,IAAI,WAAY,IAAI,IAAI;AAEhF,YAAI,aAAa,SAAS,GAAG;AAC3B,gBAAM,KAAK,EAAE;AACb,gBAAM,KAAK,sBAAsB,gBAAgB,kBAAkB,iBAAiB,aAAa;AAEjG,gBAAM,QAAQ,aAAa,OAAO,OAAK,EAAE,MAAM;AAC/C,gBAAM,WAAW,aAAa,OAAO,OAAK,CAAC,EAAE,MAAM;AAEnD,cAAI,MAAM,SAAS,GAAG;AACpB,kBAAM,KAAK,EAAE;AACb,kBAAM,KAAK,eAAe;AAC1B,uBAAW,QAAQ,OAAO;AACxB,oBAAM,KAAK,KAAK,KAAK,YAAY,GAAG,KAAK,YAAY,WAAM,KAAK,SAAS,KAAK,EAAE,EAAE;AAClF,yBAAW,KAAK,KAAK,mBAAmB;AACtC,sBAAM,KAAK,0BAAW,EAAE,IAAI,KAAK,EAAE,IAAI,IAAI,EAAE,IAAI,IAAI,EAAE,WAAW,WAAM,EAAE,QAAQ,KAAK,EAAE,EAAE;AAAA,cAC7F;AAAA,YACF;AAAA,UACF;AAEA,cAAI,SAAS,SAAS,GAAG;AACvB,kBAAM,KAAK,EAAE;AACb,kBAAM,KAAK,uBAAuB;AAClC,uBAAW,OAAO,UAAU;AAC1B,oBAAM,KAAK,KAAK,IAAI,YAAY,EAAE;AAClC,yBAAW,KAAK,IAAI,mBAAmB;AACrC,sBAAM,WAAW,EAAE,YAAY,EAAE,SAAS,SAAS,MAAM,WAAM,EAAE,QAAQ,KAAK;AAC9E,sBAAM,KAAK,0BAAW,EAAE,IAAI,KAAK,EAAE,IAAI,IAAI,EAAE,IAAI,IAAI,QAAQ,EAAE;AAAA,cACjE;AAAA,YACF;AAAA,UACF;AAAA,QACF,OAAO;AACL,gBAAM,KAAK,EAAE;AACb,gBAAM,KAAK,wFAAwF;AAAA,QACrG;AAGA,YAAI,WAAW,SAAS,GAAG;AACzB,gBAAM,KAAK,EAAE;AACb,gBAAM,KAAK,iCAAiC;AAC5C,qBAAW,MAAM,YAAY;AAC3B,kBAAM,KAAK,EAAE;AACb,kBAAM,KAAK,GAAG,GAAG,YAAY,KAAK,GAAG,UAAU,eAAe;AAC9D,uBAAW,OAAO,GAAG,MAAM;AACzB,oBAAM,MAAM,IAAI,SAAS,YAAY;AACrC,oBAAM,QAAQ,IAAI,YAAY,WAAM,IAAI,SAAS,KAAK;AACtD,oBAAM,YAAY,IAAI,mBAAmB,OAAO,IAAI,gBAAgB,KAAK;AACzE,oBAAM,KAAK,wBAAS,IAAI,YAAY,GAAG,GAAG,GAAG,KAAK,GAAG,SAAS,EAAE;AAAA,YAClE;AAAA,UACF;AAAA,QACF;AAGA,YAAI,YAAY,SAAS,GAAG;AAC1B,gBAAM,KAAK,EAAE;AACb,gBAAM,KAAK,uBAAuBC,UAAS,WAAW,CAAC,OAAO;AAC9D,qBAAW,OAAO,aAAa;AAC7B,kBAAM,SAAS,IAAI,SAAS,SAAS,oBAAe;AACpD,kBAAM,KAAK,KAAK,IAAI,IAAI,KAAK,eAAe,IAAI,IAAI,CAAC,IAAI,IAAI,eAAe,MAAM,OAAO,CAAC,IAAI,MAAM,EAAE;AACtG,gBAAI,IAAI,UAAU;AAChB,yBAAW,SAAS,IAAI,SAAS,MAAM,GAAG,EAAE,GAAG;AAC7C,sBAAM,KAAK,0BAAW,MAAM,IAAI,KAAK,eAAe,MAAM,IAAI,CAAC,IAAI,MAAM,eAAe,MAAM,OAAO,CAAC,GAAG;AAAA,cAC3G;AACA,kBAAI,IAAI,SAAS,SAAS,IAAI;AAC5B,sBAAM,KAAK,eAAe,IAAI,SAAS,SAAS,EAAE,OAAO;AAAA,cAC3D;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,cAAM,kBAAkB,MAAM,KAAK,IAAI;AAEvC,eAAO;AAAA,UACL,SAAS;AAAA,UACT;AAAA,UACA,UAAU;AAAA,UACV,MAAM,UAAU;AAAA,UAChB,MAAM,YAAY,eAAe,UAAU,IAAI,IAAI;AAAA,UACnD,UAAU,YAAY;AAAA,UACtB,gBAAgB;AAAA,UAChB,gBAAgB;AAAA,UAChB,OAAO,aACJ,OAAO,OAAK,EAAE,MAAM,EACpB,IAAI,QAAM,EAAE,MAAM,EAAE,cAAc,OAAO,EAAE,UAAU,EAAE;AAAA,UAC1D;AAAA,QACF;AAAA,MACF,SAAS,OAAgB;AACvB,eAAO;AAAA,UACL,SAAS;AAAA,UACT,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,QAC9D;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AACH;;;ADvlBA,IAAMC,aAAYC,WAAUC,KAAI;AAEhC,IAAMC,oBAAmB;AACzB,IAAMC,iBAAgB,IAAI,OAAO;AACjC,IAAM,oBAAoB;AAC1B,IAAM,eAAe;AAuCd,IAAM,iBAAN,cAA6B,SAAuB;AAAA,EAChD,OAAO;AAAA,EACP,OAAO;AAAA,EAEhB,YAAY,OAAgB;AAC1B,UAAM,SAAS,gBAAgB,MAAM;AACrC,SAAK,WAAW;AAAA,EAClB;AAAA,EAEU,gBAAgB,SAAqC;AAC7D,UAAM,eAAe,QAAQ,oBACzB;AAAA;AAAA,EAAwC,QAAQ,iBAAiB;AAAA;AAAA;AAAA,IACjE;AAEJ,WAAO;AAAA;AAAA,qBAEU,QAAQ,gBAAgB;AAAA,EAC3C,YAAY;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAqDZ;AAAA,EAEA,MAAgB,cAAc,SAA+C;AAC3E,UAAM,mBAAmB,QAAQ;AAEjC,UAAM,QAAiB;AAAA,MACrB,MAAMC,MAAK;AAAA,QACT,aAAa;AAAA,QACb,aAAaC,IAAE,OAAO;AAAA,UACpB,SAASA,IAAE,OAAO,EAAE,SAAS,iCAAiC;AAAA,UAC9D,MAAMA,IAAE,OAAO,EAAE,SAAS,EAAE,SAAS,mEAAmE;AAAA,UACxG,UAAUA,IAAE,OAAO,EAAE,SAAS,EAAE,SAAS,8CAA8C;AAAA,UACvF,YAAYA,IAAE,OAAO,EAAE,SAAS,EAAE,QAAQ,EAAE,EAAE,SAAS,qCAAqC;AAAA,QAC9F,CAAC;AAAA,QACD,SAAS,OAAO,EAAE,SAAS,MAAM,UAAU,WAAW,MAAM;AAC1D,cAAI;AACF,kBAAM,aAAa,OACfC,SAAQ,kBAAkB,IAAI,IAC9B;AAEJ,gBAAI,OAAO,CAAC,MAAM,iBAAiB,cAAc;AAEjD,gBAAI,UAAU;AACZ,mBAAK,KAAK,UAAU,QAAQ;AAAA,YAC9B;AAEA,iBAAK,KAAK,eAAe,OAAO,cAAc,EAAE,CAAC;AACjD,iBAAK,KAAK,MAAM,SAAS,UAAU;AAEnC,kBAAM,EAAE,QAAQ,OAAO,IAAI,MAAMP,WAAU,KAAK,KAAK,GAAG,GAAG;AAAA,cACzD,KAAK;AAAA,cACL,WAAW,IAAI,OAAO;AAAA,cACtB,SAAS;AAAA,YACX,CAAC;AAED,kBAAM,SAAS,eAAe,UAAU,oBAAoBG,iBAAgB;AAC5E,kBAAM,cAAc,UAAU,IAAI,MAAM,IAAI,EAAE,OAAO,OAAO,EAAE;AAE9D,mBAAO;AAAA,cACL,SAAS;AAAA,cACT;AAAA,cACA;AAAA,cACA;AAAA,YACF;AAAA,UACF,SAAS,OAAY;AAEnB,gBAAI,MAAM,SAAS,KAAK,CAAC,MAAM,QAAQ;AACrC,qBAAO;AAAA,gBACL,SAAS;AAAA,gBACT,QAAQ;AAAA,gBACR,YAAY;AAAA,gBACZ;AAAA,cACF;AAAA,YACF;AACA,mBAAO;AAAA,cACL,SAAS;AAAA,cACT,OAAO,MAAM;AAAA,cACb;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,MACF,CAAC;AAAA,MAED,MAAME,MAAK;AAAA,QACT,aAAa;AAAA,QACb,aAAaC,IAAE,OAAO;AAAA,UACpB,SAASA,IAAE,OAAO,EAAE,SAAS,0DAA0D;AAAA,UACvF,YAAYA,IAAE,OAAO,EAAE,SAAS,EAAE,QAAQ,GAAG,EAAE,SAAS,mCAAmC;AAAA,QAC7F,CAAC;AAAA,QACD,SAAS,OAAO,EAAE,SAAS,WAAW,MAAM;AAC1C,cAAI;AAEF,kBAAM,EAAE,OAAO,IAAI,MAAMN;AAAA,cACvB,yBAAyB,QAAQ,QAAQ,OAAO,EAAE,CAAC,2BAA2B,cAAc,GAAG;AAAA,cAC/F;AAAA,gBACE,KAAK;AAAA,gBACL,SAAS;AAAA,cACX;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO,KAAK,EAAE,MAAM,IAAI,EAAE,OAAO,OAAO;AAEtD,mBAAO;AAAA,cACL,SAAS;AAAA,cACT;AAAA,cACA,OAAO,MAAM;AAAA,cACb;AAAA,YACF;AAAA,UACF,SAAS,OAAY;AACnB,mBAAO;AAAA,cACL,SAAS;AAAA,cACT,OAAO,MAAM;AAAA,cACb;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,MACF,CAAC;AAAA,MAED,WAAWK,MAAK;AAAA,QACd,aAAa;AAAA,QACb,aAAaC,IAAE,OAAO;AAAA,UACpB,MAAMA,IAAE,OAAO,EAAE,SAAS,8DAA8D;AAAA,UACxF,WAAWA,IAAE,OAAO,EAAE,SAAS,EAAE,SAAS,0CAA0C;AAAA,UACpF,SAASA,IAAE,OAAO,EAAE,SAAS,EAAE,SAAS,kDAAkD;AAAA,QAC5F,CAAC;AAAA,QACD,SAAS,OAAO,EAAE,MAAM,WAAW,QAAQ,MAAM;AAC/C,cAAI;AACF,kBAAM,eAAeE,YAAW,IAAI,IAChC,OACAD,SAAQ,kBAAkB,IAAI;AAElC,gBAAI,CAACE,aAAW,YAAY,GAAG;AAC7B,qBAAO;AAAA,gBACL,SAAS;AAAA,gBACT,OAAO,mBAAmB,IAAI;AAAA,cAChC;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAMC,MAAK,YAAY;AACrC,gBAAI,MAAM,OAAON,gBAAe;AAC9B,qBAAO;AAAA,gBACL,SAAS;AAAA,gBACT,OAAO,oBAAoB,MAAM,OAAO,OAAO,MAAM,QAAQ,CAAC,CAAC;AAAA,cACjE;AAAA,YACF;AAEA,gBAAI,UAAU,MAAMO,UAAS,cAAc,OAAO;AAElD,gBAAI,cAAc,UAAa,YAAY,QAAW;AACpD,oBAAM,QAAQ,QAAQ,MAAM,IAAI;AAChC,oBAAM,SAAS,aAAa,KAAK;AACjC,oBAAM,MAAM,WAAW,MAAM;AAC7B,wBAAU,MAAM,MAAM,OAAO,GAAG,EAAE,KAAK,IAAI;AAAA,YAC7C;AAEA,mBAAO;AAAA,cACL,SAAS;AAAA,cACT,MAAMC,UAAS,kBAAkB,YAAY;AAAA,cAC7C,SAAS,eAAe,SAAST,iBAAgB;AAAA,cACjD,WAAW,QAAQ,MAAM,IAAI,EAAE;AAAA,YACjC;AAAA,UACF,SAAS,OAAY;AACnB,mBAAO;AAAA,cACL,SAAS;AAAA,cACT,OAAO,MAAM;AAAA,YACf;AAAA,UACF;AAAA,QACF;AAAA,MACF,CAAC;AAAA,MAED,UAAUE,MAAK;AAAA,QACb,aAAa;AAAA,QACb,aAAaC,IAAE,OAAO;AAAA,UACpB,MAAMA,IAAE,OAAO,EAAE,SAAS,EAAE,QAAQ,GAAG,EAAE,SAAS,gDAAgD;AAAA,UAClG,WAAWA,IAAE,QAAQ,EAAE,SAAS,EAAE,QAAQ,KAAK,EAAE,SAAS,sDAAsD;AAAA,UAChH,UAAUA,IAAE,OAAO,EAAE,SAAS,EAAE,QAAQ,CAAC,EAAE,SAAS,qCAAqC;AAAA,QAC3F,CAAC;AAAA,QACD,SAAS,OAAO,EAAE,MAAM,WAAW,SAAS,MAAM;AAChD,cAAI;AACF,kBAAM,eAAeE,YAAW,IAAI,IAChC,OACAD,SAAQ,kBAAkB,IAAI;AAElC,gBAAI,CAACE,aAAW,YAAY,GAAG;AAC7B,qBAAO;AAAA,gBACL,SAAS;AAAA,gBACT,OAAO,wBAAwB,IAAI;AAAA,cACrC;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAMC,MAAK,YAAY;AACrC,gBAAI,CAAC,MAAM,YAAY,GAAG;AACxB,qBAAO;AAAA,gBACL,SAAS;AAAA,gBACT,OAAO,oBAAoB,IAAI;AAAA,cACjC;AAAA,YACF;AAEA,gBAAI,WAAW;AAEb,oBAAM,EAAE,OAAO,IAAI,MAAMV;AAAA,gBACvB,oBAAoB,QAAQ;AAAA,gBAC5B;AAAA,kBACE,KAAK;AAAA,kBACL,SAAS;AAAA,gBACX;AAAA,cACF;AAEA,oBAAM,QAAQ,OAAO,KAAK,EAAE,MAAM,IAAI,EAAE,OAAO,OAAO;AACtD,qBAAO;AAAA,gBACL,SAAS;AAAA,gBACT,MAAMY,UAAS,kBAAkB,YAAY,KAAK;AAAA,gBAClD;AAAA,gBACA,OAAO,MAAM;AAAA,gBACb,WAAW;AAAA,cACb;AAAA,YACF,OAAO;AACL,oBAAM,UAAU,MAAMC,SAAQ,cAAc,EAAE,eAAe,KAAK,CAAC;AACnE,oBAAM,QAAQ,QAAQ,MAAM,GAAG,GAAG,EAAE,IAAI,QAAM;AAAA,gBAC5C,MAAM,EAAE;AAAA,gBACR,MAAM,EAAE,YAAY,IAAI,cAAc;AAAA,cACxC,EAAE;AAEF,qBAAO;AAAA,gBACL,SAAS;AAAA,gBACT,MAAMD,UAAS,kBAAkB,YAAY,KAAK;AAAA,gBAClD;AAAA,gBACA,OAAO,MAAM;AAAA,cACf;AAAA,YACF;AAAA,UACF,SAAS,OAAY;AACnB,mBAAO;AAAA,cACL,SAAS;AAAA,cACT,OAAO,MAAM;AAAA,YACf;AAAA,UACF;AAAA,QACF;AAAA,MACF,CAAC;AAAA,MACD,YAAY,oBAAoB;AAAA,QAC9B;AAAA,MACF,CAAC;AAAA,IACH;AAGA,QAAI;AACF,UAAI,0BAA0B,GAAG;AAC/B,cAAM,WAAW,MAAM,iBAAiB,gBAAgB;AACxD,YAAI,UAAU;AACZ,gBAAM,EAAE,0BAAAE,0BAAyB,IAAI,MAAM;AAC3C,gBAAM,kBAAkBA,0BAAyB;AAAA,YAC/C;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF,QAAQ;AAAA,IAER;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA,EAIU,SAAS,SAAsC;AAEvD,WAAO,CAAC;AAAA,EACV;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,IAAI,SAA6F;AAErG,UAAM,aAAa,MAAM,KAAK,cAAc,OAAO;AAGnD,UAAM,mBAAmB,KAAK,SAAS,KAAK,IAAI;AAChD,SAAK,WAAW,MAAM;AAEtB,QAAI;AACF,aAAO,MAAM,MAAM,IAAI,OAAO;AAAA,IAChC,UAAE;AACA,WAAK,WAAW;AAAA,IAClB;AAAA,EACF;AAAA,EAEU,YAAY,MAAc,OAAqC;AAUvE,UAAM,WAAqB,CAAC;AAE5B,eAAW,QAAQ,OAAO;AACxB,UAAI,KAAK,SAAS,UAAU,KAAK,SAAS;AACxC,iBAAS,KAAK,KAAK,OAAO;AAAA,MAC5B;AAAA,IACF;AAGA,UAAM,cAAc,SAAS,SAAS,IAClC,SAAS,KAAK,MAAM,IACpB;AAGJ,UAAM,WAA4B,CAAC;AACnC,QAAI,gBAAgB;AACpB,QAAI,aAAa;AAEjB,eAAW,QAAQ,OAAO;AACxB,UAAI,KAAK,SAAS,iBAAiB,KAAK,YAAY;AAClD,cAAM,SAAS,KAAK;AAEpB,YAAI,KAAK,aAAa,UAAU,OAAO,SAAS;AAC9C,wBAAc,OAAO,cAAc;AAGnC,gBAAM,SAAS,OAAO,UAAU,IAAI,MAAM,IAAI,EAAE,OAAO,OAAO;AAC9D,qBAAW,QAAQ,MAAM,MAAM,GAAG,EAAE,GAAG;AAErC,kBAAM,QAAQ,KAAK,MAAM,sBAAsB;AAC/C,gBAAI,OAAO;AACT,uBAAS,KAAK;AAAA,gBACZ,MAAM;AAAA,gBACN,MAAM,MAAM,CAAC;AAAA,gBACb,YAAY,SAAS,MAAM,CAAC,GAAG,EAAE;AAAA,gBACjC,SAAS,MAAM,CAAC,EAAE,KAAK;AAAA,gBACvB,WAAW;AAAA,cACb,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF,WAAW,KAAK,aAAa,UAAU,OAAO,SAAS;AACrD,2BAAiB,OAAO,SAAS;AAEjC,qBAAW,SAAS,OAAO,SAAS,CAAC,GAAG,MAAM,GAAG,EAAE,GAAG;AACpD,qBAAS,KAAK;AAAA,cACZ,MAAM;AAAA,cACN,MAAM;AAAA,cACN,WAAW;AAAA,YACb,CAAC;AAAA,UACH;AAAA,QACF,WAAW,KAAK,aAAa,eAAe,OAAO,SAAS;AAE1D,gBAAM,UAAU,OAAO,UACnB,eAAe,OAAO,SAAS,GAAG,IAClC;AAEJ,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,MAAM,OAAO;AAAA,YACb,SAAS;AAAA,YACT,WAAW;AAAA,YACX,SAAS,GAAG,OAAO,SAAS;AAAA,UAC9B,CAAC;AAAA,QACH,WAAW,KAAK,aAAa,qBAAqB,OAAO,SAAS;AAEhE,gBAAM,UAAU,OAAO,WAAW,CAAC;AACnC,qBAAW,KAAK,QAAQ,MAAM,GAAG,EAAE,GAAG;AACpC,qBAAS,KAAK;AAAA,cACZ,MAAM;AAAA,cACN,MAAM,EAAE;AAAA,cACR,YAAY,EAAE;AAAA,cACd,SAAS,EAAE,UAAU,eAAe,EAAE,SAAS,GAAG,IAAI;AAAA,cACtD,WAAW,EAAE,QAAQ,MAAM,SAAS,EAAE,QAAQ,MAAM,WAAW;AAAA,cAC/D,SAAS,EAAE,cAAc,EAAE;AAAA,YAC7B,CAAC;AAAA,UACH;AAAA,QACF,WAAW,KAAK,aAAa,gBAAgB,OAAO,SAAS;AAE3D,wBAAc,OAAO,kBAAkB;AAEvC,cAAI,OAAO,UAAU;AACnB,qBAAS,KAAK;AAAA,cACZ,MAAM;AAAA,cACN,MAAM,OAAO;AAAA,cACb,YAAY,OAAO;AAAA,cACnB,SAAS,OAAO,WAAW,eAAe,OAAO,UAAU,GAAG,IAAI;AAAA,cAClE,WAAW;AAAA,cACX,SAAS,GAAG,OAAO,QAAQ,QAAQ,GAAG,OAAO,iBAAiB,KAAK,OAAO,cAAc,UAAU,EAAE;AAAA,YACtG,CAAC;AAAA,UACH;AAGA,qBAAW,SAAS,OAAO,SAAS,CAAC,GAAG,MAAM,GAAG,EAAE,GAAG;AACpD,qBAAS,KAAK;AAAA,cACZ,MAAM;AAAA,cACN,MAAM,KAAK;AAAA,cACX,WAAW;AAAA,cACX,SAAS,KAAK,QAAQ,UAAU,KAAK,KAAK,KAAK;AAAA,YACjD,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAGA,UAAM,gBAAgB,eAAe,aAAa,iBAAiB;AAEnE,WAAO;AAAA,MACL,OAAO;AAAA;AAAA,MACP,SAAS;AAAA,MACT,UAAU,SAAS,MAAM,GAAG,YAAY;AAAA,MACxC;AAAA,MACA;AAAA,IACF;AAAA,EACF;AACF;AAGO,SAAS,qBAAqB,OAAgC;AACnE,SAAO,IAAI,eAAe,KAAK;AACjC;;;AFtgBA,IAAM,mBAAmB;AAwClB,SAAS,iBAAiB,SAA4B;AAC3D,SAAOC,OAAK;AAAA,IACV,aAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAab,aAAaC,IAAE,OAAO;AAAA,MACpB,OAAOA,IAAE,OAAO,EAAE,SAAS,gEAAiE;AAAA,MAC5F,SAASA,IAAE,OAAO,EAAE,SAAS,0YAA0Y;AAAA,IACza,CAAC;AAAA,IAED,SAAS,OAAO,EAAE,OAAO,QAAQ,GAAG,gBAAgB;AAClD,YAAM,aAAc,YAAoB,cAAc,iBAAiB,KAAK,IAAI,CAAC;AAGjF,YAAM,QAAQ,aAAa;AAAA,QACzB,QAAQ;AAAA,QACR,YAAY;AAAA,MACd,CAAC;AAED,UAAI;AACF,cAAM,WAAW,qBAAqB;AAItC,cAAM,WAAW,UACb,GAAG,KAAK;AAAA;AAAA,WAAgB,OAAO,KAC/B;AAIJ,cAAM,SAAS,MAAM,SAAS,IAAI;AAAA,UAChC,MAAM;AAAA,UACN,WAAW,QAAQ;AAAA,UACnB;AAAA,UACA,kBAAkB,QAAQ;AAAA,UAC1B,YAAY,OAAO,UAAiC;AAElD,gBAAI,MAAM,SAAS,UAAU,MAAM,MAAM;AACvC,oBAAM,QAAQ,aAAa;AAAA,gBACzB,QAAQ;AAAA,gBACR,YAAY,MAAM;AAAA,gBAClB,UAAU,MAAM,KAAK;AAAA,gBACrB,aAAa,MAAM,KAAK;AAAA,gBACxB,UAAU,MAAM,KAAK;AAAA,gBACrB,WAAW,MAAM,KAAK;AAAA,gBACtB,YAAY,MAAM,KAAK;AAAA,cACzB,CAAC;AAAA,YACH,WAAW,MAAM,SAAS,YAAY;AACpC,oBAAM,QAAQ,aAAa;AAAA,gBACzB,QAAQ;AAAA,gBACR,YAAY,MAAM;AAAA,gBAClB,QAAQ,MAAM;AAAA,cAChB,CAAC;AAAA,YACH,WAAW,MAAM,SAAS,SAAS;AACjC,oBAAM,QAAQ,aAAa;AAAA,gBACzB,QAAQ;AAAA,gBACR,YAAY,MAAM;AAAA,gBAClB,OAAO,MAAM;AAAA,cACf,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF,CAAC;AAED,YAAI,CAAC,OAAO,SAAS;AACnB,iBAAO;AAAA,YACL,SAAS;AAAA,YACT,OAAO,OAAO,SAAS;AAAA,YACvB,aAAa,OAAO;AAAA,UACtB;AAAA,QACF;AAEA,cAAM,eAAe,OAAO;AAG5B,qBAAa,QAAQ;AASrB,YAAI,kBAAkB;AAItB,YAAI,aAAa,SAAS;AACxB,6BAAmB,aAAa;AAAA,QAClC;AAGA,YAAI,aAAa,SAAS,SAAS,GAAG;AACpC,6BAAmB;AAAA;AAAA,6BAAkC,aAAa,SAAS,MAAM;AAAA;AAEjF,qBAAW,WAAW,aAAa,UAAU;AAC3C,gBAAI,QAAQ,SAAS,SAAS;AAC5B,iCAAmB;AAAA,MAAS,QAAQ,IAAI,IAAI,QAAQ,UAAU,UAAU,eAAe,QAAQ,WAAW,IAAI,GAAG,CAAC;AAAA,YACpH,WAAW,QAAQ,SAAS,YAAY;AACtC,iCAAmB;AAAA,MAAS,QAAQ,IAAI,IAAI,QAAQ,UAAU,iBAAiB,QAAQ,UAAU,IAAI,QAAQ,OAAO,MAAM,EAAE;AAC5H,kBAAI,QAAQ,SAAS;AACnB,mCAAmB;AAAA;AAAA,IAAiB,eAAe,QAAQ,SAAS,GAAG,CAAC;AAAA;AAAA,cAC1E;AAAA,YACF,WAAW,QAAQ,SAAS,QAAQ;AAClC,iCAAmB;AAAA,MAAS,QAAQ,IAAI,MAAM,QAAQ,UAAU,IAAI,QAAQ,OAAO,MAAM,EAAE;AAAA,YAC7F;AAAA,UACF;AAAA,QACF;AAEA,YAAI,CAAC,gBAAgB,KAAK,GAAG;AAC3B,4BAAkB;AAAA,QACpB;AAEA,eAAO;AAAA,UACL,SAAS;AAAA,UACT,OAAO,aAAa;AAAA,UACpB,SAAS,aAAa;AAAA,UACtB,UAAU,aAAa;AAAA,UACvB,YAAY,aAAa;AAAA,UACzB,eAAe,aAAa;AAAA,UAC5B,iBAAiB,eAAe,iBAAiB,gBAAgB;AAAA,UACjE,aAAa,OAAO;AAAA,UACpB,YAAY,OAAO,MAAM;AAAA,QAC3B;AAAA,MACF,SAAS,OAAY;AACnB,cAAM,QAAQ,aAAa;AAAA,UACzB,QAAQ;AAAA,UACR,OAAO,MAAM;AAAA,QACf,CAAC;AAED,eAAO;AAAA,UACL,SAAS;AAAA,UACT,OAAO,MAAM;AAAA,QACf;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AACH;;;AIxLA;;;ACRA,SAAS,QAAAC,cAAY;AACrB,SAAS,KAAAC,WAAS;AAClB,OAAO,SAAS;AAEhB,IAAM,MAAM,IAAI,IAAI,EAAE,WAAW,KAAK,CAAC;AAahC,SAAS,uBAAuB,SAAiC;AACtE,QAAM,WAAW,IAAI,QAAQ,QAAQ,YAAY;AAEjD,SAAOD,OAAK;AAAA,IACV,aACE;AAAA,IACF,aAAaC,IAAE,OAAO;AAAA,MACpB,QAAQA,IACL,OAAOA,IAAE,OAAO,GAAGA,IAAE,QAAQ,CAAC,EAC9B,SAAS,6DAA6D;AAAA,IAC3E,CAAC;AAAA,IACD,SAAS,OAAO,UAAU;AACxB,YAAM,QAAQ,SAAS,MAAM,MAAM;AACnC,UAAI,CAAC,OAAO;AACV,cAAM,SAAS,SAAS,QAAQ,IAAI,CAAC,OAAO;AAAA,UAC1C,MAAM,EAAE,gBAAgB;AAAA,UACxB,SAAS,EAAE;AAAA,UACX,QAAQ,EAAE;AAAA,QACZ,EAAE;AACF,eAAO;AAAA,UACL,QAAQ;AAAA,UACR,SACE;AAAA,UACF;AAAA,UACA,gBAAgB,QAAQ;AAAA,QAC1B;AAAA,MACF;AAEA,cAAQ,WAAW,EAAE,QAAQ,aAAa,QAAQ,MAAM,OAAO,CAAC;AAChE,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,SAAS;AAAA,MACX;AAAA,IACF;AAAA,EACF,CAAC;AACH;AAEO,SAAS,qBAAqB,SAAqD;AACxF,SAAOD,OAAK;AAAA,IACV,aACE;AAAA,IACF,aAAaC,IAAE,OAAO;AAAA,MACpB,QAAQA,IAAE,OAAO,EAAE,SAAS,iDAAiD;AAAA,IAC/E,CAAC;AAAA,IACD,SAAS,OAAO,UAAU;AACxB,cAAQ,WAAW,EAAE,QAAQ,UAAU,OAAO,MAAM,OAAO,CAAC;AAC5D,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,SAAS,0BAA0B,MAAM,MAAM;AAAA,MACjD;AAAA,IACF;AAAA,EACF,CAAC;AACH;;;ACrEA,SAAS,QAAAC,cAAY;AACrB,SAAS,KAAAC,WAAS;AAClB,SAAS,YAAAC,WAAU,QAAAC,aAAY;AAC/B,SAAS,QAAAC,OAAM,YAAAC,WAAU,WAAAC,gBAAe;AAOxC,IAAM,aAAqC;AAAA,EACzC,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,OAAO;AAAA,EACP,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,OAAO;AACT;AAEO,SAAS,qBAAqB,SAAgC;AACnE,SAAON,OAAK;AAAA,IACV,aAAa;AAAA,IACb,aAAaC,IAAE,OAAO;AAAA,MACpB,MAAMA,IAAE,OAAO,EAAE,SAAS,wEAAwE;AAAA,MAClG,MAAMA,IAAE,OAAO,EAAE,SAAS,EAAE,SAAS,sDAAsD;AAAA,IAC7F,CAAC;AAAA,IACD,SAAS,OAAO,UAAU;AACxB,UAAI;AACF,cAAM,EAAE,oBAAAM,qBAAoB,gBAAAC,gBAAe,IAAI,MAAM;AACrD,YAAI,CAACD,oBAAmB,GAAG;AACzB,iBAAO;AAAA,YACL,SAAS;AAAA,YACT,OAAO;AAAA,UACT;AAAA,QACF;AAEA,cAAM,WAAW,MAAM,KAAK,WAAW,GAAG,IACtC,MAAM,OACNH,MAAK,QAAQ,kBAAkB,MAAM,IAAI;AAG7C,YAAI;AACF,gBAAMD,MAAK,QAAQ;AAAA,QACrB,QAAQ;AACN,iBAAO;AAAA,YACL,SAAS;AAAA,YACT,OAAO,mBAAmB,MAAM,IAAI;AAAA,UACtC;AAAA,QACF;AAEA,cAAM,WAAW,MAAM,QAAQE,UAAS,QAAQ;AAChD,cAAM,MAAMC,SAAQ,QAAQ,EAAE,YAAY;AAC1C,cAAM,cAAc,WAAW,GAAG,KAAK;AAGvC,cAAM,aAAa,MAAME,gBAAe;AAAA,UACtC,QAAQ;AAAA,UACR;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAGA,cAAM,WAAW,MAAMN,UAAS,QAAQ;AACxC,cAAM,SAAS,MAAM,MAAM,WAAW,WAAW;AAAA,UAC/C,QAAQ;AAAA,UACR,SAAS,EAAE,gBAAgB,YAAY;AAAA,UACvC,MAAM;AAAA,QACR,CAAC;AAED,YAAI,CAAC,OAAO,IAAI;AACd,iBAAO;AAAA,YACL,SAAS;AAAA,YACT,OAAO,kBAAkB,OAAO,MAAM,IAAI,OAAO,UAAU;AAAA,UAC7D;AAAA,QACF;AAGA,cAAMM,gBAAe,WAAW,WAAW,QAAQ,EAAE,WAAW,SAAS,OAAO,CAAC;AAGjF,cAAM,eAAe,MAAMA,gBAAe,eAAe,WAAW,MAAM;AAE1E,eAAO;AAAA,UACL,SAAS;AAAA,UACT,QAAQ,WAAW;AAAA,UACnB;AAAA,UACA,WAAW,SAAS;AAAA,UACpB;AAAA,UACA,aAAa,aAAa;AAAA,UAC1B,WAAW,aAAa;AAAA,QAC1B;AAAA,MACF,SAAS,KAAU;AACjB,eAAO;AAAA,UACL,SAAS;AAAA,UACT,OAAO,kBAAkB,IAAI,OAAO;AAAA,QACtC;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AACH;;;AFzGA;AACA;AA6GA;AArFA,eAAsB,YAAY,SAA+C;AAC/E,QAAM,QAAiB;AAAA,IACrB,MAAM,eAAe;AAAA,MACnB,kBAAkB,QAAQ;AAAA,MAC1B,WAAW,QAAQ;AAAA,MACnB,UAAU,QAAQ;AAAA,MAClB,YAAY,QAAQ;AAAA,IACtB,CAAC;AAAA,IAED,WAAW,mBAAmB;AAAA,MAC5B,kBAAkB,QAAQ;AAAA,IAC5B,CAAC;AAAA,IAED,YAAY,oBAAoB;AAAA,MAC9B,kBAAkB,QAAQ;AAAA,MAC1B,WAAW,QAAQ;AAAA,MACnB,WAAW,QAAQ,aAAa;AAAA,MAChC,YAAY,QAAQ;AAAA,IACtB,CAAC;AAAA,IAED,MAAM,eAAe;AAAA,MACnB,WAAW,QAAQ;AAAA,IACrB,CAAC;AAAA,IAED,YAAY,oBAAoB;AAAA,MAC9B,WAAW,QAAQ;AAAA,MACnB,mBAAmB,QAAQ;AAAA,IAC7B,CAAC;AAAA,IAED,QAAQ,iBAAiB;AAAA,MACvB,kBAAkB,QAAQ;AAAA,IAC5B,CAAC;AAAA,IAED,eAAe,iBAAiB;AAAA,MAC9B,WAAW,QAAQ;AAAA,MACnB,kBAAkB,QAAQ;AAAA,MAC1B,YAAY,QAAQ;AAAA,IACtB,CAAC;AAAA,IAED,YAAY,oBAAoB;AAAA,MAC9B,kBAAkB,QAAQ;AAAA,IAC5B,CAAC;AAAA,EACH;AAGA,MAAI,mBAAmB,GAAG;AACxB,UAAM,cAAc,qBAAqB;AAAA,MACvC,kBAAkB,QAAQ;AAAA,MAC1B,WAAW,QAAQ;AAAA,IACrB,CAAC;AAAA,EACH;AAGA,MAAI,QAAQ,yBAAyB,OAAO;AAC1C,QAAI;AACF,UAAI,0BAA0B,GAAG;AAC/B,cAAM,WAAW,MAAM,iBAAiB,QAAQ,gBAAgB;AAChE,YAAI,UAAU;AACZ,gBAAM,kBAAkB,yBAAyB;AAAA,YAC/C,kBAAkB,QAAQ;AAAA,UAC5B,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF,QAAQ;AAAA,IAER;AAAA,EACF;AAGA,MAAI,QAAQ,WAAW;AACrB,UAAM,gBAAgB,uBAAuB,QAAQ,SAAS;AAC9D,UAAM,cAAc,qBAAqB,QAAQ,SAAS;AAAA,EAC5D;AAEA,SAAO;AACT;;;AGhHA,SAAS,gBAAAC,qBAAyD;AAElE;;;ACDA;AASA;AAVA,OAAO,QAAQ;AAgBf,SAAS,wBAAgC;AACvC,QAAMC,YAAW,QAAQ;AAEzB,QAAM,SAAS;AAAA;AAAA;AAIf,MAAIA,cAAa,SAAS;AACxB,WAAO,GAAG,MAAM;AAAA;AAAA;AAAA;AAAA,EAIlB;AAGA,SAAO,GAAG,MAAM;AAAA;AAAA;AAAA;AAIlB;AAKA,eAAsB,kBAAkB,SAOpB;AAClB,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,cAAc,CAAC;AAAA,IACf;AAAA,EACF,IAAI;AAGJ,MAAI,sBAAsB;AAC1B,MAAI,qBAAqB;AACzB,MAAI,kBAAkB;AACtB,MAAI,wBAAwB;AAE5B,MAAI,kBAAkB;AAEpB,UAAM,EAAE,QAAQ,UAAU,IAAI,IAAI,MAAM,4BAA4B,gBAAgB;AAGpF,0BAAsB,yBAAyB,MAAM;AAGrD,4BAAwB,uBAAuB,QAAQ;AAGvD,UAAM,WAAW,MAAM,aAAa,iBAAiB,YAAY;AACjE,sBAAkB,sBAAsB,QAAQ;AAGhD,QAAI,YAAY,SAAS,GAAG;AAC1B,YAAM,cAAc,MAAM,qBAAqB,KAAK,aAAa,gBAAgB;AACjF,2BAAqB,wBAAwB,WAAW;AAAA,IAC1D;AAAA,EACF,OAAO;AAEL,UAAM,EAAE,eAAAC,eAAc,IAAI,MAAM;AAChC,UAAM,SAAS,MAAMA,eAAc,iBAAiB;AACpD,4BAAwB,uBAAuB,MAAM;AAAA,EACvD;AAGA,QAAM,QAAQ,MAAM,YAAY,aAAa,SAAS;AACtD,QAAM,eAAe,sBAAsB,KAAK;AAGhD,QAAMD,YAAW,QAAQ,aAAa,UAAU,YAAY,QAAQ,aAAa,WAAW,UAAU;AACtG,QAAM,eAAc,oBAAI,KAAK,GAAE,mBAAmB,SAAS,EAAE,SAAS,QAAQ,MAAM,WAAW,OAAO,QAAQ,KAAK,UAAU,CAAC;AAC9H,QAAM,qBAAqB,sBAAsB;AAEjD,QAAM,eAAe;AAAA;AAAA;AAAA,kBAGLA,SAAQ,KAAK,GAAG,QAAQ,CAAC;AAAA,cAC7B,WAAW;AAAA,2BACE,gBAAgB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAmLzC,kBAAkB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAoDlB,eAAe;AAAA;AAAA,EAEf,mBAAmB;AAAA;AAAA,EAEnB,kBAAkB;AAAA;AAAA;AAAA,EAGlB,qBAAqB;AAAA;AAAA;AAAA,EAGrB,YAAY;AAAA;AAAA,EAEZ,qBAAqB;AAAA,EAA2B,kBAAkB,KAAK,EAAE;AAAA;AAAA;AAIzE,SAAO;AACT;AAKA,SAAS,sBAAsB,OAA2B;AACxD,MAAI,MAAM,WAAW,GAAG;AACtB,WAAO;AAAA,EACT;AAEA,QAAM,cAAsC;AAAA,IAC1C,SAAS;AAAA,IACT,aAAa;AAAA,IACb,WAAW;AAAA,IACX,WAAW;AAAA,EACb;AAEA,QAAM,QAAQ,CAAC,gBAAgB;AAC/B,aAAW,QAAQ,OAAO;AACxB,UAAM,QAAQ,YAAY,KAAK,MAAM,KAAK;AAC1C,UAAM,KAAK,GAAG,KAAK,KAAK,KAAK,EAAE,KAAK,KAAK,OAAO,EAAE;AAAA,EACpD;AAEA,SAAO,MAAM,KAAK,IAAI;AACxB;AAMO,SAAS,wBAAwB,cAA+C;AACrF,SAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA+DP,KAAK,UAAU,cAAc,MAAM,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAOvC;AAKO,SAAS,oBAAoB,qBAAqC;AACvE,SAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASP,mBAAmB;AAAA;AAAA;AAGrB;;;AC9bA,SAAS,0BAA6C;AAMtD,SAAS,sBAAsB,OAAyB;AACtD,MAAI,UAAU,QAAQ,UAAU,OAAW,QAAO;AAClD,MAAI,iBAAiB,KAAM,QAAO,MAAM,YAAY;AACpD,MAAI,MAAM,QAAQ,KAAK,EAAG,QAAO,MAAM,IAAI,qBAAqB;AAChE,MAAI,OAAO,UAAU,UAAU;AAC7B,UAAM,SAAkC,CAAC;AACzC,eAAW,CAAC,GAAG,CAAC,KAAK,OAAO,QAAQ,KAAgC,GAAG;AACrE,aAAO,CAAC,IAAI,sBAAsB,CAAC;AAAA,IACrC;AACA,WAAO;AAAA,EACT;AACA,SAAO;AACT;AAUA,SAAS,gBAAgB,KAAuB;AAC9C,MAAI,QAAQ,QAAQ,QAAQ,UAAa,OAAO,QAAQ,SAAU,QAAO;AAEzE,QAAM,UAAU;AAGhB,MAAI,CAAC,QAAQ,QAAQ,OAAO,QAAQ,SAAS,SAAU,QAAO;AAG9D,SAAO,sBAAsB,OAAO;AACtC;AAWA,SAAS,iBACP,GACA,GACyC;AACzC,QAAM,YAAY,OAAO,MAAM;AAC/B,QAAM,YAAY,OAAO,MAAM;AAG/B,MAAI,aAAa,WAAW;AAC1B,WAAO,GAAG,CAAC;AAAA;AAAA,EAAO,CAAC;AAAA,EACrB;AAGA,QAAM,SAAyC,YAC3C,CAAC,EAAE,MAAM,QAAQ,MAAM,EAAE,CAAC,IAC1B,MAAM,QAAQ,CAAC,IACZ,IACD,CAAC;AAEP,QAAM,SAAyC,YAC3C,CAAC,EAAE,MAAM,QAAQ,MAAM,EAAE,CAAC,IAC1B,MAAM,QAAQ,CAAC,IACZ,IACD,CAAC;AAEP,SAAO,CAAC,GAAG,QAAQ,GAAG,MAAM;AAC9B;AAeA,SAAS,yBAAyB,UAA0C;AAC1E,MAAI,SAAS,UAAU,EAAG,QAAO;AAEjC,QAAM,SAAyB,CAAC;AAEhC,aAAW,OAAO,UAAU;AAC1B,UAAM,OAAO,OAAO,OAAO,SAAS,CAAC;AAErC,QAAI,CAAC,QAAS,KAAa,SAAU,IAAY,MAAM;AAErD,aAAO,KAAK,GAAG;AACf;AAAA,IACF;AAGA,UAAM,OAAQ,IAAY;AAE1B,QAAI,SAAS,QAAQ;AACnB,YAAM,gBAAgB,iBAAkB,KAAa,SAAU,IAAY,OAAO;AAClF,aAAO,OAAO,SAAS,CAAC,IAAI,EAAE,MAAM,QAAQ,SAAS,cAAc;AACnE,cAAQ,KAAK,sDAAsD;AAAA,IACrE,WAAW,SAAS,aAAa;AAE/B,YAAM,YAAY,OAAQ,KAAa,YAAY,WAC/C,CAAC,EAAE,MAAM,QAAQ,MAAO,KAAa,QAAQ,CAAC,IAC9C,MAAM,QAAS,KAAa,OAAO,IAChC,KAAa,UACd,CAAC;AACP,YAAM,WAAW,OAAQ,IAAY,YAAY,WAC7C,CAAC,EAAE,MAAM,QAAQ,MAAO,IAAY,QAAQ,CAAC,IAC7C,MAAM,QAAS,IAAY,OAAO,IAC/B,IAAY,UACb,CAAC;AACP,aAAO,OAAO,SAAS,CAAC,IAAI,EAAE,MAAM,aAAa,SAAS,CAAC,GAAG,WAAW,GAAG,QAAQ,EAAE;AACtF,cAAQ,KAAK,2DAA2D;AAAA,IAC1E,WAAW,SAAS,QAAQ;AAE1B,YAAM,cAAc,MAAM,QAAS,KAAa,OAAO,IAAK,KAAa,UAAU,CAAC;AACpF,YAAM,aAAa,MAAM,QAAS,IAAY,OAAO,IAAK,IAAY,UAAU,CAAC;AACjF,aAAO,OAAO,SAAS,CAAC,IAAI,EAAE,MAAM,QAAQ,SAAS,CAAC,GAAG,aAAa,GAAG,UAAU,EAAE;AACrF,cAAQ,KAAK,sDAAsD;AAAA,IACrE,OAAO;AAEL,aAAO,KAAK,GAAG;AAAA,IACjB;AAAA,EACF;AAEA,SAAO;AACT;AAeO,SAAS,sBAAsB,UAA0C;AAG9E,MAAI,WAAW;AACf,aAAW,OAAO,UAAU;AAC1B,QAAI;AACF,yBAAmB,MAAM,GAAG;AAAA,IAC9B,QAAQ;AACN,iBAAW;AACX;AAAA,IACF;AAAA,EACF;AAEA,MAAI;AAEJ,MAAI,UAAU;AACZ,aAAS;AAAA,EACX,OAAO;AAEL,YAAQ,KAAK,0EAA0E;AAEvF,UAAM,YAA4B,CAAC;AACnC,QAAI,cAAc;AAElB,aAAS,IAAI,GAAG,IAAI,SAAS,QAAQ,KAAK;AACxC,YAAM,MAAM,SAAS,CAAC;AAGtB,UAAI;AACF,2BAAmB,MAAM,GAAG;AAC5B,kBAAU,KAAK,GAAG;AAClB;AAAA,MACF,QAAQ;AAAA,MAER;AAGA,YAAM,QAAQ,gBAAgB,GAAG;AACjC,UAAI;AACF,2BAAmB,MAAM,KAAK;AAC9B,kBAAU,KAAK,KAAK;AACpB;AACA,gBAAQ,KAAK,wCAAwC,CAAC,UAAW,IAAY,IAAI,uCAAuC;AACxH;AAAA,MACF,QAAQ;AAAA,MAER;AAIA,UAAK,IAAY,SAAS,UAAU,MAAM,QAAS,IAAY,OAAO,GAAG;AACvE,cAAM,eAAiB,IAAY,QAAkB,IAAI,CAAC,SAAc;AACtE,cAAI,KAAK,SAAS,iBAAiB,KAAK,WAAW,QAAW;AAC5D,kBAAM,SAAS,sBAAsB,KAAK,MAAM;AAEhD,gBAAI,UAAU,OAAO,WAAW,YAAY,CAAE,OAAe,MAAM;AACjE,qBAAO,EAAE,GAAG,MAAM,QAAQ,EAAE,MAAM,QAAQ,OAAO,OAAO,EAAE;AAAA,YAC5D;AAEA,kBAAM,aAAa,CAAC,QAAQ,QAAQ,oBAAoB,cAAc,cAAc,SAAS;AAC7F,gBAAI,UAAU,OAAO,WAAW,YAAY,CAAC,WAAW,SAAU,OAAe,IAAI,GAAG;AACtF,qBAAO,EAAE,GAAG,MAAM,QAAQ,EAAE,MAAM,QAAQ,OAAO,OAAO,EAAE;AAAA,YAC5D;AACA,mBAAO,EAAE,GAAG,MAAM,OAAO;AAAA,UAC3B;AACA,iBAAO,sBAAsB,IAAI;AAAA,QACnC,CAAC;AAED,cAAM,aAAa,EAAE,GAAI,KAAa,SAAS,aAAa;AAC5D,YAAI;AACF,6BAAmB,MAAM,UAAU;AACnC,oBAAU,KAAK,UAAU;AACzB;AACA,kBAAQ,KAAK,wCAAwC,CAAC,gDAAgD;AACtG;AAAA,QACF,QAAQ;AAAA,QAER;AAAA,MACF;AAKA,cAAQ;AAAA,QACN,gDAAgD,CAAC,UAAW,IAAY,IAAI,oDAC3B,KAAK,UAAU,OAAO,KAAK,GAAU,CAAC,CAAC;AAAA,MAC1F;AACA,gBAAU,KAAK,GAAG;AAAA,IACpB;AAEA,QAAI,cAAc,GAAG;AACnB,cAAQ,KAAK,mDAAmD,WAAW,IAAI,SAAS,MAAM,WAAW;AAAA,IAC3G;AAEA,aAAS;AAAA,EACX;AAIA,WAAS,yBAAyB,MAAM;AAExC,SAAO;AACT;;;AClRA,IAAM,eAA4C;AAAA,EAChD,6BAAiC,EAAE,eAAe,KAAS,eAAe,KAAQ;AAAA,EAClF,6BAAiC,EAAE,eAAe,KAAS,eAAe,KAAQ;AAAA,EAClF,+BAAiC,EAAE,eAAe,KAAS,eAAe,KAAQ;AAAA,EAClF,4BAAiC,EAAE,eAAe,KAAS,eAAe,KAAQ;AAAA,EAClF,iCAAiC,EAAE,eAAe,KAAW,eAAe,KAAQ;AAAA,EACpF,yBAAiC,EAAE,eAAe,KAAW,eAAe,KAAQ;AAAA,EACpF,2BAAiC,EAAE,eAAe,KAAW,eAAe,KAAQ;AAAA,EACpF,iBAAiC,EAAE,eAAe,OAAS,eAAe,KAAO;AAAA,EACjF,kBAAgC,EAAE,eAAe,KAAW,eAAe,KAAQ;AAAA,EACnF,aAAiC,EAAE,eAAe,KAAS,eAAe,KAAQ;AAAA,EAClF,cAAiC,EAAE,eAAe,QAAS,eAAe,IAAO;AACnF;AAEA,IAAM,iBAA8B,EAAE,eAAe,KAAS,eAAe,KAAQ;AAErF,IAAM,kBAA+C;AAAA,EACnD,cAAc,EAAE,eAAe,KAAS,eAAe,KAAQ;AAAA,EAC/D,WAAc,EAAE,eAAe,KAAW,eAAe,KAAQ;AAAA,EACjE,WAAc,EAAE,eAAe,OAAS,eAAe,KAAO;AAAA,EAC9D,QAAc,EAAE,eAAe,QAAS,eAAe,IAAO;AAChE;AAMO,SAAS,eAAe,SAA8B;AAC3D,QAAM,aAAa,QAAQ,KAAK,EAAE,YAAY;AAE9C,QAAM,QAAQ,aAAa,UAAU;AACrC,MAAI,MAAO,QAAO;AAElB,aAAW,CAAC,QAAQ,MAAM,KAAK,OAAO,QAAQ,eAAe,GAAG;AAC9D,QAAI,WAAW,WAAW,MAAM,EAAG,QAAO;AAAA,EAC5C;AAEA,SAAO;AACT;AAEO,IAAM,sBAAsB;AAG5B,IAAM,uBAAuB;AAM7B,IAAM,uBAAuB;;;AH/BpC,IAAM,yBAAyB;AAE/B,IAAM,oBAAoB,oBAAI,IAAI;AAAA,EAChC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AAgBM,IAAM,iBAAN,MAAqB;AAAA,EAClB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,YAAsB,CAAC;AAAA,EAE/B,YAAY,SAAgC;AAC1C,SAAK,YAAY,QAAQ;AACzB,SAAK,UAAU,QAAQ;AACvB,SAAK,kBAAkB,QAAQ;AAC/B,SAAK,qBAAqB,QAAQ;AAClC,SAAK,gBAAgB,QAAQ;AAAA,EAC/B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAyC;AAC7C,QAAI,WAAY,MAAM,eAAe,iBAAiB,KAAK,SAAS;AACpE,eAAW,sBAAsB,QAAQ;AAGzC,eAAW,KAAK,qBAAqB,UAAU,KAAK,kBAAkB;AAEtE,QAAI,KAAK,eAAe;AACtB,YAAM,EAAE,cAAc,IAAI,eAAe,KAAK,OAAO;AACrD,YAAM,gBAAgB,KAAK,MAAM,gBAAgB,oBAAoB;AAGrE,iBAAW,MAAM,KAAK,eAAe,UAAU,aAAa;AAG5D,YAAM,KAAK,cAAc,aAAa;AAAA,IACxC;AAGA,QAAI,KAAK,UAAU,SAAS,GAAG;AAC7B,YAAM,iBAAiB,KAAK,UAAU,KAAK,aAAa;AACxD,iBAAW;AAAA,QACT;AAAA,UACE,MAAM;AAAA,UACN,SAAS;AAAA,EAAoC,cAAc;AAAA,QAC7D;AAAA,QACA,GAAG;AAAA,MACL;AAAA,IACF;AAGA,eAAW,kBAAkB,QAAQ;AAErC,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,qBACE,UACA,aACkB;AAClB,QAAI,SAAS,UAAU,YAAa,QAAO;AAE3C,UAAM,WAAW,SAAS,SAAS;AACnC,UAAM,gBAAgB,SAAS,MAAM,GAAG,QAAQ;AAChD,UAAM,iBAAiB,SAAS,MAAM,QAAQ;AAG9C,UAAM,qBAAqB,oBAAI,IAAY;AAC3C,UAAM,YAA8B,CAAC;AAErC,eAAW,OAAO,eAAe;AAC/B,YAAM,YAAY,KAAK,eAAe,KAAK,kBAAkB;AAC7D,UAAI,UAAW,WAAU,KAAK,SAAS;AAAA,IACzC;AAGA,QAAI,mBAAmB,OAAO,GAAG;AAC/B,YAAM,UAA4B,CAAC;AACnC,iBAAW,OAAO,WAAW;AAC3B,cAAM,SAAS,yBAAyB,KAAK,kBAAkB;AAC/D,YAAI,OAAQ,SAAQ,KAAK,MAAM;AAAA,MACjC;AACA,aAAO,CAAC,GAAG,SAAS,GAAG,cAAc;AAAA,IACvC;AAEA,WAAO,CAAC,GAAG,WAAW,GAAG,cAAc;AAAA,EACzC;AAAA,EAEQ,eACN,KACA,oBACuB;AACvB,QAAI,CAAC,MAAM,QAAQ,IAAI,OAAO,EAAG,QAAO;AAExC,UAAM,QAAe,CAAC;AACtB,eAAW,QAAQ,IAAI,SAAkB;AAEvC,UAAI,KAAK,SAAS,eAAe,KAAK,aAAa,QAAQ;AACzD,YAAI,KAAK,WAAY,oBAAmB,IAAI,KAAK,UAAU;AAC3D;AAAA,MACF;AAGA,UAAI,KAAK,SAAS,iBAAiB,KAAK,aAAa,QAAQ;AAC3D,YAAI,KAAK,WAAY,oBAAmB,IAAI,KAAK,UAAU;AAC3D;AAAA,MACF;AAGA,UAAI,KAAK,SAAS,eAAe,KAAK,SAAS,WAAY;AAG3D,UAAI,KAAK,SAAS,iBAAiB,kBAAkB,IAAI,KAAK,QAAQ,GAAG;AACvE,cAAM,KAAK,KAAK,eAAe,IAAI,CAAC;AACpC;AAAA,MACF;AAEA,YAAM,KAAK,IAAI;AAAA,IACjB;AAEA,QAAI,MAAM,WAAW,EAAG,QAAO;AAC/B,WAAO,EAAE,GAAG,KAAK,SAAS,MAAM;AAAA,EAClC;AAAA,EAEQ,eAAe,MAAgB;AACrC,UAAM,UAAU,MAAM,QAAQ,KAAK,MAAM,IAAI,KAAK,SAAS,CAAC,KAAK,MAAM;AACvE,UAAM,iBAAiB,QAAQ,IAAI,CAAC,MAAW;AAC7C,UAAI,OAAO,MAAM,YAAY,EAAE,SAAS,wBAAwB;AAC9D,cAAM,OAAO,KAAK,MAAM,yBAAyB,CAAC;AAClD,eACE,EAAE,MAAM,GAAG,IAAI,IACf;AAAA,cAAiB,EAAE,SAAS,sBAAsB;AAAA,IAClD,EAAE,MAAM,CAAC,IAAI;AAAA,MAEjB;AACA,UAAI,KAAK,OAAO,MAAM,YAAY,OAAO,EAAE,SAAS,YAAY,EAAE,KAAK,SAAS,wBAAwB;AACtG,cAAM,OAAO,KAAK,MAAM,yBAAyB,CAAC;AAClD,eAAO;AAAA,UACL,GAAG;AAAA,UACH,MACE,EAAE,KAAK,MAAM,GAAG,IAAI,IACpB;AAAA,cAAiB,EAAE,KAAK,SAAS,sBAAsB;AAAA,IACvD,EAAE,KAAK,MAAM,CAAC,IAAI;AAAA,QACtB;AAAA,MACF;AACA,aAAO;AAAA,IACT,CAAC;AAED,WAAO;AAAA,MACL,GAAG;AAAA,MACH,QAAQ,MAAM,QAAQ,KAAK,MAAM,IAAI,iBAAiB,eAAe,CAAC;AAAA,IACxE;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAc,eACZ,UACA,eAC2B;AAC3B,QAAI,cAAc,sBAAsB,QAAQ;AAEhD,WAAO,cAAc,iBAAiB,SAAS,SAAS,KAAK,oBAAoB;AAE/E,UAAI,cAAc;AAClB,UAAI,WAAW;AACf,YAAM,eAAe,SAAS,SAAS,KAAK;AAE5C,eAAS,IAAI,GAAG,IAAI,cAAc,KAAK;AACrC,cAAM,YAAY,KAAK,cAAc,SAAS,CAAC,CAAC;AAChD,uBAAe;AACf,mBAAW,IAAI;AACf,YAAI,eAAe,qBAAsB;AAAA,MAC3C;AAEA,UAAI,aAAa,EAAG;AAEpB,YAAM,QAAQ,SAAS,MAAM,GAAG,QAAQ;AACxC,YAAM,YAAY,SAAS,MAAM,QAAQ;AAEzC,YAAM,UAAU,MAAM,KAAK,eAAe,KAAK;AAC/C,UAAI,SAAS;AACX,aAAK,UAAU,KAAK,OAAO;AAC3B,gBAAQ;AAAA,UACN,wBAAwB,MAAM,MAAM,eAAe,WAAW,iBAAiB,eAAe,OAAO,CAAC;AAAA,QACxG;AAAA,MACF;AAEA,iBAAW;AACX,oBAAc,sBAAsB,QAAQ;AAAA,IAC9C;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,MAAc,eAAe,OAAiD;AAC5E,UAAM,cAAc,MACjB,IAAI,CAAC,QAAQ;AACZ,YAAM,UAAU,OAAO,IAAI,YAAY,WACnC,IAAI,UACJ,KAAK,UAAU,IAAI,OAAO;AAC9B,aAAO,IAAI,IAAI,IAAI,MAAM,OAAO;AAAA,IAClC,CAAC,EACA,KAAK,MAAM;AAEd,QAAI;AACF,YAAM,SAAS,MAAME,cAAa;AAAA,QAChC,OAAO,aAAa,mBAAmB;AAAA,QACvC,QAAQ,oBAAoB,WAAW;AAAA,MACzC,CAAC;AACD,aAAO,OAAO;AAAA,IAChB,SAAS,OAAO;AACd,cAAQ,MAAM,yCAAyC,KAAK;AAC5D,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAc,cAAc,QAA+B;AACzD,QAAI,KAAK,UAAU,UAAU,EAAG;AAEhC,UAAM,qBAAqB,KAAK,UAAU;AAAA,MACxC,CAAC,GAAG,MAAM,IAAI,eAAe,CAAC;AAAA,MAC9B;AAAA,IACF;AAEA,QAAI,sBAAsB,OAAQ;AAElC,UAAM,WAAW,KAAK,UAAU,KAAK,aAAa;AAElD,QAAI;AACF,YAAM,SAAS,MAAMA,cAAa;AAAA,QAChC,OAAO,aAAa,mBAAmB;AAAA,QACvC,QAAQ,oBAAoB,QAAQ;AAAA,MACtC,CAAC;AAED,cAAQ;AAAA,QACN,oBAAoB,KAAK,UAAU,MAAM,eAAe,kBAAkB,iBAAiB,eAAe,OAAO,IAAI,CAAC;AAAA,MACxH;AAEA,WAAK,YAAY,CAAC,OAAO,IAAI;AAAA,IAC/B,SAAS,OAAO;AACd,cAAQ,MAAM,qCAAqC,KAAK;AAAA,IAC1D;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAMQ,cAAc,KAA6B;AACjD,UAAM,UAAU,OAAO,IAAI,YAAY,WACnC,IAAI,UACJ,KAAK,UAAU,IAAI,OAAO;AAC9B,WAAO,eAAe,OAAO,IAAI;AAAA,EACnC;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,eACJ,SAGe;AACf,UAAM,cAA4B;AAAA,MAChC,MAAM;AAAA,MACN;AAAA,IACF;AACA,UAAM,eAAe,OAAO,KAAK,WAAW,WAAW;AAAA,EACzD;AAAA,EAEA,MAAM,oBAAoB,UAA2C;AACnE,UAAM,eAAe,QAAQ,KAAK,WAAW,QAA0B;AAAA,EACzE;AAAA,EAEA,MAAM,WAMH;AACD,UAAM,WAAY,MAAM,eAAe,iBAAiB,KAAK,SAAS;AAEtE,WAAO;AAAA,MACL,cAAc,SAAS;AAAA,MACvB,cAAc,qBAAqB,QAAQ;AAAA,MAC3C,iBAAiB,sBAAsB,QAAQ;AAAA,MAC/C,YAAY,KAAK,UAAU,SAAS;AAAA,MACpC,cAAc,KAAK,UAAU;AAAA,IAC/B;AAAA,EACF;AAAA,EAEA,MAAM,QAAuB;AAC3B,UAAM,eAAe,gBAAgB,KAAK,SAAS;AACnD,SAAK,YAAY,CAAC;AAAA,EACpB;AACF;AASA,SAAS,yBACP,KACA,YACuB;AACvB,MAAI,CAAC,MAAM,QAAQ,IAAI,OAAO,EAAG,QAAO;AAExC,QAAM,QAAS,IAAI,QAAkB,OAAO,CAAC,SAAS;AACpD,QAAI,KAAK,SAAS,iBAAiB,WAAW,IAAI,KAAK,UAAU,EAAG,QAAO;AAC3E,QAAI,KAAK,SAAS,eAAe,WAAW,IAAI,KAAK,UAAU,EAAG,QAAO;AACzE,WAAO;AAAA,EACT,CAAC;AAED,MAAI,MAAM,WAAW,EAAG,QAAO;AAC/B,SAAO,EAAE,GAAG,KAAK,SAAS,MAAM;AAClC;AAQO,SAAS,kBAAkB,UAA8C;AAE9E,QAAM,cAAc,oBAAI,IAAY;AACpC,QAAM,gBAAgB,oBAAI,IAAY;AAEtC,aAAW,OAAO,UAAU;AAC1B,QAAI,CAAC,MAAM,QAAQ,IAAI,OAAO,EAAG;AACjC,eAAW,QAAQ,IAAI,SAAkB;AACvC,UAAI,KAAK,SAAS,eAAe,KAAK,WAAY,aAAY,IAAI,KAAK,UAAU;AACjF,UAAI,KAAK,SAAS,iBAAiB,KAAK,WAAY,eAAc,IAAI,KAAK,UAAU;AAAA,IACvF;AAAA,EACF;AAGA,QAAM,gBAAgB,IAAI,IAAI,CAAC,GAAG,WAAW,EAAE,OAAO,CAAC,OAAO,CAAC,cAAc,IAAI,EAAE,CAAC,CAAC;AACrF,QAAM,kBAAkB,IAAI,IAAI,CAAC,GAAG,aAAa,EAAE,OAAO,CAAC,OAAO,CAAC,YAAY,IAAI,EAAE,CAAC,CAAC;AAEvF,MAAI,cAAc,SAAS,KAAK,gBAAgB,SAAS,EAAG,QAAO;AAEnE,MAAI,cAAc,OAAO,GAAG;AAC1B,YAAQ,KAAK,0BAA0B,cAAc,IAAI,gDAAgD;AAAA,EAC3G;AACA,MAAI,gBAAgB,OAAO,GAAG;AAC5B,YAAQ,KAAK,0BAA0B,gBAAgB,IAAI,gDAAgD;AAAA,EAC7G;AAEA,QAAM,WAA6B,CAAC;AAEpC,aAAW,OAAO,UAAU;AAC1B,QAAI,CAAC,MAAM,QAAQ,IAAI,OAAO,GAAG;AAC/B,eAAS,KAAK,GAAG;AACjB;AAAA,IACF;AAEA,UAAM,QAAS,IAAI,QAAkB,OAAO,CAAC,SAAS;AACpD,UAAI,KAAK,SAAS,eAAe,cAAc,IAAI,KAAK,UAAU,EAAG,QAAO;AAC5E,UAAI,KAAK,SAAS,iBAAiB,gBAAgB,IAAI,KAAK,UAAU,EAAG,QAAO;AAChF,aAAO;AAAA,IACT,CAAC;AAED,QAAI,MAAM,WAAW,EAAG;AACxB,aAAS,KAAK,EAAE,GAAG,KAAK,SAAS,MAAM,CAAmB;AAAA,EAC5D;AAEA,SAAO;AACT;;;AzBxaA;AAGA,IAAM,uBAAuB,IAAI;AACjC,IAAM,qBAAqB,IAAI;AAE/B,SAAS,uBAAuB,OAAyD;AACvF,QAAM,MAAM,EAAE,GAAG,MAAM;AACvB,aAAW,OAAO,CAAC,WAAW,cAAc,YAAY,GAAY;AAClE,UAAM,MAAM,IAAI,GAAG;AACnB,QAAI,OAAO,QAAQ,YAAY,IAAI,SAAS,sBAAsB;AAChE,UAAI,GAAG,IAAI,GAAG,IAAI,MAAM,GAAG,kBAAkB,CAAC;AAAA;AAC9C,UAAI,GAAG,GAAG,WAAW,IAAI;AACzB,UAAI,GAAG,GAAG,QAAQ,IAAI,IAAI;AAAA,IAC5B;AAAA,EACF;AACA,SAAO;AACT;AAGA,IAAM,oBAAoB,oBAAI,IAI3B;AAkDH,SAAS,mBAAmB,MAAsB;AAChD,QAAM,WAAW,KAAK,QAAQ,GAAG;AACjC,MAAI,aAAa,MAAM,KAAK,WAAW,OAAO,GAAG;AAC/C,WAAO,KAAK,MAAM,WAAW,CAAC;AAAA,EAChC;AACA,SAAO;AACT;AAKO,IAAM,QAAN,MAAM,OAAM;AAAA,EACT;AAAA,EACA;AAAA,EACA;AAAA,EACA,mBAA+C,oBAAI,IAAI;AAAA,EAEvD,YAAY,SAAkB,SAAyB,OAAgB;AAC7E,SAAK,UAAU;AACf,SAAK,UAAU;AACf,SAAK,YAAY;AAAA,EACnB;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,yBAAyB,SAElB;AACnB,UAAM,SAAS,UAAU;AACzB,WAAO,YAAY;AAAA,MACjB,WAAW,KAAK,QAAQ;AAAA,MACxB,kBAAkB,KAAK,QAAQ;AAAA,MAC/B,mBAAmB,OAAO;AAAA,MAC1B,gBAAgB,QAAQ,iBACpB,CAAC,aAAa,QAAQ,eAAgB,EAAE,UAAU,QAAQ,MAAM,SAAS,CAAC,IAC1E;AAAA,MACJ,qBAAqB,QAAQ,iBACzB,CAAC,aAAa,QAAQ,eAAgB,EAAE,UAAU,cAAc,MAAM,SAAS,CAAC,IAChF;AAAA,MACJ,kBAAkB,QAAQ,iBACtB,CAAC,aAAa,QAAQ,eAAgB,EAAE,UAAU,iBAAiB,MAAM,SAAS,CAAC,IACnF;AAAA,IACN,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,aAAa,OAAO,UAAwB,CAAC,GAAmB;AAC9D,UAAM,SAAS,UAAU;AAGzB,QAAI;AAEJ,QAAI,QAAQ,WAAW;AACrB,YAAM,WAAW,MAAM,eAAe,QAAQ,QAAQ,SAAS;AAC/D,UAAI,CAAC,UAAU;AACb,cAAM,IAAI,MAAM,sBAAsB,QAAQ,SAAS,EAAE;AAAA,MAC3D;AACA,gBAAU;AAAA,IACZ,OAAO;AACL,gBAAU,MAAM,eAAe,OAAO;AAAA,QACpC,MAAM,QAAQ;AAAA,QACd,kBAAkB,QAAQ,oBAAoB,OAAO;AAAA,QACrD,OAAO,QAAQ,SAAS,OAAO;AAAA,QAC/B,QAAQ,QAAQ;AAAA,MAClB,CAAC;AAAA,IACH;AAGA,UAAM,UAAU,IAAI,eAAe;AAAA,MACjC,WAAW,QAAQ;AAAA,MACnB,SAAS,QAAQ,SAAS,OAAO;AAAA,MACjC,iBAAiB,OAAO,SAAS,YAAY;AAAA,MAC7C,oBAAoB,OAAO,SAAS,sBAAsB;AAAA,MAC1D,eAAe,OAAO,SAAS,iBAAiB;AAAA,IAClD,CAAC;AAGD,UAAM,QAAQ,MAAM,YAAY;AAAA,MAC9B,WAAW,QAAQ;AAAA,MACnB,kBAAkB,QAAQ;AAAA,MAC1B,mBAAmB,OAAO;AAAA,IAC5B,CAAC;AAED,WAAO,IAAI,OAAM,SAAS,SAAS,KAAK;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,YAAoB;AACtB,WAAO,KAAK,QAAQ;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA,EAKA,aAAsB;AACpB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKQ,wBACN,QACA,aAC2I;AAC3I,QAAI,CAAC,eAAe,YAAY,WAAW,GAAG;AAC5C,aAAO;AAAA,IACT;AAGA,UAAM,eAAiJ,CAAC;AAIxJ,UAAM,yBAAyB,YAC5B,IAAI,CAAC,GAAG,MAAM;AACb,YAAM,OAAO,EAAE,YAAY,cAAc,IAAI,CAAC;AAC9C,YAAM,YAAY,EAAE,SAAS,UAAU,UAAU;AACjD,YAAM,WAAW,EAAE,aAAa;AAChC,aAAO,GAAG,IAAI,CAAC,KAAK,SAAS,MAAM,IAAI,eAAe,QAAQ;AAAA,IAChE,CAAC,EACA,KAAK,IAAI;AAEZ,iBAAa,KAAK;AAAA,MAChB,MAAM;AAAA,MACN,MAAM;AAAA,EAA2F,sBAAsB;AAAA;AAAA;AAAA,IACzH,CAAC;AAGD,QAAI,QAAQ;AACV,mBAAa,KAAK,EAAE,MAAM,QAAQ,MAAM;AAAA;AAAA,EAAqB,MAAM,GAAG,CAAC;AAAA,IACzE;AAGA,eAAW,cAAc,aAAa;AACpC,UAAI,WAAW,SAAS,SAAS;AAC/B,qBAAa,KAAK;AAAA,UAChB,MAAM;AAAA,UACN,OAAO,mBAAmB,WAAW,IAAI;AAAA,UACzC,WAAW,WAAW;AAAA,UACtB,UAAU,WAAW;AAAA,UACrB,WAAW,WAAW;AAAA,QACxB,CAAC;AAAA,MACH,OAAO;AACL,qBAAa,KAAK;AAAA,UAChB,MAAM;AAAA,UACN,MAAM,mBAAmB,WAAW,IAAI;AAAA,UACxC,WAAW,WAAW,aAAa;AAAA,UACnC,UAAU,WAAW;AAAA,UACrB,WAAW,WAAW;AAAA,QACxB,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,OAAO,SAAsD;AACjE,UAAM,SAAS,UAAU;AAGzB,UAAM,cAAc,KAAK,wBAAwB,QAAQ,QAAQ,QAAQ,WAAW;AAGpF,QAAI,CAAC,QAAQ,qBAAqB;AAChC,WAAK,QAAQ,eAAe,WAAW;AAAA,IACzC;AAGA,UAAM,eAAe,aAAa,KAAK,QAAQ,IAAI,QAAQ;AAG3D,UAAM,eAAe,MAAM,kBAAkB;AAAA,MAC3C,kBAAkB,KAAK,QAAQ;AAAA,MAC/B,mBAAmB,OAAO;AAAA,MAC1B,WAAW,KAAK,QAAQ;AAAA,MACxB,kBAAkB,OAAO;AAAA;AAAA,MAEzB,aAAa,CAAC;AAAA,IAChB,CAAC;AAGD,UAAM,WAAW,MAAM,KAAK,QAAQ,YAAY;AAGhD,UAAM,QAAQ,QAAQ,iBAClB,MAAM,KAAK,yBAAyB,EAAE,gBAAgB,QAAQ,eAAe,CAAC,IAC9E,KAAK;AAGT,UAAM,eAAe,KAAK,sBAAsB,SAAS,KAAK;AAG9D,UAAM,eAAe,iBAAiB,KAAK,QAAQ,KAAK;AACxD,UAAM,SAASC,YAAW;AAAA,MACxB,OAAO,aAAa,KAAK,QAAQ,KAAK;AAAA,MACtC,QAAQ;AAAA,MACR;AAAA,MACA,OAAO;AAAA,MACP,UAAUC,aAAY,GAAG;AAAA;AAAA,MAEzB,aAAa,QAAQ;AAAA;AAAA,MAErB,iBAAiB,eACb;AAAA,QACE,WAAW;AAAA,UACT,eAAe;AAAA,UACf,UAAU;AAAA,YACR,MAAM;AAAA,YACN,cAAc;AAAA,UAChB;AAAA,QACF;AAAA,MACF,IACA;AAAA,MACJ,cAAc,OAAO,SAAS;AAC5B,gBAAQ,eAAe,IAAW;AAAA,MACpC;AAAA,MACA,SAAS,CAAC,EAAE,MAAM,MAAM;AACtB,gBAAQ,UAAU,EAAE,MAAM,CAAC;AAAA,MAC7B;AAAA,IACF,CAAC;AAGD,UAAM,uBAAuB,YAAY;AACvC,YAAM,SAAS,MAAM;AACrB,YAAM,WAAW,MAAM,OAAO;AAC9B,YAAM,mBAAmB,SAAS;AAClC,WAAK,QAAQ,oBAAoB,gBAAgB;AAAA,IACnD;AAEA,WAAO;AAAA,MACL,WAAW,KAAK,QAAQ;AAAA,MACxB;AAAA,MACA,kBAAkB,MAAM,KAAK,iBAAiB;AAAA,MAC9C;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,IAAI,SAAuF;AAC/F,UAAM,SAAS,UAAU;AAGzB,SAAK,QAAQ,eAAe,QAAQ,MAAM;AAG1C,UAAM,eAAe,MAAM,kBAAkB;AAAA,MAC3C,kBAAkB,KAAK,QAAQ;AAAA,MAC/B,mBAAmB,OAAO;AAAA,MAC1B,WAAW,KAAK,QAAQ;AAAA,MACxB,kBAAkB,OAAO;AAAA,MACzB,aAAa,CAAC;AAAA,IAChB,CAAC;AAGD,UAAM,WAAW,MAAM,KAAK,QAAQ,YAAY;AAGhD,UAAM,QAAQ,QAAQ,iBAClB,MAAM,KAAK,yBAAyB,EAAE,gBAAgB,QAAQ,eAAe,CAAC,IAC9E,KAAK;AAGT,UAAM,eAAe,KAAK,sBAAsB,SAAS,KAAK;AAE9D,UAAM,eAAe,iBAAiB,KAAK,QAAQ,KAAK;AACxD,UAAM,SAAS,MAAMC,cAAa;AAAA,MAChC,OAAO,aAAa,KAAK,QAAQ,KAAK;AAAA,MACtC,QAAQ;AAAA,MACR;AAAA,MACA,OAAO;AAAA,MACP,UAAUD,aAAY,GAAG;AAAA;AAAA,MAEzB,iBAAiB,eACb;AAAA,QACE,WAAW;AAAA,UACT,UAAU;AAAA,YACR,MAAM;AAAA,YACN,cAAc;AAAA,UAChB;AAAA,QACF;AAAA,MACF,IACA;AAAA,IACN,CAAC;AAGD,UAAM,mBAAmB,OAAO,SAAS;AACzC,SAAK,QAAQ,oBAAoB,gBAAgB;AAEjD,WAAO;AAAA,MACL,MAAM,OAAO;AAAA,MACb,OAAO,OAAO;AAAA,IAChB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,QAAQ,SAUwF;AACpG,UAAM,SAAS,UAAU;AACzB,UAAM,gBAAgB,QAAQ,WAAW,iBAAiB;AAC1D,UAAM,aAAa,QAAQ,WAAW;AAEtC,UAAM,cAAc,CAAC,MAA4B,SAAkB;AACjE,UAAI,CAAC,WAAY;AACjB,kBAAY,YAAY;AAAA,QACtB;AAAA,QACA,QAAQ,KAAK,QAAQ;AAAA,QACrB,WAAW,KAAK,QAAQ;AAAA,QACxB,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,QAClC;AAAA,MACF,CAAC;AAAA,IACH;AAGA,UAAM,aAAsD,EAAE,QAAQ,KAAK;AAC3E,UAAM,aAAa,CAAC,WAAiC;AACnD,iBAAW,SAAS;AAAA,IACtB;AAGA,QAAI,eAAsE;AAC1E,UAAM,YAAY,KAAK,QAAQ;AAE/B,UAAM,OAAO,QAAQ;AAErB,UAAM,sBAAsB,CAAC,aAA+B;AAC1D,cAAQ,iBAAiB,EAAE,UAAU,QAAQ,MAAM,SAAS,CAAC;AAC7D,UAAI,KAAM,MAAK,KAAK,UAAU,EAAE,MAAM,iBAAiB,UAAU,QAAQ,MAAM,SAAS,CAAC,CAAC,EAAE,MAAM,MAAM;AAAA,MAAC,CAAC;AAG1G,YAAM,OAAO,SAAS;AACtB,UAAI,QAAQ,SAAS,WAAW,WAAW;AACzC,kFAAqC,KAAK,CAAC,EAAE,kBAAAE,kBAAiB,MAAM;AAClE,gBAAM,QAAQA,kBAAiB,WAAW,IAAI;AAC9C,cAAI,CAAC,cAAc;AACjB,8EAAiC,KAAK,CAAC,EAAE,eAAAC,eAAc,MAAM;AAC3D,6BAAe,IAAIA,eAAc,SAAS;AAC1C,2BAAa,MAAM;AAAA,YACrB,CAAC;AAAA,UACH;AACA,cAAI,MAAM,cAAc,OAAO,MAAM,GAAG;AACtC,kBAAM,GAAG,SAAS,CAAC,UAAU;AAC3B,4BAAc,SAAS,KAAK;AAC5B,kBAAI,KAAM,MAAK,KAAK,UAAU,EAAE,MAAM,iBAAiB,MAAM,MAAM,MAAM,UAAU,MAAM,SAAS,CAAC,CAAC,EAAE,MAAM,MAAM;AAAA,cAAC,CAAC;AAAA,YACtH,CAAC;AACD,kBAAM,GAAG,UAAU,CAAC,MAAW;AAC7B,kBAAI,KAAM,MAAK,KAAK,UAAU,EAAE,MAAM,kBAAkB,GAAG,EAAE,CAAC,CAAC,EAAE,MAAM,MAAM;AAAA,cAAC,CAAC;AAAA,YACjF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF;AAGA,UAAM,YAAY,MAAM,YAAY;AAAA,MAClC,WAAW,KAAK,QAAQ;AAAA,MACxB,kBAAkB,KAAK,QAAQ;AAAA,MAC/B,mBAAmB,OAAO;AAAA,MAC1B,gBAAgB;AAAA,MAChB,qBAAqB,CAAC,aAAa;AACjC,gBAAQ,iBAAiB,EAAE,UAAU,cAAc,MAAM,SAAS,CAAC;AACnE,YAAI,KAAM,MAAK,KAAK,UAAU,EAAE,MAAM,iBAAiB,UAAU,cAAc,MAAM,SAAS,CAAC,CAAC,EAAE,MAAM,MAAM;AAAA,QAAC,CAAC;AAAA,MAClH;AAAA,MACA,kBAAkB,CAAC,aAAa;AAC9B,gBAAQ,iBAAiB,EAAE,UAAU,iBAAiB,MAAM,SAAS,CAAC;AACtE,YAAI,KAAM,MAAK,KAAK,UAAU,EAAE,MAAM,iBAAiB,UAAU,iBAAiB,MAAM,SAAS,CAAC,CAAC,EAAE,MAAM,MAAM;AAAA,QAAC,CAAC;AAAA,MACrH;AAAA,MACA,WAAW;AAAA,QACT,cAAc,QAAQ,WAAW;AAAA,QACjC;AAAA,MACF;AAAA,IACF,CAAC;AAGD,UAAM,mBAAmB,MAAM,kBAAkB;AAAA,MAC/C,kBAAkB,KAAK,QAAQ;AAAA,MAC/B,mBAAmB,OAAO;AAAA,MAC1B,WAAW,KAAK,QAAQ;AAAA,MACxB,kBAAkB,OAAO;AAAA,MACzB,aAAa,CAAC;AAAA,IAChB,CAAC;AACD,UAAM,eAAe,wBAAwB,QAAQ,WAAW,YAAY;AAC5E,UAAM,eAAe,GAAG,gBAAgB;AAAA;AAAA,EAAO,YAAY;AAE3D,gBAAY,gBAAgB,EAAE,QAAQ,QAAQ,OAAO,CAAC;AAEtD,QAAI,MAAM;AACR,YAAM,KAAK,KAAK,UAAU,EAAE,MAAM,qBAAqB,MAAM,EAAE,IAAI,QAAQ,KAAK,IAAI,CAAC,IAAI,SAAS,QAAQ,OAAO,EAAE,CAAC,CAAC;AAAA,IACvH;AAGA,UAAM,KAAK,QAAQ,eAAe,QAAQ,MAAM;AAEhD,QAAI,YAAY;AAEhB,WAAO,YAAY,eAAe;AAChC;AAEA,UAAI,QAAQ,aAAa,SAAS;AAChC,cAAM,cAAc;AACpB,oBAAY,eAAe,EAAE,QAAQ,UAAU,OAAO,aAAa,YAAY,UAAU,CAAC;AAC1F,eAAO,EAAE,QAAQ,UAAU,OAAO,aAAa,YAAY,UAAU;AAAA,MACvE;AAEA,YAAM,WAAW,MAAM,KAAK,QAAQ,YAAY;AAChD,YAAM,eAAe,iBAAiB,KAAK,QAAQ,KAAK;AAGxD,UAAI,MAAM;AACR,cAAM,KAAK,KAAK,UAAU,EAAE,MAAM,SAAS,WAAW,OAAO,KAAK,IAAI,CAAC,GAAG,CAAC,CAAC;AAAA,MAC9E;AAEA,UAAI,cAAc;AAClB,UAAI,SAAS,QAAQ,KAAK,IAAI,CAAC;AAC/B,UAAI,cAAc,aAAa,KAAK,IAAI,CAAC;AACzC,UAAI,mBAAmB;AACvB,YAAM,iBAAiB,oBAAI,IAAY;AAEvC,YAAM,aAAaJ,YAAW;AAAA,QAC5B,OAAO,aAAa,KAAK,QAAQ,KAAK;AAAA,QACtC,QAAQ;AAAA,QACR;AAAA,QACA,OAAO;AAAA,QACP,UAAUC,aAAY,GAAG;AAAA,QACzB,aAAa,QAAQ;AAAA,QACrB,iBAAiB,eACb;AAAA,UACE,WAAW;AAAA,YACT,eAAe;AAAA,YACf,UAAU,EAAE,MAAM,WAAW,cAAc,IAAM;AAAA,UACnD;AAAA,QACF,IACA;AAAA,QACJ,cAAc,OAAO,SAAc;AACjC,kBAAQ,eAAe,IAAI;AAC3B,sBAAY,sBAAsB,EAAE,WAAW,MAAM,KAAK,KAAK,CAAC;AAChE,cAAI,MAAM;AACR,gBAAI,aAAa;AACf,oBAAM,KAAK,KAAK,UAAU,EAAE,MAAM,YAAY,IAAI,OAAO,CAAC,CAAC;AAC3D,4BAAc;AACd,uBAAS,QAAQ,KAAK,IAAI,CAAC;AAAA,YAC7B;AACA,kBAAM,KAAK,KAAK,UAAU,EAAE,MAAM,cAAc,CAAC,CAAC;AAAA,UACpD;AAAA,QACF;AAAA,MACF,CAAC;AAGD,uBAAiB,QAAQ,WAAW,YAAY;AAC9C,YAAI,KAAK,SAAS,cAAc;AAC9B,cAAI,MAAM;AACR,gBAAI,CAAC,aAAa;AAChB,oBAAM,KAAK,KAAK,UAAU,EAAE,MAAM,cAAc,IAAI,OAAO,CAAC,CAAC;AAC7D,4BAAc;AAAA,YAChB;AACA,kBAAM,KAAK,KAAK,UAAU,EAAE,MAAM,cAAc,IAAI,QAAQ,OAAO,KAAK,KAAK,CAAC,CAAC;AAAA,UACjF;AAAA,QACF,WAAW,KAAK,SAAS,mBAAmB;AAC1C,cAAI,MAAM;AACR,kBAAM,KAAK,KAAK,UAAU,EAAE,MAAM,mBAAmB,IAAI,YAAY,CAAC,CAAC;AACvE,+BAAmB;AAAA,UACrB;AAAA,QACF,WAAW,KAAK,SAAS,mBAAmB;AAC1C,cAAI,MAAM;AACR,kBAAM,KAAK,KAAK,UAAU,EAAE,MAAM,mBAAmB,IAAI,aAAa,OAAO,KAAK,KAAK,CAAC,CAAC;AAAA,UAC3F;AAAA,QACF,WAAW,KAAK,SAAS,iBAAiB;AACxC,cAAI,QAAQ,kBAAkB;AAC5B,kBAAM,KAAK,KAAK,UAAU,EAAE,MAAM,iBAAiB,IAAI,YAAY,CAAC,CAAC;AACrE,+BAAmB;AACnB,0BAAc,aAAa,KAAK,IAAI,CAAC;AAAA,UACvC;AAAA,QACF,WAAY,KAAa,SAAS,6BAA6B;AAC7D,cAAI,MAAM;AACR,kBAAM,IAAI;AACV,kBAAM,KAAK,KAAK,UAAU,EAAE,MAAM,oBAAoB,YAAY,EAAE,YAAY,UAAU,EAAE,SAAS,CAAC,CAAC;AACvG,2BAAe,IAAI,EAAE,UAAU;AAAA,UACjC;AAAA,QACF,WAAY,KAAa,SAAS,mBAAmB;AACnD,cAAI,MAAM;AACR,kBAAM,IAAI;AACV,kBAAM,KAAK,KAAK,UAAU,EAAE,MAAM,oBAAoB,YAAY,EAAE,YAAY,eAAe,EAAE,cAAc,CAAC,CAAC;AAAA,UACnH;AAAA,QACF,WAAW,KAAK,SAAS,aAAa;AACpC,cAAI,MAAM;AACR,gBAAI,CAAC,eAAe,IAAI,KAAK,UAAU,GAAG;AACxC,oBAAM,KAAK,KAAK,UAAU,EAAE,MAAM,oBAAoB,YAAY,KAAK,YAAY,UAAU,KAAK,SAAS,CAAC,CAAC;AAC7G,6BAAe,IAAI,KAAK,UAAU;AAAA,YACpC;AACA,kBAAM,YAAY,KAAK,aAAa,gBAAgB,KAAK,SAAS,OAAO,KAAK,UAAU,WACpF,uBAAuB,KAAK,KAAgC,IAC5D,KAAK;AACT,kBAAM,KAAK,KAAK,UAAU,EAAE,MAAM,wBAAwB,YAAY,KAAK,YAAY,UAAU,KAAK,UAAU,OAAO,UAAU,CAAC,CAAC;AAAA,UACrI;AAAA,QACF,WAAW,KAAK,SAAS,eAAe;AACtC,cAAI,MAAM;AACR,kBAAM,KAAK,KAAK,UAAU,EAAE,MAAM,yBAAyB,YAAY,KAAK,YAAY,QAAQ,KAAK,OAAO,CAAC,CAAC;AAAA,UAChH;AAAA,QACF,WAAW,KAAK,SAAS,SAAS;AAChC,kBAAQ,MAAM,sBAAsB,KAAK,KAAK;AAC9C,cAAI,MAAM;AACR,kBAAM,KAAK,KAAK,UAAU,EAAE,MAAM,SAAS,WAAW,OAAO,KAAK,KAAK,EAAE,CAAC,CAAC;AAAA,UAC7E;AAAA,QACF;AAAA,MACF;AAGA,UAAI,QAAQ,aAAa;AACvB,cAAM,KAAK,KAAK,UAAU,EAAE,MAAM,YAAY,IAAI,OAAO,CAAC,CAAC;AAAA,MAC7D;AACA,UAAI,QAAQ,kBAAkB;AAC5B,cAAM,KAAK,KAAK,UAAU,EAAE,MAAM,iBAAiB,IAAI,YAAY,CAAC,CAAC;AAAA,MACvE;AAGA,YAAM,eAAe,MAAM,WAAW;AACtC,YAAM,mBAAmB,aAAa;AACtC,YAAM,KAAK,QAAQ,oBAAoB,gBAAgB;AAEvD,YAAM,aAAa,MAAM,WAAW;AACpC,YAAM,cAAc,MAAM,WAAW;AAGrC,UAAI,YAAY;AACd,gBAAQ,SAAS,UAAU;AAC3B,oBAAY,gBAAgB,EAAE,WAAW,MAAM,WAAW,CAAC;AAAA,MAC7D;AAGA,iBAAW,QAAQ,aAAa;AAC9B,YAAK,KAAa,WAAW;AAC3B,qBAAW,MAAO,KAAa,WAAW;AACxC,oBAAQ,aAAa,EAAE,YAAY,GAAG,YAAY,UAAU,GAAG,UAAU,OAAO,GAAG,KAAK,CAAC;AACzF,wBAAY,kBAAkB,EAAE,WAAW,UAAU,GAAG,UAAU,YAAY,GAAG,YAAY,OAAO,GAAG,KAAK,CAAC;AAAA,UAC/G;AAAA,QACF;AACA,YAAK,KAAa,aAAa;AAC7B,qBAAW,MAAO,KAAa,aAAa;AAC1C,oBAAQ,eAAe,EAAE,YAAY,GAAG,YAAY,UAAU,GAAG,UAAU,QAAQ,GAAG,OAAO,CAAC;AAC9F,wBAAY,oBAAoB,EAAE,WAAW,UAAU,GAAG,UAAU,YAAY,GAAG,YAAY,QAAQ,GAAG,OAAO,CAAC;AAAA,UACpH;AAAA,QACF;AAAA,MACF;AAGA,UAAI,WAAW,QAAQ;AACrB,cAAM,MAAM,WAAW;AACvB,cAAM,cAAc,IAAI;AAGxB,YAAI;AACJ,YAAI,gBAAgB,eAAe,IAAI,UAAU,OAAO,IAAI,WAAW,UAAU;AAC/E,gBAAM,YAAY,IAAI;AACtB,gBAAM,YAAY,MAAM,QAAQ,UAAU,KAAK,IAAI,UAAU,QAAoB,CAAC;AAClF,cAAI,UAAU,SAAS,GAAG;AACxB,uBAAW,MAAM,KAAK,gBAAgB,SAAS;AAAA,UACjD;AAAA,QACF;AAGA,cAAM,gBAAgB,MAAM,KAAK,oBAAoB,YAAY;AAEjE,cAAM,cAAc,CAAC,GAAI,YAAY,CAAC,GAAI,GAAG,aAAa;AAE1D,cAAM,YAAY,gBAAgB,cAAc,mBAAmB;AACnE,oBAAY,WAAmC;AAAA,UAC7C,QAAQ;AAAA,UACR,QAAQ,IAAI;AAAA,UACZ,OAAO,IAAI;AAAA,UACX,YAAY;AAAA,UACZ,UAAU,YAAY,SAAS,IAAI,cAAc;AAAA,UACjD,sBAAsB,cAAc,SAAS,IAAI,gBAAgB;AAAA,QACnE,CAAC;AAGD,cAAMI,eAA0B;AAAA,UAC9B,GAAG,QAAQ;AAAA,UACX,QAAQ;AAAA,UACR,QAAQ,IAAI;AAAA,UACZ,OAAO,IAAI;AAAA,UACX,YAAY;AAAA,QACd;AACA,cAAM,eAAe,OAAO,KAAK,QAAQ,IAAI;AAAA,UAC3C,QAAQ,EAAE,GAAG,KAAK,QAAQ,QAAQ,MAAMA,aAAY;AAAA,QACtD,CAAC;AAED,eAAO;AAAA,UACL,QAAQ;AAAA,UACR,QAAQ,IAAI;AAAA,UACZ,OAAO,IAAI;AAAA,UACX,YAAY;AAAA,QACd;AAAA,MACF;AAGA,YAAM,qBAAqB;AAC3B,UAAI,MAAM;AACR,cAAM,KAAK,KAAK,UAAU,EAAE,MAAM,qBAAqB,MAAM,EAAE,IAAI,QAAQ,KAAK,IAAI,CAAC,IAAI,SAAS,mBAAmB,EAAE,CAAC,CAAC;AAAA,MAC3H;AACA,YAAM,KAAK,QAAQ,eAAe,kBAAkB;AAAA,IACtD;AAGA,UAAM,eAAe,gCAAgC,aAAa;AAClE,UAAM,uBAAuB,MAAM,KAAK,oBAAoB,YAAY;AACxE,gBAAY,eAAe;AAAA,MACzB,QAAQ;AAAA,MACR,OAAO;AAAA,MACP,YAAY;AAAA,MACZ,sBAAsB,qBAAqB,SAAS,IAAI,uBAAuB;AAAA,IACjF,CAAC;AAED,UAAM,cAA0B;AAAA,MAC9B,GAAG,QAAQ;AAAA,MACX,QAAQ;AAAA,MACR,OAAO;AAAA,MACP,YAAY;AAAA,IACd;AACA,UAAM,eAAe,OAAO,KAAK,QAAQ,IAAI;AAAA,MAC3C,QAAQ,EAAE,GAAG,KAAK,QAAQ,QAAQ,MAAM,YAAY;AAAA,IACtD,CAAC;AAED,WAAO,EAAE,QAAQ,UAAU,OAAO,cAAc,YAAY,UAAU;AAAA,EACxE;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,oBACZ,UACmB;AAEnB,QAAI;AACF,YAAM,EAAE,cAAAC,cAAa,IAAI,MAAM;AAC/B,MAAAA,cAAa,KAAK,QAAQ,EAAE;AAAA,IAC9B,QAAQ;AAAA,IAAC;AAET,QAAI,CAAC,YAAY,SAAS,eAAe,GAAG;AAC1C,gBAAU,MAAM;AAChB,aAAO,CAAC;AAAA,IACV;AAEA,aAAS,KAAK;AACd,QAAI;AACF,YAAM,EAAE,oBAAAC,qBAAoB,gBAAAC,gBAAe,IAAI,MAAM;AACrD,UAAI,CAACD,oBAAmB,GAAG;AAAE,iBAAS,MAAM;AAAG,eAAO,CAAC;AAAA,MAAG;AAE1D,YAAM,SAAS,MAAM,SAAS,OAAO;AACrC,eAAS,MAAM;AACf,UAAI,CAAC,OAAQ,QAAO,CAAC;AAErB,YAAM,EAAE,UAAAE,YAAU,QAAAC,QAAO,IAAI,MAAM,OAAO,aAAkB;AAE5D,YAAM,aAAa,MAAMF,gBAAe;AAAA,QACtC,KAAK,QAAQ;AAAA,QACb,qBAAqB,KAAK,IAAI,CAAC;AAAA,QAC/B;AAAA,QACA;AAAA,MACF;AAEA,YAAM,WAAW,MAAMC,WAAS,OAAO,IAAI;AAC3C,YAAM,MAAM,WAAW,WAAW;AAAA,QAChC,QAAQ;AAAA,QACR,SAAS,EAAE,gBAAgB,YAAY;AAAA,QACvC,MAAM;AAAA,MACR,CAAC;AACD,YAAMD,gBAAe,WAAW,WAAW,QAAQ,EAAE,WAAW,OAAO,UAAU,CAAC;AAElF,YAAM,SAAS,MAAMA,gBAAe,eAAe,WAAW,MAAM;AACpE,YAAME,QAAO,OAAO,IAAI,EAAE,MAAM,MAAM;AAAA,MAAC,CAAC;AAExC,cAAQ,IAAI,sCAAsC,OAAO,SAAS,SAAS;AAC3E,aAAO,CAAC,OAAO,WAAW;AAAA,IAC5B,SAAS,KAAU;AACjB,cAAQ,MAAM,8CAA8C,IAAI,OAAO;AACvE,eAAS,MAAM;AACf,aAAO,CAAC;AAAA,IACV;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,gBAAgB,WAAwC;AACpE,QAAI;AACF,YAAM,EAAE,oBAAAH,qBAAoB,gBAAAC,gBAAe,IAAI,MAAM;AACrD,UAAI,CAACD,oBAAmB,EAAG,QAAO,CAAC;AAEnC,YAAM,EAAE,UAAAE,WAAS,IAAI,MAAM,OAAO,aAAkB;AACpD,YAAM,EAAE,MAAAE,QAAM,UAAAC,UAAS,IAAI,MAAM,OAAO,MAAW;AAEnD,YAAM,OAAiB,CAAC;AAExB,iBAAW,YAAY,WAAW;AAChC,YAAI;AACF,gBAAM,WAAW,SAAS,WAAW,GAAG,IACpC,WACAD,OAAK,KAAK,QAAQ,kBAAkB,QAAQ;AAChD,gBAAM,WAAWC,UAAS,QAAQ;AAGlC,gBAAM,MAAM,SAAS,MAAM,GAAG,EAAE,IAAI,GAAG,YAAY,KAAK;AACxD,gBAAM,UAAkC;AAAA,YACtC,KAAK;AAAA,YAAmB,MAAM;AAAA,YAAoB,KAAK;AAAA,YACvD,KAAK;AAAA,YAAc,IAAI;AAAA,YAAiB,MAAM;AAAA,YAC9C,KAAK;AAAA,YAAa,KAAK;AAAA,YAAc,MAAM;AAAA,YAC3C,KAAK;AAAA,YAAa,KAAK;AAAA,YAAiB,KAAK;AAAA,YAC7C,KAAK;AAAA,UACP;AACA,gBAAM,cAAc,QAAQ,GAAG,KAAK;AAEpC,gBAAM,aAAa,MAAMJ,gBAAe;AAAA,YACtC,KAAK,QAAQ;AAAA,YACb;AAAA,YACA;AAAA,YACA;AAAA,UACF;AAEA,gBAAM,WAAW,MAAMC,WAAS,QAAQ;AACxC,gBAAM,MAAM,WAAW,WAAW;AAAA,YAChC,QAAQ;AAAA,YACR,SAAS,EAAE,gBAAgB,YAAY;AAAA,YACvC,MAAM;AAAA,UACR,CAAC;AAED,gBAAMD,gBAAe,WAAW,WAAW,QAAQ,EAAE,WAAW,SAAS,OAAO,CAAC;AAEjF,gBAAM,eAAe,MAAMA,gBAAe,eAAe,WAAW,MAAM;AAC1E,eAAK,KAAK,aAAa,WAAW;AAElC,kBAAQ,IAAI,yBAAyB,QAAQ,KAAK,SAAS,MAAM,SAAS;AAAA,QAC5E,SAAS,KAAU;AACjB,kBAAQ,MAAM,gCAAgC,QAAQ,KAAK,IAAI,OAAO;AAAA,QACxE;AAAA,MACF;AAEA,aAAO;AAAA,IACT,SAAS,KAAU;AACjB,cAAQ,MAAM,8BAA8B,IAAI,OAAO;AACvD,aAAO,CAAC;AAAA,IACV;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,sBAAsB,SAA0B,OAA0B;AAChF,UAAM,gBAAgB,KAAK,QAAQ;AACnC,UAAM,eAAwB,CAAC;AAC/B,UAAM,cAAc,SAAS,KAAK;AAElC,eAAW,CAAC,MAAM,YAAY,KAAK,OAAO,QAAQ,WAAW,GAAG;AAC9D,YAAM,gBAAgB,iBAAiB,MAAM,iBAAiB,MAAS;AAEvE,UAAI,CAAC,eAAe;AAClB,qBAAa,IAAI,IAAI;AACrB;AAAA,MACF;AAGA,mBAAa,IAAI,IAAIK,OAAK;AAAA,QACxB,aAAa,aAAa,eAAe;AAAA,QACzC,aAAc,aAAqB,eAAeC,IAAE,OAAO,CAAC,CAAC;AAAA,QAC7D,SAAS,OAAO,OAAgB,gBAAyC;AACvE,gBAAM,aAAa,YAAY,cAAcC,QAAO;AAGpD,gBAAM,YAAY,qBAAqB,OAAO;AAAA,YAC5C,WAAW,KAAK,QAAQ;AAAA,YACxB,UAAU;AAAA,YACV;AAAA,YACA;AAAA,YACA,kBAAkB;AAAA,YAClB,QAAQ;AAAA,UACV,CAAC;AAGD,eAAK,iBAAiB,IAAI,YAAY,MAAM,SAAS;AAGrD,kBAAQ,qBAAqB,MAAM,SAAS;AAG5C,gBAAM,eAAe,aAAa,KAAK,QAAQ,IAAI,SAAS;AAG5D,gBAAM,WAAW,MAAM,IAAI,QAAiB,CAACC,cAAY;AACvD,8BAAkB,IAAI,YAAY,EAAE,SAAAA,WAAS,WAAW,KAAK,QAAQ,GAAG,CAAC;AAAA,UAC3E,CAAC;AAGD,gBAAM,eAAe,kBAAkB,IAAI,UAAU;AACrD,4BAAkB,OAAO,UAAU;AACnC,eAAK,iBAAiB,OAAO,UAAU;AAEvC,gBAAMC,QAAO,MAAM;AACnB,cAAI,CAAC,UAAU;AAEb,kBAAM,SAAS,cAAc,UAAU;AACvC,kBAAM,qBAAqB,OAAOA,MAAK,EAAE;AACzC,kBAAM,eAAe,aAAa,KAAK,QAAQ,IAAI,QAAQ;AAE3D,mBAAO;AAAA,cACL,QAAQ;AAAA,cACR;AAAA,cACA,UAAU;AAAA,cACV;AAAA,cACA,SAAS,SAAS,IAAI,uCAAuC,MAAM;AAAA,YACrE;AAAA,UACF;AAGA,gBAAM,qBAAqB,QAAQA,MAAK,EAAE;AAC1C,gBAAM,eAAe,aAAa,KAAK,QAAQ,IAAI,QAAQ;AAE3D,cAAI;AACF,kBAAM,SAAS,MAAO,aAAqB,QAAQ,OAAO,WAAW;AACrE,kBAAM,qBAAqB,SAASA,MAAK,IAAI,MAAM;AACnD,mBAAO;AAAA,UACT,SAAS,OAAY;AACnB,kBAAM,qBAAqB,SAASA,MAAK,IAAI,MAAM,MAAM,OAAO;AAChE,kBAAM;AAAA,UACR;AAAA,QACF;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,mBAA6C;AACjD,WAAO,MAAM,KAAK,KAAK,iBAAiB,OAAO,CAAC;AAAA,EAClD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,QAAQ,YAAiD;AAE7D,UAAM,WAAW,kBAAkB,IAAI,UAAU;AACjD,QAAI,UAAU;AACZ,eAAS,QAAQ,IAAI;AACrB,aAAO,EAAE,UAAU,KAAK;AAAA,IAC1B;AAGA,UAAM,gBAAgB,MAAM,qBAAqB,oBAAoB,KAAK,QAAQ,EAAE;AACpF,UAAM,YAAY,cAAc,KAAK,CAAC,MAAqB,EAAE,eAAe,UAAU;AAEtF,QAAI,CAAC,WAAW;AACd,YAAM,IAAI,MAAM,sCAAsC,UAAU,EAAE;AAAA,IACpE;AAGA,UAAM,qBAAqB,QAAQ,UAAU,EAAE;AAC/C,WAAO,EAAE,UAAU,KAAK;AAAA,EAC1B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,OAAO,YAAoB,QAA8C;AAE7E,UAAM,WAAW,kBAAkB,IAAI,UAAU;AACjD,QAAI,UAAU;AACZ,eAAS,SAAS;AAClB,eAAS,QAAQ,KAAK;AACtB,aAAO,EAAE,UAAU,KAAK;AAAA,IAC1B;AAGA,UAAM,gBAAgB,MAAM,qBAAqB,oBAAoB,KAAK,QAAQ,EAAE;AACpF,UAAM,YAAY,cAAc,KAAK,CAAC,MAAqB,EAAE,eAAe,UAAU;AAEtF,QAAI,CAAC,WAAW;AACd,YAAM,IAAI,MAAM,sCAAsC,UAAU,EAAE;AAAA,IACpE;AAGA,UAAM,qBAAqB,OAAO,UAAU,EAAE;AAC9C,WAAO,EAAE,UAAU,KAAK;AAAA,EAC1B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,sBAAgD;AACpD,WAAO,qBAAqB,oBAAoB,KAAK,QAAQ,EAAE;AAAA,EACjE;AAAA;AAAA;AAAA;AAAA,EAKA,kBAAkB;AAChB,WAAO,KAAK,QAAQ,SAAS;AAAA,EAC/B;AAAA;AAAA;AAAA;AAAA,EAKA,eAAqB;AACnB,SAAK,QAAQ,MAAM;AAAA,EACrB;AACF;;;ADz/BA;;;A8BMA,IAAM,uBAAuB,oBAAI,IAA6B;AAKvD,SAAS,mBAAmB,WAAmB,SAAgC;AACpF,uBAAqB,IAAI,WAAW,OAAO;AAC7C;AAKO,SAAS,mBAAmB,WAA2C;AAC5E,uBAAqB;AACrB,SAAO,qBAAqB,IAAI,SAAS,KAAK;AAChD;AAKO,SAAS,uBAA6B;AAC3C,QAAM,MAAM,KAAK,IAAI;AACrB,aAAW,CAAC,WAAW,GAAG,KAAK,sBAAsB;AACnD,QAAI,MAAM,IAAI,cAAc,QAAQ,IAAI,KAAK,KAAM;AACjD,2BAAqB,OAAO,SAAS;AAAA,IACvC;AAAA,EACF;AACF;;;A9B7BA,IAAM,WAAW,IAAI,KAAK;AAM1B,IAAM,oBAAoB,oBAAI,IAA+C;AAG7E,SAAS,uBAAuB;AAC9B,QAAM,MAAM,KAAK,IAAI;AACrB,aAAW,CAAC,WAAW,KAAK,KAAK,mBAAmB;AAClD,QAAI,MAAM,MAAM,UAAU,QAAQ,IAAI,IAAI,KAAK,KAAM;AACnD,wBAAkB,OAAO,SAAS;AAAA,IACpC;AAAA,EACF;AACF;AAGA,IAAM,sBAAsBC,IAAE,OAAO;AAAA,EACnC,MAAMA,IAAE,OAAO,EAAE,SAAS;AAAA,EAC1B,kBAAkBA,IAAE,OAAO,EAAE,SAAS;AAAA,EACtC,OAAOA,IAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,eAAeA,IAAE,OAAOA,IAAE,OAAO,GAAGA,IAAE,QAAQ,CAAC,EAAE,SAAS;AAC5D,CAAC;AAED,IAAM,wBAAwBA,IAAE,OAAO;AAAA,EACrC,OAAOA,IAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,QAAQA,IAAE,OAAO,EAAE,SAAS;AAC9B,CAAC;AAED,IAAM,sBAAsBA,IAAE,OAAO;AAAA,EACnC,OAAOA,IAAE,OAAO,EAAE,SAAS;AAC7B,CAAC;AAGD,SAAS;AAAA,EACP;AAAA,EACA,WAAW,SAAS,qBAAqB;AAAA,EACzC,OAAO,MAAM;AACX,UAAM,QAAQ,EAAE,IAAI,MAAM,OAAO;AACjC,UAAM,QAAQ,SAAS,MAAM,SAAS,IAAI;AAC1C,UAAM,SAAS,SAAS,MAAM,UAAU,GAAG;AAE3C,UAAM,cAAc,MAAM,eAAe,KAAK,OAAO,MAAM;AAG3D,UAAM,yBAAyB,MAAM,QAAQ,IAAI,YAAY,IAAI,OAAO,MAAM;AAC5E,YAAM,eAAe,MAAM,oBAAoB,eAAe,EAAE,EAAE;AAClE,aAAO;AAAA,QACL,IAAI,EAAE;AAAA,QACN,MAAM,EAAE;AAAA,QACR,kBAAkB,EAAE;AAAA,QACpB,OAAO,EAAE;AAAA,QACT,QAAQ,EAAE;AAAA,QACV,QAAQ,EAAE;AAAA,QACV,aAAa,CAAC,CAAC;AAAA,QACf,WAAW,EAAE,UAAU,YAAY;AAAA,QACnC,WAAW,EAAE,UAAU,YAAY;AAAA,MACrC;AAAA,IACF,CAAC,CAAC;AAEF,WAAO,EAAE,KAAK;AAAA,MACZ,UAAU;AAAA,MACV,OAAO,YAAY;AAAA,MACnB;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AACF;AAGA,SAAS;AAAA,EACP;AAAA,EACA,WAAW,QAAQ,mBAAmB;AAAA,EACtC,OAAO,MAAM;AACX,UAAM,OAAO,EAAE,IAAI,MAAM,MAAM;AAC/B,UAAM,SAAS,UAAU;AAEzB,UAAM,QAAQ,MAAM,MAAM,OAAO;AAAA,MAC/B,MAAM,KAAK;AAAA,MACX,kBAAkB,KAAK,oBAAoB,OAAO;AAAA,MAClD,OAAO,KAAK,SAAS,OAAO;AAAA,MAC5B,eAAe,KAAK,gBAAgB,EAAE,eAAe,KAAK,cAAc,IAAI;AAAA,IAC9E,CAAC;AAED,UAAM,UAAU,MAAM,WAAW;AAEjC,WAAO,EAAE,KAAK;AAAA,MACZ,IAAI,QAAQ;AAAA,MACZ,MAAM,QAAQ;AAAA,MACd,kBAAkB,QAAQ;AAAA,MAC1B,OAAO,QAAQ;AAAA,MACf,QAAQ,QAAQ;AAAA,MAChB,WAAW,QAAQ,UAAU,YAAY;AAAA,IAC3C,GAAG,GAAG;AAAA,EACR;AACF;AAGA,SAAS,IAAI,QAAQ,OAAO,MAAM;AAChC,QAAM,KAAK,EAAE,IAAI,MAAM,IAAI;AAC3B,QAAM,UAAU,MAAM,eAAe,QAAQ,EAAE;AAE/C,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,EACnD;AAEA,QAAM,eAAe,OAAO,YAAY;AACtC,UAAM,QAAQ,MAAM,MAAM,OAAO,EAAE,WAAW,GAAG,CAAC;AAClD,WAAO,MAAM,gBAAgB;AAAA,EAC/B,GAAG;AAEH,QAAM,QAAQ,MAAM,YAAY,aAAa,EAAE;AAC/C,QAAM,mBAAmB,MAAM,qBAAqB,oBAAoB,EAAE;AAE1E,SAAO,EAAE,KAAK;AAAA,IACZ,IAAI,QAAQ;AAAA,IACZ,MAAM,QAAQ;AAAA,IACd,kBAAkB,QAAQ;AAAA,IAC1B,OAAO,QAAQ;AAAA,IACf,QAAQ,QAAQ;AAAA,IAChB,QAAQ,QAAQ;AAAA,IAChB,WAAW,QAAQ,UAAU,YAAY;AAAA,IACzC,WAAW,QAAQ,UAAU,YAAY;AAAA,IACzC,SAAS;AAAA,IACT,OAAO,MAAM,IAAI,CAAC,OAAO;AAAA,MACvB,IAAI,EAAE;AAAA,MACN,SAAS,EAAE;AAAA,MACX,QAAQ,EAAE;AAAA,MACV,OAAO,EAAE;AAAA,IACX,EAAE;AAAA,IACF,kBAAkB,iBAAiB,IAAI,CAAC,OAAO;AAAA,MAC7C,IAAI,EAAE;AAAA,MACN,YAAY,EAAE;AAAA,MACd,UAAU,EAAE;AAAA,MACZ,OAAO,EAAE;AAAA,IACX,EAAE;AAAA,EACJ,CAAC;AACH,CAAC;AAGD,SAAS;AAAA,EACP;AAAA,EACA,WAAW,SAAS,mBAAmB;AAAA,EACvC,OAAO,MAAM;AACX,UAAM,KAAK,EAAE,IAAI,MAAM,IAAI;AAC3B,UAAM,QAAQ,EAAE,IAAI,MAAM,OAAO;AACjC,UAAM,QAAQ,SAAS,MAAM,SAAS,KAAK;AAE3C,UAAM,UAAU,MAAM,eAAe,QAAQ,EAAE;AAC/C,QAAI,CAAC,SAAS;AACZ,aAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,IACnD;AAEA,UAAM,WAAW,MAAM,eAAe,mBAAmB,IAAI,KAAK;AAElE,WAAO,EAAE,KAAK;AAAA,MACZ,WAAW;AAAA,MACX,UAAU,SAAS,IAAI,CAAC,OAAO;AAAA,QAC7B,IAAI,EAAE;AAAA,QACN,GAAG,EAAE;AAAA;AAAA,QACL,WAAW,EAAE,UAAU,YAAY;AAAA,MACrC,EAAE;AAAA,MACF,OAAO,SAAS;AAAA,IAClB,CAAC;AAAA,EACH;AACF;AAGA,SAAS,IAAI,cAAc,OAAO,MAAM;AACtC,QAAM,KAAK,EAAE,IAAI,MAAM,IAAI;AAE3B,QAAM,UAAU,MAAM,eAAe,QAAQ,EAAE;AAC/C,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,EACnD;AAEA,QAAM,aAAa,MAAM,qBAAqB,aAAa,EAAE;AAE7D,SAAO,EAAE,KAAK;AAAA,IACZ,WAAW;AAAA,IACX,YAAY,WAAW,IAAI,CAAC,OAAO;AAAA,MACjC,IAAI,EAAE;AAAA,MACN,YAAY,EAAE;AAAA,MACd,UAAU,EAAE;AAAA,MACZ,OAAO,EAAE;AAAA,MACT,QAAQ,EAAE;AAAA,MACV,QAAQ,EAAE;AAAA,MACV,kBAAkB,EAAE;AAAA,MACpB,OAAO,EAAE;AAAA,MACT,WAAW,EAAE,UAAU,YAAY;AAAA,MACnC,aAAa,EAAE,aAAa,YAAY;AAAA,IAC1C,EAAE;AAAA,IACF,OAAO,WAAW;AAAA,EACpB,CAAC;AACH,CAAC;AAGD,IAAM,sBAAsBA,IAAE,OAAO;AAAA,EACnC,OAAOA,IAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,MAAMA,IAAE,OAAO,EAAE,SAAS;AAAA,EAC1B,eAAeA,IAAE,OAAOA,IAAE,OAAO,GAAGA,IAAE,QAAQ,CAAC,EAAE,SAAS;AAC5D,CAAC;AAED,SAAS;AAAA,EACP;AAAA,EACA,WAAW,QAAQ,mBAAmB;AAAA,EACtC,OAAO,MAAM;AACX,UAAM,KAAK,EAAE,IAAI,MAAM,IAAI;AAC3B,UAAM,OAAO,EAAE,IAAI,MAAM,MAAM;AAE/B,UAAM,UAAU,MAAM,eAAe,QAAQ,EAAE;AAC/C,QAAI,CAAC,SAAS;AACZ,aAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,IACnD;AAGA,UAAM,UAAqE,CAAC;AAC5E,QAAI,KAAK,MAAO,SAAQ,QAAQ,KAAK;AACrC,QAAI,KAAK,SAAS,OAAW,SAAQ,OAAO,KAAK;AAGjD,QAAI,KAAK,kBAAkB,QAAW;AACpC,YAAM,iBAAiB,QAAQ,UAAU,CAAC;AAC1C,YAAM,wBAAwB,eAAe,iBAAiB,CAAC;AAC/D,cAAQ,SAAS;AAAA,QACf,GAAG;AAAA,QACH,eAAe;AAAA,UACb,GAAG;AAAA,UACH,GAAG,KAAK;AAAA,QACV;AAAA,MACF;AAAA,IACF;AAEA,UAAM,iBAAiB,OAAO,KAAK,OAAO,EAAE,SAAS,IAChD,MAAM,eAAe,OAAO,IAAI,OAAO,KAAM,UAC9C;AAEJ,WAAO,EAAE,KAAK;AAAA,MACZ,IAAI,eAAe;AAAA,MACnB,MAAM,eAAe;AAAA,MACrB,OAAO,eAAe;AAAA,MACtB,QAAQ,eAAe;AAAA,MACvB,kBAAkB,eAAe;AAAA,MACjC,QAAQ,eAAe;AAAA,MACvB,WAAW,eAAe,UAAU,YAAY;AAAA,IAClD,CAAC;AAAA,EACH;AACF;AAGA,SAAS,OAAO,QAAQ,OAAO,MAAM;AACnC,QAAM,KAAK,EAAE,IAAI,MAAM,IAAI;AAG3B,MAAI;AACF,UAAM,UAAU,MAAM,eAAe,QAAQ,EAAE;AAC/C,QAAI,SAAS;AACX,YAAM,cAAc,MAAW,aAAa;AAC5C,iBAAW,OAAO,aAAa;AAC7B,cAAM,OAAO,MAAW,QAAQ,KAAK,QAAQ,gBAAgB;AAC7D,YAAI,QAAQ,KAAK,cAAc,IAAI;AACjC,gBAAW,aAAa,GAAG;AAAA,QAC7B;AAAA,MACF;AAAA,IACF;AAAA,EACF,SAAS,GAAG;AAAA,EAEZ;AAGA,yBAAuB,EAAE;AAEzB,QAAM,UAAU,MAAM,eAAe,OAAO,EAAE;AAC9C,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,EACnD;AAEA,SAAO,EAAE,KAAK,EAAE,SAAS,MAAM,GAAG,CAAC;AACrC,CAAC;AAGD,SAAS,KAAK,cAAc,OAAO,MAAM;AACvC,QAAM,KAAK,EAAE,IAAI,MAAM,IAAI;AAE3B,QAAM,UAAU,MAAM,eAAe,QAAQ,EAAE;AAC/C,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,EACnD;AAEA,QAAM,QAAQ,MAAM,MAAM,OAAO,EAAE,WAAW,GAAG,CAAC;AAClD,QAAM,MAAM,aAAa;AAEzB,SAAO,EAAE,KAAK,EAAE,SAAS,MAAM,WAAW,GAAG,CAAC;AAChD,CAAC;AAMD,IAAM,qBAAqBA,IAAE,OAAO;AAAA,EAClC,MAAMA,IAAE,OAAO;AACjB,CAAC;AAGD,SAAS;AAAA,EACP;AAAA,EACA,WAAW,QAAQ,kBAAkB;AAAA,EACrC,OAAO,MAAM;AACX,UAAM,KAAK,EAAE,IAAI,MAAM,IAAI;AAC3B,UAAM,EAAE,KAAK,IAAI,EAAE,IAAI,MAAM,MAAM;AAGnC,UAAM,UAAU,MAAM,eAAe,QAAQ,EAAE;AAC/C,QAAI,CAAC,SAAS;AACZ,aAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,IACnD;AAGA,sBAAkB,IAAI,IAAI,EAAE,MAAM,WAAW,oBAAI,KAAK,EAAE,CAAC;AAGzD,yBAAqB;AAErB,WAAO,EAAE,KAAK,EAAE,SAAS,MAAM,WAAW,GAAG,CAAC;AAAA,EAChD;AACF;AAGA,SAAS,IAAI,sBAAsB,OAAO,MAAM;AAC9C,QAAM,KAAK,EAAE,IAAI,MAAM,IAAI;AAG3B,uBAAqB;AAErB,QAAM,UAAU,kBAAkB,IAAI,EAAE;AACxC,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,iBAAiB,OAAO,MAAM,KAAK,CAAC;AAAA,EACtD;AAGA,oBAAkB,OAAO,EAAE;AAE3B,SAAO,EAAE,KAAK;AAAA,IACZ,iBAAiB;AAAA,IACjB,MAAM,QAAQ;AAAA,IACd,WAAW,QAAQ,UAAU,YAAY;AAAA,EAC3C,CAAC;AACH,CAAC;AAMD,IAAM,wBAAwBA,IAAE,OAAO;AAAA,EACrC,KAAKA,IAAE,OAAO;AAAA,EACd,MAAMA,IAAE,OAAO;AAAA,EACf,UAAUA,IAAE,OAAO,EAAE,SAAS;AAAA,EAC9B,aAAaA,IAAE,OAAO,EAAE,SAAS;AAAA,EACjC,cAAcA,IAAE,OAAO,EAAE,SAAS;AAAA,EAClC,kBAAkBA,IAAE,OAAO,EAAE,SAAS;AACxC,CAAC;AAGD,SAAS;AAAA,EACP;AAAA,EACA,WAAW,QAAQ,qBAAqB;AAAA,EACxC,OAAO,MAAM;AACX,UAAM,KAAK,EAAE,IAAI,MAAM,IAAI;AAC3B,UAAM,OAAO,EAAE,IAAI,MAAM,MAAM;AAG/B,uBAAmB,IAAI;AAAA,MACrB,KAAK,KAAK;AAAA,MACV,MAAM,KAAK;AAAA,MACX,UAAU,KAAK,YAAY,KAAK;AAAA,MAChC,aAAa,KAAK;AAAA,MAClB,cAAc,KAAK;AAAA,MACnB,kBAAkB,KAAK;AAAA,MACvB,eAAe,oBAAI,KAAK;AAAA,IAC1B,CAAC;AAED,WAAO,EAAE,KAAK,EAAE,SAAS,MAAM,WAAW,GAAG,CAAC;AAAA,EAChD;AACF;AAGA,SAAS,IAAI,yBAAyB,OAAO,MAAM;AACjD,QAAM,KAAK,EAAE,IAAI,MAAM,IAAI;AAE3B,QAAM,MAAM,mBAAe,EAAE;AAC7B,MAAI,CAAC,KAAK;AACR,WAAO,EAAE,KAAK,EAAE,WAAW,OAAO,SAAS,KAAK,CAAC;AAAA,EACnD;AAEA,SAAO,EAAE,KAAK;AAAA,IACZ,WAAW;AAAA,IACX,SAAS;AAAA,MACP,KAAK,IAAI;AAAA,MACT,MAAM,IAAI;AAAA,MACV,UAAU,IAAI;AAAA,MACd,aAAa,IAAI;AAAA,MACjB,cAAc,IAAI;AAAA,MAClB,kBAAkB,IAAI;AAAA,MACtB,eAAe,IAAI,cAAc,YAAY;AAAA,IAC/C;AAAA,EACF,CAAC;AACH,CAAC;AAGD,SAAS,IAAI,cAAc,OAAO,MAAM;AACtC,QAAM,KAAK,EAAE,IAAI,MAAM,IAAI;AAE3B,QAAM,UAAU,MAAM,eAAe,QAAQ,EAAE;AAC/C,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,EACnD;AAEA,QAAM,QAAQ,MAAM,YAAY,aAAa,EAAE;AAC/C,QAAM,UAAU,MAAM,OAAO,CAAC,MAA0B,EAAE,WAAW,SAAS;AAC9E,QAAM,aAAa,MAAM,OAAO,CAAC,MAA0B,EAAE,WAAW,aAAa;AACrF,QAAM,YAAY,MAAM,OAAO,CAAC,MAA0B,EAAE,WAAW,WAAW;AAClF,QAAM,YAAY,MAAM,OAAO,CAAC,MAA0B,EAAE,WAAW,WAAW;AAGlF,QAAM,WAAW,WAAW,CAAC,KAAK,QAAQ,CAAC,KAAK;AAEhD,SAAO,EAAE,KAAK;AAAA,IACZ,OAAO,MAAM,IAAI,CAAC,OAAO;AAAA,MACvB,IAAI,EAAE;AAAA,MACN,SAAS,EAAE;AAAA,MACX,QAAQ,EAAE;AAAA,MACV,OAAO,EAAE;AAAA,MACT,WAAW,EAAE,UAAU,YAAY;AAAA,MACnC,WAAW,EAAE,UAAU,YAAY;AAAA,IACrC,EAAE;AAAA,IACF,OAAO;AAAA,MACL,OAAO,MAAM;AAAA,MACb,SAAS,QAAQ;AAAA,MACjB,YAAY,WAAW;AAAA,MACvB,WAAW,UAAU;AAAA,MACrB,WAAW,UAAU;AAAA,IACvB;AAAA,IACA,UAAU,WAAW;AAAA,MACnB,IAAI,SAAS;AAAA,MACb,SAAS,SAAS;AAAA,MAClB,QAAQ,SAAS;AAAA,IACnB,IAAI;AAAA,EACN,CAAC;AACH,CAAC;AAGD,SAAS,IAAI,oBAAoB,OAAO,MAAM;AAC5C,QAAM,KAAK,EAAE,IAAI,MAAM,IAAI;AAE3B,QAAM,UAAU,MAAM,eAAe,QAAQ,EAAE;AAC/C,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,EACnD;AAEA,QAAM,cAAc,MAAM,eAAe,EAAE;AAE3C,SAAO,EAAE,KAAK;AAAA,IACZ,WAAW;AAAA,IACX,aAAa,YAAY,IAAI,CAAC,QAAQ;AAAA,MACpC,IAAI,GAAG;AAAA,MACP,iBAAiB,GAAG;AAAA,MACpB,SAAS,GAAG;AAAA,MACZ,WAAW,GAAG,UAAU,YAAY;AAAA,IACtC,EAAE;AAAA,IACF,OAAO,YAAY;AAAA,EACrB,CAAC;AACH,CAAC;AAGD,SAAS,KAAK,6BAA6B,OAAO,MAAM;AACtD,QAAM,YAAY,EAAE,IAAI,MAAM,IAAI;AAClC,QAAM,eAAe,EAAE,IAAI,MAAM,cAAc;AAE/C,QAAM,UAAU,MAAM,eAAe,QAAQ,SAAS;AACtD,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,EACnD;AAGA,QAAM,eAAe,MAAM,oBAAoB,eAAe,SAAS;AACvE,MAAI,cAAc;AAChB,WAAO,EAAE,KAAK;AAAA,MACZ,OAAO;AAAA,MACP,UAAU,aAAa;AAAA,IACzB,GAAG,GAAG;AAAA,EACR;AAEA,QAAM,SAAS,MAAM,mBAAmB,WAAW,YAAY;AAE/D,MAAI,CAAC,OAAO,SAAS;AACnB,WAAO,EAAE,KAAK,EAAE,OAAO,OAAO,MAAM,GAAG,GAAG;AAAA,EAC5C;AAEA,SAAO,EAAE,KAAK;AAAA,IACZ,SAAS;AAAA,IACT;AAAA,IACA;AAAA,IACA,eAAe,OAAO;AAAA,IACtB,cAAc,OAAO;AAAA,IACrB,iBAAiB,OAAO;AAAA,IACxB,oBAAoB,OAAO;AAAA,EAC7B,CAAC;AACH,CAAC;AAGD,SAAS,IAAI,aAAa,OAAO,MAAM;AACrC,QAAM,KAAK,EAAE,IAAI,MAAM,IAAI;AAE3B,QAAM,UAAU,MAAM,eAAe,QAAQ,EAAE;AAC/C,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,EACnD;AAEA,QAAM,OAAO,MAAM,eAAe,EAAE;AAEpC,SAAO,EAAE,KAAK;AAAA,IACZ,WAAW;AAAA,IACX,OAAO,KAAK,MAAM,IAAI,CAAC,OAAO;AAAA,MAC5B,MAAM,EAAE;AAAA,MACR,QAAQ,EAAE;AAAA,MACV,aAAa,EAAE,oBAAoB;AAAA,MACnC,YAAY,EAAE,mBAAmB;AAAA;AAAA;AAAA;AAAA,IAInC,EAAE;AAAA,IACF,SAAS;AAAA,MACP,SAAS,KAAK,MAAM,OAAO,OAAK,EAAE,WAAW,SAAS,EAAE;AAAA,MACxD,UAAU,KAAK,MAAM,OAAO,OAAK,EAAE,WAAW,UAAU,EAAE;AAAA,MAC1D,SAAS,KAAK,MAAM,OAAO,OAAK,EAAE,WAAW,SAAS,EAAE;AAAA,MACxD,OAAO,KAAK,MAAM;AAAA,IACpB;AAAA,EACF,CAAC;AACH,CAAC;AAGD,SAAS,IAAI,uBAAuB,OAAO,MAAM;AAC/C,QAAM,YAAY,EAAE,IAAI,MAAM,IAAI;AAClC,QAAM,WAAW,mBAAmB,EAAE,IAAI,MAAM,UAAU,CAAC;AAE3D,QAAM,UAAU,MAAM,eAAe,QAAQ,SAAS;AACtD,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,EACnD;AAEA,QAAM,OAAO,MAAM,eAAe,SAAS;AAC3C,QAAM,WAAW,KAAK,MAAM,KAAK,OAAK,EAAE,SAAS,QAAQ;AAEzD,MAAI,CAAC,UAAU;AACb,WAAO,EAAE,KAAK,EAAE,OAAO,yBAAyB,GAAG,GAAG;AAAA,EACxD;AAEA,SAAO,EAAE,KAAK;AAAA,IACZ;AAAA,IACA,MAAM,SAAS;AAAA,IACf,QAAQ,SAAS;AAAA,IACjB,iBAAiB,SAAS;AAAA,IAC1B,gBAAgB,SAAS;AAAA,EAC3B,CAAC;AACH,CAAC;AASD,SAAS,kBAAkB,WAA2B;AACpD,QAAM,aAAa,oBAAoB;AACvC,SAAOC,MAAK,YAAY,eAAe,SAAS;AAClD;AAKA,SAAS,qBAAqB,WAA2B;AACvD,QAAM,MAAM,kBAAkB,SAAS;AACvC,MAAI,CAACC,aAAW,GAAG,GAAG;AACpB,IAAAC,WAAU,KAAK,EAAE,WAAW,KAAK,CAAC;AAAA,EACpC;AACA,SAAO;AACT;AAGA,SAAS,IAAI,oBAAoB,OAAO,MAAM;AAC5C,QAAM,YAAY,EAAE,IAAI,MAAM,IAAI;AAElC,QAAM,UAAU,MAAM,eAAe,QAAQ,SAAS;AACtD,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,EACnD;AAEA,QAAM,MAAM,kBAAkB,SAAS;AACvC,MAAI,CAACD,aAAW,GAAG,GAAG;AACpB,WAAO,EAAE,KAAK,EAAE,WAAW,aAAa,CAAC,GAAG,OAAO,EAAE,CAAC;AAAA,EACxD;AAEA,QAAM,QAAQ,YAAY,GAAG;AAC7B,QAAM,cAAc,MAAM,IAAI,CAAC,aAAa;AAC1C,UAAM,WAAWD,MAAK,KAAK,QAAQ;AACnC,UAAM,QAAQG,UAAS,QAAQ;AAC/B,WAAO;AAAA,MACL,IAAI,SAAS,MAAM,GAAG,EAAE,CAAC;AAAA;AAAA,MACzB;AAAA,MACA,MAAM;AAAA,MACN,MAAM,MAAM;AAAA,MACZ,WAAW,MAAM,UAAU,YAAY;AAAA,IACzC;AAAA,EACF,CAAC;AAED,SAAO,EAAE,KAAK;AAAA,IACZ;AAAA,IACA;AAAA,IACA,OAAO,YAAY;AAAA,EACrB,CAAC;AACH,CAAC;AAGD,SAAS,KAAK,oBAAoB,OAAO,MAAM;AAC7C,QAAM,YAAY,EAAE,IAAI,MAAM,IAAI;AAElC,QAAM,UAAU,MAAM,eAAe,QAAQ,SAAS;AACtD,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,EACnD;AAEA,QAAM,cAAc,EAAE,IAAI,OAAO,cAAc,KAAK;AAGpD,MAAI,YAAY,SAAS,qBAAqB,GAAG;AAC/C,QAAI;AACF,YAAM,WAAW,MAAM,EAAE,IAAI,SAAS;AACtC,YAAM,OAAO,SAAS,IAAI,MAAM;AAEhC,UAAI,CAAC,QAAQ,EAAE,gBAAgB,OAAO;AACpC,eAAO,EAAE,KAAK,EAAE,OAAO,mBAAmB,GAAG,GAAG;AAAA,MAClD;AAEA,YAAM,MAAM,qBAAqB,SAAS;AAC1C,YAAM,KAAKC,QAAO,EAAE;AACpB,YAAM,MAAMC,SAAQ,KAAK,IAAI,KAAK;AAClC,YAAM,eAAe,GAAG,EAAE,IAAIC,UAAS,KAAK,IAAI,EAAE,QAAQ,oBAAoB,GAAG,CAAC;AAClF,YAAM,WAAWN,MAAK,KAAK,YAAY;AAEvC,YAAM,cAAc,MAAM,KAAK,YAAY;AAC3C,MAAAO,eAAc,UAAU,OAAO,KAAK,WAAW,CAAC;AAEhD,aAAO,EAAE,KAAK;AAAA,QACZ;AAAA,QACA,UAAU,KAAK;AAAA,QACf,UAAU;AAAA,QACV,MAAM;AAAA,QACN,MAAM,KAAK;AAAA,QACX,WAAW,KAAK;AAAA,QAChB;AAAA,MACF,GAAG,GAAG;AAAA,IACR,SAAS,KAAK;AACZ,cAAQ,MAAM,gCAAgC,GAAG;AACjD,aAAO,EAAE,KAAK,EAAE,OAAO,wBAAwB,GAAG,GAAG;AAAA,IACvD;AAAA,EACF;AAGA,MAAI;AACF,UAAM,OAAO,MAAM,EAAE,IAAI,KAAK;AAM9B,QAAI,CAAC,KAAK,YAAY,CAAC,KAAK,MAAM;AAChC,aAAO,EAAE,KAAK,EAAE,OAAO,2BAA2B,GAAG,GAAG;AAAA,IAC1D;AAEA,UAAM,MAAM,qBAAqB,SAAS;AAC1C,UAAM,KAAKH,QAAO,EAAE;AACpB,UAAM,MAAMC,SAAQ,KAAK,QAAQ,KAAK;AACtC,UAAM,eAAe,GAAG,EAAE,IAAIC,UAAS,KAAK,QAAQ,EAAE,QAAQ,oBAAoB,GAAG,CAAC;AACtF,UAAM,WAAWN,MAAK,KAAK,YAAY;AAGvC,QAAI,aAAa,KAAK;AACtB,QAAI,WAAW,SAAS,GAAG,GAAG;AAC5B,mBAAa,WAAW,MAAM,GAAG,EAAE,CAAC;AAAA,IACtC;AAEA,UAAM,SAAS,OAAO,KAAK,YAAY,QAAQ;AAC/C,IAAAO,eAAc,UAAU,MAAM;AAE9B,WAAO,EAAE,KAAK;AAAA,MACZ;AAAA,MACA,UAAU,KAAK;AAAA,MACf,UAAU;AAAA,MACV,MAAM;AAAA,MACN,MAAM,OAAO;AAAA,MACb,WAAW,KAAK;AAAA,MAChB;AAAA,IACF,GAAG,GAAG;AAAA,EACR,SAAS,KAAK;AACZ,YAAQ,MAAM,gCAAgC,GAAG;AACjD,WAAO,EAAE,KAAK,EAAE,OAAO,wBAAwB,GAAG,GAAG;AAAA,EACvD;AACF,CAAC;AAGD,SAAS,OAAO,kCAAkC,OAAO,MAAM;AAC7D,QAAM,YAAY,EAAE,IAAI,MAAM,IAAI;AAClC,QAAM,eAAe,EAAE,IAAI,MAAM,cAAc;AAE/C,QAAM,UAAU,MAAM,eAAe,QAAQ,SAAS;AACtD,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,EACnD;AAEA,QAAM,MAAM,kBAAkB,SAAS;AACvC,MAAI,CAACN,aAAW,GAAG,GAAG;AACpB,WAAO,EAAE,KAAK,EAAE,OAAO,uBAAuB,GAAG,GAAG;AAAA,EACtD;AAGA,QAAM,QAAQ,YAAY,GAAG;AAC7B,QAAM,OAAO,MAAM,KAAK,OAAK,EAAE,WAAW,eAAe,GAAG,CAAC;AAE7D,MAAI,CAAC,MAAM;AACT,WAAO,EAAE,KAAK,EAAE,OAAO,uBAAuB,GAAG,GAAG;AAAA,EACtD;AAEA,QAAM,WAAWD,MAAK,KAAK,IAAI;AAC/B,aAAW,QAAQ;AAEnB,SAAO,EAAE,KAAK,EAAE,SAAS,MAAM,IAAI,aAAa,CAAC;AACnD,CAAC;AAMD,IAAM,mBAAmBD,IAAE,OAAO;AAAA,EAChC,OAAOA,IAAE,OAAO,EAAE,SAAS;AAAA;AAAA,EAC3B,OAAOA,IAAE,OAAO,EAAE,SAAS;AAAA;AAC7B,CAAC;AAGD,IAAM,sBAAsB,oBAAI,IAAI;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AAGD,IAAM,qBAAqB,oBAAI,IAAI;AAAA,EACjC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AAUD,IAAM,sBAAsB;AAS5B,SAAS,aAAa,MAAc,cAAsB,OAAwB;AAChF,MAAI,CAAC,MAAO,QAAO;AAEnB,QAAM,aAAa,MAAM,YAAY;AACrC,QAAM,YAAY,KAAK,YAAY;AACnC,QAAM,YAAY,aAAa,YAAY;AAG3C,MAAI,UAAU,SAAS,UAAU,KAAK,UAAU,SAAS,UAAU,GAAG;AACpE,WAAO;AAAA,EACT;AAGA,QAAM,eAAe,UAAU,MAAM,GAAG;AACxC,aAAW,WAAW,cAAc;AAClC,QAAI,QAAQ,SAAS,UAAU,KAAK,QAAQ,WAAW,UAAU,GAAG;AAClE,aAAO;AAAA,IACT;AAAA,EACF;AAEA,SAAO;AACT;AAMA,SAAS,kBAAkB,MAAc,cAAsB,OAAuB;AACpF,MAAI,CAAC,MAAO,QAAO;AAEnB,QAAM,aAAa,MAAM,YAAY;AACrC,QAAM,YAAY,KAAK,YAAY;AACnC,QAAM,YAAY,aAAa,YAAY;AAG3C,MAAI,cAAc,WAAY,QAAO;AAGrC,MAAI,UAAU,WAAW,UAAU,EAAG,QAAO;AAG7C,MAAI,UAAU,SAAS,UAAU,EAAG,QAAO;AAG3C,QAAM,cAAc,UAAU,MAAM,GAAG,EAAE,IAAI,KAAK;AAClD,MAAI,gBAAgB,WAAY,QAAO;AACvC,MAAI,YAAY,WAAW,UAAU,EAAG,QAAO;AAC/C,MAAI,YAAY,SAAS,UAAU,EAAG,QAAO;AAG7C,MAAI,UAAU,SAAS,UAAU,EAAG,QAAO;AAG3C,QAAM,WAAW,UAAU,MAAM,GAAG;AACpC,aAAW,WAAW,UAAU;AAC9B,QAAI,QAAQ,WAAW,UAAU,EAAG,QAAO;AAAA,EAC7C;AAEA,SAAO;AACT;AAMA,eAAe,mBACb,SACA,YACA,OACA,OACA,UAA2B,CAAC,GAC5B,QAAgB,GACU;AAE1B,MAAI,QAAQ,uBAAuB,QAAQ,UAAU,QAAQ,GAAG;AAC9D,WAAO;AAAA,EACT;AAEA,MAAI;AACF,UAAM,UAAU,MAAMS,SAAQ,YAAY,EAAE,eAAe,KAAK,CAAC;AAEjE,eAAW,SAAS,SAAS;AAE3B,UAAI,QAAQ,UAAU,QAAQ,EAAG;AAEjC,YAAM,WAAWR,MAAK,YAAY,MAAM,IAAI;AAC5C,YAAM,eAAeS,UAAS,SAAS,QAAQ;AAG/C,UAAI,MAAM,YAAY,KAAK,oBAAoB,IAAI,MAAM,IAAI,GAAG;AAC9D;AAAA,MACF;AAGA,UAAI,MAAM,KAAK,WAAW,GAAG,GAAG;AAC9B;AAAA,MACF;AAGA,YAAM,MAAMJ,SAAQ,MAAM,IAAI,EAAE,YAAY;AAC5C,UAAI,mBAAmB,IAAI,GAAG,GAAG;AAC/B;AAAA,MACF;AAEA,UAAI,MAAM,YAAY,GAAG;AAEvB,YAAI,aAAa,MAAM,MAAM,cAAc,KAAK,GAAG;AACjD,kBAAQ,KAAK;AAAA,YACX,MAAM;AAAA,YACN,MAAM,MAAM;AAAA,YACZ,MAAM;AAAA,UACR,CAAC;AAAA,QACH;AAGA,cAAM,mBAAmB,SAAS,UAAU,OAAO,OAAO,SAAS,QAAQ,CAAC;AAAA,MAC9E,WAAW,MAAM,OAAO,GAAG;AAEzB,YAAI,aAAa,MAAM,MAAM,cAAc,KAAK,GAAG;AACjD,kBAAQ,KAAK;AAAA,YACX,MAAM;AAAA,YACN,MAAM,MAAM;AAAA,YACZ,MAAM;AAAA,YACN,WAAW,OAAO;AAAA,UACpB,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAAA,EACF,QAAQ;AAAA,EAER;AAEA,SAAO;AACT;AAGA,SAAS;AAAA,EACP;AAAA,EACA,WAAW,SAAS,gBAAgB;AAAA,EACpC,OAAO,MAAM;AACX,UAAM,YAAY,EAAE,IAAI,MAAM,IAAI;AAClC,UAAM,EAAE,QAAQ,IAAI,OAAO,WAAW,KAAK,IAAI,EAAE,IAAI,MAAM,OAAO;AAClE,UAAM,QAAQ,KAAK,IAAI,SAAS,QAAQ,KAAK,IAAI,GAAG;AAEpD,UAAM,UAAU,MAAM,eAAe,QAAQ,SAAS;AACtD,QAAI,CAAC,SAAS;AACZ,aAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,IACnD;AAEA,UAAM,mBAAmB,QAAQ;AAEjC,QAAI,CAACJ,aAAW,gBAAgB,GAAG;AACjC,aAAO,EAAE,KAAK;AAAA,QACZ;AAAA,QACA;AAAA,QACA,OAAO,CAAC;AAAA,QACR,OAAO;AAAA,QACP,OAAO;AAAA,MACT,CAAC;AAAA,IACH;AAEA,QAAI;AACF,YAAM,WAAW,MAAM;AAAA,QACrB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAGA,eAAS,KAAK,CAAC,GAAG,MAAM;AAEtB,YAAI,OAAO;AACT,gBAAM,SAAS,kBAAkB,EAAE,MAAM,EAAE,MAAM,KAAK;AACtD,gBAAM,SAAS,kBAAkB,EAAE,MAAM,EAAE,MAAM,KAAK;AACtD,cAAI,WAAW,QAAQ;AACrB,mBAAO,SAAS;AAAA,UAClB;AAAA,QACF;AAGA,YAAI,EAAE,SAAS,EAAE,MAAM;AACrB,iBAAO,EAAE,SAAS,WAAW,KAAK;AAAA,QACpC;AAGA,eAAO,EAAE,KAAK,cAAc,EAAE,IAAI;AAAA,MACpC,CAAC;AAGD,YAAM,QAAQ,SAAS,MAAM,GAAG,KAAK;AAErC,aAAO,EAAE,KAAK;AAAA,QACZ;AAAA,QACA;AAAA,QACA;AAAA,QACA,OAAO,MAAM;AAAA,QACb;AAAA,MACF,CAAC;AAAA,IACH,SAAS,KAAK;AACZ,cAAQ,MAAM,mCAAmC,GAAG;AACpD,aAAO,EAAE,KAAK;AAAA,QACZ,OAAO;AAAA,QACP;AAAA,QACA;AAAA,QACA,OAAO,CAAC;AAAA,QACR,OAAO;AAAA,MACT,GAAG,GAAG;AAAA,IACR;AAAA,EACF;AACF;AAMA,SAAS,IAAI,sBAAsB,OAAO,MAAM;AAC9C,QAAM,YAAY,EAAE,IAAI,MAAM,IAAI;AAElC,MAAI;AACF,UAAM,EAAE,oBAAAS,oBAAmB,IAAI,MAAM;AACrC,QAAI,CAACA,oBAAmB,GAAG;AACzB,aAAO,EAAE,KAAK,EAAE,OAAO,CAAC,EAAE,CAAC;AAAA,IAC7B;AAEA,UAAM,EAAE,gBAAAC,gBAAe,IAAI,MAAM;AACjC,UAAM,QAAQ,MAAMA,gBAAe,gBAAgB,SAAS;AAC5D,WAAO,EAAE,KAAK,EAAE,WAAW,MAAM,CAAC;AAAA,EACpC,SAAS,KAAU;AACjB,YAAQ,MAAM,gCAAgC,IAAI,OAAO;AACzD,WAAO,EAAE,KAAK,EAAE,WAAW,OAAO,CAAC,EAAE,CAAC;AAAA,EACxC;AACF,CAAC;AAED,SAAS,IAAI,2BAA2B,OAAO,MAAM;AACnD,QAAM,SAAS,EAAE,IAAI,MAAM,QAAQ;AAEnC,MAAI;AACF,UAAM,EAAE,oBAAAD,qBAAoB,gBAAAC,gBAAe,IAAI,MAAM;AACrD,QAAI,CAACD,oBAAmB,GAAG;AACzB,aAAO,EAAE,KAAK,EAAE,OAAO,+BAA+B,GAAG,GAAG;AAAA,IAC9D;AAEA,UAAM,SAAS,MAAMC,gBAAe,eAAe,MAAM;AACzD,WAAO,EAAE,KAAK,MAAM;AAAA,EACtB,SAAS,KAAU;AACjB,WAAO,EAAE,KAAK,EAAE,OAAO,IAAI,QAAQ,GAAG,GAAG;AAAA,EAC3C;AACF,CAAC;AAMD,SAAS,IAAI,0BAA0B,OAAO,MAAM;AAClD,QAAM,YAAY,EAAE,IAAI,MAAM,IAAI;AAElC,MAAI;AACF,UAAM,EAAE,oBAAAD,qBAAoB,gBAAAC,gBAAe,IAAI,MAAM;AACrD,QAAI,CAACD,oBAAmB,GAAG;AACzB,aAAO,EAAE,KAAK,EAAE,WAAW,YAAY,CAAC,EAAE,CAAC;AAAA,IAC7C;AAEA,UAAM,QAAQ,MAAMC,gBAAe,gBAAgB,SAAS;AAC5D,UAAM,aAAa,MAAM,OAAO,CAAC,MAAM,EAAE,aAAa,mBAAmB;AAEzE,QAAI,WAAW,WAAW,GAAG;AAC3B,aAAO,EAAE,KAAK,EAAE,WAAW,YAAY,CAAC,GAAG,SAAS,yCAAyC,CAAC;AAAA,IAChG;AAEA,WAAO,EAAE,KAAK;AAAA,MACZ;AAAA,MACA,YAAY,WAAW,IAAI,CAAC,OAAO;AAAA,QACjC,IAAI,EAAE;AAAA,QACN,UAAU,EAAE;AAAA,QACZ,WAAW,EAAE;AAAA,QACb,WAAW,EAAE;AAAA,QACb,aAAa,EAAE;AAAA,QACf,WAAW,EAAE;AAAA,MACf,EAAE;AAAA,IACJ,CAAC;AAAA,EACH,SAAS,KAAU;AACjB,YAAQ,MAAM,qCAAqC,IAAI,OAAO;AAC9D,WAAO,EAAE,KAAK,EAAE,WAAW,YAAY,CAAC,GAAG,OAAO,IAAI,QAAQ,CAAC;AAAA,EACjE;AACF,CAAC;;;A+BpkCD;AALA,SAAS,QAAAC,aAAY;AACrB,SAAS,cAAAC,mBAAkB;AAC3B,SAAS,KAAAC,WAAS;AAClB,SAAS,cAAAC,cAAY,aAAAC,YAAW,iBAAAC,sBAAqB;AACrD,SAAS,QAAAC,aAAY;AAGrB;;;ACIA,SAAS,oCAAoC;AAI7C,IAAM,QAAQ,oBAAI,IAAmD;AAGrE,IAAM,WAAW,oBAAI,IAA4C;AAIjE,IAAM,kBAAkB,YAAY,MAAM;AACxC,QAAM,MAAM,KAAK,IAAI;AACrB,aAAW,CAAC,KAAK,IAAI,KAAK,MAAM,QAAQ,GAAG;AACzC,QAAI,KAAK,aAAa,KAAK,YAAY,KAAK;AAC1C,YAAM,OAAO,GAAG;AAAA,IAClB;AAAA,EACF;AACF,GAAG,GAAK;AACR,gBAAgB,MAAM;AAEtB,IAAI,eAAe;AACnB,IAAI,iBAAiB;AAKrB,IAAM,YAAuB;AAAA,EAC3B,SAAS,YAAY;AAAA,EAErB;AAAA,EAEA,SAAS,OAAO,SAAiB,YAAoB;AACnD,UAAM,cAAc,SAAS,IAAI,OAAO;AACxC;AACA,UAAM,MAAM,KAAK,IAAI;AACrB,QAAI,MAAM,iBAAiB,KAAO;AAChC,cAAQ,IAAI,0CAA0C,YAAY,cAAc,SAAS,IAAI,WAAW,MAAM,IAAI,EAAE;AACpH,uBAAiB;AAAA,IACnB;AACA,QAAI,aAAa;AACf,iBAAW,YAAY,aAAa;AAClC,qBAAa,MAAM,SAAS,OAAO,CAAC;AAAA,MACtC;AAAA,IACF;AAAA,EACF;AAAA,EAEA,KAAK,OAAO,KAAa,OAAe,YAA8B;AACpE,UAAM,YAAY,SAAS,KAAK,KAAK,IAAI,IAAI,QAAQ,KAAK,MAAO;AACjE,UAAM,IAAI,KAAK,EAAE,OAAO,UAAU,CAAC;AACnC,QAAI,SAAS,IAAI;AACf,iBAAW,MAAM,MAAM,OAAO,GAAG,GAAG,QAAQ,KAAK,GAAI;AAAA,IACvD;AAAA,EACF;AAAA,EAEA,KAAK,OAAO,QAAgB;AAC1B,UAAM,OAAO,MAAM,IAAI,GAAG;AAC1B,QAAI,CAAC,KAAM,QAAO;AAElB,QAAI,KAAK,aAAa,KAAK,YAAY,KAAK,IAAI,GAAG;AACjD,YAAM,OAAO,GAAG;AAChB,aAAO;AAAA,IACT;AAEA,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,MAAM,OAAO,QAAgB;AAC3B,UAAM,OAAO,MAAM,IAAI,GAAG;AAC1B,UAAM,UAAU,OAAO,SAAS,KAAK,OAAO,EAAE,IAAI;AAClD,UAAM,QAAQ,MAAM,OAAO,IAAI,IAAI,WAAW;AAC9C,UAAM,IAAI,KAAK,EAAE,OAAO,OAAO,IAAI,GAAG,WAAW,MAAM,UAAU,CAAC;AAClE,WAAO;AAAA,EACT;AACF;AAKA,IAAM,aAAyB;AAAA,EAC7B,SAAS,YAAY;AAAA,EAErB;AAAA,EAEA,WAAW,OAAO,SAAiB,aAAwC;AACzE,QAAI,CAAC,SAAS,IAAI,OAAO,GAAG;AAC1B,eAAS,IAAI,SAAS,oBAAI,IAAI,CAAC;AAAA,IACjC;AACA,aAAS,IAAI,OAAO,EAAG,IAAI,QAAQ;AACnC,YAAQ,IAAI,2CAA2C,OAAO,yBAAyB,SAAS,IAAI,OAAO,EAAG,IAAI,GAAG;AAAA,EACvH;AAAA,EAEA,aAAa,OAAO,YAAoB;AACtC,UAAM,QAAQ,SAAS,IAAI,OAAO,GAAG,QAAQ;AAC7C,aAAS,OAAO,OAAO;AACvB,YAAQ,IAAI,+CAA+C,OAAO,cAAc,KAAK,eAAe;AAAA,EACtG;AACF;AAKO,IAAM,gBAAgB,6BAA6B;AAAA;AAAA,EAExD,WAAW,CAAC,YAA8B;AACxC,YAAQ,MAAM,CAAC,QAAQ;AACrB,cAAQ,MAAM,4CAA4C,GAAG;AAAA,IAC/D,CAAC;AAAA,EACH;AAAA,EACA;AAAA,EACA;AACF,CAAC;;;ADjHD,SAAS,UAAAC,eAAc;AAGvB;AACA;AACA;AAIA,IAAM,mBAAmB,oBAAI,IAA2B;AAExD,IAAM,wBAAwB,IAAI;AAClC,IAAM,yBAAyB,IAAI;AACnC,IAAM,sBAAsB,IAAI;AAEhC,SAAS,kBAAkB,UAAkB,OAAyB;AACpE,MAAI,aAAa,gBAAgB,CAAC,SAAS,OAAO,UAAU,UAAU;AACpE,WAAO;AAAA,EACT;AAEA,QAAM,OAAO;AACb,MAAI,UAAU;AACd,QAAM,OAAgC,EAAE,GAAG,KAAK;AAEhD,QAAM,UAAU,OAAO,KAAK,YAAY,WAAW,KAAK,UAAU;AAClE,MAAI,WAAW,QAAQ,SAAS,uBAAuB;AACrD,SAAK,UAAU,GAAG,QAAQ,MAAM,GAAG,sBAAsB,CAAC;AAAA;AAC1D,SAAK,gBAAgB,QAAQ;AAC7B,SAAK,mBAAmB;AACxB,cAAU;AAAA,EACZ;AAEA,QAAM,YAAY,OAAO,KAAK,eAAe,WAAW,KAAK,aAAa;AAC1E,MAAI,aAAa,UAAU,SAAS,uBAAuB;AACzD,SAAK,aAAa,GAAG,UAAU,MAAM,GAAG,sBAAsB,CAAC;AAAA;AAC/D,SAAK,kBAAkB,UAAU;AACjC,SAAK,qBAAqB;AAC1B,cAAU;AAAA,EACZ;AAEA,QAAM,YAAY,OAAO,KAAK,eAAe,WAAW,KAAK,aAAa;AAC1E,MAAI,aAAa,UAAU,SAAS,uBAAuB;AACzD,SAAK,aAAa,GAAG,UAAU,MAAM,GAAG,sBAAsB,CAAC;AAAA;AAC/D,SAAK,kBAAkB,UAAU;AACjC,SAAK,qBAAqB;AAC1B,cAAU;AAAA,EACZ;AAEA,MAAI,SAAS;AACX,YAAQ,IAAI,oEAAoE;AAAA,EAClF;AAEA,SAAO,UAAU,OAAO;AAC1B;AAEA,SAAS,kBAAkB,OAAwB;AACjD,MAAI;AACF,WAAO,KAAK,UAAU,SAAS,CAAC,CAAC;AAAA,EACnC,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAEA,eAAe,2BACb,UACA,gBACA,YACA,UACA,OACe;AACf,MAAI,eAAe,IAAI,UAAU,EAAG;AAEpC,iBAAe,IAAI,UAAU;AAC7B,QAAM,SAAS,KAAK,UAAU;AAAA,IAC5B,MAAM;AAAA,IACN;AAAA,IACA;AAAA,EACF,CAAC,CAAC;AAEF,MAAI,aAAa,aAAc;AAE/B,QAAM,WAAW,kBAAkB,KAAK;AACxC,WAAS,IAAI,GAAG,IAAI,SAAS,QAAQ,KAAK,qBAAqB;AAC7D,UAAM,QAAQ,SAAS,MAAM,GAAG,IAAI,mBAAmB;AACvD,UAAM,SAAS,KAAK,UAAU;AAAA,MAC5B,MAAM;AAAA,MACN;AAAA,MACA,eAAe;AAAA,IACjB,CAAC,CAAC;AACF,UAAM,IAAI,QAAQ,CAACC,cAAY,WAAWA,WAAS,CAAC,CAAC;AAAA,EACvD;AACF;AAOA,SAAS,wBAAwB,WAA2B;AAC1D,QAAM,MAAM,mBAAmB,SAAS;AACxC,MAAI,CAAC,IAAK,QAAO;AAEjB,QAAM,QAAkB,CAAC;AACzB,QAAM,KAAK,oBAAoB;AAC/B,QAAM,KAAK,gBAAgB,IAAI,GAAG,WAAW,IAAI,IAAI,WAAW,IAAI,QAAQ,MAAM;AAClF,MAAI,IAAI,eAAe,IAAI,cAAc;AACvC,UAAM,KAAK,sBAAsB,IAAI,WAAW,aAAa,IAAI,YAAY,IAAI,IAAI,mBAAmB,SAAS,IAAI,gBAAgB,MAAM,EAAE,KAAK;AAAA,EACpJ;AACA,QAAM,KAAK,qBAAqB;AAChC,SAAO,MAAM,KAAK,IAAI;AACxB;AAMA,SAAS,gCAAgC,WAAmB,QAAwB;AAClF,QAAM,MAAM,wBAAwB,SAAS;AAC7C,MAAI,CAAC,IAAK,QAAO;AACjB,SAAO,GAAG,GAAG;AAAA;AAAA,EAAO,MAAM;AAC5B;AAsDA,IAAM,SAAS,IAAIC,MAAK;AAGxB,IAAM,mBAAmBC,IAAE,OAAO;AAAA,EAChC,MAAMA,IAAE,KAAK,CAAC,SAAS,MAAM,CAAC;AAAA,EAC9B,MAAMA,IAAE,OAAO;AAAA;AAAA,EACf,WAAWA,IAAE,OAAO,EAAE,SAAS;AAAA,EAC/B,UAAUA,IAAE,OAAO,EAAE,SAAS;AAChC,CAAC;AAED,IAAM,kBAAkBA,IAAE,OAAO;AAAA,EAC/B,QAAQA,IAAE,OAAO;AAAA;AAAA,EACjB,aAAaA,IAAE,MAAM,gBAAgB,EAAE,SAAS;AAClD,CAAC,EAAE;AAAA,EACD,CAAC,SAAS,KAAK,OAAO,KAAK,EAAE,SAAS,KAAM,KAAK,eAAe,KAAK,YAAY,SAAS;AAAA,EAC1F,EAAE,SAAS,gDAAgD;AAC7D;AAEA,IAAM,mBAAmBA,IAAE,OAAO;AAAA,EAChC,QAAQA,IAAE,OAAO,EAAE,IAAI,CAAC;AAAA,EACxB,MAAMA,IAAE,OAAO,EAAE,SAAS;AAAA,EAC1B,kBAAkBA,IAAE,OAAO,EAAE,SAAS;AAAA,EACtC,OAAOA,IAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,eAAeA,IAAE,OAAOA,IAAE,OAAO,GAAGA,IAAE,QAAQ,CAAC,EAAE,SAAS;AAC5D,CAAC;AAED,IAAM,eAAeA,IAAE,OAAO;AAAA,EAC5B,QAAQA,IAAE,OAAO,EAAE,SAAS;AAC9B,CAAC,EAAE,SAAS;AAGZ,IAAM,yBAAyB,oBAAI,IAA6B;AAehE,SAAS,wBAAwB,WAA2B;AAC1D,QAAM,aAAa,oBAAoB;AACvC,SAAOC,MAAK,YAAY,eAAe,SAAS;AAClD;AAMA,eAAe,qBACb,WACA,YACA,OACiB;AACjB,QAAM,iBAAiB,wBAAwB,SAAS;AAExD,MAAI,CAACC,aAAW,cAAc,GAAG;AAC/B,IAAAC,WAAU,gBAAgB,EAAE,WAAW,KAAK,CAAC;AAAA,EAC/C;AAEA,MAAI,WAAW,WAAW;AAC1B,MAAI,CAAC,UAAU;AACb,UAAM,MAAM,0BAA0B,WAAW,WAAW,WAAW,IAAI;AAC3E,eAAW,cAAc,QAAQ,CAAC,GAAG,GAAG;AAAA,EAC1C;AAEA,MAAI,aAAa,WAAW;AAC5B,MAAI,WAAW,SAAS,GAAG,GAAG;AAC5B,iBAAa,WAAW,MAAM,GAAG,EAAE,CAAC;AAAA,EACtC;AAEA,MAAI,SAAiB,OAAO,KAAK,YAAY,QAAQ;AAErD,MAAI,WAAW,SAAS,SAAS;AAC/B,UAAM,UAAU,MAAM,oBAAoB,QAAQ,WAAW,SAAS;AACtE,aAAS,QAAQ;AACjB,eAAW,YAAY,QAAQ;AAC/B,eAAW,OAAO,OAAO,SAAS,QAAQ;AAAA,EAC5C;AAEA,QAAM,WAAWF,MAAK,gBAAgB,QAAQ;AAC9C,EAAAG,eAAc,UAAU,MAAM;AAE9B,SAAO;AACT;AAWA,SAASC,oBAAmB,MAAsB;AAChD,QAAM,WAAW,KAAK,QAAQ,GAAG;AACjC,MAAI,aAAa,MAAM,KAAK,WAAW,OAAO,GAAG;AAC/C,WAAO,KAAK,MAAM,WAAW,CAAC;AAAA,EAChC;AACA,SAAO;AACT;AAEA,SAAS,0BAA0B,WAAoB,MAAiC;AACtF,MAAI,CAAC,WAAW;AACd,WAAO,SAAS,UAAU,SAAS;AAAA,EACrC;AAEA,QAAM,YAAoC;AAAA,IACxC,aAAa;AAAA,IACb,cAAc;AAAA,IACd,aAAa;AAAA,IACb,aAAa;AAAA,IACb,cAAc;AAAA,IACd,iBAAiB;AAAA,IACjB,mBAAmB;AAAA,IACnB,cAAc;AAAA,IACd,iBAAiB;AAAA,IACjB,oBAAoB;AAAA,IACpB,0BAA0B;AAAA,IAC1B,mBAAmB;AAAA,IACnB,mBAAmB;AAAA,IACnB,aAAa;AAAA,IACb,YAAY;AAAA,EACd;AAEA,SAAO,UAAU,SAAS,KAAK;AACjC;AAaA,SAAS,0BACP,WACA,QACA,UACA,aAC8B;AAC9B,SAAO,MAAM;AAEX,UAAM,EAAE,UAAU,SAAS,IAAI,IAAI,gBAAgC;AACnE,UAAM,SAAS,SAAS,UAAU;AAClC,QAAI,eAAe;AACnB,UAAM,iBAAiB,oBAAI,IAAY;AAGvC,UAAM,kBAAkB,IAAI,gBAAgB;AAC5C,2BAAuB,IAAI,UAAU,eAAe;AAEpD,QAAI,gBAAgB;AACpB,QAAI,uBAAuB;AAC3B,QAAI,iBAAiB;AAGrB,UAAM,WAAW,OAAO,SAAiB;AACvC,UAAI,aAAc;AAClB,UAAI;AACF;AACA,cAAM,OAAO,MAAM,SAAS,IAAI;AAAA;AAAA,CAAM;AAAA,MACxC,SAAS,KAAU;AACjB;AACA,YAAI,mBAAmB,GAAG;AACxB,kBAAQ,IAAI,QAAQ,QAAQ,6DAA6D,aAAa,qBAAqB,oBAAoB,EAAE;AAAA,QACnJ;AACA,uBAAe;AAAA,MACjB;AAAA,IACF;AAGA,UAAM,YAAY,YAAY;AAC5B,UAAI,aAAc;AAClB,UAAI;AACF,gBAAQ,IAAI,QAAQ,QAAQ,mCAAmC,aAAa,qBAAqB,oBAAoB,mBAAmB,cAAc,EAAE;AACxJ,uBAAe;AACf,cAAM,OAAO,MAAM;AAAA,MACrB,QAAQ;AAAA,MAER;AAAA,IACF;AAGA,UAAM,yBAAyB,MAAM;AACnC,6BAAuB,OAAO,QAAQ;AAAA,IACxC;AAGA,KAAC,YAAY;AACX,UAAI,YAAY;AAEhB,UAAI;AACF,cAAM,QAAQ,MAAM,MAAM,OAAO,EAAE,UAAU,CAAC;AAG9C,cAAM,SAAS,KAAK,UAAU,EAAE,MAAM,kBAAkB,SAAS,CAAC,CAAC;AAKnE,YAAI;AACJ,YAAI,eAAe,YAAY,SAAS,GAAG;AACzC,gBAAM,eAAiJ,CAAC;AAGxJ,gBAAM,yBAAyB,YAC5B,IAAI,CAAC,GAAG,MAAM;AACb,kBAAM,OAAO,EAAE,YAAY,cAAc,IAAI,CAAC;AAC9C,kBAAM,YAAY,EAAE,SAAS,UAAU,UAAU;AACjD,kBAAM,WAAW,EAAE,aAAa;AAChC,mBAAO,GAAG,IAAI,CAAC,KAAK,SAAS,MAAM,IAAI,eAAe,QAAQ;AAAA,UAChE,CAAC,EACA,KAAK,IAAI;AACZ,uBAAa,KAAK;AAAA,YAChB,MAAM;AAAA,YACN,MAAM;AAAA,EAA2F,sBAAsB;AAAA;AAAA;AAAA,UACzH,CAAC;AAGD,cAAI,QAAQ;AACV,yBAAa,KAAK,EAAE,MAAM,QAAQ,MAAM;AAAA;AAAA,EAAqB,MAAM,GAAG,CAAC;AAAA,UACzE;AAGA,qBAAW,cAAc,aAAa;AACpC,gBAAI,WAAW,SAAS,SAAS;AAC/B,2BAAa,KAAK;AAAA,gBAChB,MAAM;AAAA,gBACN,OAAOA,oBAAmB,WAAW,IAAI;AAAA,gBACzC,WAAW,WAAW;AAAA,gBACtB,UAAU,WAAW;AAAA,gBACrB,WAAW,WAAW;AAAA,cACxB,CAAC;AAAA,YACH,OAAO;AACL,2BAAa,KAAK;AAAA,gBAChB,MAAM;AAAA,gBACN,MAAMA,oBAAmB,WAAW,IAAI;AAAA,gBACxC,WAAW,WAAW,aAAa;AAAA,gBACnC,UAAU,WAAW;AAAA,gBACrB,WAAW,WAAW;AAAA,cACxB,CAAC;AAAA,YACH;AAAA,UACF;AACA,6BAAmB;AAAA,QACrB,OAAO;AACL,6BAAmB;AAAA,QACrB;AAGA,cAAM,SAAS,KAAK,UAAU;AAAA,UAC5B,MAAM;AAAA,UACN,MAAM,EAAE,IAAI,QAAQ,KAAK,IAAI,CAAC,IAAI,SAAS,iBAAiB;AAAA,QAC9D,CAAC,CAAC;AAGF,cAAM,YAAY,OAAO,KAAK,IAAI,CAAC;AACnC,cAAM,SAAS,KAAK,UAAU,EAAE,MAAM,SAAS,UAAU,CAAC,CAAC;AAE3D,YAAI,SAAS,QAAQ,KAAK,IAAI,CAAC;AAC/B,YAAI,cAAc;AAElB,cAAM,SAAS,MAAM,MAAM,OAAO;AAAA,UAChC;AAAA,UACA;AAAA;AAAA,UACA,aAAa,gBAAgB;AAAA;AAAA,UAC7B,qBAAqB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAKrB,YAAY,YAAY;AAAA,UAExB;AAAA,UACA,cAAc,YAAY;AAAA,UAE1B;AAAA,UACA,oBAAoB,OAAO,cAAc;AACvC,kBAAM,SAAS,KAAK,UAAU;AAAA,cAC5B,MAAM;AAAA,cACN,MAAM;AAAA,gBACJ,IAAI,UAAU;AAAA,gBACd,YAAY,UAAU;AAAA,gBACtB,UAAU,UAAU;AAAA,gBACpB,OAAO,UAAU;AAAA,cACnB;AAAA,YACF,CAAC,CAAC;AAAA,UACJ;AAAA,UACA,gBAAgB,OAAO,aAAa;AAClC,kBAAM,SAAU,SAAS,MAAc,UAAU;AACjD,kBAAM,gBAAgB,OAAQ,SAAS,MAAc,YAAY,WAC5D,SAAS,KAAa,QAAQ,SAC/B;AACJ,kBAAM,aAAc,SAAS,MAAc;AAC3C,kBAAM,aAAc,SAAS,MAAc;AAC3C,oBAAQ;AAAA,cACN;AAAA,cACA,SAAS;AAAA,cACT;AAAA,cACA,kBAAkB,SAAY,iBAAiB,aAAa,KAAK;AAAA,cACjE,eAAe,UAAa,eAAe,SAAY,SAAS,UAAU,IAAI,UAAU,KAAK;AAAA,YAC/F;AACA,kBAAM,SAAS,KAAK,UAAU;AAAA,cAC5B,MAAM;AAAA,cACN,UAAU,SAAS;AAAA,cACnB,MAAM,SAAS;AAAA,YACjB,CAAC,CAAC;AACF,gBAAI,SAAS,aAAa,gBAAgB,WAAW,WAAW;AAC9D,oBAAM,SAAS,KAAK,UAAU;AAAA,gBAC5B,MAAM;AAAA,gBACN,OAAO;AAAA,gBACP;AAAA,gBACA;AAAA,gBACA;AAAA,cACF,CAAC,CAAC;AACF,oBAAM,IAAI,QAAQ,CAACC,cAAY,WAAWA,WAAS,CAAC,CAAC;AAAA,YACvD;AAGA,kBAAM,cAAe,SAAS,MAAc;AAC5C,kBAAM,gBAAiB,SAAS,MAAc;AAE9C,gBAAI,SAAS,aAAa,UAAU,eAAe;AACjD,sBAAQ,IAAI,mBAAmB,QAAQ,gEAAgE,SAAS,EAAE;AAClH,2BAAa,SAAS;AAAA,YACxB,WAAW,SAAS,aAAa,UAAU,aAAa;AACtD,sBAAQ,IAAI,mBAAmB,QAAQ,4DAA4D,WAAW,gBAAgB,SAAS,EAAE;AACzI,oBAAM,QAAQ,iBAAiB,WAAW,WAAW;AACrD,sBAAQ,IAAI,mBAAmB,QAAQ,4BAA4B,MAAM,SAAS,oBAAoB,MAAM,cAAc,OAAO,CAAC,qBAAqB,MAAM,cAAc,QAAQ,CAAC,EAAE;AAEtL,kBAAI,CAAC,iBAAiB,IAAI,SAAS,GAAG;AACpC,sBAAM,WAAW,IAAI,cAAc,SAAS;AAC5C,yBAAS,MAAM;AACf,iCAAiB,IAAI,WAAW,QAAQ;AAAA,cAC1C;AAKA,oBAAM,oBAAoB,MAAM,cAAc,OAAO;AACrD,kBAAI,oBAAoB,GAAG;AACzB,wBAAQ,IAAI,mBAAmB,QAAQ,eAAe,iBAAiB,+CAA+C;AACtH,sBAAM,mBAAmB,OAAO;AAChC,sBAAM,mBAAmB,QAAQ;AAAA,cACnC;AACA,sBAAQ,IAAI,mBAAmB,QAAQ,6CAA6C;AACpF,oBAAM,GAAG,SAAS,CAAC,UAAwB;AACzC;AACA,oBAAI,yBAAyB,GAAG;AAC9B,0BAAQ,IAAI,mBAAmB,QAAQ,4CAA4C,MAAM,MAAM,UAAU,CAAC,iBAAiB,YAAY,EAAE;AAAA,gBAC3I,WAAW,uBAAuB,OAAO,GAAG;AAC1C,0BAAQ,IAAI,mBAAmB,QAAQ,oBAAoB,oBAAoB,kBAAkB,YAAY,GAAG;AAAA,gBAClH;AACA,sBAAM,MAAM,iBAAiB,IAAI,SAAS;AAC1C,qBAAK,SAAS,KAAK;AACnB,yBAAS,KAAK,UAAU;AAAA,kBACtB,MAAM;AAAA,kBACN,MAAM,MAAM;AAAA,kBACZ,UAAU,MAAM;AAAA,gBAClB,CAAC,CAAC,EAAE,MAAM,CAAC,QAAQ;AACjB,0BAAQ,KAAK,mBAAmB,QAAQ,2CAA2C,GAAG;AAAA,gBACxF,CAAC;AAAA,cACH,CAAC;AACD,oBAAM,GAAG,UAAU,CAAC,MAAqB;AACvC,wBAAQ,IAAI,mBAAmB,QAAQ,qCAAqC,EAAE,SAAS,kBAAkB,EAAE,aAAa,aAAa,EAAE,aAAa,IAAI,EAAE,cAAc,EAAE;AAC1K,yBAAS,KAAK,UAAU;AAAA,kBACtB,MAAM;AAAA,kBACN,GAAG;AAAA,gBACL,CAAC,CAAC,EAAE,MAAM,CAAC,QAAQ;AACjB,0BAAQ,KAAK,mBAAmB,QAAQ,4CAA4C,GAAG;AAAA,gBACzF,CAAC;AAAA,cACH,CAAC;AAED,oBAAM,cAAc;AAAA,YACtB;AAAA,UACF;AAAA,UACA,cAAc,YAAY;AACxB,kBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,cAAc,CAAC,CAAC;AACtD,gBAAI,aAAa;AACf,oBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,YAAY,IAAI,OAAO,CAAC,CAAC;AAC/D,4BAAc;AACd,uBAAS,QAAQ,KAAK,IAAI,CAAC;AAAA,YAC7B;AAAA,UACF;AAAA,UACA,SAAS,OAAO,EAAE,MAAM,MAAM;AAC5B,wBAAY;AACZ,oBAAQ,IAAI,wBAAwB,MAAM,MAAM,QAAQ;AAAA,UAC1D;AAAA,QACF,CAAC;AAGD,YAAI,cAAc,aAAa,KAAK,IAAI,CAAC;AACzC,YAAI,mBAAmB;AAEvB,yBAAiB,QAAQ,OAAO,OAAO,YAAY;AACjD,cAAI,KAAK,SAAS,cAAc;AAC9B,gBAAI,CAAC,aAAa;AAChB,oBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,cAAc,IAAI,OAAO,CAAC,CAAC;AACjE,4BAAc;AAAA,YAChB;AACA,kBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,cAAc,IAAI,QAAQ,OAAO,KAAK,KAAK,CAAC,CAAC;AAAA,UACrF,WAAW,KAAK,SAAS,mBAAmB;AAC1C,kBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,mBAAmB,IAAI,YAAY,CAAC,CAAC;AAC3E,+BAAmB;AAAA,UACrB,WAAW,KAAK,SAAS,mBAAmB;AAC1C,kBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,mBAAmB,IAAI,aAAa,OAAO,KAAK,KAAK,CAAC,CAAC;AAAA,UAC/F,WAAW,KAAK,SAAS,iBAAiB;AACxC,gBAAI,kBAAkB;AACpB,oBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,iBAAiB,IAAI,YAAY,CAAC,CAAC;AACzE,iCAAmB;AACnB,4BAAc,aAAa,KAAK,IAAI,CAAC;AAAA,YACvC;AAAA,UACF,WAAY,KAAa,SAAS,6BAA6B;AAE7D,kBAAM,IAAI;AACV,kBAAM,SAAS,KAAK,UAAU;AAAA,cAC5B,MAAM;AAAA,cACN,YAAY,EAAE;AAAA,cACd,UAAU,EAAE;AAAA,YACd,CAAC,CAAC;AACF,2BAAe,IAAI,EAAE,UAAU;AAAA,UACjC,WAAY,KAAa,SAAS,mBAAmB;AAEnD,kBAAM,IAAI;AACV,kBAAM,SAAS,KAAK,UAAU;AAAA,cAC5B,MAAM;AAAA,cACN,YAAY,EAAE;AAAA,cACd,eAAe,EAAE;AAAA,YACnB,CAAC,CAAC;AAAA,UACJ,WAAW,KAAK,SAAS,aAAa;AACpC,kBAAM;AAAA,cACJ;AAAA,cACA;AAAA,cACA,KAAK;AAAA,cACL,KAAK;AAAA,cACL,KAAK;AAAA,YACP;AACA,kBAAM,SAAS,KAAK,UAAU;AAAA,cAC5B,MAAM;AAAA,cACN,YAAY,KAAK;AAAA,cACjB,UAAU,KAAK;AAAA,cACf,OAAO,kBAAkB,KAAK,UAAU,KAAK,KAAK;AAAA,YACpD,CAAC,CAAC;AACF,kBAAM,SAAS,KAAK,UAAU;AAAA,cAC5B,MAAM;AAAA,cACN,OAAO;AAAA,cACP,UAAU,KAAK;AAAA,YACjB,CAAC,CAAC;AAAA,UACJ,WAAW,KAAK,SAAS,eAAe;AACtC,kBAAM,SAAS,KAAK,UAAU;AAAA,cAC5B,MAAM;AAAA,cACN,YAAY,KAAK;AAAA,cACjB,QAAQ,KAAK;AAAA,YACf,CAAC,CAAC;AAAA,UACJ,WAAW,KAAK,SAAS,SAAS;AAChC,oBAAQ,MAAM,iBAAiB,KAAK,KAAK;AACzC,kBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,SAAS,WAAW,OAAO,KAAK,KAAK,EAAE,CAAC,CAAC;AAAA,UACjF;AAAA,QACF;AAGA,YAAI,aAAa;AACf,gBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,YAAY,IAAI,OAAO,CAAC,CAAC;AAAA,QACjE;AAGA,YAAI,kBAAkB;AACpB,gBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,iBAAiB,IAAI,YAAY,CAAC,CAAC;AAAA,QAC3E;AAGA,YAAI,CAAC,WAAW;AACd,gBAAM,OAAO,qBAAqB;AAAA,QACpC;AAGA,YAAI,WAAW;AACb,gBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,QAAQ,CAAC,CAAC;AAAA,QAClD,OAAO;AACL,gBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,SAAS,CAAC,CAAC;AAAA,QACnD;AAGA,YAAI;AACF,gBAAM,oBAAoB,OAAO,QAAQ;AAAA,QAC3C,QAAQ;AAAA,QAER;AAAA,MACF,SAAS,OAAY;AACnB,YAAI,MAAM,SAAS,gBAAgB,MAAM,SAAS,SAAS,SAAS,GAAG;AAErE,gBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,QAAQ,CAAC,CAAC;AAAA,QAClD,OAAO;AAEL,kBAAQ,MAAM,gBAAgB,KAAK;AACnC,gBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,SAAS,WAAW,MAAM,QAAQ,CAAC,CAAC;AAC1E,cAAI;AACF,kBAAM,oBAAoB,UAAU,QAAQ;AAAA,UAC9C,QAAQ;AAAA,UAER;AAAA,QACF;AAAA,MACF,UAAE;AACA,+BAAuB;AAIvB,cAAM,SAAS,QAAQ;AACvB,cAAM,UAAU;AAAA,MAClB;AAAA,IACF,GAAG;AAEH,WAAO;AAAA,EACT;AACF;AAGA,OAAO;AAAA,EACL;AAAA,EACAC,YAAW,QAAQ,eAAe;AAAA,EAClC,OAAO,MAAM;AACX,UAAM,KAAK,EAAE,IAAI,MAAM,IAAI;AAC3B,UAAM,EAAE,QAAQ,WAAW,YAAY,IAAI,EAAE,IAAI,MAAM,MAAM;AAG7D,UAAM,SAAS,gCAAgC,IAAI,SAAS;AAE5D,UAAM,UAAU,MAAM,eAAe,QAAQ,EAAE;AAC/C,QAAI,CAAC,SAAS;AACZ,aAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,IACnD;AAGA,UAAM,eAAe,MAAM,eAAe,gBAAgB,EAAE;AAI5D,UAAM,iBAAiB,IAAI,QAAQ,kBAAkB,YAAY;AAGjE,QAAI;AAGJ,UAAM,oBAAoD;AAE1D,QAAI,qBAAqB,kBAAkB,SAAS,GAAG;AAErD,eAAS,IAAI,GAAG,IAAI,kBAAkB,QAAQ,KAAK;AACjD,cAAM,aAAa,kBAAkB,CAAC;AACtC,YAAI;AACF,gBAAM,YAAY,MAAM,qBAAqB,IAAI,YAAY,CAAC;AAC9D,qBAAW,YAAY;AAAA,QACzB,SAAS,KAAK;AACZ,kBAAQ,MAAM,6BAA6B,CAAC,KAAK,GAAG;AAAA,QACtD;AAAA,MACF;AAGA,YAAM,eAAiJ,CAAC;AAGxJ,YAAM,yBAAyB,kBAC5B,IAAI,CAAC,GAAG,MAAM;AACb,cAAM,OAAO,EAAE,YAAY,cAAc,IAAI,CAAC;AAC9C,cAAM,YAAY,EAAE,SAAS,UAAU,UAAU;AACjD,cAAM,WAAW,EAAE,aAAa;AAChC,eAAO,GAAG,IAAI,CAAC,KAAK,SAAS,MAAM,IAAI,eAAe,QAAQ;AAAA,MAChE,CAAC,EACA,KAAK,IAAI;AACZ,mBAAa,KAAK;AAAA,QAChB,MAAM;AAAA,QACN,MAAM;AAAA,EAA2F,sBAAsB;AAAA;AAAA;AAAA,MACzH,CAAC;AAGD,UAAI,QAAQ;AACV,qBAAa,KAAK,EAAE,MAAM,QAAQ,MAAM;AAAA;AAAA,EAAqB,MAAM,GAAG,CAAC;AAAA,MACzE;AAGA,iBAAW,cAAc,mBAAmB;AAC1C,YAAI,WAAW,SAAS,SAAS;AAC/B,uBAAa,KAAK;AAAA,YAChB,MAAM;AAAA,YACN,OAAOF,oBAAmB,WAAW,IAAI;AAAA,YACzC,WAAW,WAAW;AAAA,YACtB,UAAU,WAAW;AAAA,YACrB,WAAW,WAAW;AAAA,UACxB,CAAC;AAAA,QACH,OAAO;AACL,uBAAa,KAAK;AAAA,YAChB,MAAM;AAAA,YACN,MAAMA,oBAAmB,WAAW,IAAI;AAAA,YACxC,WAAW,WAAW,aAAa;AAAA,YACnC,UAAU,WAAW;AAAA,YACrB,WAAW,WAAW;AAAA,UACxB,CAAC;AAAA,QACH;AAAA,MACF;AAEA,2BAAqB;AAAA,IACvB,OAAO;AACL,2BAAqB;AAAA,IACvB;AAIA,UAAM,eAAe,OAAO,IAAI,EAAE,MAAM,QAAQ,SAAS,mBAA0B,CAAC;AAGpF,UAAM,WAAW,UAAU,EAAE,IAAIG,QAAO,EAAE,CAAC;AAC3C,YAAQ,IAAI,4BAA4B,QAAQ,gBAAgB,EAAE,EAAE;AAGpE,UAAM,oBAAoB,OAAO,IAAI,QAAQ;AAI7C,UAAM,SAAS,MAAM,cAAc;AAAA,MACjC;AAAA,MACA,0BAA0B,IAAI,QAAQ,UAAU,iBAAiB;AAAA,IACnE;AAEA,QAAI,CAAC,QAAQ;AACX,cAAQ,MAAM,8CAA8C,QAAQ,EAAE;AACtE,aAAO,EAAE,KAAK,EAAE,OAAO,0BAA0B,GAAG,GAAG;AAAA,IACzD;AACA,YAAQ,IAAI,mBAAmB,QAAQ,uBAAuB;AAG9D,UAAM,gBAAgB,OAAO,YAAY,IAAI,kBAAkB,CAAC;AAEhE,WAAO,IAAI,SAAS,eAAwD;AAAA,MAC1E,SAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,iBAAiB;AAAA,QACjB,cAAc;AAAA,QACd,iCAAiC;AAAA,QACjC,eAAe;AAAA,MACjB;AAAA,IACF,CAAC;AAAA,EACH;AACF;AAGA,OAAO,IAAI,cAAc,OAAO,MAAM;AACpC,QAAM,YAAY,EAAE,IAAI,MAAM,IAAI;AAClC,QAAM,WAAW,EAAE,IAAI,MAAM,UAAU;AACvC,QAAM,mBAAmB,EAAE,IAAI,MAAM,UAAU;AAE/C,QAAM,UAAU,MAAM,eAAe,QAAQ,SAAS;AACtD,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,EACnD;AAGA,MAAI,WAA+B;AACnC,MAAI,CAAC,UAAU;AACb,UAAM,eAAe,MAAM,oBAAoB,eAAe,SAAS;AACvE,QAAI,CAAC,cAAc;AACjB,aAAO,EAAE,KAAK,EAAE,OAAO,qCAAqC,MAAM,4CAA4C,GAAG,GAAG;AAAA,IACtH;AACA,eAAW,aAAa;AAAA,EAC1B;AAEA,UAAQ,IAAI,sCAAsC,SAAS,cAAc,QAAQ,cAAc,YAAY,MAAM,EAAE;AAGnH,QAAM,SAAS,MAAM,cAAc;AAAA,IACjC;AAAA,IACA,WAAW,SAAS,UAAU,EAAE,IAAI;AAAA,EACtC;AAEA,MAAI,CAAC,QAAQ;AACX,YAAQ,IAAI,uCAAkC,QAAQ,sBAAsB;AAC5E,WAAO,EAAE,KAAK;AAAA,MACZ,OAAO;AAAA,MACP;AAAA,MACA,MAAM;AAAA,IACR,GAAG,GAAG;AAAA,EACR;AACA,UAAQ,IAAI,mCAAmC,QAAQ,EAAE;AAEzD,QAAM,gBAAgB,OAAO,YAAY,IAAI,kBAAkB,CAAC;AAEhE,SAAO,IAAI,SAAS,eAAwD;AAAA,IAC1E,SAAS;AAAA,MACP,gBAAgB;AAAA,MAChB,iBAAiB;AAAA,MACjB,cAAc;AAAA,MACd,iCAAiC;AAAA,MACjC,eAAe,YAAY;AAAA,IAC7B;AAAA,EACF,CAAC;AACH,CAAC;AAGD,OAAO,IAAI,eAAe,OAAO,MAAM;AACrC,QAAM,YAAY,EAAE,IAAI,MAAM,IAAI;AAElC,QAAM,UAAU,MAAM,eAAe,QAAQ,SAAS;AACtD,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,EACnD;AAEA,QAAM,eAAe,MAAM,oBAAoB,eAAe,SAAS;AAEvE,SAAO,EAAE,KAAK;AAAA,IACZ;AAAA,IACA,iBAAiB,CAAC,CAAC;AAAA,IACnB,QAAQ,eAAe;AAAA,MACrB,IAAI,aAAa;AAAA,MACjB,UAAU,aAAa;AAAA,MACvB,QAAQ,aAAa;AAAA,MACrB,WAAW,aAAa,UAAU,YAAY;AAAA,IAChD,IAAI;AAAA,EACN,CAAC;AACH,CAAC;AAGD,OAAO;AAAA,EACL;AAAA,EACAD,YAAW,QAAQ,eAAe;AAAA,EAClC,OAAO,MAAM;AACX,UAAM,KAAK,EAAE,IAAI,MAAM,IAAI;AAC3B,UAAM,EAAE,QAAQ,UAAU,IAAI,EAAE,IAAI,MAAM,MAAM;AAGhD,UAAM,SAAS,gCAAgC,IAAI,SAAS;AAE5D,UAAM,UAAU,MAAM,eAAe,QAAQ,EAAE;AAC/C,QAAI,CAAC,SAAS;AACZ,aAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,IACnD;AAEA,QAAI;AACF,YAAM,QAAQ,MAAM,MAAM,OAAO,EAAE,WAAW,GAAG,CAAC;AAClD,YAAM,SAAS,MAAM,MAAM,IAAI,EAAE,OAAO,CAAC;AAEzC,aAAO,EAAE,KAAK;AAAA,QACZ,WAAW;AAAA,QACX,MAAM,OAAO;AAAA,QACb,WAAW,OAAO,MAAM;AAAA,MAC1B,CAAC;AAAA,IACH,SAAS,OAAY;AACnB,aAAO,EAAE,KAAK,EAAE,OAAO,MAAM,QAAQ,GAAG,GAAG;AAAA,IAC7C;AAAA,EACF;AACF;AAGA,OAAO,KAAK,4BAA4B,OAAO,MAAM;AACnD,QAAM,YAAY,EAAE,IAAI,MAAM,IAAI;AAClC,QAAM,aAAa,EAAE,IAAI,MAAM,YAAY;AAE3C,QAAM,UAAU,MAAM,eAAe,QAAQ,SAAS;AACtD,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,EACnD;AAEA,MAAI;AACF,UAAM,QAAQ,MAAM,MAAM,OAAO,EAAE,UAAU,CAAC;AAC9C,UAAM,SAAS,MAAM,MAAM,QAAQ,UAAU;AAE7C,WAAO,EAAE,KAAK;AAAA,MACZ,SAAS;AAAA,MACT;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH,SAAS,OAAY;AACnB,WAAO,EAAE,KAAK,EAAE,OAAO,MAAM,QAAQ,GAAG,GAAG;AAAA,EAC7C;AACF,CAAC;AAGD,OAAO;AAAA,EACL;AAAA,EACAA,YAAW,QAAQ,YAAY;AAAA,EAC/B,OAAO,MAAM;AACX,UAAM,YAAY,EAAE,IAAI,MAAM,IAAI;AAClC,UAAM,aAAa,EAAE,IAAI,MAAM,YAAY;AAC3C,UAAM,OAAO,EAAE,IAAI,MAAM,MAAM;AAE/B,UAAM,UAAU,MAAM,eAAe,QAAQ,SAAS;AACtD,QAAI,CAAC,SAAS;AACZ,aAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,IACnD;AAEA,QAAI;AACF,YAAM,QAAQ,MAAM,MAAM,OAAO,EAAE,UAAU,CAAC;AAC9C,YAAM,OAAO,YAAY,MAAM,MAAM;AAErC,aAAO,EAAE,KAAK;AAAA,QACZ,SAAS;AAAA,QACT;AAAA,QACA,UAAU;AAAA,MACZ,CAAC;AAAA,IACH,SAAS,OAAY;AACnB,aAAO,EAAE,KAAK,EAAE,OAAO,MAAM,QAAQ,GAAG,GAAG;AAAA,IAC7C;AAAA,EACF;AACF;AAGA,OAAO,IAAI,kBAAkB,OAAO,MAAM;AACxC,QAAM,YAAY,EAAE,IAAI,MAAM,IAAI;AAElC,QAAM,UAAU,MAAM,eAAe,QAAQ,SAAS;AACtD,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,EACnD;AAEA,QAAM,mBAAmB,MAAM,qBAAqB,oBAAoB,SAAS;AAEjF,SAAO,EAAE,KAAK;AAAA,IACZ;AAAA,IACA,kBAAkB,iBAAiB,IAAI,CAAC,OAAO;AAAA,MAC7C,IAAI,EAAE;AAAA,MACN,YAAY,EAAE;AAAA,MACd,UAAU,EAAE;AAAA,MACZ,OAAO,EAAE;AAAA,MACT,WAAW,EAAE,UAAU,YAAY;AAAA,IACrC,EAAE;AAAA,IACF,OAAO,iBAAiB;AAAA,EAC1B,CAAC;AACH,CAAC;AAID,OAAO,KAAK,cAAc,OAAO,MAAM;AACrC,QAAM,YAAY,EAAE,IAAI,MAAM,IAAI;AAElC,QAAM,UAAU,MAAM,eAAe,QAAQ,SAAS;AACtD,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,EACnD;AAGA,QAAM,eAAe,MAAM,oBAAoB,eAAe,SAAS;AACvE,MAAI,CAAC,cAAc;AACjB,WAAO,EAAE,KAAK,EAAE,OAAO,oCAAoC,GAAG,GAAG;AAAA,EACnE;AAGA,QAAM,kBAAkB,uBAAuB,IAAI,aAAa,QAAQ;AACxE,MAAI,iBAAiB;AACnB,oBAAgB,MAAM;AACtB,2BAAuB,OAAO,aAAa,QAAQ;AACnD,WAAO,EAAE,KAAK,EAAE,SAAS,MAAM,UAAU,aAAa,UAAU,SAAS,KAAK,CAAC;AAAA,EACjF;AAGA,SAAO,EAAE,KAAK;AAAA,IACZ,SAAS;AAAA,IACT,UAAU,aAAa;AAAA,IACvB,SAAS;AAAA,EACX,CAAC;AACH,CAAC;AAGD,OAAO;AAAA,EACL;AAAA,EACAA,YAAW,QAAQ,gBAAgB;AAAA,EACnC,OAAO,MAAM;AACX,UAAM,OAAO,EAAE,IAAI,MAAM,MAAM;AAC/B,UAAM,SAAS,UAAU;AAGzB,UAAM,QAAQ,MAAM,MAAM,OAAO;AAAA,MAC/B,MAAM,KAAK;AAAA,MACX,kBAAkB,KAAK,oBAAoB,OAAO;AAAA,MAClD,OAAO,KAAK,SAAS,OAAO;AAAA,MAC5B,eAAe,KAAK,gBAAgB,EAAE,eAAe,KAAK,cAAc,IAAI;AAAA,IAC9E,CAAC;AAED,UAAM,UAAU,MAAM,WAAW;AAGjC,UAAM,iBAAiB,gCAAgC,QAAQ,IAAI,KAAK,MAAM;AAE9E,UAAM,WAAW,UAAU,QAAQ,EAAE,IAAIC,QAAO,EAAE,CAAC;AAInD,UAAM,iBAAiB,QAAQ,IAAI,QAAQ,kBAAkB,CAAC;AAG9D,UAAM,oBAAoB,OAAO,QAAQ,IAAI,QAAQ;AAIrD,UAAM,4BAA4B,MAA8B;AAC9D,YAAM,EAAE,UAAU,SAAS,IAAI,IAAI,gBAAgC;AACnE,YAAM,SAAS,SAAS,UAAU;AAClC,UAAI,eAAe;AACnB,YAAM,iBAAiB,oBAAI,IAAY;AAGvC,YAAM,kBAAkB,IAAI,gBAAgB;AAC5C,6BAAuB,IAAI,UAAU,eAAe;AAEpD,UAAI,gBAAgB;AACpB,UAAI,uBAAuB;AAC3B,UAAI,iBAAiB;AAGrB,YAAM,WAAW,OAAO,SAAiB;AACvC,YAAI,aAAc;AAClB,YAAI;AACF;AACA,gBAAM,OAAO,MAAM,SAAS,IAAI;AAAA;AAAA,CAAM;AAAA,QACxC,SAAS,KAAU;AACjB;AACA,cAAI,mBAAmB,GAAG;AACxB,oBAAQ,IAAI,QAAQ,QAAQ,6DAA6D,aAAa,qBAAqB,oBAAoB,EAAE;AAAA,UACnJ;AACA,yBAAe;AAAA,QACjB;AAAA,MACF;AAEA,YAAM,YAAY,YAAY;AAC5B,YAAI,aAAc;AAClB,YAAI;AACF,kBAAQ,IAAI,QAAQ,QAAQ,mCAAmC,aAAa,qBAAqB,oBAAoB,mBAAmB,cAAc,EAAE;AACxJ,yBAAe;AACf,gBAAM,OAAO,MAAM;AAAA,QACrB,QAAQ;AAAA,QAER;AAAA,MACF;AAGA,YAAM,yBAAyB,MAAM;AACnC,+BAAuB,OAAO,QAAQ;AAAA,MACxC;AAEA,OAAC,YAAY;AACX,YAAI,YAAY;AAEhB,YAAI;AAEF,gBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,kBAAkB,SAAS,CAAC,CAAC;AACnE,gBAAM,SAAS,KAAK,UAAU;AAAA,YAC5B,MAAM;AAAA,YACN,MAAM;AAAA,cACJ,IAAI,QAAQ;AAAA,cACZ,MAAM,QAAQ;AAAA,cACd,kBAAkB,QAAQ;AAAA,cAC1B,OAAO,QAAQ;AAAA,YACjB;AAAA,UACF,CAAC,CAAC;AAEF,gBAAM,YAAY,OAAO,KAAK,IAAI,CAAC;AACnC,gBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,SAAS,UAAU,CAAC,CAAC;AAE3D,cAAI,SAAS,QAAQ,KAAK,IAAI,CAAC;AAC/B,cAAI,cAAc;AAElB,gBAAM,SAAS,MAAM,MAAM,OAAO;AAAA,YAChC,QAAQ;AAAA,YACR,aAAa,gBAAgB;AAAA;AAAA,YAC7B,gBAAgB,OAAO,aAAa;AAClC,oBAAM,SAAU,SAAS,MAAc,UAAU;AACjD,oBAAM,gBAAgB,OAAQ,SAAS,MAAc,YAAY,WAC5D,SAAS,KAAa,QAAQ,SAC/B;AACJ,oBAAM,aAAc,SAAS,MAAc;AAC3C,oBAAM,aAAc,SAAS,MAAc;AAC3C,sBAAQ;AAAA,gBACN;AAAA,gBACA,SAAS;AAAA,gBACT;AAAA,gBACA,kBAAkB,SAAY,iBAAiB,aAAa,KAAK;AAAA,gBACjE,eAAe,UAAa,eAAe,SAAY,SAAS,UAAU,IAAI,UAAU,KAAK;AAAA,cAC/F;AACA,oBAAM,SAAS,KAAK,UAAU;AAAA,gBAC5B,MAAM;AAAA,gBACN,UAAU,SAAS;AAAA,gBACnB,MAAM,SAAS;AAAA,cACjB,CAAC,CAAC;AACF,kBAAI,SAAS,aAAa,gBAAgB,WAAW,WAAW;AAC9D,sBAAM,SAAS,KAAK,UAAU;AAAA,kBAC5B,MAAM;AAAA,kBACN,OAAO;AAAA,kBACP;AAAA,kBACA;AAAA,kBACA;AAAA,gBACF,CAAC,CAAC;AACF,sBAAM,IAAI,QAAQ,CAACF,cAAY,WAAWA,WAAS,CAAC,CAAC;AAAA,cACvD;AAEA,oBAAM,cAAe,SAAS,MAAc;AAC5C,oBAAM,gBAAiB,SAAS,MAAc;AAE9C,kBAAI,SAAS,aAAa,UAAU,eAAe;AACjD,wBAAQ,IAAI,mBAAmB,QAAQ,gEAAgE,QAAQ,EAAE,EAAE;AACnH,6BAAa,QAAQ,EAAE;AAAA,cACzB,WAAW,SAAS,aAAa,UAAU,aAAa;AACtD,wBAAQ,IAAI,mBAAmB,QAAQ,0CAA0C,WAAW,gBAAgB,QAAQ,EAAE,EAAE;AACxH,sBAAM,QAAQ,iBAAiB,QAAQ,IAAI,WAAW;AACtD,wBAAQ,IAAI,mBAAmB,QAAQ,4BAA4B,MAAM,SAAS,oBAAoB,MAAM,cAAc,OAAO,CAAC,qBAAqB,MAAM,cAAc,QAAQ,CAAC,EAAE;AAEtL,oBAAI,CAAC,iBAAiB,IAAI,QAAQ,EAAE,GAAG;AACrC,wBAAM,WAAW,IAAI,cAAc,QAAQ,EAAE;AAC7C,2BAAS,MAAM;AACf,mCAAiB,IAAI,QAAQ,IAAI,QAAQ;AAAA,gBAC3C;AAGA,sBAAM,oBAAoB,MAAM,cAAc,OAAO;AACrD,oBAAI,oBAAoB,GAAG;AACzB,0BAAQ,IAAI,mBAAmB,QAAQ,eAAe,iBAAiB,+CAA+C;AACtH,wBAAM,mBAAmB,OAAO;AAChC,wBAAM,mBAAmB,QAAQ;AAAA,gBACnC;AACA,wBAAQ,IAAI,mBAAmB,QAAQ,6CAA6C;AACpF,sBAAM,GAAG,SAAS,CAAC,UAAwB;AACzC;AACA,sBAAI,yBAAyB,GAAG;AAC9B,4BAAQ,IAAI,mBAAmB,QAAQ,4CAA4C,MAAM,MAAM,UAAU,CAAC,iBAAiB,YAAY,EAAE;AAAA,kBAC3I,WAAW,uBAAuB,OAAO,GAAG;AAC1C,4BAAQ,IAAI,mBAAmB,QAAQ,oBAAoB,oBAAoB,kBAAkB,YAAY,GAAG;AAAA,kBAClH;AACA,wBAAM,MAAM,iBAAiB,IAAI,QAAQ,EAAE;AAC3C,uBAAK,SAAS,KAAK;AACnB,2BAAS,KAAK,UAAU;AAAA,oBACtB,MAAM;AAAA,oBACN,MAAM,MAAM;AAAA,oBACZ,UAAU,MAAM;AAAA,kBAClB,CAAC,CAAC,EAAE,MAAM,CAAC,QAAQ;AACjB,4BAAQ,KAAK,mBAAmB,QAAQ,2CAA2C,GAAG;AAAA,kBACxF,CAAC;AAAA,gBACH,CAAC;AACD,sBAAM,GAAG,UAAU,CAAC,MAAqB;AACvC,0BAAQ,IAAI,mBAAmB,QAAQ,qCAAqC,EAAE,SAAS,kBAAkB,EAAE,aAAa,aAAa,EAAE,aAAa,IAAI,EAAE,cAAc,EAAE;AAC1K,2BAAS,KAAK,UAAU;AAAA,oBACtB,MAAM;AAAA,oBACN,GAAG;AAAA,kBACL,CAAC,CAAC,EAAE,MAAM,CAAC,QAAQ;AACjB,4BAAQ,KAAK,mBAAmB,QAAQ,4CAA4C,GAAG;AAAA,kBACzF,CAAC;AAAA,gBACH,CAAC;AACD,sBAAM,cAAc;AAAA,cACtB;AAAA,YACF;AAAA,YACA,cAAc,YAAY;AACxB,oBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,cAAc,CAAC,CAAC;AACtD,kBAAI,aAAa;AACf,sBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,YAAY,IAAI,OAAO,CAAC,CAAC;AAC/D,8BAAc;AACd,yBAAS,QAAQ,KAAK,IAAI,CAAC;AAAA,cAC7B;AAAA,YACF;AAAA,YACA,SAAS,OAAO,EAAE,MAAM,MAAM;AAC5B,0BAAY;AACZ,sBAAQ,IAAI,wBAAwB,MAAM,MAAM,QAAQ;AAAA,YAC1D;AAAA,UACF,CAAC;AAED,cAAI,cAAc,aAAa,KAAK,IAAI,CAAC;AACzC,cAAI,mBAAmB;AAEvB,2BAAiB,QAAQ,OAAO,OAAO,YAAY;AACjD,gBAAI,KAAK,SAAS,cAAc;AAC9B,kBAAI,CAAC,aAAa;AAChB,sBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,cAAc,IAAI,OAAO,CAAC,CAAC;AACjE,8BAAc;AAAA,cAChB;AACA,oBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,cAAc,IAAI,QAAQ,OAAO,KAAK,KAAK,CAAC,CAAC;AAAA,YACrF,WAAW,KAAK,SAAS,mBAAmB;AAC1C,oBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,mBAAmB,IAAI,YAAY,CAAC,CAAC;AAC3E,iCAAmB;AAAA,YACrB,WAAW,KAAK,SAAS,mBAAmB;AAC1C,oBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,mBAAmB,IAAI,aAAa,OAAO,KAAK,KAAK,CAAC,CAAC;AAAA,YAC/F,WAAW,KAAK,SAAS,iBAAiB;AACxC,kBAAI,kBAAkB;AACpB,sBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,iBAAiB,IAAI,YAAY,CAAC,CAAC;AACzE,mCAAmB;AACnB,8BAAc,aAAa,KAAK,IAAI,CAAC;AAAA,cACvC;AAAA,YACF,WAAY,KAAa,SAAS,6BAA6B;AAE7D,oBAAM,IAAI;AACV,oBAAM,SAAS,KAAK,UAAU;AAAA,gBAC5B,MAAM;AAAA,gBACN,YAAY,EAAE;AAAA,gBACd,UAAU,EAAE;AAAA,cACd,CAAC,CAAC;AACF,6BAAe,IAAI,EAAE,UAAU;AAAA,YACjC,WAAY,KAAa,SAAS,mBAAmB;AAEnD,oBAAM,IAAI;AACV,oBAAM,SAAS,KAAK,UAAU;AAAA,gBAC5B,MAAM;AAAA,gBACN,YAAY,EAAE;AAAA,gBACd,eAAe,EAAE;AAAA,cACnB,CAAC,CAAC;AAAA,YACJ,WAAW,KAAK,SAAS,aAAa;AAEpC,oBAAM;AAAA,gBACJ;AAAA,gBACA;AAAA,gBACA,KAAK;AAAA,gBACL,KAAK;AAAA,gBACL,KAAK;AAAA,cACP;AACA,oBAAM,SAAS,KAAK,UAAU;AAAA,gBAC5B,MAAM;AAAA,gBACN,YAAY,KAAK;AAAA,gBACjB,UAAU,KAAK;AAAA,gBACf,OAAO,kBAAkB,KAAK,UAAU,KAAK,KAAK;AAAA,cACpD,CAAC,CAAC;AACF,oBAAM,SAAS,KAAK,UAAU;AAAA,gBAC5B,MAAM;AAAA,gBACN,OAAO;AAAA,gBACP,UAAU,KAAK;AAAA,cACjB,CAAC,CAAC;AAAA,YACJ,WAAW,KAAK,SAAS,eAAe;AACtC,oBAAM,SAAS,KAAK,UAAU;AAAA,gBAC5B,MAAM;AAAA,gBACN,YAAY,KAAK;AAAA,gBACjB,QAAQ,KAAK;AAAA,cACf,CAAC,CAAC;AAAA,YACJ,WAAW,KAAK,SAAS,SAAS;AAChC,sBAAQ,MAAM,iBAAiB,KAAK,KAAK;AACzC,oBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,SAAS,WAAW,OAAO,KAAK,KAAK,EAAE,CAAC,CAAC;AAAA,YACjF;AAAA,UACF;AAEA,cAAI,aAAa;AACf,kBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,YAAY,IAAI,OAAO,CAAC,CAAC;AAAA,UACjE;AAEA,cAAI,kBAAkB;AACpB,kBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,iBAAiB,IAAI,YAAY,CAAC,CAAC;AAAA,UAC3E;AAEA,cAAI,CAAC,WAAW;AACd,kBAAM,OAAO,qBAAqB;AAAA,UACpC;AAEA,cAAI,WAAW;AACb,kBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,QAAQ,CAAC,CAAC;AAAA,UAClD,OAAO;AACL,kBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,SAAS,CAAC,CAAC;AAAA,UACnD;AAEA,gBAAM,oBAAoB,OAAO,QAAQ;AAAA,QAC3C,SAAS,OAAY;AACnB,cAAI,MAAM,SAAS,gBAAgB,MAAM,SAAS,SAAS,SAAS,GAAG;AAErE,kBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,QAAQ,CAAC,CAAC;AAAA,UAClD,OAAO;AAEL,oBAAQ,MAAM,gBAAgB,KAAK;AACnC,kBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,SAAS,WAAW,MAAM,QAAQ,CAAC,CAAC;AAC1E,kBAAM,oBAAoB,UAAU,QAAQ;AAAA,UAC9C;AAAA,QACF,UAAE;AACA,iCAAuB;AAEvB,gBAAM,SAAS,QAAQ;AACvB,gBAAM,UAAU;AAAA,QAClB;AAAA,MACF,GAAG;AAEH,aAAO;AAAA,IACT;AAEA,UAAM,SAAS,MAAM,cAAc;AAAA,MACjC;AAAA,MACA;AAAA,IACF;AAEA,QAAI,CAAC,QAAQ;AACX,aAAO,EAAE,KAAK,EAAE,OAAO,0BAA0B,GAAG,GAAG;AAAA,IACzD;AAEA,UAAM,gBAAgB,OAAO,YAAY,IAAI,kBAAkB,CAAC;AAEhE,WAAO,IAAI,SAAS,eAAwD;AAAA,MAC1E,SAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,iBAAiB;AAAA,QACjB,cAAc;AAAA,QACd,iCAAiC;AAAA,QACjC,eAAe;AAAA,QACf,gBAAgB,QAAQ;AAAA,MAC1B;AAAA,IACF,CAAC;AAAA,EACH;AACF;AAGA,IAAM,qBAAqBN,IAAE,OAAO;AAAA,EAClC,MAAMA,IAAE,KAAK,CAAC,eAAe,kBAAkB,aAAa,CAAC;AAAA,EAC7D,WAAWA,IAAE,OAAO;AAAA,EACpB,GAAGA,IAAE,OAAO,EAAE,SAAS;AAAA,EACvB,GAAGA,IAAE,OAAO,EAAE,SAAS;AAAA,EACvB,QAAQA,IAAE,OAAO,EAAE,SAAS;AAAA,EAC5B,YAAYA,IAAE,OAAO,EAAE,SAAS;AAAA,EAChC,QAAQA,IAAE,OAAO,EAAE,SAAS;AAAA,EAC5B,QAAQA,IAAE,OAAO,EAAE,SAAS;AAAA,EAC5B,KAAKA,IAAE,OAAO,EAAE,SAAS;AAAA,EACzB,MAAMA,IAAE,OAAO,EAAE,SAAS;AAAA,EAC1B,MAAMA,IAAE,OAAO,EAAE,SAAS;AAAA,EAC1B,WAAWA,IAAE,OAAO,EAAE,SAAS;AAAA,EAC/B,aAAaA,IAAE,MAAMA,IAAE,OAAO;AAAA,IAC5B,GAAGA,IAAE,OAAO;AAAA,IACZ,GAAGA,IAAE,OAAO;AAAA,IACZ,IAAIA,IAAE,OAAO,EAAE,SAAS;AAAA,EAC1B,CAAC,CAAC,EAAE,SAAS;AACf,CAAC;AAED,OAAO;AAAA,EACL;AAAA,EACAO,YAAW,QAAQ,kBAAkB;AAAA,EACrC,OAAO,MAAM;AACX,UAAM,YAAY,EAAE,IAAI,MAAM,IAAI;AAClC,UAAM,QAAQ,EAAE,IAAI,MAAM,MAAM;AAEhC,UAAM,QAAQ,SAAS,SAAS;AAChC,QAAI,CAAC,SAAS,CAAC,MAAM,WAAW;AAC9B,aAAO,EAAE,KAAK,EAAE,OAAO,4CAA4C,GAAG,GAAG;AAAA,IAC3E;AAEA,UAAM,YAAY,KAAK;AACvB,WAAO,EAAE,KAAK,EAAE,SAAS,KAAK,CAAC;AAAA,EACjC;AACF;AAGA,OAAO,IAAI,uBAAuB,OAAO,MAAM;AAC7C,QAAM,YAAY,EAAE,IAAI,MAAM,IAAI;AAClC,QAAM,QAAQ,SAAS,SAAS;AAEhC,SAAO,EAAE,KAAK;AAAA,IACZ;AAAA,IACA,QAAQ,CAAC,CAAC,OAAO;AAAA,IACjB,UAAU,CAAC,CAAC;AAAA,IACZ,aAAa,OAAO,cAAc;AAAA,MAChC,UAAU,MAAM,YAAY;AAAA,MAC5B,WAAW,MAAM,YAAY;AAAA,IAC/B,IAAI;AAAA,EACN,CAAC;AACH,CAAC;;;AEz5CD;AANA,SAAS,QAAAE,aAAY;AACrB,SAAS,cAAAC,mBAAkB;AAC3B,SAAS,KAAAC,WAAS;AAClB,SAAS,gBAAAC,qBAAoB;AAC7B,SAAS,iBAAAC,sBAAqB;AAC9B,SAAS,WAAAC,UAAS,QAAAC,cAAY;AAI9B,IAAM,aAAaF,eAAc,YAAY,GAAG;AAChD,IAAM,YAAYC,SAAQ,UAAU;AAKpC,IAAM,gBAAgB;AAAA,EACpBC,OAAK,WAAW,iBAAiB;AAAA;AAAA,EACjCA,OAAK,WAAW,oBAAoB;AAAA;AAAA,EACpCA,OAAK,WAAW,uBAAuB;AAAA;AAAA,EACvCA,OAAK,QAAQ,IAAI,GAAG,cAAc;AAAA;AACpC;AAEA,IAAI,iBAAiB;AACrB,IAAI,cAAc;AAElB,WAAW,mBAAmB,eAAe;AAC3C,MAAI;AACF,UAAM,cAAc,KAAK,MAAMH,cAAa,iBAAiB,OAAO,CAAC;AAErE,QAAI,YAAY,SAAS,eAAe;AACtC,uBAAiB,YAAY,WAAW;AACxC,oBAAc,YAAY,QAAQ;AAClC;AAAA,IACF;AAAA,EACF,QAAQ;AAAA,EAER;AACF;AAEA,IAAM,SAAS,IAAIH,MAAK;AAExB,OAAO,IAAI,KAAK,OAAO,MAAM;AAC3B,QAAM,SAAS,UAAU;AACzB,QAAM,eAAe,gBAAgB;AAGrC,QAAM,aAAa,aAAa,KAAK,OAAK,EAAE,aAAa,YAAY;AACrE,QAAM,YAAY,YAAY,cAAc;AAE5C,SAAO,EAAE,KAAK;AAAA,IACZ,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,QAAQ,QAAQ,OAAO;AAAA,IACvB,kBAAkB;AAAA,IAClB,QAAQ;AAAA,MACN,kBAAkB,OAAO;AAAA,MACzB,cAAc,OAAO;AAAA,MACrB,sBAAsB,OAAO,iBAAiB,CAAC;AAAA,MAC/C,MAAM,OAAO,OAAO;AAAA,IACtB;AAAA,IACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,EACpC,CAAC;AACH,CAAC;AAGD,OAAO,IAAI,YAAY,OAAO,MAAM;AAClC,MAAI,gBAAgB;AACpB,MAAI,kBAAkB;AACtB,MAAI;AAEJ,MAAI;AAEF,UAAM,cAAc,MAAM,MAAM,8BAA8B,WAAW,WAAW;AAAA,MAClF,SAAS,EAAE,UAAU,mBAAmB;AAAA,MACxC,QAAQ,YAAY,QAAQ,GAAI;AAAA;AAAA,IAClC,CAAC;AAED,QAAI,YAAY,IAAI;AAClB,YAAM,UAAU,MAAM,YAAY,KAAK;AACvC,sBAAgB,QAAQ,WAAW;AAGnC,YAAM,eAAe,CAAC,MAAc;AAClC,cAAM,QAAQ,EAAE,QAAQ,MAAM,EAAE,EAAE,MAAM,GAAG,EAAE,IAAI,MAAM;AACvD,eAAO,EAAE,OAAO,MAAM,CAAC,KAAK,GAAG,OAAO,MAAM,CAAC,KAAK,GAAG,OAAO,MAAM,CAAC,KAAK,EAAE;AAAA,MAC5E;AAEA,YAAM,UAAU,aAAa,cAAc;AAC3C,YAAM,SAAS,aAAa,aAAa;AAEzC,wBACE,OAAO,QAAQ,QAAQ,SACtB,OAAO,UAAU,QAAQ,SAAS,OAAO,QAAQ,QAAQ,SACzD,OAAO,UAAU,QAAQ,SAAS,OAAO,UAAU,QAAQ,SAAS,OAAO,QAAQ,QAAQ;AAAA,IAChG,OAAO;AACL,cAAQ,yBAAyB,YAAY,MAAM;AAAA,IACrD;AAAA,EACF,SAAS,KAAK;AACZ,YAAQ,eAAe,QAAQ,IAAI,UAAU;AAAA,EAC/C;AAEA,SAAO,EAAE,KAAK;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,eAAe,kBAAkB,kBAAkB,WAAW,YAAY;AAAA,IAC1E;AAAA,IACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,EACpC,CAAC;AACH,CAAC;AAED,OAAO,IAAI,UAAU,OAAO,MAAM;AAChC,MAAI;AAEF,cAAU;AAEV,WAAO,EAAE,KAAK;AAAA,MACZ,QAAQ;AAAA,MACR,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,IACpC,CAAC;AAAA,EACH,SAAS,OAAY;AACnB,WAAO,EAAE;AAAA,MACP;AAAA,QACE,QAAQ;AAAA,QACR,OAAO,MAAM;AAAA,QACb,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,MACpC;AAAA,MACA;AAAA,IACF;AAAA,EACF;AACF,CAAC;AAOD,OAAO,IAAI,aAAa,OAAO,MAAM;AACnC,QAAM,SAAS,gBAAgB;AAE/B,SAAO,EAAE,KAAK;AAAA,IACZ,WAAW;AAAA,IACX,oBAAoB;AAAA,EACtB,CAAC;AACH,CAAC;AAGD,IAAM,kBAAkBE,IAAE,OAAO;AAAA,EAC/B,UAAUA,IAAE,OAAO;AAAA,EACnB,QAAQA,IAAE,OAAO,EAAE,IAAI,CAAC;AAC1B,CAAC;AAED,OAAO;AAAA,EACL;AAAA,EACAD,YAAW,QAAQ,eAAe;AAAA,EAClC,OAAO,MAAM;AACX,UAAM,EAAE,UAAU,OAAO,IAAI,EAAE,IAAI,MAAM,MAAM;AAE/C,QAAI;AACF,gBAAU,UAAU,MAAM;AAC1B,YAAM,SAAS,gBAAgB;AAC/B,YAAM,iBAAiB,OAAO,KAAK,OAAK,EAAE,aAAa,SAAS,YAAY,CAAC;AAE7E,aAAO,EAAE,KAAK;AAAA,QACZ,SAAS;AAAA,QACT,UAAU,SAAS,YAAY;AAAA,QAC/B,WAAW,gBAAgB;AAAA,QAC3B,SAAS,eAAe,QAAQ;AAAA,MAClC,CAAC;AAAA,IACH,SAAS,OAAY;AACnB,aAAO,EAAE,KAAK,EAAE,OAAO,MAAM,QAAQ,GAAG,GAAG;AAAA,IAC7C;AAAA,EACF;AACF;AAGA,OAAO,OAAO,uBAAuB,OAAO,MAAM;AAChD,QAAM,WAAW,EAAE,IAAI,MAAM,UAAU;AAEvC,MAAI;AACF,iBAAa,QAAQ;AAErB,WAAO,EAAE,KAAK;AAAA,MACZ,SAAS;AAAA,MACT,UAAU,SAAS,YAAY;AAAA,MAC/B,SAAS,eAAe,QAAQ;AAAA,IAClC,CAAC;AAAA,EACH,SAAS,OAAY;AACnB,WAAO,EAAE,KAAK,EAAE,OAAO,MAAM,QAAQ,GAAG,GAAG;AAAA,EAC7C;AACF,CAAC;;;AC/LD,SAAS,QAAAM,aAAY;AACrB,SAAS,cAAAC,mBAAkB;AAC3B,SAAS,KAAAC,WAAS;AAElB;AAEO,IAAM,YAAY,IAAIC,MAAK;AAGlC,IAAM,cAAcC,IAAE,OAAO;AAAA,EAC3B,SAASA,IAAE,OAAO;AAAA,EAClB,KAAKA,IAAE,OAAO,EAAE,SAAS;AAAA,EACzB,MAAMA,IAAE,OAAO,EAAE,SAAS;AAC5B,CAAC;AAED,UAAU;AAAA,EACR;AAAA,EACAC,YAAW,QAAQ,WAAW;AAAA,EAC9B,OAAO,MAAM;AACX,UAAM,YAAY,EAAE,IAAI,MAAM,WAAW;AACzC,UAAM,OAAO,EAAE,IAAI,MAAM,MAAM;AAG/B,UAAM,UAAU,MAAM,eAAe,QAAQ,SAAS;AACtD,QAAI,CAAC,SAAS;AACZ,aAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,IACnD;AAGA,UAAM,UAAU,MAAW,gBAAgB;AAC3C,QAAI,CAAC,SAAS;AACZ,aAAO,EAAE,KAAK,EAAE,OAAO,4DAA4D,GAAG,GAAG;AAAA,IAC3F;AAEA,UAAM,mBAAmB,KAAK,OAAO,QAAQ;AAC7C,UAAM,SAAS,MAAW,cAAc,KAAK,SAAS,kBAAkB;AAAA,MACtE;AAAA,MACA,MAAM,KAAK;AAAA,IACb,CAAC;AAED,WAAO,EAAE,KAAK;AAAA,MACZ,IAAI,OAAO;AAAA,MACX,MAAM,KAAK,QAAQ;AAAA,MACnB,SAAS,KAAK;AAAA,MACd,KAAK;AAAA,MACL,QAAQ,OAAO;AAAA,MACf,KAAK;AAAA;AAAA,IACP,GAAG,GAAG;AAAA,EACR;AACF;AAGA,UAAU,IAAI,yBAAyB,OAAO,MAAM;AAClD,QAAM,YAAY,EAAE,IAAI,MAAM,WAAW;AAEzC,QAAM,UAAU,MAAM,eAAe,QAAQ,SAAS;AACtD,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,EACnD;AAGA,QAAM,mBAAmB,MAAW,qBAAqB,WAAW,QAAQ,gBAAgB;AAG5F,QAAM,eAAe,MAAM,QAAQ;AAAA,IACjC,iBAAiB,IAAI,OAAO,SAAS;AACnC,YAAM,UAAU,MAAW,UAAU,KAAK,EAAE;AAC5C,aAAO;AAAA,QACL,IAAI,KAAK;AAAA,QACT,MAAM,KAAK,QAAQ;AAAA,QACnB,SAAS,KAAK;AAAA,QACd,KAAK,KAAK;AAAA,QACV,QAAQ,UAAU,YAAY;AAAA,QAC9B,WAAW,KAAK;AAAA,MAClB;AAAA,IACF,CAAC;AAAA,EACH;AAEA,SAAO,EAAE,KAAK;AAAA,IACZ;AAAA,IACA,WAAW;AAAA,IACX,OAAO,aAAa;AAAA,IACpB,SAAS,aAAa,OAAO,OAAK,EAAE,WAAW,SAAS,EAAE;AAAA,EAC5D,CAAC;AACH,CAAC;AAGD,UAAU,IAAI,qCAAqC,OAAO,MAAM;AAC9D,QAAM,YAAY,EAAE,IAAI,MAAM,WAAW;AACzC,QAAM,aAAa,EAAE,IAAI,MAAM,YAAY;AAE3C,QAAM,UAAU,MAAM,eAAe,QAAQ,SAAS;AACtD,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,EACnD;AAEA,QAAM,OAAO,MAAW,QAAQ,YAAY,QAAQ,kBAAkB,SAAS;AAC/E,MAAI,CAAC,MAAM;AACT,WAAO,EAAE,KAAK,EAAE,OAAO,qBAAqB,GAAG,GAAG;AAAA,EACpD;AAEA,QAAM,UAAU,MAAW,UAAU,UAAU;AAE/C,SAAO,EAAE,KAAK;AAAA,IACZ,IAAI;AAAA,IACJ,SAAS,KAAK;AAAA,IACd,KAAK,KAAK;AAAA,IACV,QAAQ,UAAU,YAAY;AAAA,IAC9B,WAAW,KAAK;AAAA,IAChB,UAAU,UAAU,OAAO;AAAA;AAAA,EAC7B,CAAC;AACH,CAAC;AAGD,IAAM,kBAAkBD,IAAE,OAAO;AAAA,EAC/B,MAAMA,IAAE,OAAO,EAAE,SAAS,EAAE,UAAU,OAAK,IAAI,SAAS,GAAG,EAAE,IAAI,MAAS;AAC5E,CAAC;AAED,UAAU;AAAA,EACR;AAAA,EACAC,YAAW,SAAS,eAAe;AAAA,EACnC,OAAO,MAAM;AACX,UAAM,YAAY,EAAE,IAAI,MAAM,WAAW;AACzC,UAAM,aAAa,EAAE,IAAI,MAAM,YAAY;AAC3C,UAAM,QAAQ,EAAE,IAAI,MAAM,OAAO;AAEjC,UAAM,UAAU,MAAM,eAAe,QAAQ,SAAS;AACtD,QAAI,CAAC,SAAS;AACZ,aAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,IACnD;AAEA,UAAM,SAAS,MAAW,QAAQ,YAAY,QAAQ,kBAAkB,EAAE,MAAM,MAAM,MAAM,UAAU,CAAC;AAEvG,QAAI,OAAO,WAAW,WAAW;AAC/B,aAAO,EAAE,KAAK,EAAE,OAAO,qBAAqB,GAAG,GAAG;AAAA,IACpD;AAEA,WAAO,EAAE,KAAK;AAAA,MACZ;AAAA,MACA,MAAM,OAAO;AAAA,MACb,WAAW,OAAO,OAAO,MAAM,IAAI,EAAE;AAAA,IACvC,CAAC;AAAA,EACH;AACF;AAGA,IAAM,aAAaD,IAAE,OAAO;AAAA,EAC1B,QAAQA,IAAE,KAAK,CAAC,WAAW,SAAS,CAAC,EAAE,SAAS;AAClD,CAAC;AAED,UAAU;AAAA,EACR;AAAA,EACAC,YAAW,QAAQ,WAAW,SAAS,CAAC;AAAA,EACxC,OAAO,MAAM;AACX,UAAM,aAAa,EAAE,IAAI,MAAM,YAAY;AAE3C,UAAM,UAAU,MAAW,aAAa,UAAU;AAElD,QAAI,CAAC,SAAS;AACZ,aAAO,EAAE,KAAK,EAAE,OAAO,mDAAmD,GAAG,GAAG;AAAA,IAClF;AAEA,WAAO,EAAE,KAAK,EAAE,SAAS,MAAM,SAAS,kBAAkB,CAAC;AAAA,EAC7D;AACF;AAGA,IAAM,cAAcD,IAAE,OAAO;AAAA,EAC3B,OAAOA,IAAE,OAAO;AAClB,CAAC;AAED,UAAU;AAAA,EACR;AAAA,EACAC,YAAW,QAAQ,WAAW;AAAA,EAC9B,OAAO,MAAM;AACX,UAAM,aAAa,EAAE,IAAI,MAAM,YAAY;AAC3C,UAAM,OAAO,EAAE,IAAI,MAAM,MAAM;AAG/B,UAAMC,aAAY,MAAW,UAAU,UAAU;AACjD,QAAI,CAACA,YAAW;AACd,aAAO,EAAE,KAAK,EAAE,OAAO,0BAA0B,GAAG,GAAG;AAAA,IACzD;AAIA,UAAM,UAAU,MAAW,UAAU,YAAY,KAAK,OAAO,EAAE,YAAY,MAAM,CAAC;AAElF,QAAI,CAAC,SAAS;AACZ,aAAO,EAAE,KAAK,EAAE,OAAO,8BAA8B,GAAG,GAAG;AAAA,IAC7D;AAEA,WAAO,EAAE,KAAK,EAAE,SAAS,MAAM,SAAS,KAAK,MAAM,OAAO,CAAC;AAAA,EAC7D;AACF;AAGA,UAAU,KAAK,kCAAkC,OAAO,MAAM;AAC5D,QAAM,YAAY,EAAE,IAAI,MAAM,WAAW;AAEzC,QAAM,UAAU,MAAM,eAAe,QAAQ,SAAS;AACtD,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,EACnD;AAGA,QAAM,mBAAmB,MAAW,qBAAqB,WAAW,QAAQ,gBAAgB;AAC5F,MAAI,SAAS;AAEb,aAAW,YAAY,kBAAkB;AACvC,UAAMA,aAAY,MAAW,UAAU,SAAS,EAAE;AAClD,QAAIA,YAAW;AACb,YAAM,UAAU,MAAW,aAAa,SAAS,EAAE;AACnD,UAAI,QAAS;AAAA,IACf;AAAA,EACF;AAEA,SAAO,EAAE,KAAK,EAAE,SAAS,MAAM,OAAO,CAAC;AACzC,CAAC;AAID,UAAU,IAAI,uBAAuB,OAAO,MAAM;AAChD,QAAM,aAAa,EAAE,IAAI,MAAM,YAAY;AAG3C,QAAMC,YAAW,MAAM,eAAe,KAAK;AAC3C,MAAI,eAAyD;AAC7D,MAAI,mBAAmB,QAAQ,IAAI;AACnC,MAAI;AAGJ,aAAW,WAAWA,WAAU;AAC9B,mBAAe,MAAW,QAAQ,YAAY,QAAQ,kBAAkB,QAAQ,EAAE;AAClF,QAAI,cAAc;AAChB,yBAAmB,QAAQ;AAC3B,uBAAiB,QAAQ;AACzB;AAAA,IACF;AAAA,EACF;AAGA,MAAI,CAAC,cAAc;AACjB,eAAW,WAAWA,WAAU;AAC9B,qBAAe,MAAW,QAAQ,YAAY,QAAQ,gBAAgB;AACtE,UAAI,cAAc;AAChB,2BAAmB,QAAQ;AAC3B,yBAAiB,aAAa;AAC9B;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,QAAM,WAAW,MAAW,UAAU,UAAU;AAChD,MAAI,CAAC,gBAAgB,CAAC,UAAU;AAC9B,WAAO,EAAE,KAAK,EAAE,OAAO,qBAAqB,GAAG,GAAG;AAAA,EACpD;AAGA,SAAO,IAAI;AAAA,IACT,IAAI,eAAe;AAAA,MACjB,MAAM,MAAM,YAAY;AACtB,cAAM,UAAU,IAAI,YAAY;AAChC,YAAI,aAAa;AACjB,YAAID,aAAY;AAChB,YAAI,YAAY;AAChB,cAAM,WAAW;AAGjB,mBAAW;AAAA,UACT,QAAQ,OAAO;AAAA,QAAwB,KAAK,UAAU,EAAE,YAAY,QAAQ,YAAY,CAAC,CAAC;AAAA;AAAA,CAAM;AAAA,QAClG;AAEA,eAAOA,cAAa,YAAY,UAAU;AACxC,cAAI;AACF,kBAAM,SAAS,MAAW,QAAQ,YAAY,kBAAkB,EAAE,WAAW,eAAe,CAAC;AAG7F,gBAAI,OAAO,WAAW,YAAY;AAChC,oBAAM,aAAa,OAAO,OAAO,MAAM,WAAW,MAAM;AACxD,kBAAI,YAAY;AACd,2BAAW;AAAA,kBACT,QAAQ,OAAO;AAAA,QAAwB,KAAK,UAAU,EAAE,MAAM,WAAW,CAAC,CAAC;AAAA;AAAA,CAAM;AAAA,gBACnF;AAAA,cACF;AACA,2BAAa,OAAO;AAAA,YACtB;AAEA,YAAAA,aAAY,OAAO,WAAW;AAE9B,gBAAI,CAACA,YAAW;AACd,yBAAW;AAAA,gBACT,QAAQ,OAAO;AAAA,QAAsB,KAAK,UAAU,EAAE,QAAQ,UAAU,CAAC,CAAC;AAAA;AAAA,CAAM;AAAA,cAClF;AACA;AAAA,YACF;AAGA,kBAAM,IAAI,QAAQ,OAAK,WAAW,GAAG,GAAG,CAAC;AACzC;AAAA,UACF,QAAQ;AACN;AAAA,UACF;AAAA,QACF;AAEA,mBAAW,MAAM;AAAA,MACnB;AAAA,IACF,CAAC;AAAA,IACD;AAAA,MACE,SAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,iBAAiB;AAAA,QACjB,cAAc;AAAA,MAChB;AAAA,IACF;AAAA,EACF;AACF,CAAC;AAGD,UAAU,IAAI,4CAA4C,OAAO,MAAM;AACrE,QAAM,YAAY,EAAE,IAAI,MAAM,WAAW;AACzC,QAAM,aAAa,EAAE,IAAI,MAAM,YAAY;AAE3C,QAAM,UAAU,MAAM,eAAe,QAAQ,SAAS;AACtD,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,EACnD;AAEA,QAAM,OAAO,MAAW,QAAQ,YAAY,QAAQ,kBAAkB,SAAS;AAC/E,MAAI,CAAC,MAAM;AACT,WAAO,EAAE,KAAK,EAAE,OAAO,qBAAqB,GAAG,GAAG;AAAA,EACpD;AAGA,SAAO,IAAI;AAAA,IACT,IAAI,eAAe;AAAA,MACjB,MAAM,MAAM,YAAY;AACtB,cAAM,UAAU,IAAI,YAAY;AAChC,YAAI,aAAa;AACjB,YAAIA,aAAY;AAEhB,eAAOA,YAAW;AAChB,cAAI;AACF,kBAAM,SAAS,MAAW,QAAQ,YAAY,QAAQ,kBAAkB,EAAE,UAAU,CAAC;AAGrF,gBAAI,OAAO,WAAW,YAAY;AAChC,oBAAM,aAAa,OAAO,OAAO,MAAM,WAAW,MAAM;AACxD,kBAAI,YAAY;AACd,2BAAW;AAAA,kBACT,QAAQ,OAAO;AAAA,QAAwB,KAAK,UAAU,EAAE,MAAM,WAAW,CAAC,CAAC;AAAA;AAAA,CAAM;AAAA,gBACnF;AAAA,cACF;AACA,2BAAa,OAAO;AAAA,YACtB;AAEA,YAAAA,aAAY,OAAO,WAAW;AAE9B,gBAAI,CAACA,YAAW;AACd,yBAAW;AAAA,gBACT,QAAQ,OAAO;AAAA,QAAsB,KAAK,UAAU,EAAE,QAAQ,UAAU,CAAC,CAAC;AAAA;AAAA,CAAM;AAAA,cAClF;AACA;AAAA,YACF;AAGA,kBAAM,IAAI,QAAQ,OAAK,WAAW,GAAG,GAAG,CAAC;AAAA,UAC3C,QAAQ;AACN;AAAA,UACF;AAAA,QACF;AAEA,mBAAW,MAAM;AAAA,MACnB;AAAA,IACF,CAAC;AAAA,IACD;AAAA,MACE,SAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,iBAAiB;AAAA,QACjB,cAAc;AAAA,MAChB;AAAA,IACF;AAAA,EACF;AACF,CAAC;;;AC5XD;AAJA,SAAS,QAAAE,aAAY;AACrB,SAAS,cAAAC,mBAAkB;AAC3B,SAAS,KAAAC,WAAS;AAClB,SAAS,UAAAC,eAAc;AAGvB;AAGA,IAAM,QAAQ,IAAIC,MAAK;AAGvB,IAAM,uBAAuB,oBAAI,IAA6B;AAE9D,IAAM,mBAAmBC,IAAE,OAAO;AAAA,EAChC,QAAQA,IAAE,OAAO,EAAE,IAAI,CAAC;AAAA,EACxB,cAAcA,IAAE,OAAOA,IAAE,OAAO,GAAGA,IAAE,QAAQ,CAAC;AAAA,EAC9C,YAAYA,IAAE,OAAO,EAAE,IAAI,EAAE,SAAS;AAAA,EACtC,OAAOA,IAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,kBAAkBA,IAAE,OAAO,EAAE,SAAS;AAAA,EACtC,MAAMA,IAAE,OAAO,EAAE,SAAS;AAAA,EAC1B,eAAeA,IAAE,OAAO,EAAE,IAAI,EAAE,IAAI,CAAC,EAAE,IAAI,GAAG,EAAE,SAAS;AAC3D,CAAC;AAGD,MAAM;AAAA,EACJ;AAAA,EACAC,YAAW,QAAQ,gBAAgB;AAAA,EACnC,OAAO,MAAM;AACX,UAAM,OAAO,EAAE,IAAI,MAAM,MAAM;AAC/B,UAAM,SAAS,UAAU;AAEzB,UAAM,aAAyB;AAAA,MAC7B,SAAS;AAAA,MACT,cAAc,KAAK;AAAA,MACnB,YAAY,KAAK;AAAA,MACjB,eAAe,KAAK,iBAAiB;AAAA,MACrC,QAAQ;AAAA,IACV;AAGA,UAAM,QAAQ,MAAM,MAAM,OAAO;AAAA,MAC/B,MAAM,KAAK,QAAQ;AAAA,MACnB,kBAAkB,KAAK,oBAAoB,OAAO;AAAA,MAClD,OAAO,KAAK,SAAS,OAAO;AAAA,MAC5B,eAAe;AAAA,QACb,eAAe,EAAE,MAAM,OAAO,YAAY,OAAO,WAAW,MAAM;AAAA,QAClE,MAAM;AAAA,MACR;AAAA,IACF,CAAC;AAED,UAAM,SAAS,MAAM;AACrB,UAAM,kBAAkB,IAAI,gBAAgB;AAC5C,yBAAqB,IAAI,QAAQ,eAAe;AAIhD,UAAM,WAAW,UAAU,MAAM,IAAIC,QAAO,EAAE,CAAC;AAC/C,UAAM,oBAAoB,OAAO,QAAQ,QAAQ;AAEjD,UAAM,qBAAqB,MAAM;AAC/B,YAAM,EAAE,UAAU,SAAS,IAAI,IAAI,gBAAgC;AACnE,YAAM,SAAS,SAAS,UAAU;AAClC,UAAI,eAAe;AAEnB,YAAM,WAAW,OAAO,SAAiB;AACvC,YAAI,aAAc;AAClB,YAAI;AACF,gBAAM,OAAO,MAAM,SAAS,IAAI;AAAA;AAAA,CAAM;AAAA,QACxC,QAAQ;AACN,yBAAe;AAAA,QACjB;AAAA,MACF;AAEA,OAAC,YAAY;AACX,cAAM,SAAS,KAAK,UAAU,EAAE,MAAM,kBAAkB,SAAS,CAAC,CAAC;AACnE,YAAI;AACF,gBAAM,MAAM,QAAQ;AAAA,YAClB,QAAQ,KAAK;AAAA,YACb;AAAA,YACA,aAAa,gBAAgB;AAAA,YAC7B;AAAA,UACF,CAAC;AACD,gBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,SAAS,CAAC,CAAC;AAAA,QACnD,SAAS,KAAU;AACjB,cAAI,IAAI,SAAS,gBAAgB,gBAAgB,OAAO,SAAS;AAC/D,oBAAQ,IAAI,eAAe,MAAM,gBAAgB;AACjD,kBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,QAAQ,CAAC,CAAC;AAAA,UAClD,OAAO;AACL,oBAAQ,MAAM,wBAAwB,MAAM,KAAK,IAAI,OAAO;AAC5D,kBAAM,WAAW,IAAI,WAAW;AAChC,kBAAM,SAAS,KAAK,UAAU,EAAE,MAAM,SAAS,WAAW,SAAS,CAAC,CAAC;AACrE,kBAAM,aAAyB;AAAA,cAC7B,GAAG;AAAA,cACH,QAAQ;AAAA,cACR,OAAO;AAAA,YACT;AACA,kBAAM,eAAe,OAAO,QAAQ;AAAA,cAClC,QAAQ;AAAA,gBACN,eAAe,EAAE,MAAM,OAAO,YAAY,OAAO,WAAW,MAAM;AAAA,gBAClE,MAAM;AAAA,cACR;AAAA,YACF,CAAC;AACD,gBAAI,WAAW,YAAY;AACzB,oBAAM,EAAE,aAAAC,aAAY,IAAI,MAAM;AAC9B,cAAAA,aAAY,WAAW,YAAY;AAAA,gBACjC,MAAM;AAAA,gBACN;AAAA,gBACA,WAAW;AAAA,gBACX,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,gBAClC,MAAM,EAAE,QAAQ,UAAU,OAAO,SAAS;AAAA,cAC5C,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF,UAAE;AACA,gBAAM,SAAS,QAAQ;AACvB,iBAAO,MAAM,EAAE,MAAM,MAAM;AAAA,UAAC,CAAC;AAC7B,gBAAM,oBAAoB,OAAO,QAAQ,EAAE,MAAM,MAAM;AAAA,UAAC,CAAC;AACzD,+BAAqB,OAAO,MAAM;AAAA,QACpC;AAAA,MACF,GAAG;AAEH,aAAO;AAAA,IACT;AAEA,UAAM,cAAc,gBAAgB,UAAU,kBAAkB;AAEhE,WAAO,EAAE,KAAK,EAAE,QAAQ,QAAQ,UAAU,GAAG,GAAG;AAAA,EAClD;AACF;AAGA,MAAM,IAAI,QAAQ,OAAO,MAAM;AAC7B,QAAM,KAAK,EAAE,IAAI,MAAM,IAAI;AAC3B,QAAM,UAAU,MAAM,eAAe,QAAQ,EAAE;AAE/C,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,iBAAiB,GAAG,GAAG;AAAA,EAChD;AAEA,QAAM,OAAO,QAAQ,QAAQ;AAC7B,MAAI,CAAC,MAAM,SAAS;AAClB,WAAO,EAAE,KAAK,EAAE,OAAO,wBAAwB,GAAG,GAAG;AAAA,EACvD;AAGA,MAAI,oBAAuG,CAAC;AAC5G,MAAI;AACF,UAAM,EAAE,oBAAAC,qBAAoB,gBAAAC,gBAAe,IAAI,MAAM;AACrD,QAAID,oBAAmB,GAAG;AACxB,YAAM,QAAQ,MAAMC,gBAAe,gBAAgB,EAAE;AACrD,0BAAoB,MACjB,OAAO,CAAC,MAAM,EAAE,aAAa,mBAAmB,EAChD,IAAI,CAAC,OAAO,EAAE,UAAU,EAAE,UAAU,aAAa,EAAE,aAAa,WAAW,EAAE,UAAU,EAAE;AAAA,IAC9F;AAAA,EACF,QAAQ;AAAA,EAAC;AAET,SAAO,EAAE,KAAK;AAAA,IACZ,QAAQ;AAAA,IACR,QAAQ,KAAK;AAAA,IACb,QAAQ,KAAK;AAAA,IACb,OAAO,KAAK;AAAA,IACZ,YAAY,KAAK;AAAA,IACjB,OAAO,QAAQ;AAAA,IACf,MAAM,QAAQ;AAAA,IACd,WAAW,QAAQ,UAAU,YAAY;AAAA,IACzC,WAAW,QAAQ,UAAU,YAAY;AAAA,IACzC,mBAAmB,kBAAkB,SAAS,IAAI,oBAAoB;AAAA,EACxE,CAAC;AACH,CAAC;AAGD,MAAM,KAAK,eAAe,OAAO,MAAM;AACrC,QAAM,KAAK,EAAE,IAAI,MAAM,IAAI;AAC3B,QAAM,UAAU,MAAM,eAAe,QAAQ,EAAE;AAE/C,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,KAAK,EAAE,OAAO,iBAAiB,GAAG,GAAG;AAAA,EAChD;AAEA,QAAM,OAAO,QAAQ,QAAQ;AAC7B,MAAI,CAAC,MAAM,SAAS;AAClB,WAAO,EAAE,KAAK,EAAE,OAAO,wBAAwB,GAAG,GAAG;AAAA,EACvD;AAEA,MAAI,KAAK,WAAW,WAAW;AAC7B,WAAO,EAAE,KAAK,EAAE,OAAO,mBAAmB,KAAK,MAAM,GAAG,GAAG,GAAG;AAAA,EAChE;AAEA,QAAM,kBAAkB,qBAAqB,IAAI,EAAE;AACnD,MAAI,iBAAiB;AACnB,oBAAgB,MAAM;AACtB,yBAAqB,OAAO,EAAE;AAAA,EAChC;AAEA,QAAM,gBAA4B;AAAA,IAChC,GAAG;AAAA,IACH,QAAQ;AAAA,IACR,OAAO;AAAA,EACT;AACA,QAAM,eAAe,OAAO,IAAI;AAAA,IAC9B,QAAQ,EAAE,GAAG,QAAQ,QAAQ,MAAM,cAAc;AAAA,EACnD,CAAC;AAED,MAAI,KAAK,YAAY;AACnB,UAAM,EAAE,aAAAF,aAAY,IAAI,MAAM;AAC9B,IAAAA,aAAY,KAAK,YAAY;AAAA,MAC3B,MAAM;AAAA,MACN,QAAQ;AAAA,MACR,WAAW;AAAA,MACX,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,MAClC,MAAM,EAAE,QAAQ,UAAU,OAAO,yBAAyB;AAAA,IAC5D,CAAC;AAAA,EACH;AAEA,SAAO,EAAE,KAAK,EAAE,QAAQ,IAAI,QAAQ,UAAU,OAAO,yBAAyB,CAAC;AACjF,CAAC;AAED,IAAO,gBAAQ;;;ApC1Mf;AACA;;;AqCVA,SAAS,QAAAG,aAAY;AACrB,SAAS,aAAAC,kBAAiB;AAC1B,SAAS,YAAAC,iBAAgB;AAEzB,IAAMC,aAAYF,WAAUD,KAAI;AAYhC,SAAS,yBAAiC;AACxC,QAAMI,MAAKF,UAAS;AAEpB,MAAIE,QAAO,UAAU;AACnB,WAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMT,KAAK;AAAA,EACL;AAEA,MAAIA,QAAO,SAAS;AAClB,WAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUT,KAAK;AAAA,EACL;AAGA,SAAO;AAAA;AAAA;AAAA;AAAA,EAIP,KAAK;AACP;AAKA,eAAsB,YAA4C;AAChE,MAAI;AACF,UAAM,EAAE,OAAO,IAAI,MAAMD,WAAU,WAAW,EAAE,SAAS,IAAK,CAAC;AAC/D,UAAM,UAAU,OAAO,KAAK;AAE5B,WAAO;AAAA,MACL,WAAW;AAAA,MACX;AAAA,IACF;AAAA,EACF,SAAS,OAAY;AACnB,WAAO;AAAA,MACL,WAAW;AAAA,MACX,OAAO;AAAA,MACP,qBAAqB,uBAAuB;AAAA,IAC9C;AAAA,EACF;AACF;AAMA,eAAsB,kBAAkB,UAGpC,CAAC,GAAqB;AACxB,QAAM,EAAE,QAAQ,OAAO,gBAAgB,KAAK,IAAI;AAEhD,QAAM,YAAY,MAAM,UAAU;AAElC,MAAI,CAAC,UAAU,WAAW;AACxB,QAAI,CAAC,OAAO;AACV,cAAQ,MAAM,4CAAuC;AACrD,cAAQ,MAAM,EAAE;AAChB,cAAQ,MAAM,4DAA4D;AAC1E,cAAQ,MAAM,EAAE;AAChB,UAAI,UAAU,qBAAqB;AACjC,gBAAQ,MAAM,UAAU,mBAAmB;AAAA,MAC7C;AACA,cAAQ,MAAM,EAAE;AAChB,cAAQ,MAAM,+CAA+C;AAC7D,cAAQ,MAAM,EAAE;AAAA,IAClB;AAEA,QAAI,eAAe;AACjB,cAAQ,KAAK,CAAC;AAAA,IAChB;AAEA,WAAO;AAAA,EACT;AAEA,MAAI,CAAC,OAAO;AAAA,EAEZ;AAEA,SAAO;AACT;;;ArCjGA,IAAI,iBAAoC;AACxC,IAAI,eAAoC;AAGxC,IAAM,mBAAmB;AACzB,IAAM,oBAAoB,CAAC,MAAM,MAAM,MAAM,MAAM,MAAM,MAAM,MAAM,MAAM,MAAM,IAAI;AAcrF,SAAS,kBAAiC;AACxC,MAAI;AACF,UAAM,aAAaE,SAAQC,eAAc,YAAY,GAAG,CAAC;AACzD,UAAM,SAASC,UAAQ,YAAY,MAAM,KAAK;AAE9C,QAAIC,aAAW,MAAM,KAAKA,aAAWC,OAAK,QAAQ,cAAc,CAAC,GAAG;AAClE,aAAO;AAAA,IACT;AAEA,UAAM,YAAYF,UAAQ,YAAY,MAAM,MAAM,KAAK;AACvD,QAAIC,aAAW,SAAS,KAAKA,aAAWC,OAAK,WAAW,cAAc,CAAC,GAAG;AACxE,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAGA,eAAe,uBAAuB,MAAgC;AACpE,MAAI;AACF,UAAM,WAAW,MAAM,MAAM,oBAAoB,IAAI,eAAe;AAAA,MAClE,QAAQ,YAAY,QAAQ,GAAI;AAAA,IAClC,CAAC;AACD,QAAI,SAAS,IAAI;AACf,YAAM,OAAO,MAAM,SAAS,KAAK;AACjC,aAAO,KAAK,SAAS;AAAA,IACvB;AACA,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAGA,SAAS,YAAY,MAAgC;AACnD,SAAO,IAAI,QAAQ,CAACF,cAAY;AAC9B,UAAM,SAAS,gBAAgB;AAE/B,WAAO,KAAK,SAAS,CAAC,QAA+B;AACnD,UAAI,IAAI,SAAS,cAAc;AAC7B,QAAAA,UAAQ,IAAI;AAAA,MACd,OAAO;AACL,QAAAA,UAAQ,KAAK;AAAA,MACf;AAAA,IACF,CAAC;AAED,WAAO,KAAK,aAAa,MAAM;AAC7B,aAAO,MAAM;AACb,MAAAA,UAAQ,KAAK;AAAA,IACf,CAAC;AAED,WAAO,OAAO,MAAM,SAAS;AAAA,EAC/B,CAAC;AACH;AAGA,eAAe,YAAY,eAA2E;AACpG,MAAI,MAAM,uBAAuB,aAAa,GAAG;AAC/C,WAAO,EAAE,MAAM,eAAe,gBAAgB,KAAK;AAAA,EACrD;AAEA,MAAI,CAAE,MAAM,YAAY,aAAa,GAAI;AACvC,WAAO,EAAE,MAAM,eAAe,gBAAgB,MAAM;AAAA,EACtD;AAEA,aAAW,QAAQ,mBAAmB;AACpC,QAAI,SAAS,cAAe;AAE5B,QAAI,MAAM,uBAAuB,IAAI,GAAG;AACtC,aAAO,EAAE,MAAM,gBAAgB,KAAK;AAAA,IACtC;AAEA,QAAI,CAAE,MAAM,YAAY,IAAI,GAAI;AAC9B,aAAO,EAAE,MAAM,gBAAgB,MAAM;AAAA,IACvC;AAAA,EACF;AAEA,SAAO,EAAE,MAAM,eAAe,gBAAgB,MAAM;AACtD;AAGA,SAAS,mBAAmB,QAAyB;AACnD,QAAM,cAAcE,OAAK,QAAQ,SAAS,UAAU;AACpD,SAAOD,aAAW,WAAW;AAC/B;AAGA,SAAS,eAAe,QAAyB;AAE/C,QAAM,SAASC,OAAK,QAAQ,OAAO,KAAK;AACxC,QAAM,WAAWA,OAAK,QAAQ,OAAO,OAAO;AAC5C,QAAM,aAAaA,OAAK,QAAQ,KAAK;AACrC,QAAM,eAAeA,OAAK,QAAQ,OAAO;AAEzC,SAAOD,aAAW,MAAM,KAAKA,aAAW,QAAQ,KAAKA,aAAW,UAAU,KAAKA,aAAW,YAAY;AACxG;AAGA,SAAS,wBAAwB,QAA+B;AAE9D,QAAME,iBAAgB;AAAA,IACpBD,OAAK,QAAQ,SAAS,cAAc,WAAW;AAAA,IAC/CA,OAAK,QAAQ,SAAS,cAAc,OAAO,WAAW;AAAA,EACxD;AAEA,aAAW,cAAcC,gBAAe;AACtC,QAAIF,aAAW,UAAU,GAAG;AAC1B,aAAO;AAAA,IACT;AAAA,EACF;AAEA,SAAO;AACT;AAGA,SAAS,WAAW,SAAiB,MAAgB,KAAa,KAAuE;AACvI,SAAO,IAAI,QAAQ,CAACD,cAAY;AAC9B,UAAM,QAAQI,OAAM,SAAS,MAAM;AAAA,MACjC;AAAA,MACA,OAAO,CAAC,UAAU,QAAQ,MAAM;AAAA,MAChC;AAAA,MACA,OAAO;AAAA,IACT,CAAC;AAED,QAAI,SAAS;AACb,UAAM,QAAQ,GAAG,QAAQ,CAAC,SAAiB;AAAE,gBAAU,KAAK,SAAS;AAAA,IAAG,CAAC;AACzE,UAAM,QAAQ,GAAG,QAAQ,CAAC,SAAiB;AAAE,gBAAU,KAAK,SAAS;AAAA,IAAG,CAAC;AAEzE,UAAM,GAAG,SAAS,CAAC,SAAS;AAC1B,MAAAJ,UAAQ,EAAE,SAAS,SAAS,GAAG,OAAO,CAAC;AAAA,IACzC,CAAC;AAED,UAAM,GAAG,SAAS,CAAC,QAAQ;AACzB,MAAAA,UAAQ,EAAE,SAAS,OAAO,QAAQ,IAAI,QAAQ,CAAC;AAAA,IACjD,CAAC;AAAA,EACH,CAAC;AACH;AAGA,eAAe,WACb,SACA,UAAkB,kBAClB,QAAiB,OACjB,WAC4E;AAC5E,QAAM,SAAS,gBAAgB;AAE/B,MAAI,CAAC,QAAQ;AACX,QAAI,CAAC,MAAO,SAAQ,IAAI,wCAAmC;AAC3D,WAAO,EAAE,SAAS,MAAM,MAAM,QAAQ;AAAA,EACxC;AAEA,QAAM,EAAE,MAAM,YAAY,eAAe,IAAI,MAAM,YAAY,OAAO;AAEtE,MAAI,gBAAgB;AAClB,QAAI,CAAC,MAAO,SAAQ,IAAI,uDAAkD,UAAU,EAAE;AACtF,WAAO,EAAE,SAAS,MAAM,MAAM,WAAW;AAAA,EAC3C;AAGA,QAAM,UAAUC,aAAWC,OAAK,QAAQ,gBAAgB,CAAC;AACzD,QAAM,SAAS,CAAC,WAAWD,aAAWC,OAAK,QAAQ,mBAAmB,CAAC;AAEvE,QAAM,aAAa,UAAU,SAAS,SAAS,QAAQ;AAIvD,QAAM,EAAE,cAAc,mBAAmB,GAAG,SAAS,IAAI,QAAQ;AAGjE,QAAM,SAAS,aAAa,oBAAoB,OAAO;AAIvD,QAAM,gBAAgB,EAAE,YAAY,OAAO;AAC3C,QAAM,oBAAoBA,OAAK,QAAQ,qBAAqB;AAC5D,MAAI;AACF,IAAAG,eAAc,mBAAmB,KAAK,UAAU,eAAe,MAAM,CAAC,CAAC;AACvE,QAAI,CAAC,MAAO,SAAQ,IAAI,yCAAkC,iBAAiB,EAAE;AAAA,EAC/E,SAAS,KAAK;AACZ,QAAI,CAAC,MAAO,SAAQ,KAAK,4CAAuC,GAAG,EAAE;AAAA,EACvE;AAEA,QAAM,SAAkE;AAAA,IACtE,GAAG;AAAA,IACH,MAAM,OAAO,UAAU;AAAA;AAAA,EACzB;AAOA,QAAM,YAAY,eAAe,MAAM;AACvC,QAAM,uBAAuB,wBAAwB,MAAM;AAC3D,QAAM,WAAW,mBAAmB,MAAM;AAC1C,QAAM,eAAe,QAAQ,IAAI,aAAa;AAE9C,MAAI;AACJ,MAAI;AACJ,MAAI,MAAM;AAEV,MAAI,sBAAsB;AAGxB,cAAU;AACV,WAAO,CAAC,WAAW;AAGnB,UAAMP,SAAQ,oBAAoB;AAGlC,WAAO,OAAO,OAAO,UAAU;AAC/B,WAAO,WAAW;AAElB,QAAI,CAAC,MAAO,SAAQ,IAAI,sDAA+C;AAAA,EACzE,WAAW,aAAa,gBAAgB,CAAC,YAAY;AAInD,cAAU;AAEV,WAAO,eAAe,QAClB,CAAC,QAAQ,SAAS,MAAM,OAAO,UAAU,CAAC,IAC1C,CAAC,OAAO,OAAO;AAAA,EACrB,WAAW,WAAW;AAEpB,QAAI,gBAAgB,CAAC,UAAU;AAE7B,UAAI,CAAC,MAAO,SAAQ,IAAI,+CAAwC;AAEhE,YAAM,YAAY,eAAe,QAC7B,CAAC,QAAQ,OAAO,IAChB,CAAC,OAAO,OAAO;AAEnB,YAAM,cAAc,MAAM,WAAW,YAAY,WAAW,QAAQ,MAAM;AAE1E,UAAI,CAAC,YAAY,SAAS;AACxB,YAAI,CAAC,MAAO,SAAQ,MAAM,8BAAyB;AACnD,eAAO,EAAE,SAAS,MAAM,MAAM,WAAW;AAAA,MAC3C;AAEA,UAAI,CAAC,MAAO,SAAQ,IAAI,gCAA2B;AAEnD,gBAAU;AAEV,aAAO,eAAe,QAClB,CAAC,QAAQ,SAAS,MAAM,OAAO,UAAU,CAAC,IAC1C,CAAC,OAAO,OAAO;AAAA,IACrB,OAAO;AAEL,gBAAU;AAEV,aAAO,eAAe,QAClB,CAAC,QAAQ,OAAO,MAAM,OAAO,UAAU,CAAC,IACxC,CAAC,OAAO,KAAK;AAAA,IACnB;AAAA,EACF,OAAO;AAEL,QAAI,CAAC,OAAO;AACV,cAAQ,MAAM,8DAAyD;AACvE,cAAQ,MAAM,mEAAmE;AAAA,IACnF;AACA,WAAO,EAAE,SAAS,MAAM,MAAM,WAAW;AAAA,EAC3C;AAEA,QAAM,QAAQM,OAAM,SAAS,MAAM;AAAA,IACjC;AAAA,IACA,OAAO,CAAC,UAAU,QAAQ,MAAM;AAAA,IAChC,KAAK;AAAA,IACL,UAAU;AAAA,IACV,OAAO;AAAA,EACT,CAAC;AAGD,QAAM,iBAAiB;AACvB,MAAI,UAAU;AACd,MAAI,SAAS;AACb,MAAI,WAA0B;AAE9B,QAAM,iBAAiB,IAAI,QAAiB,CAACJ,cAAY;AACvD,UAAM,UAAU,WAAW,MAAM;AAC/B,UAAI,CAAC,WAAW,CAAC,QAAQ;AACvB,QAAAA,UAAQ,KAAK;AAAA,MACf;AAAA,IACF,GAAG,cAAc;AAEjB,UAAM,QAAQ,GAAG,QAAQ,CAAC,SAAiB;AACzC,YAAM,SAAS,KAAK,SAAS;AAC7B,UAAI,CAAC,OAAO;AAEV,cAAM,QAAQ,OAAO,KAAK,EAAE,MAAM,IAAI,EAAE,OAAO,OAAK,EAAE,KAAK,CAAC;AAC5D,mBAAW,QAAQ,OAAO;AACxB,kBAAQ,IAAI,aAAa,IAAI,EAAE;AAAA,QACjC;AAAA,MACF;AACA,UAAI,CAAC,YAAY,OAAO,SAAS,OAAO,KAAK,OAAO,SAAS,SAAS,KAAK,OAAO,SAAS,WAAW,IAAI;AACxG,kBAAU;AACV,qBAAa,OAAO;AACpB,QAAAA,UAAQ,IAAI;AAAA,MACd;AAAA,IACF,CAAC;AAED,UAAM,QAAQ,GAAG,QAAQ,CAAC,SAAiB;AACzC,YAAM,SAAS,KAAK,SAAS,EAAE,KAAK;AACpC,UAAI,CAAC,SAAS,QAAQ;AACpB,gBAAQ,MAAM,aAAa,OAAO,MAAM,GAAG,GAAG,CAAC,EAAE;AAAA,MACnD;AAAA,IACF,CAAC;AAED,UAAM,GAAG,SAAS,CAAC,QAAQ;AACzB,UAAI,CAAC,MAAO,SAAQ,MAAM,gCAA2B,IAAI,OAAO,EAAE;AAClE,mBAAa,OAAO;AACpB,MAAAA,UAAQ,KAAK;AAAA,IACf,CAAC;AAED,UAAM,GAAG,QAAQ,CAAC,SAAS;AACzB,eAAS;AACT,iBAAW;AACX,UAAI,CAAC,SAAS;AACZ,qBAAa,OAAO;AACpB,QAAAA,UAAQ,KAAK;AAAA,MACf;AACA,qBAAe;AAAA,IACjB,CAAC;AAAA,EACH,CAAC;AAED,iBAAe;AAGf,QAAM,WAAW,MAAM;AAEvB,MAAI,CAAC,UAAU;AACb,QAAI,UAAU,aAAa,GAAG;AAC5B,UAAI,CAAC,MAAO,SAAQ,MAAM,+CAA0C,QAAQ,GAAG;AAAA,IACjF,WAAW,CAAC,QAAQ;AAClB,UAAI,CAAC,MAAO,SAAQ,IAAI,yDAAoD;AAAA,IAC9E;AAAA,EAEF;AAEA,SAAO,EAAE,SAAS,OAAO,MAAM,YAAY,SAAS,SAAS;AAC/D;AAGO,SAAS,YAAkB;AAChC,MAAI,cAAc;AAChB,iBAAa,KAAK,SAAS;AAC3B,mBAAe;AAAA,EACjB;AACF;AAEA,eAAsB,UAAU,UAA+B,CAAC,GAAG;AACjE,QAAM,MAAM,IAAIM,MAAK;AAGrB,MAAI,IAAI,KAAK,KAAK;AAAA,IAChB,QAAQ;AAAA;AAAA,IACR,cAAc,CAAC,OAAO,QAAQ,OAAO,SAAS,UAAU,SAAS;AAAA,IACjE,cAAc,CAAC,gBAAgB,iBAAiB,kBAAkB;AAAA,IAClE,eAAe,CAAC,eAAe,aAAa;AAAA,IAC5C,QAAQ;AAAA;AAAA,EACV,CAAC,CAAC;AAGF,MAAI,CAAC,QAAQ,OAAO;AAClB,QAAI,IAAI,KAAK,OAAO,CAAC;AAAA,EACvB;AAGA,MAAI,MAAM,WAAW,MAAM;AAG3B,MAAI,MAAM,aAAa,QAAQ;AAC/B,MAAI,MAAM,WAAW,MAAM;AAC3B,MAAI,MAAM,aAAa,SAAS;AAChC,MAAI,MAAM,cAAc,SAAS;AACjC,MAAI,MAAM,UAAU,aAAK;AAGzB,MAAI,IAAI,iBAAiB,OAAO,MAAM;AACpC,WAAO,EAAE,KAAK,oBAAoB,CAAC;AAAA,EACrC,CAAC;AAGD,MAAI,IAAI,YAAY,CAAC,MAAM;AACzB,UAAM,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAoBb,WAAO,EAAE,KAAK,IAAI;AAAA,EACpB,CAAC;AAGD,MAAI,IAAI,KAAK,CAAC,MAAM;AAClB,WAAO,EAAE,KAAK;AAAA,MACZ,MAAM;AAAA,MACN,SAAS;AAAA,MACT,aAAa;AAAA,MACb,MAAM;AAAA,MACN,WAAW;AAAA,QACT,QAAQ;AAAA,QACR,UAAU;AAAA,QACV,QAAQ;AAAA,QACR,WAAW;AAAA,MACb;AAAA,IACF,CAAC;AAAA,EACH,CAAC;AAED,SAAO;AACT;AAEA,eAAsB,YAAY,UAAyB,CAAC,GAAG;AAE7D,QAAM,SAAS,MAAM,kBAAkB,EAAE,OAAO,QAAQ,OAAO,eAAe,MAAM,CAAC;AACrF,MAAI,CAAC,QAAQ;AACX,UAAM,IAAI,MAAM,6EAA6E;AAAA,EAC/F;AAGA,QAAM,SAAS,MAAM,WAAW,QAAQ,YAAY,QAAQ,gBAAgB;AAG5E,qBAAmB;AAGnB,MAAI,QAAQ,kBAAkB;AAC5B,WAAO,2BAA2B,QAAQ;AAAA,EAC5C;AAGA,MAAI,CAACL,aAAW,OAAO,wBAAwB,GAAG;AAChD,IAAAM,WAAU,OAAO,0BAA0B,EAAE,WAAW,KAAK,CAAC;AAC9D,QAAI,CAAC,QAAQ,MAAO,SAAQ,IAAI,sCAA+B,OAAO,wBAAwB,EAAE;AAAA,EAClG;AAGA,MAAI,CAAC,OAAO,qBAAqB,KAAK;AACpC,UAAM,IAAI,MAAM,wGAAwG;AAAA,EAC1H;AAEA,MAAIC,WAAU,OAAO,qBAAqB;AAC1C,MAAI,CAACA,UAAS;AACZ,QAAI,CAAC,QAAQ,MAAO,SAAQ,IAAI,6CAAsC;AACtE,IAAAA,WAAU,MAAM,oBAAoB,OAAO,qBAAqB,GAAG;AACnE,QAAI,CAAC,QAAQ,MAAO,SAAQ,IAAI,sCAAiC;AAAA,EACnE;AACA,eAAa,EAAE,KAAK,OAAO,qBAAqB,KAAK,SAAAA,SAAQ,CAAC;AAC9D,MAAI,CAAC,QAAQ,MAAO,SAAQ,IAAI,oCAA6B,OAAO,qBAAqB,GAAG,EAAE;AAE9F,QAAM,OAAO,QAAQ,QAAQ,OAAO,OAAO;AAC3C,QAAM,OAAO,QAAQ,QAAQ,OAAO,OAAO,QAAQ;AAEnD,QAAM,YAAY,QAAQ,aAAa,OAAO,OAAO;AAErD,QAAM,MAAM,MAAM,UAAU,EAAE,OAAO,QAAQ,MAAM,CAAC;AAEpD,MAAI,CAAC,QAAQ,OAAO;AAClB,YAAQ,IAAI;AAAA,iCAA6B;AACzC,YAAQ,IAAI,+BAA0B,IAAI,IAAI,IAAI,EAAE;AACpD,QAAI,WAAW;AACb,cAAQ,IAAI,yBAAoB,SAAS,EAAE;AAAA,IAC7C;AACA,YAAQ,IAAI,gCAA2B,OAAO,wBAAwB,EAAE;AACxE,YAAQ,IAAI,4BAAuB,OAAO,YAAY,EAAE;AACxD,YAAQ,IAAI,kCAA6B,IAAI,IAAI,IAAI;AAAA,CAAiB;AAAA,EACxE;AAEA,mBAAiB,MAAM;AAAA,IACrB,OAAO,IAAI;AAAA,IACX;AAAA,IACA,UAAU;AAAA,EACZ,CAAC;AAGD,MAAI;AACJ,MAAI;AACJ,MAAI,QAAQ,UAAU,OAAO;AAC3B,UAAM,SAAS,MAAM,WAAW,MAAM,QAAQ,WAAW,kBAAkB,QAAQ,OAAO,SAAS;AACnG,cAAU,OAAO;AACjB,iBAAa,OAAO;AAAA,EACtB;AAEA,SAAO,EAAE,KAAK,MAAM,MAAM,SAAS,WAAW;AAChD;AAEO,SAAS,aAAa;AAE3B,YAAU;AAGV,EAAK,aAAa,EAAE,KAAK,OAAOC,cAAa;AAC3C,eAAW,MAAMA,WAAU;AACzB,YAAW,aAAa,EAAE;AAAA,IAC5B;AAAA,EACF,CAAC,EAAE,MAAM,MAAM;AAAA,EAEf,CAAC;AAED,MAAI,gBAAgB;AAClB,mBAAe,MAAM;AACrB,qBAAiB;AAAA,EACnB;AACA,gBAAc;AAChB;AAEA,SAAS,sBAAsB;AAC7B,SAAO;AAAA,IACL,SAAS;AAAA,IACT,MAAM;AAAA,MACJ,OAAO;AAAA,MACP,SAAS;AAAA,MACT,aACE;AAAA,IACJ;AAAA,IACA,SAAS,CAAC,EAAE,KAAK,yBAAyB,aAAa,oBAAoB,CAAC;AAAA,IAC5E,OAAO;AAAA,MACL,KAAK;AAAA,QACH,KAAK;AAAA,UACH,SAAS;AAAA,UACT,aAAa;AAAA,UACb,WAAW;AAAA,YACT,KAAK;AAAA,cACH,aAAa;AAAA,cACb,SAAS,EAAE,oBAAoB,CAAC,EAAE;AAAA,YACpC;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,MACA,WAAW;AAAA,QACT,KAAK;AAAA,UACH,SAAS;AAAA,UACT,aAAa;AAAA,UACb,WAAW;AAAA,YACT,KAAK;AAAA,cACH,aAAa;AAAA,cACb,SAAS,EAAE,oBAAoB,CAAC,EAAE;AAAA,YACpC;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,MACA,iBAAiB;AAAA,QACf,KAAK;AAAA,UACH,SAAS;AAAA,UACT,aAAa;AAAA,UACb,WAAW;AAAA,YACT,KAAK,EAAE,aAAa,eAAe;AAAA,YACnC,KAAK,EAAE,aAAa,mBAAmB;AAAA,UACzC;AAAA,QACF;AAAA,MACF;AAAA,MACA,aAAa;AAAA,QACX,KAAK;AAAA,UACH,SAAS;AAAA,UACT,aAAa;AAAA,UACb,YAAY;AAAA,YACV,EAAE,MAAM,SAAS,IAAI,SAAS,QAAQ,EAAE,MAAM,WAAW,SAAS,GAAG,EAAE;AAAA,YACvE,EAAE,MAAM,UAAU,IAAI,SAAS,QAAQ,EAAE,MAAM,WAAW,SAAS,EAAE,EAAE;AAAA,UACzE;AAAA,UACA,WAAW;AAAA,YACT,KAAK,EAAE,aAAa,mBAAmB;AAAA,UACzC;AAAA,QACF;AAAA,QACA,MAAM;AAAA,UACJ,SAAS;AAAA,UACT,aAAa;AAAA,UACb,aAAa;AAAA,YACX,SAAS;AAAA,cACP,oBAAoB;AAAA,gBAClB,QAAQ;AAAA,kBACN,MAAM;AAAA,kBACN,YAAY;AAAA,oBACV,MAAM,EAAE,MAAM,SAAS;AAAA,oBACvB,kBAAkB,EAAE,MAAM,SAAS;AAAA,oBACnC,OAAO,EAAE,MAAM,SAAS;AAAA,oBACxB,eAAe,EAAE,MAAM,SAAS;AAAA,kBAClC;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UACA,WAAW;AAAA,YACT,KAAK,EAAE,aAAa,kBAAkB;AAAA,UACxC;AAAA,QACF;AAAA,MACF;AAAA,MACA,kBAAkB;AAAA,QAChB,KAAK;AAAA,UACH,SAAS;AAAA,UACT,aAAa;AAAA,UACb,YAAY,CAAC,EAAE,MAAM,MAAM,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE,CAAC;AAAA,UACnF,WAAW;AAAA,YACT,KAAK,EAAE,aAAa,kBAAkB;AAAA,YACtC,KAAK,EAAE,aAAa,oBAAoB;AAAA,UAC1C;AAAA,QACF;AAAA,QACA,QAAQ;AAAA,UACN,SAAS;AAAA,UACT,aAAa;AAAA,UACb,YAAY,CAAC,EAAE,MAAM,MAAM,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE,CAAC;AAAA,UACnF,WAAW;AAAA,YACT,KAAK,EAAE,aAAa,kBAAkB;AAAA,YACtC,KAAK,EAAE,aAAa,oBAAoB;AAAA,UAC1C;AAAA,QACF;AAAA,MACF;AAAA,MACA,2BAA2B;AAAA,QACzB,KAAK;AAAA,UACH,SAAS;AAAA,UACT,aAAa;AAAA,UACb,YAAY;AAAA,YACV,EAAE,MAAM,MAAM,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE;AAAA,YACrE,EAAE,MAAM,SAAS,IAAI,SAAS,QAAQ,EAAE,MAAM,WAAW,SAAS,IAAI,EAAE;AAAA,UAC1E;AAAA,UACA,WAAW;AAAA,YACT,KAAK,EAAE,aAAa,kBAAkB;AAAA,YACtC,KAAK,EAAE,aAAa,oBAAoB;AAAA,UAC1C;AAAA,QACF;AAAA,MACF;AAAA,MACA,wBAAwB;AAAA,QACtB,MAAM;AAAA,UACJ,SAAS;AAAA,UACT,aAAa;AAAA,UACb,YAAY,CAAC,EAAE,MAAM,MAAM,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE,CAAC;AAAA,UACnF,WAAW;AAAA,YACT,KAAK,EAAE,aAAa,kBAAkB;AAAA,YACtC,KAAK,EAAE,aAAa,oBAAoB;AAAA,UAC1C;AAAA,QACF;AAAA,MACF;AAAA,MACA,oBAAoB;AAAA,QAClB,MAAM;AAAA,UACJ,SAAS;AAAA,UACT,aACE;AAAA,UACF,YAAY,CAAC,EAAE,MAAM,MAAM,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE,CAAC;AAAA,UACnF,aAAa;AAAA,YACX,SAAS;AAAA,cACP,oBAAoB;AAAA,gBAClB,QAAQ;AAAA,kBACN,MAAM;AAAA,kBACN,UAAU,CAAC,QAAQ;AAAA,kBACnB,YAAY;AAAA,oBACV,QAAQ,EAAE,MAAM,SAAS;AAAA,kBAC3B;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UACA,WAAW;AAAA,YACT,KAAK;AAAA,cACH,aAAa;AAAA,cACb,SAAS,EAAE,qBAAqB,CAAC,EAAE;AAAA,YACrC;AAAA,YACA,KAAK,EAAE,aAAa,oBAAoB;AAAA,UAC1C;AAAA,QACF;AAAA,MACF;AAAA,MACA,yBAAyB;AAAA,QACvB,MAAM;AAAA,UACJ,SAAS;AAAA,UACT,aAAa;AAAA,UACb,YAAY,CAAC,EAAE,MAAM,MAAM,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE,CAAC;AAAA,UACnF,aAAa;AAAA,YACX,SAAS;AAAA,cACP,oBAAoB;AAAA,gBAClB,QAAQ;AAAA,kBACN,MAAM;AAAA,kBACN,UAAU,CAAC,QAAQ;AAAA,kBACnB,YAAY;AAAA,oBACV,QAAQ,EAAE,MAAM,SAAS;AAAA,kBAC3B;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UACA,WAAW;AAAA,YACT,KAAK,EAAE,aAAa,iBAAiB;AAAA,YACrC,KAAK,EAAE,aAAa,oBAAoB;AAAA,UAC1C;AAAA,QACF;AAAA,MACF;AAAA,MACA,qCAAqC;AAAA,QACnC,MAAM;AAAA,UACJ,SAAS;AAAA,UACT,aAAa;AAAA,UACb,YAAY;AAAA,YACV,EAAE,MAAM,MAAM,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE;AAAA,YACrE,EAAE,MAAM,cAAc,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE;AAAA,UAC/E;AAAA,UACA,WAAW;AAAA,YACT,KAAK,EAAE,aAAa,6BAA6B;AAAA,YACjD,KAAK,EAAE,aAAa,kBAAkB;AAAA,YACtC,KAAK,EAAE,aAAa,oBAAoB;AAAA,UAC1C;AAAA,QACF;AAAA,MACF;AAAA,MACA,oCAAoC;AAAA,QAClC,MAAM;AAAA,UACJ,SAAS;AAAA,UACT,aAAa;AAAA,UACb,YAAY;AAAA,YACV,EAAE,MAAM,MAAM,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE;AAAA,YACrE,EAAE,MAAM,cAAc,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE;AAAA,UAC/E;AAAA,UACA,aAAa;AAAA,YACX,SAAS;AAAA,cACP,oBAAoB;AAAA,gBAClB,QAAQ;AAAA,kBACN,MAAM;AAAA,kBACN,YAAY;AAAA,oBACV,QAAQ,EAAE,MAAM,SAAS;AAAA,kBAC3B;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UACA,WAAW;AAAA,YACT,KAAK,EAAE,aAAa,gBAAgB;AAAA,YACpC,KAAK,EAAE,aAAa,mBAAmB;AAAA,YACvC,KAAK,EAAE,aAAa,oBAAoB;AAAA,UAC1C;AAAA,QACF;AAAA,MACF;AAAA,MACA,0BAA0B;AAAA,QACxB,KAAK;AAAA,UACH,SAAS;AAAA,UACT,aAAa;AAAA,UACb,YAAY,CAAC,EAAE,MAAM,MAAM,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE,CAAC;AAAA,UACnF,WAAW;AAAA,YACT,KAAK,EAAE,aAAa,oBAAoB;AAAA,YACxC,KAAK,EAAE,aAAa,oBAAoB;AAAA,UAC1C;AAAA,QACF;AAAA,MACF;AAAA,MACA,iBAAiB;AAAA,QACf,MAAM;AAAA,UACJ,SAAS;AAAA,UACT,aAAa;AAAA,UACb,aAAa;AAAA,YACX,SAAS;AAAA,cACP,oBAAoB;AAAA,gBAClB,QAAQ;AAAA,kBACN,MAAM;AAAA,kBACN,UAAU,CAAC,QAAQ;AAAA,kBACnB,YAAY;AAAA,oBACV,QAAQ,EAAE,MAAM,SAAS;AAAA,oBACzB,MAAM,EAAE,MAAM,SAAS;AAAA,oBACvB,kBAAkB,EAAE,MAAM,SAAS;AAAA,oBACnC,OAAO,EAAE,MAAM,SAAS;AAAA,oBACxB,eAAe,EAAE,MAAM,SAAS;AAAA,kBAClC;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UACA,WAAW;AAAA,YACT,KAAK;AAAA,cACH,aAAa;AAAA,cACb,SAAS,EAAE,qBAAqB,CAAC,EAAE;AAAA,YACrC;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,MACA,mCAAmC;AAAA,QACjC,KAAK;AAAA,UACH,SAAS;AAAA,UACT,aAAa;AAAA,UACb,YAAY,CAAC,EAAE,MAAM,aAAa,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE,CAAC;AAAA,UAC1F,WAAW;AAAA,YACT,KAAK,EAAE,aAAa,oBAAoB;AAAA,YACxC,KAAK,EAAE,aAAa,oBAAoB;AAAA,UAC1C;AAAA,QACF;AAAA,QACA,MAAM;AAAA,UACJ,SAAS;AAAA,UACT,aAAa;AAAA,UACb,YAAY,CAAC,EAAE,MAAM,aAAa,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE,CAAC;AAAA,UAC1F,aAAa;AAAA,YACX,SAAS;AAAA,cACP,oBAAoB;AAAA,gBAClB,QAAQ;AAAA,kBACN,MAAM;AAAA,kBACN,UAAU,CAAC,SAAS;AAAA,kBACpB,YAAY;AAAA,oBACV,SAAS,EAAE,MAAM,SAAS;AAAA,oBAC1B,KAAK,EAAE,MAAM,SAAS;AAAA,oBACtB,MAAM,EAAE,MAAM,SAAS;AAAA,kBACzB;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UACA,WAAW;AAAA,YACT,KAAK,EAAE,aAAa,mBAAmB;AAAA,YACvC,KAAK,EAAE,aAAa,oBAAoB;AAAA,UAC1C;AAAA,QACF;AAAA,MACF;AAAA,MACA,gDAAgD;AAAA,QAC9C,KAAK;AAAA,UACH,SAAS;AAAA,UACT,aAAa;AAAA,UACb,YAAY;AAAA,YACV,EAAE,MAAM,aAAa,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE;AAAA,YAC5E,EAAE,MAAM,cAAc,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE;AAAA,UAC/E;AAAA,UACA,WAAW;AAAA,YACT,KAAK,EAAE,aAAa,kBAAkB;AAAA,YACtC,KAAK,EAAE,aAAa,qBAAqB;AAAA,UAC3C;AAAA,QACF;AAAA,MACF;AAAA,MACA,qDAAqD;AAAA,QACnD,KAAK;AAAA,UACH,SAAS;AAAA,UACT,aAAa;AAAA,UACb,YAAY;AAAA,YACV,EAAE,MAAM,aAAa,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE;AAAA,YAC5E,EAAE,MAAM,cAAc,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE;AAAA,YAC7E,EAAE,MAAM,QAAQ,IAAI,SAAS,QAAQ,EAAE,MAAM,UAAU,EAAE;AAAA,UAC3D;AAAA,UACA,WAAW;AAAA,YACT,KAAK,EAAE,aAAa,gBAAgB;AAAA,YACpC,KAAK,EAAE,aAAa,qBAAqB;AAAA,UAC3C;AAAA,QACF;AAAA,MACF;AAAA,MACA,qDAAqD;AAAA,QACnD,MAAM;AAAA,UACJ,SAAS;AAAA,UACT,aAAa;AAAA,UACb,YAAY;AAAA,YACV,EAAE,MAAM,aAAa,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE;AAAA,YAC5E,EAAE,MAAM,cAAc,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE;AAAA,UAC/E;AAAA,UACA,aAAa;AAAA,YACX,SAAS;AAAA,cACP,oBAAoB;AAAA,gBAClB,QAAQ;AAAA,kBACN,MAAM;AAAA,kBACN,YAAY;AAAA,oBACV,QAAQ,EAAE,MAAM,UAAU,MAAM,CAAC,WAAW,SAAS,EAAE;AAAA,kBACzD;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UACA,WAAW;AAAA,YACT,KAAK,EAAE,aAAa,kBAAkB;AAAA,YACtC,KAAK,EAAE,aAAa,0BAA0B;AAAA,UAChD;AAAA,QACF;AAAA,MACF;AAAA,MACA,sDAAsD;AAAA,QACpD,MAAM;AAAA,UACJ,SAAS;AAAA,UACT,aAAa;AAAA,UACb,YAAY;AAAA,YACV,EAAE,MAAM,aAAa,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE;AAAA,YAC5E,EAAE,MAAM,cAAc,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE;AAAA,UAC/E;AAAA,UACA,aAAa;AAAA,YACX,SAAS;AAAA,cACP,oBAAoB;AAAA,gBAClB,QAAQ;AAAA,kBACN,MAAM;AAAA,kBACN,UAAU,CAAC,OAAO;AAAA,kBAClB,YAAY;AAAA,oBACV,OAAO,EAAE,MAAM,SAAS;AAAA,kBAC1B;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UACA,WAAW;AAAA,YACT,KAAK,EAAE,aAAa,aAAa;AAAA,YACjC,KAAK,EAAE,aAAa,kBAAkB;AAAA,UACxC;AAAA,QACF;AAAA,MACF;AAAA,MACA,uDAAuD;AAAA,QACrD,KAAK;AAAA,UACH,SAAS;AAAA,UACT,aAAa;AAAA,UACb,YAAY;AAAA,YACV,EAAE,MAAM,aAAa,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE;AAAA,YAC5E,EAAE,MAAM,cAAc,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE;AAAA,UAC/E;AAAA,UACA,WAAW;AAAA,YACT,KAAK,EAAE,aAAa,cAAc,SAAS,EAAE,qBAAqB,CAAC,EAAE,EAAE;AAAA,YACvE,KAAK,EAAE,aAAa,qBAAqB;AAAA,UAC3C;AAAA,QACF;AAAA,MACF;AAAA,MACA,4CAA4C;AAAA,QAC1C,MAAM;AAAA,UACJ,SAAS;AAAA,UACT,aAAa;AAAA,UACb,YAAY,CAAC,EAAE,MAAM,aAAa,IAAI,QAAQ,UAAU,MAAM,QAAQ,EAAE,MAAM,SAAS,EAAE,CAAC;AAAA,UAC1F,WAAW;AAAA,YACT,KAAK,EAAE,aAAa,mBAAmB;AAAA,UACzC;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,IACA,YAAY;AAAA,MACV,SAAS;AAAA,QACP,SAAS;AAAA,UACP,MAAM;AAAA,UACN,YAAY;AAAA,YACV,IAAI,EAAE,MAAM,SAAS;AAAA,YACrB,MAAM,EAAE,MAAM,SAAS;AAAA,YACvB,kBAAkB,EAAE,MAAM,SAAS;AAAA,YACnC,OAAO,EAAE,MAAM,SAAS;AAAA,YACxB,QAAQ,EAAE,MAAM,UAAU,MAAM,CAAC,UAAU,WAAW,aAAa,OAAO,EAAE;AAAA,YAC5E,WAAW,EAAE,MAAM,UAAU,QAAQ,YAAY;AAAA,YACjD,WAAW,EAAE,MAAM,UAAU,QAAQ,YAAY;AAAA,UACnD;AAAA,QACF;AAAA,QACA,SAAS;AAAA,UACP,MAAM;AAAA,UACN,YAAY;AAAA,YACV,IAAI,EAAE,MAAM,SAAS;AAAA,YACrB,MAAM,EAAE,MAAM,UAAU,MAAM,CAAC,QAAQ,aAAa,UAAU,MAAM,EAAE;AAAA,YACtE,SAAS,EAAE,MAAM,SAAS;AAAA,YAC1B,WAAW,EAAE,MAAM,UAAU,QAAQ,YAAY;AAAA,UACnD;AAAA,QACF;AAAA,QACA,eAAe;AAAA,UACb,MAAM;AAAA,UACN,YAAY;AAAA,YACV,IAAI,EAAE,MAAM,SAAS;AAAA,YACrB,YAAY,EAAE,MAAM,SAAS;AAAA,YAC7B,UAAU,EAAE,MAAM,SAAS;AAAA,YAC3B,OAAO,EAAE,MAAM,SAAS;AAAA,YACxB,QAAQ,EAAE,MAAM,SAAS;AAAA,YACzB,QAAQ,EAAE,MAAM,UAAU,MAAM,CAAC,WAAW,YAAY,YAAY,aAAa,OAAO,EAAE;AAAA,YAC1F,kBAAkB,EAAE,MAAM,UAAU;AAAA,UACtC;AAAA,QACF;AAAA,QACA,UAAU;AAAA,UACR,MAAM;AAAA,UACN,YAAY;AAAA,YACV,IAAI,EAAE,MAAM,SAAS;AAAA,YACrB,MAAM,EAAE,MAAM,SAAS;AAAA,YACvB,SAAS,EAAE,MAAM,SAAS;AAAA,YAC1B,KAAK,EAAE,MAAM,SAAS;AAAA,YACtB,KAAK,EAAE,MAAM,UAAU;AAAA,YACvB,QAAQ,EAAE,MAAM,UAAU,MAAM,CAAC,WAAW,WAAW,OAAO,EAAE;AAAA,YAChE,UAAU,EAAE,MAAM,UAAU;AAAA,YAC5B,OAAO,EAAE,MAAM,SAAS;AAAA,YACxB,WAAW,EAAE,MAAM,UAAU,QAAQ,YAAY;AAAA,YACjD,WAAW,EAAE,MAAM,UAAU,QAAQ,YAAY;AAAA,UACnD;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;","names":["authKey","readFile","resolve","extname","relative","existsSync","init_types","createHash","extname","basename","remoteServerUrl","authKey","readFileSync","relative","minimatch","MAX_FILE_SIZE","init_types","tool","z","existsSync","readFileSync","join","minimatch","exec","promisify","writeFile","mkdir","readFile","unlink","readdir","join","nanoid","execAsync","Hono","existsSync","mkdirSync","writeFileSync","resolve","dirname","join","spawn","fileURLToPath","z","existsSync","mkdirSync","writeFileSync","statSync","readdir","join","basename","extname","relative","nanoid","streamText","generateText","tool","stepCountIs","z","nanoid","z","exec","promisify","existsSync","mkdirSync","join","output","isRunning","terminals","readdir","execAsync","promisify","exec","MAX_OUTPUT_CHARS","z","execAsync","MAX_OUTPUT_CHARS","output","status","truncatedOutput","tool","z","readFile","resolve","existsSync","existsSync","mkdirSync","readFileSync","writeFileSync","join","MAX_OUTPUT_CHARS","z","tool","resolve","existsSync","readFile","tool","z","readFile","writeFile","mkdir","resolve","relative","isAbsolute","dirname","existsSync","readFile","writeFile","mkdir","existsSync","resolve","relative","dirname","exec","promisify","execAsync","resolve","relative","existsSync","readFile","dirname","mkdir","writeFile","extname","dirname","existsSync","resolve","dirname","exec","promisify","execAsync","readFile","existsSync","extname","resolve","cleanup","extname","dirname","extname","z","tool","isAbsolute","resolve","relative","existsSync","dirname","mkdir","writeFile","readFile","tool","z","tool","z","tool","z","resolve","relative","isAbsolute","extname","existsSync","readdir","stat","z","readdir","resolve","extname","tool","isAbsolute","existsSync","stat","relative","tool","z","nanoid","nanoid","resolve","tool","z","exec","promisify","readFile","stat","readdir","resolve","relative","isAbsolute","existsSync","tool","z","resolve","relative","isAbsolute","basename","readFile","readdir","existsSync","fileURLToPath","z","relative","resolve","readdir","readFile","tool","isAbsolute","existsSync","lines","fileURLToPath","basename","execAsync","promisify","exec","MAX_OUTPUT_CHARS","MAX_FILE_SIZE","tool","z","resolve","isAbsolute","existsSync","stat","readFile","relative","readdir","createSemanticSearchTool","tool","z","tool","z","tool","z","readFile","stat","join","basename","extname","isRemoteConfigured","storageQueries","generateText","platform","loadAllSkills","generateText","streamText","stepCountIs","generateText","getOrCreateProxy","FrameRecorder","updatedTask","destroyProxy","isRemoteConfigured","storageQueries","readFile","unlink","join","basename","tool","z","nanoid","resolve","exec","z","join","existsSync","mkdirSync","statSync","nanoid","extname","basename","writeFileSync","readdir","relative","isRemoteConfigured","storageQueries","Hono","zValidator","z","existsSync","mkdirSync","writeFileSync","join","nanoid","resolve","Hono","z","join","existsSync","mkdirSync","writeFileSync","stripDataUrlPrefix","resolve","zValidator","nanoid","Hono","zValidator","z","readFileSync","fileURLToPath","dirname","join","Hono","zValidator","z","Hono","z","zValidator","isRunning","sessions","Hono","zValidator","z","nanoid","Hono","z","zValidator","nanoid","sendWebhook","isRemoteConfigured","storageQueries","exec","promisify","platform","execAsync","os","dirname","fileURLToPath","resolve","existsSync","join","possiblePaths","spawn","writeFileSync","Hono","mkdirSync","authKey","sessions"]}
|