@within-7/minto 0.1.7 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/cli.js +155 -37
- package/dist/Tool.js +38 -0
- package/dist/Tool.js.map +3 -3
- package/dist/commands/agents/AgentsCommand.js +73 -49
- package/dist/commands/agents/AgentsCommand.js.map +2 -2
- package/dist/commands/agents/constants.js +1 -1
- package/dist/commands/agents/constants.js.map +1 -1
- package/dist/commands/agents/index.js +1 -1
- package/dist/commands/bug.js +74 -7
- package/dist/commands/bug.js.map +3 -3
- package/dist/commands/clear.js +3 -0
- package/dist/commands/clear.js.map +2 -2
- package/dist/commands/compact.js +37 -0
- package/dist/commands/compact.js.map +2 -2
- package/dist/commands/context.js +85 -0
- package/dist/commands/context.js.map +7 -0
- package/dist/commands/ctx_viz.js +18 -10
- package/dist/commands/ctx_viz.js.map +2 -2
- package/dist/commands/doctor.js +158 -12
- package/dist/commands/doctor.js.map +2 -2
- package/dist/commands/export.js +157 -0
- package/dist/commands/export.js.map +7 -0
- package/dist/commands/mcp-interactive.js +28 -18
- package/dist/commands/mcp-interactive.js.map +2 -2
- package/dist/commands/model.js +9 -7
- package/dist/commands/model.js.map +2 -2
- package/dist/commands/permissions.js +87 -0
- package/dist/commands/permissions.js.map +7 -0
- package/dist/commands/plugin/AddMarketplaceForm.js +3 -2
- package/dist/commands/plugin/AddMarketplaceForm.js.map +2 -2
- package/dist/commands/plugin/ConfirmDialog.js +2 -1
- package/dist/commands/plugin/ConfirmDialog.js.map +2 -2
- package/dist/commands/plugin/ErrorView.js +2 -1
- package/dist/commands/plugin/ErrorView.js.map +2 -2
- package/dist/commands/plugin/InstalledPluginsByMarketplace.js +5 -4
- package/dist/commands/plugin/InstalledPluginsByMarketplace.js.map +2 -2
- package/dist/commands/plugin/InstalledPluginsManager.js +5 -4
- package/dist/commands/plugin/InstalledPluginsManager.js.map +2 -2
- package/dist/commands/plugin/MainMenu.js +2 -1
- package/dist/commands/plugin/MainMenu.js.map +2 -2
- package/dist/commands/plugin/MarketplaceManager.js +5 -4
- package/dist/commands/plugin/MarketplaceManager.js.map +2 -2
- package/dist/commands/plugin/MarketplaceSelector.js +4 -3
- package/dist/commands/plugin/MarketplaceSelector.js.map +2 -2
- package/dist/commands/plugin/PlaceholderScreen.js +3 -2
- package/dist/commands/plugin/PlaceholderScreen.js.map +2 -2
- package/dist/commands/plugin/PluginBrowser.js +6 -5
- package/dist/commands/plugin/PluginBrowser.js.map +2 -2
- package/dist/commands/plugin/PluginDetailsInstall.js +5 -4
- package/dist/commands/plugin/PluginDetailsInstall.js.map +2 -2
- package/dist/commands/plugin/PluginDetailsManage.js +4 -3
- package/dist/commands/plugin/PluginDetailsManage.js.map +2 -2
- package/dist/commands/plugin.js +16 -15
- package/dist/commands/plugin.js.map +2 -2
- package/dist/commands/quit.js +3 -1
- package/dist/commands/quit.js.map +2 -2
- package/dist/commands/sandbox.js +105 -0
- package/dist/commands/sandbox.js.map +7 -0
- package/dist/commands/setup.js +2 -1
- package/dist/commands/setup.js.map +2 -2
- package/dist/commands/status.js +59 -0
- package/dist/commands/status.js.map +7 -0
- package/dist/commands/tasks.js +108 -0
- package/dist/commands/tasks.js.map +7 -0
- package/dist/commands/todos.js +123 -0
- package/dist/commands/todos.js.map +7 -0
- package/dist/commands/undo.js +245 -0
- package/dist/commands/undo.js.map +7 -0
- package/dist/commands.js +22 -2
- package/dist/commands.js.map +2 -2
- package/dist/components/AgentThinkingBlock.js +10 -18
- package/dist/components/AgentThinkingBlock.js.map +2 -2
- package/dist/components/AsciiLogo.js +7 -8
- package/dist/components/AsciiLogo.js.map +2 -2
- package/dist/components/AskUserQuestionDialog/AskUserQuestionDialog.js +3 -2
- package/dist/components/AskUserQuestionDialog/AskUserQuestionDialog.js.map +2 -2
- package/dist/components/AskUserQuestionDialog/QuestionView.js +2 -1
- package/dist/components/AskUserQuestionDialog/QuestionView.js.map +2 -2
- package/dist/components/BackgroundTasksPanel.js +78 -29
- package/dist/components/BackgroundTasksPanel.js.map +2 -2
- package/dist/components/BashStreamingProgress.js +24 -0
- package/dist/components/BashStreamingProgress.js.map +7 -0
- package/dist/components/CollapsibleHint.js +15 -0
- package/dist/components/CollapsibleHint.js.map +7 -0
- package/dist/components/Config.js +3 -2
- package/dist/components/Config.js.map +2 -2
- package/dist/components/ConsoleOAuthFlow.js +2 -1
- package/dist/components/ConsoleOAuthFlow.js.map +2 -2
- package/dist/components/Cost.js +2 -1
- package/dist/components/Cost.js.map +2 -2
- package/dist/components/FileEditToolUpdatedMessage.js +1 -1
- package/dist/components/FileEditToolUpdatedMessage.js.map +2 -2
- package/dist/components/HeaderBar.js +13 -8
- package/dist/components/HeaderBar.js.map +2 -2
- package/dist/components/HistorySearchOverlay.js +4 -3
- package/dist/components/HistorySearchOverlay.js.map +2 -2
- package/dist/components/HotkeyHelpPanel.js +134 -0
- package/dist/components/HotkeyHelpPanel.js.map +7 -0
- package/dist/components/InvalidConfigDialog.js +2 -1
- package/dist/components/InvalidConfigDialog.js.map +2 -2
- package/dist/components/Logo.js +24 -68
- package/dist/components/Logo.js.map +2 -2
- package/dist/components/MCPServerApprovalDialog.js +2 -1
- package/dist/components/MCPServerApprovalDialog.js.map +2 -2
- package/dist/components/MCPServerDialogCopy.js +2 -1
- package/dist/components/MCPServerDialogCopy.js.map +2 -2
- package/dist/components/MCPServerMultiselectDialog.js +2 -1
- package/dist/components/MCPServerMultiselectDialog.js.map +2 -2
- package/dist/components/Message.js +23 -7
- package/dist/components/Message.js.map +3 -3
- package/dist/components/MessageSelector.js +4 -3
- package/dist/components/MessageSelector.js.map +2 -2
- package/dist/components/ModeIndicator.js +2 -1
- package/dist/components/ModeIndicator.js.map +2 -2
- package/dist/components/ModelConfig.js +20 -6
- package/dist/components/ModelConfig.js.map +2 -2
- package/dist/components/ModelListManager.js +7 -6
- package/dist/components/ModelListManager.js.map +2 -2
- package/dist/components/ModelSelector/ModelSelector.js +27 -14
- package/dist/components/ModelSelector/ModelSelector.js.map +2 -2
- package/dist/components/Onboarding.js +22 -16
- package/dist/components/Onboarding.js.map +2 -2
- package/dist/components/OperationSummary.js +130 -0
- package/dist/components/OperationSummary.js.map +7 -0
- package/dist/components/ProgressBar.js +74 -0
- package/dist/components/ProgressBar.js.map +7 -0
- package/dist/components/PromptInput.js +210 -87
- package/dist/components/PromptInput.js.map +2 -2
- package/dist/components/RequestStatusIndicator.js +194 -0
- package/dist/components/RequestStatusIndicator.js.map +7 -0
- package/dist/components/SensitiveFileWarning.js +31 -0
- package/dist/components/SensitiveFileWarning.js.map +7 -0
- package/dist/components/Spinner.js +141 -27
- package/dist/components/Spinner.js.map +2 -2
- package/dist/components/SpinnerSymbol.js +21 -27
- package/dist/components/SpinnerSymbol.js.map +2 -2
- package/dist/components/StreamingBashOutput.js +9 -8
- package/dist/components/StreamingBashOutput.js.map +2 -2
- package/dist/components/StructuredDiff.js +6 -8
- package/dist/components/StructuredDiff.js.map +2 -2
- package/dist/components/SubagentBlock.js +5 -3
- package/dist/components/SubagentBlock.js.map +2 -2
- package/dist/components/SubagentProgress.js +17 -15
- package/dist/components/SubagentProgress.js.map +2 -2
- package/dist/components/TaskCard.js +30 -24
- package/dist/components/TaskCard.js.map +2 -2
- package/dist/components/TextInput.js +9 -1
- package/dist/components/TextInput.js.map +2 -2
- package/dist/components/TodoChangeBlock.js +1 -1
- package/dist/components/TodoChangeBlock.js.map +2 -2
- package/dist/components/TodoPanel.js +140 -31
- package/dist/components/TodoPanel.js.map +3 -3
- package/dist/components/TokenCounter.js +74 -0
- package/dist/components/TokenCounter.js.map +7 -0
- package/dist/components/TokenWarning.js +2 -1
- package/dist/components/TokenWarning.js.map +2 -2
- package/dist/components/ToolUseLoader.js +2 -2
- package/dist/components/ToolUseLoader.js.map +2 -2
- package/dist/components/TreeConnector.js +26 -0
- package/dist/components/TreeConnector.js.map +7 -0
- package/dist/components/TrustDialog.js +2 -1
- package/dist/components/TrustDialog.js.map +2 -2
- package/dist/components/TurnCompletionIndicator.js +18 -0
- package/dist/components/TurnCompletionIndicator.js.map +7 -0
- package/dist/components/binary-feedback/BinaryFeedbackView.js +2 -1
- package/dist/components/binary-feedback/BinaryFeedbackView.js.map +2 -2
- package/dist/components/messages/AssistantTextMessage.js +20 -9
- package/dist/components/messages/AssistantTextMessage.js.map +2 -2
- package/dist/components/messages/AssistantThinkingMessage.js +18 -3
- package/dist/components/messages/AssistantThinkingMessage.js.map +2 -2
- package/dist/components/messages/AssistantToolUseMessage.js +17 -10
- package/dist/components/messages/AssistantToolUseMessage.js.map +2 -2
- package/dist/components/messages/GroupRenderer.js +54 -0
- package/dist/components/messages/GroupRenderer.js.map +7 -0
- package/dist/components/messages/NestedTasksPreview.js +24 -0
- package/dist/components/messages/NestedTasksPreview.js.map +7 -0
- package/dist/components/messages/ParallelTasksGroupView.js +93 -0
- package/dist/components/messages/ParallelTasksGroupView.js.map +7 -0
- package/dist/components/messages/TaskInModuleView.js +218 -0
- package/dist/components/messages/TaskInModuleView.js.map +7 -0
- package/dist/components/messages/TaskOutputContent.js +56 -0
- package/dist/components/messages/TaskOutputContent.js.map +7 -0
- package/dist/components/messages/UserPromptMessage.js +2 -2
- package/dist/components/messages/UserPromptMessage.js.map +2 -2
- package/dist/components/messages/UserToolResultMessage/UserToolSuccessMessage.js +2 -3
- package/dist/components/messages/UserToolResultMessage/UserToolSuccessMessage.js.map +2 -2
- package/dist/components/permissions/FallbackPermissionRequest.js +4 -4
- package/dist/components/permissions/FallbackPermissionRequest.js.map +2 -2
- package/dist/components/permissions/FilesystemPermissionRequest/FilesystemPermissionRequest.js +4 -4
- package/dist/components/permissions/FilesystemPermissionRequest/FilesystemPermissionRequest.js.map +2 -2
- package/dist/constants/colors.js +120 -54
- package/dist/constants/colors.js.map +2 -2
- package/dist/constants/formatRules.js +102 -0
- package/dist/constants/formatRules.js.map +7 -0
- package/dist/constants/prompts.js +12 -34
- package/dist/constants/prompts.js.map +2 -2
- package/dist/constants/symbols.js +64 -6
- package/dist/constants/symbols.js.map +2 -2
- package/dist/constants/timing.js +5 -0
- package/dist/constants/timing.js.map +2 -2
- package/dist/constants/toolInputExamples.js +84 -0
- package/dist/constants/toolInputExamples.js.map +7 -0
- package/dist/core/backupManager.js +321 -0
- package/dist/core/backupManager.js.map +7 -0
- package/dist/core/config/defaults.js +84 -0
- package/dist/core/config/defaults.js.map +7 -0
- package/dist/core/config/index.js +111 -0
- package/dist/core/config/index.js.map +7 -0
- package/dist/core/config/loader.js +221 -0
- package/dist/core/config/loader.js.map +7 -0
- package/dist/core/config/migrations.js +128 -0
- package/dist/core/config/migrations.js.map +7 -0
- package/dist/core/config/schema.js +178 -0
- package/dist/core/config/schema.js.map +7 -0
- package/dist/core/costTracker.js +129 -0
- package/dist/core/costTracker.js.map +7 -0
- package/dist/core/gitAutoCommit.js +287 -0
- package/dist/core/gitAutoCommit.js.map +7 -0
- package/dist/core/index.js +8 -0
- package/dist/core/index.js.map +7 -0
- package/dist/core/operationTracker.js +212 -0
- package/dist/core/operationTracker.js.map +7 -0
- package/dist/core/permissions/auditLog.js +204 -0
- package/dist/core/permissions/auditLog.js.map +7 -0
- package/dist/core/permissions/engine/index.js +3 -0
- package/dist/core/permissions/engine/index.js.map +7 -0
- package/dist/core/permissions/engine/permissionEngine.js +106 -0
- package/dist/core/permissions/engine/permissionEngine.js.map +7 -0
- package/dist/core/permissions/engine/types.js +1 -0
- package/dist/core/permissions/engine/types.js.map +7 -0
- package/dist/core/permissions/index.js +84 -0
- package/dist/core/permissions/index.js.map +7 -0
- package/dist/core/permissions/ruleEngine.js +259 -0
- package/dist/core/permissions/ruleEngine.js.map +7 -0
- package/dist/core/permissions/rules/allowedToolsRule.js +62 -0
- package/dist/core/permissions/rules/allowedToolsRule.js.map +7 -0
- package/dist/core/permissions/rules/autoEscalationRule.js +296 -0
- package/dist/core/permissions/rules/autoEscalationRule.js.map +7 -0
- package/dist/core/permissions/rules/index.js +46 -0
- package/dist/core/permissions/rules/index.js.map +7 -0
- package/dist/core/permissions/rules/planModeRule.js +55 -0
- package/dist/core/permissions/rules/planModeRule.js.map +7 -0
- package/dist/core/permissions/rules/projectBoundaryRule.js +173 -0
- package/dist/core/permissions/rules/projectBoundaryRule.js.map +7 -0
- package/dist/core/permissions/rules/safeModeRule.js +65 -0
- package/dist/core/permissions/rules/safeModeRule.js.map +7 -0
- package/dist/core/permissions/rules/sensitivePathsRule.js +345 -0
- package/dist/core/permissions/rules/sensitivePathsRule.js.map +7 -0
- package/dist/core/permissions/types.js +127 -0
- package/dist/core/permissions/types.js.map +7 -0
- package/dist/core/tokenStats.js +9 -0
- package/dist/core/tokenStats.js.map +7 -0
- package/dist/core/tokenStatsManager.js +331 -0
- package/dist/core/tokenStatsManager.js.map +7 -0
- package/dist/core/tools/executor.js +143 -0
- package/dist/core/tools/executor.js.map +7 -0
- package/dist/core/tools/index.js +15 -0
- package/dist/core/tools/index.js.map +7 -0
- package/dist/core/tools/registry.js +183 -0
- package/dist/core/tools/registry.js.map +7 -0
- package/dist/core/tools/types.js +1 -0
- package/dist/core/tools/types.js.map +7 -0
- package/dist/cost-tracker.js +23 -15
- package/dist/cost-tracker.js.map +2 -2
- package/dist/entrypoints/cli.js +158 -130
- package/dist/entrypoints/cli.js.map +2 -2
- package/dist/entrypoints/mcp.js +12 -4
- package/dist/entrypoints/mcp.js.map +2 -2
- package/dist/history.js +14 -3
- package/dist/history.js.map +2 -2
- package/dist/hooks/useAgentTokenStats.js +72 -0
- package/dist/hooks/useAgentTokenStats.js.map +7 -0
- package/dist/hooks/useAgentTranscripts.js +140 -0
- package/dist/hooks/useAgentTranscripts.js.map +7 -0
- package/dist/hooks/useAnimationSync.js +53 -0
- package/dist/hooks/useAnimationSync.js.map +7 -0
- package/dist/hooks/useArrowKeyHistory.js +4 -2
- package/dist/hooks/useArrowKeyHistory.js.map +2 -2
- package/dist/hooks/useCanUseTool.js +3 -1
- package/dist/hooks/useCanUseTool.js.map +2 -2
- package/dist/hooks/useExitOnCtrlCD.js +9 -5
- package/dist/hooks/useExitOnCtrlCD.js.map +2 -2
- package/dist/hooks/useHookStatus.js +40 -0
- package/dist/hooks/useHookStatus.js.map +7 -0
- package/dist/hooks/useLogMessages.js +29 -2
- package/dist/hooks/useLogMessages.js.map +2 -2
- package/dist/hooks/useMessageGroups.js +43 -0
- package/dist/hooks/useMessageGroups.js.map +7 -0
- package/dist/hooks/useTerminalSize.js +62 -6
- package/dist/hooks/useTerminalSize.js.map +2 -2
- package/dist/hooks/useUnifiedCompletion.js +69 -0
- package/dist/hooks/useUnifiedCompletion.js.map +2 -2
- package/dist/i18n/index.js +109 -0
- package/dist/i18n/index.js.map +7 -0
- package/dist/i18n/locales/en.js +348 -0
- package/dist/i18n/locales/en.js.map +7 -0
- package/dist/i18n/locales/index.js +7 -0
- package/dist/i18n/locales/index.js.map +7 -0
- package/dist/i18n/locales/zh-CN.js +348 -0
- package/dist/i18n/locales/zh-CN.js.map +7 -0
- package/dist/i18n/types.js +8 -0
- package/dist/i18n/types.js.map +7 -0
- package/dist/permissions.js +28 -1
- package/dist/permissions.js.map +2 -2
- package/dist/query.js +253 -21
- package/dist/query.js.map +3 -3
- package/dist/screens/REPL.js +523 -194
- package/dist/screens/REPL.js.map +3 -3
- package/dist/services/adapters/chatCompletions.js +3 -1
- package/dist/services/adapters/chatCompletions.js.map +2 -2
- package/dist/services/adapters/messageNormalizer.js +354 -0
- package/dist/services/adapters/messageNormalizer.js.map +7 -0
- package/dist/services/adapters/responsesAPI.js +6 -3
- package/dist/services/adapters/responsesAPI.js.map +2 -2
- package/dist/services/checkpointManager.js +386 -0
- package/dist/services/checkpointManager.js.map +7 -0
- package/dist/services/claude.js +192 -14
- package/dist/services/claude.js.map +3 -3
- package/dist/services/compressionService.js +50 -1
- package/dist/services/compressionService.js.map +2 -2
- package/dist/services/contextMonitor.js +162 -0
- package/dist/services/contextMonitor.js.map +7 -0
- package/dist/services/customCommands.js +60 -41
- package/dist/services/customCommands.js.map +2 -2
- package/dist/services/hookExecutor.js +173 -1
- package/dist/services/hookExecutor.js.map +2 -2
- package/dist/services/intelligentCompactor.js +281 -0
- package/dist/services/intelligentCompactor.js.map +7 -0
- package/dist/services/lspConfig.js +109 -0
- package/dist/services/lspConfig.js.map +7 -0
- package/dist/services/mcpClient.js +338 -43
- package/dist/services/mcpClient.js.map +2 -2
- package/dist/services/modelOrchestrator.js +310 -0
- package/dist/services/modelOrchestrator.js.map +7 -0
- package/dist/services/openai.js +8 -1
- package/dist/services/openai.js.map +2 -2
- package/dist/services/outputStyles.js +138 -0
- package/dist/services/outputStyles.js.map +7 -0
- package/dist/services/plugins/index.js +5 -0
- package/dist/services/plugins/index.js.map +7 -0
- package/dist/services/plugins/lspServers.js +188 -0
- package/dist/services/plugins/lspServers.js.map +7 -0
- package/dist/services/plugins/pluginRuntime.js +229 -0
- package/dist/services/plugins/pluginRuntime.js.map +7 -0
- package/dist/services/plugins/pluginValidation.js +219 -0
- package/dist/services/plugins/pluginValidation.js.map +7 -0
- package/dist/services/plugins/skillMarketplace.js +556 -0
- package/dist/services/plugins/skillMarketplace.js.map +7 -0
- package/dist/services/responseStateManager.js +37 -3
- package/dist/services/responseStateManager.js.map +2 -2
- package/dist/services/sandbox/filesystemBoundary.js +341 -0
- package/dist/services/sandbox/filesystemBoundary.js.map +7 -0
- package/dist/services/sandbox/index.js +14 -0
- package/dist/services/sandbox/index.js.map +7 -0
- package/dist/services/sandbox/networkProxy.js +293 -0
- package/dist/services/sandbox/networkProxy.js.map +7 -0
- package/dist/services/sandbox/sandboxController.js +574 -0
- package/dist/services/sandbox/sandboxController.js.map +7 -0
- package/dist/services/sandbox/types.js +50 -0
- package/dist/services/sandbox/types.js.map +7 -0
- package/dist/services/sessionMemory.js +266 -0
- package/dist/services/sessionMemory.js.map +7 -0
- package/dist/services/taskRouter.js +324 -0
- package/dist/services/taskRouter.js.map +7 -0
- package/dist/tools/ArchitectTool/ArchitectTool.js +7 -1
- package/dist/tools/ArchitectTool/ArchitectTool.js.map +2 -2
- package/dist/tools/AskExpertModelTool/AskExpertModelTool.js +6 -2
- package/dist/tools/AskExpertModelTool/AskExpertModelTool.js.map +2 -2
- package/dist/tools/AskUserQuestionTool/AskUserQuestionTool.js +2 -1
- package/dist/tools/AskUserQuestionTool/AskUserQuestionTool.js.map +2 -2
- package/dist/tools/BaseTool.js +72 -0
- package/dist/tools/BaseTool.js.map +7 -0
- package/dist/tools/BashOutputTool/BashOutputToolResultMessage.js +3 -0
- package/dist/tools/BashOutputTool/BashOutputToolResultMessage.js.map +2 -2
- package/dist/tools/BashTool/BashTool.js +79 -3
- package/dist/tools/BashTool/BashTool.js.map +2 -2
- package/dist/tools/BashTool/BashToolResultMessage.js +3 -0
- package/dist/tools/BashTool/BashToolResultMessage.js.map +2 -2
- package/dist/tools/BashTool/OutputLine.js +54 -0
- package/dist/tools/BashTool/OutputLine.js.map +2 -2
- package/dist/tools/BashTool/prompt.js +336 -3
- package/dist/tools/BashTool/prompt.js.map +2 -2
- package/dist/tools/FileEditTool/FileEditTool.js +29 -4
- package/dist/tools/FileEditTool/FileEditTool.js.map +2 -2
- package/dist/tools/FileEditTool/prompt.js +6 -3
- package/dist/tools/FileEditTool/prompt.js.map +2 -2
- package/dist/tools/FileWriteTool/FileWriteTool.js +5 -5
- package/dist/tools/FileWriteTool/FileWriteTool.js.map +2 -2
- package/dist/tools/FileWriteTool/prompt.js +4 -2
- package/dist/tools/FileWriteTool/prompt.js.map +2 -2
- package/dist/tools/GlobTool/GlobTool.js +4 -2
- package/dist/tools/GlobTool/GlobTool.js.map +2 -2
- package/dist/tools/GrepTool/GrepTool.js +36 -7
- package/dist/tools/GrepTool/GrepTool.js.map +2 -2
- package/dist/tools/KillShellTool/KillShellToolResultMessage.js +3 -0
- package/dist/tools/KillShellTool/KillShellToolResultMessage.js.map +2 -2
- package/dist/tools/ListMcpResourcesTool/ListMcpResourcesTool.js +109 -0
- package/dist/tools/ListMcpResourcesTool/ListMcpResourcesTool.js.map +7 -0
- package/dist/tools/ListMcpResourcesTool/prompt.js +19 -0
- package/dist/tools/ListMcpResourcesTool/prompt.js.map +7 -0
- package/dist/tools/LspTool/LspTool.js +664 -0
- package/dist/tools/LspTool/LspTool.js.map +7 -0
- package/dist/tools/LspTool/prompt.js +27 -0
- package/dist/tools/LspTool/prompt.js.map +7 -0
- package/dist/tools/MCPTool/MCPTool.js +9 -1
- package/dist/tools/MCPTool/MCPTool.js.map +2 -2
- package/dist/tools/MemoryReadTool/MemoryReadTool.js +19 -6
- package/dist/tools/MemoryReadTool/MemoryReadTool.js.map +2 -2
- package/dist/tools/MemoryWriteTool/MemoryWriteTool.js +6 -6
- package/dist/tools/MemoryWriteTool/MemoryWriteTool.js.map +2 -2
- package/dist/tools/MultiEditTool/MultiEditTool.js +19 -2
- package/dist/tools/MultiEditTool/MultiEditTool.js.map +2 -2
- package/dist/tools/MultiEditTool/prompt.js +5 -3
- package/dist/tools/MultiEditTool/prompt.js.map +2 -2
- package/dist/tools/NotebookEditTool/NotebookEditTool.js +7 -2
- package/dist/tools/NotebookEditTool/NotebookEditTool.js.map +2 -2
- package/dist/tools/NotebookReadTool/NotebookReadTool.js.map +2 -2
- package/dist/tools/PlanModeTool/EnterPlanModeTool.js +75 -0
- package/dist/tools/PlanModeTool/EnterPlanModeTool.js.map +7 -0
- package/dist/tools/PlanModeTool/ExitPlanModeTool.js +109 -0
- package/dist/tools/PlanModeTool/ExitPlanModeTool.js.map +7 -0
- package/dist/tools/PlanModeTool/prompt.js +94 -0
- package/dist/tools/PlanModeTool/prompt.js.map +7 -0
- package/dist/tools/ReadMcpResourceTool/ReadMcpResourceTool.js +130 -0
- package/dist/tools/ReadMcpResourceTool/ReadMcpResourceTool.js.map +7 -0
- package/dist/tools/ReadMcpResourceTool/prompt.js +17 -0
- package/dist/tools/ReadMcpResourceTool/prompt.js.map +7 -0
- package/dist/tools/SkillTool/SkillTool.js +10 -4
- package/dist/tools/SkillTool/SkillTool.js.map +2 -2
- package/dist/tools/SkillTool/prompt.js +1 -1
- package/dist/tools/SkillTool/prompt.js.map +1 -1
- package/dist/tools/SlashCommandTool/SlashCommandTool.js +260 -0
- package/dist/tools/SlashCommandTool/SlashCommandTool.js.map +7 -0
- package/dist/tools/SlashCommandTool/prompt.js +35 -0
- package/dist/tools/SlashCommandTool/prompt.js.map +7 -0
- package/dist/tools/TaskOutputTool/TaskOutputTool.js +190 -0
- package/dist/tools/TaskOutputTool/TaskOutputTool.js.map +7 -0
- package/dist/tools/TaskOutputTool/prompt.js +15 -0
- package/dist/tools/TaskOutputTool/prompt.js.map +7 -0
- package/dist/tools/TaskTool/TaskTool.js +310 -104
- package/dist/tools/TaskTool/TaskTool.js.map +2 -2
- package/dist/tools/TaskTool/prompt.js.map +2 -2
- package/dist/tools/TodoWriteTool/TodoWriteTool.js +42 -77
- package/dist/tools/TodoWriteTool/TodoWriteTool.js.map +2 -2
- package/dist/tools/URLFetcherTool/URLFetcherTool.js +4 -1
- package/dist/tools/URLFetcherTool/URLFetcherTool.js.map +2 -2
- package/dist/tools/URLFetcherTool/cache.js +55 -8
- package/dist/tools/URLFetcherTool/cache.js.map +2 -2
- package/dist/tools.js +31 -2
- package/dist/tools.js.map +2 -2
- package/dist/types/hooks.js +4 -0
- package/dist/types/hooks.js.map +2 -2
- package/dist/types/marketplace.js.map +2 -2
- package/dist/types/messageGroup.js +36 -0
- package/dist/types/messageGroup.js.map +7 -0
- package/dist/types/plugin.js.map +2 -2
- package/dist/types/thinking.js +1 -0
- package/dist/types/thinking.js.map +7 -0
- package/dist/utils/BackgroundShellManager.js +136 -39
- package/dist/utils/BackgroundShellManager.js.map +2 -2
- package/dist/utils/CircuitBreaker.js +242 -0
- package/dist/utils/CircuitBreaker.js.map +7 -0
- package/dist/utils/MessageBatchBuffer.js +102 -0
- package/dist/utils/MessageBatchBuffer.js.map +7 -0
- package/dist/utils/PersistentShell.js +151 -1
- package/dist/utils/PersistentShell.js.map +2 -2
- package/dist/utils/agentLoader.js +1 -23
- package/dist/utils/agentLoader.js.map +2 -2
- package/dist/utils/agentTranscripts.js +641 -0
- package/dist/utils/agentTranscripts.js.map +7 -0
- package/dist/utils/animationManager.js +213 -0
- package/dist/utils/animationManager.js.map +7 -0
- package/dist/utils/animationSync.js +110 -0
- package/dist/utils/animationSync.js.map +7 -0
- package/dist/utils/ask.js +2 -0
- package/dist/utils/ask.js.map +2 -2
- package/dist/utils/asyncFile.js +215 -0
- package/dist/utils/asyncFile.js.map +7 -0
- package/dist/utils/backgroundAgentManager.js +231 -0
- package/dist/utils/backgroundAgentManager.js.map +7 -0
- package/dist/utils/config.js +108 -10
- package/dist/utils/config.js.map +2 -2
- package/dist/utils/conversationRecovery.js +19 -0
- package/dist/utils/conversationRecovery.js.map +2 -2
- package/dist/utils/credentials/CredentialStore.js +1 -0
- package/dist/utils/credentials/CredentialStore.js.map +7 -0
- package/dist/utils/credentials/EncryptedFileStore.js +157 -0
- package/dist/utils/credentials/EncryptedFileStore.js.map +7 -0
- package/dist/utils/credentials/index.js +37 -0
- package/dist/utils/credentials/index.js.map +7 -0
- package/dist/utils/credentials/migration.js +82 -0
- package/dist/utils/credentials/migration.js.map +7 -0
- package/dist/utils/exit.js +73 -0
- package/dist/utils/exit.js.map +7 -0
- package/dist/utils/format.js +73 -5
- package/dist/utils/format.js.map +2 -2
- package/dist/utils/generators.js +76 -6
- package/dist/utils/generators.js.map +2 -2
- package/dist/utils/globalErrorHandler.js +149 -0
- package/dist/utils/globalErrorHandler.js.map +7 -0
- package/dist/utils/groupHandlers/index.js +8 -0
- package/dist/utils/groupHandlers/index.js.map +7 -0
- package/dist/utils/groupHandlers/parallelTasksHandler.js +140 -0
- package/dist/utils/groupHandlers/parallelTasksHandler.js.map +7 -0
- package/dist/utils/groupHandlers/taskHandler.js +104 -0
- package/dist/utils/groupHandlers/taskHandler.js.map +7 -0
- package/dist/utils/groupHandlers/types.js +1 -0
- package/dist/utils/groupHandlers/types.js.map +7 -0
- package/dist/utils/logRotation.js +224 -0
- package/dist/utils/logRotation.js.map +7 -0
- package/dist/utils/markdown.js +13 -1
- package/dist/utils/markdown.js.map +2 -2
- package/dist/utils/marketplaceManager.js +3 -5
- package/dist/utils/marketplaceManager.js.map +2 -2
- package/dist/utils/memSafety.js +264 -0
- package/dist/utils/memSafety.js.map +7 -0
- package/dist/utils/messageGroupManager.js +274 -0
- package/dist/utils/messageGroupManager.js.map +7 -0
- package/dist/utils/messages.js +13 -4
- package/dist/utils/messages.js.map +2 -2
- package/dist/utils/model.js +119 -15
- package/dist/utils/model.js.map +3 -3
- package/dist/utils/permissions/filesystem.js +162 -6
- package/dist/utils/permissions/filesystem.js.map +2 -2
- package/dist/utils/plan/planMode.js +143 -0
- package/dist/utils/plan/planMode.js.map +7 -0
- package/dist/utils/pluginLoader.js +17 -21
- package/dist/utils/pluginLoader.js.map +2 -2
- package/dist/utils/ripgrep.js +55 -2
- package/dist/utils/ripgrep.js.map +2 -2
- package/dist/utils/safePath.js +132 -0
- package/dist/utils/safePath.js.map +7 -0
- package/dist/utils/sanitizeInput.js +32 -0
- package/dist/utils/sanitizeInput.js.map +7 -0
- package/dist/utils/secureKeyStorage.js +312 -0
- package/dist/utils/secureKeyStorage.js.map +7 -0
- package/dist/utils/sensitiveFiles.js +125 -0
- package/dist/utils/sensitiveFiles.js.map +7 -0
- package/dist/utils/session/sessionPlugins.js +67 -0
- package/dist/utils/session/sessionPlugins.js.map +7 -0
- package/dist/utils/taskDisplayUtils.js +257 -0
- package/dist/utils/taskDisplayUtils.js.map +7 -0
- package/dist/utils/teamConfig.js +2 -1
- package/dist/utils/teamConfig.js.map +2 -2
- package/dist/utils/theme.js +6 -6
- package/dist/utils/theme.js.map +1 -1
- package/dist/utils/todoStorage.js +92 -2
- package/dist/utils/todoStorage.js.map +2 -2
- package/dist/utils/toolRiskClassification.js +207 -0
- package/dist/utils/toolRiskClassification.js.map +7 -0
- package/dist/utils/toolTimeout.js +136 -0
- package/dist/utils/toolTimeout.js.map +7 -0
- package/dist/utils/tooling/safeRender.js +116 -0
- package/dist/utils/tooling/safeRender.js.map +7 -0
- package/dist/utils/userFriendlyError.js +346 -0
- package/dist/utils/userFriendlyError.js.map +7 -0
- package/dist/utils/vendor/ripgrep/arm64-darwin/rg +0 -0
- package/dist/version.js +2 -2
- package/dist/version.js.map +1 -1
- package/package.json +17 -5
- package/scripts/postinstall.js +128 -38
- package/dist/commands/agents.js +0 -2086
- package/dist/commands/agents.js.map +0 -7
- package/dist/commands/build.js +0 -74
- package/dist/commands/build.js.map +0 -7
- package/dist/commands/compression.js +0 -57
- package/dist/commands/compression.js.map +0 -7
- package/dist/commands/listen.js +0 -37
- package/dist/commands/listen.js.map +0 -7
- package/dist/commands/login.js +0 -37
- package/dist/commands/login.js.map +0 -7
- package/dist/commands/logout.js +0 -33
- package/dist/commands/logout.js.map +0 -7
- package/dist/commands/mcp.js +0 -40
- package/dist/commands/mcp.js.map +0 -7
- package/dist/commands/mcp_refresh.js +0 -40
- package/dist/commands/mcp_refresh.js.map +0 -7
- package/dist/commands/modelstatus.js +0 -21
- package/dist/commands/modelstatus.js.map +0 -7
- package/dist/commands/onboarding.js +0 -36
- package/dist/commands/onboarding.js.map +0 -7
- package/dist/commands/plugin-interactive.js +0 -446
- package/dist/commands/plugin-interactive.js.map +0 -7
- package/dist/commands/pr_comments.js +0 -61
- package/dist/commands/pr_comments.js.map +0 -7
- package/dist/commands/release-notes.js +0 -30
- package/dist/commands/release-notes.js.map +0 -7
- package/dist/commands/review.js +0 -51
- package/dist/commands/review.js.map +0 -7
- package/dist/components/Bug.js +0 -147
- package/dist/components/Bug.js.map +0 -7
- package/dist/components/ModelSelector.js +0 -2062
- package/dist/components/ModelSelector.js.map +0 -7
- package/dist/components/ModelStatusDisplay.js +0 -87
- package/dist/components/ModelStatusDisplay.js.map +0 -7
- package/dist/entrypoints/cli-wrapper.js +0 -61
- package/dist/entrypoints/cli-wrapper.js.map +0 -7
- package/dist/hooks/useCancelRequest.js +0 -28
- package/dist/hooks/useCancelRequest.js.map +0 -7
- package/dist/screens/Doctor.js +0 -22
- package/dist/screens/Doctor.js.map +0 -7
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/services/claude.ts"],
|
|
4
|
-
"sourcesContent": ["import '@anthropic-ai/sdk/shims/node'\nimport Anthropic, { APIConnectionError, APIError } from '@anthropic-ai/sdk'\nimport { AnthropicBedrock } from '@anthropic-ai/bedrock-sdk'\nimport { AnthropicVertex } from '@anthropic-ai/vertex-sdk'\nimport chalk from 'chalk'\nimport { randomUUID, UUID } from 'crypto'\nimport 'dotenv/config'\n\nimport { addToTotalCost } from '@costTracker'\nimport models from '@constants/models'\nimport type { AssistantMessage, UserMessage } from '@query'\nimport { Tool } from '@tool'\nimport {\n getAnthropicApiKey,\n getOrCreateUserID,\n getGlobalConfig,\n ModelProfile,\n} from '@utils/config'\nimport { getProjectDocs } from '@context'\nimport { logError, SESSION_ID } from '@utils/log'\nimport { abortableDelay } from '@utils/async'\nimport { USER_AGENT } from '@utils/http'\nimport {\n createAssistantAPIErrorMessage,\n normalizeContentFromAPI,\n} from '@utils/messages'\nimport { withVCR } from './vcr'\nimport {\n debug as debugLogger,\n markPhase,\n getCurrentRequest,\n logLLMInteraction,\n logSystemPromptConstruction,\n logErrorWithDiagnosis,\n} from '@utils/debugLogger'\nimport { getModelManager } from '@utils/model'\nimport { zodToJsonSchema } from 'zod-to-json-schema'\nimport { ModelAdapterFactory } from './modelAdapterFactory'\nimport { UnifiedRequestParams } from '@minto-types/modelCapabilities'\nimport { responseStateManager, getConversationId } from './responseStateManager'\nimport type { ToolUseContext } from '@tool'\nimport type {\n MessageParam,\n TextBlockParam,\n} from '@anthropic-ai/sdk/resources/index.mjs'\nimport { USE_BEDROCK, USE_VERTEX } from '@utils/model'\nimport { getCLISyspromptPrefix } from '@constants/prompts'\nimport { getVertexRegionForModel } from '@utils/model'\nimport OpenAI from 'openai'\nimport type { ChatCompletionStream } from 'openai/lib/ChatCompletionStream'\nimport { ContentBlock } from '@anthropic-ai/sdk/resources/messages/messages'\nimport { nanoid } from 'nanoid'\nimport {\n getCompletionWithProfile,\n getGPT5CompletionWithProfile,\n} from './openai'\nimport { getReasoningEffort } from '@utils/thinking'\nimport { generateSystemReminders } from './systemReminder'\n\n// Helper function to check if a model is GPT-5\nfunction isGPT5Model(modelName: string): boolean {\n return modelName.startsWith('gpt-5')\n}\n\n// MintoContext\u7BA1\u7406\u5668 - \u7528\u4E8E\u9879\u76EE\u6587\u6863\u7684\u540C\u6B65\u7F13\u5B58\u548C\u8BBF\u95EE\nclass MintoContextManager {\n private static instance: MintoContextManager\n private projectDocsCache: string = ''\n private cacheInitialized: boolean = false\n private initPromise: Promise<void> | null = null\n\n private constructor() {}\n\n public static getInstance(): MintoContextManager {\n if (!MintoContextManager.instance) {\n MintoContextManager.instance = new MintoContextManager()\n }\n return MintoContextManager.instance\n }\n\n public async initialize(): Promise<void> {\n if (this.cacheInitialized) return\n\n if (this.initPromise) {\n return this.initPromise\n }\n\n this.initPromise = this.loadProjectDocs()\n await this.initPromise\n }\n\n private async loadProjectDocs(): Promise<void> {\n try {\n const projectDocs = await getProjectDocs()\n this.projectDocsCache = projectDocs || ''\n this.cacheInitialized = true\n\n // \u5728\u8C03\u8BD5\u6A21\u5F0F\u4E0B\u8BB0\u5F55\u52A0\u8F7D\u7ED3\u679C\n if (process.env.NODE_ENV === 'development') {\n debugLogger.info('MINTO_CONTEXT_LOADED', {\n characters: this.projectDocsCache.length,\n })\n }\n } catch (error) {\n debugLogger.warn('MINTO_CONTEXT_LOAD_FAILED', { error: String(error) })\n this.projectDocsCache = ''\n this.cacheInitialized = true\n }\n }\n\n public getMintoContext(): string {\n if (!this.cacheInitialized) {\n // \u5982\u679C\u672A\u521D\u59CB\u5316\uFF0C\u5F02\u6B65\u521D\u59CB\u5316\u4F46\u7ACB\u5373\u8FD4\u56DE\u7A7A\u5B57\u7B26\u4E32\n this.initialize().catch(() => {})\n return ''\n }\n return this.projectDocsCache\n }\n\n public async refreshCache(): Promise<void> {\n this.cacheInitialized = false\n this.initPromise = null\n await this.initialize()\n }\n}\n\n// \u5BFC\u51FA\u51FD\u6570\u4FDD\u6301\u5411\u540E\u517C\u5BB9\nconst mintoContextManager = MintoContextManager.getInstance()\n\n// \u5728\u6A21\u5757\u52A0\u8F7D\u65F6\u5F02\u6B65\u521D\u59CB\u5316\nmintoContextManager.initialize().catch(() => {})\n\nexport const generateMintoContext = (): string => {\n return mintoContextManager.getMintoContext()\n}\n\nexport const refreshMintoContext = async (): Promise<void> => {\n await mintoContextManager.refreshCache()\n}\n\nexport const API_ERROR_MESSAGE_PREFIX = 'API Error'\nexport const PROMPT_TOO_LONG_ERROR_MESSAGE = 'Prompt is too long'\nexport const CREDIT_BALANCE_TOO_LOW_ERROR_MESSAGE = 'Credit balance is too low'\nexport const INVALID_API_KEY_ERROR_MESSAGE =\n 'Invalid API key \u00B7 Please run /login'\nexport const NO_CONTENT_MESSAGE = '(no content)'\nconst PROMPT_CACHING_ENABLED = !process.env.DISABLE_PROMPT_CACHING\n\n// @see https://docs.anthropic.com/en/docs/about-claude/models#model-comparison-table\nconst SONNET_COST_PER_MILLION_INPUT_TOKENS = 3\nconst SONNET_COST_PER_MILLION_OUTPUT_TOKENS = 15\nconst SONNET_COST_PER_MILLION_PROMPT_CACHE_WRITE_TOKENS = 3.75\nconst SONNET_COST_PER_MILLION_PROMPT_CACHE_READ_TOKENS = 0.3\n\nexport const MAIN_QUERY_TEMPERATURE = 1 // to get more variation for binary feedback\n\nfunction getMetadata() {\n return {\n user_id: `${getOrCreateUserID()}_${SESSION_ID}`,\n }\n}\n\nconst MAX_RETRIES = process.env.USER_TYPE === 'SWE_BENCH' ? 100 : 10\nconst BASE_DELAY_MS = 500\n\ninterface RetryOptions {\n maxRetries?: number\n signal?: AbortSignal\n}\n\nfunction getRetryDelay(\n attempt: number,\n retryAfterHeader?: string | null,\n): number {\n if (retryAfterHeader) {\n const seconds = parseInt(retryAfterHeader, 10)\n if (!isNaN(seconds)) {\n return seconds * 1000\n }\n }\n return Math.min(BASE_DELAY_MS * Math.pow(2, attempt - 1), 32000) // Max 32s delay\n}\n\nfunction shouldRetry(error: APIError): boolean {\n // Check for overloaded errors first and only retry for SWE_BENCH\n if (error.message?.includes('\"type\":\"overloaded_error\"')) {\n return process.env.USER_TYPE === 'SWE_BENCH'\n }\n\n // Note this is not a standard header.\n const shouldRetryHeader = error.headers?.['x-should-retry']\n\n // If the server explicitly says whether or not to retry, obey.\n if (shouldRetryHeader === 'true') return true\n if (shouldRetryHeader === 'false') return false\n\n if (error instanceof APIConnectionError) {\n return true\n }\n\n if (!error.status) return false\n\n // Retry on request timeouts.\n if (error.status === 408) return true\n\n // Retry on lock timeouts.\n if (error.status === 409) return true\n\n // Retry on rate limits.\n if (error.status === 429) return true\n\n // Retry internal errors.\n if (error.status && error.status >= 500) return true\n\n return false\n}\n\nasync function withRetry<T>(\n operation: (attempt: number) => Promise<T>,\n options: RetryOptions = {},\n): Promise<T> {\n const maxRetries = options.maxRetries ?? MAX_RETRIES\n let lastError: unknown\n\n for (let attempt = 1; attempt <= maxRetries + 1; attempt++) {\n try {\n return await operation(attempt)\n } catch (error) {\n lastError = error\n // Only retry if the error indicates we should\n if (\n attempt > maxRetries ||\n !(error instanceof APIError) ||\n !shouldRetry(error)\n ) {\n throw error\n }\n\n if (options.signal?.aborted) {\n throw new Error('Request cancelled by user')\n }\n\n // Get retry-after header if available\n const retryAfter = error.headers?.['retry-after'] ?? null\n const delayMs = getRetryDelay(attempt, retryAfter)\n\n console.log(\n ` \u23BF ${chalk.red(`API ${error.name} (${error.message}) \u00B7 Retrying in ${Math.round(delayMs / 1000)} seconds\u2026 (attempt ${attempt}/${maxRetries})`)}`,\n )\n\n try {\n await abortableDelay(delayMs, options.signal)\n } catch (delayError) {\n // If aborted during delay, throw the error to stop retrying\n if (delayError.message === 'Request was aborted') {\n throw new Error('Request cancelled by user')\n }\n throw delayError\n }\n }\n }\n\n throw lastError\n}\n\n/**\n * Fetch available models from Anthropic API\n */\nexport async function fetchAnthropicModels(\n baseURL: string,\n apiKey: string,\n): Promise<any[]> {\n try {\n // Use provided baseURL or default to official Anthropic API\n const modelsURL = baseURL\n ? `${baseURL.replace(/\\/+$/, '')}/v1/models`\n : 'https://api.anthropic.com/v1/models'\n\n const response = await fetch(modelsURL, {\n method: 'GET',\n headers: {\n 'x-api-key': apiKey,\n 'anthropic-version': '2023-06-01',\n 'User-Agent': USER_AGENT,\n },\n })\n\n if (!response.ok) {\n // Provide user-friendly error messages based on status code\n if (response.status === 401) {\n throw new Error(\n 'Invalid API key. Please check your Anthropic API key and try again.',\n )\n } else if (response.status === 403) {\n throw new Error(\n 'API key does not have permission to access models. Please check your API key permissions.',\n )\n } else if (response.status === 429) {\n throw new Error(\n 'Too many requests. Please wait a moment and try again.',\n )\n } else if (response.status >= 500) {\n throw new Error(\n 'Anthropic service is temporarily unavailable. Please try again later.',\n )\n } else {\n throw new Error(\n `Unable to connect to Anthropic API (${response.status}). Please check your internet connection and API key.`,\n )\n }\n }\n\n const data = await response.json()\n return data.data || []\n } catch (error) {\n // If it's already our custom error, pass it through\n if (\n (error instanceof Error && error.message.includes('API key')) ||\n (error instanceof Error && error.message.includes('Anthropic'))\n ) {\n throw error\n }\n\n // For network errors or other issues\n logError(error)\n throw new Error(\n 'Unable to connect to Anthropic API. Please check your internet connection and try again.',\n )\n }\n}\n\nexport async function verifyApiKey(\n apiKey: string,\n baseURL?: string,\n provider?: string,\n): Promise<boolean> {\n if (!apiKey) {\n return false\n }\n\n // For non-Anthropic providers, use OpenAI-compatible verification\n if (provider && provider !== 'anthropic') {\n try {\n const headers: Record<string, string> = {\n Authorization: `Bearer ${apiKey}`,\n 'Content-Type': 'application/json',\n }\n\n if (!baseURL) {\n debugLogger.warn('API_VERIFY', {\n error: 'No baseURL provided for non-Anthropic provider',\n })\n return false\n }\n\n const modelsURL = `${baseURL.replace(/\\/+$/, '')}/models`\n\n const response = await fetch(modelsURL, {\n method: 'GET',\n headers,\n })\n\n return response.ok\n } catch (error) {\n debugLogger.warn('API_VERIFY', {\n error: 'non-Anthropic provider verification failed',\n details: String(error),\n })\n return false\n }\n }\n\n // For Anthropic and Anthropic-compatible APIs\n const clientConfig: any = {\n apiKey,\n dangerouslyAllowBrowser: true,\n maxRetries: 3,\n defaultHeaders: {\n 'User-Agent': USER_AGENT,\n },\n }\n\n // Only add baseURL for true Anthropic-compatible APIs\n if (\n baseURL &&\n (provider === 'anthropic' ||\n provider === 'bigdream' ||\n provider === 'opendev')\n ) {\n clientConfig.baseURL = baseURL\n }\n\n const anthropic = new Anthropic(clientConfig)\n\n try {\n await withRetry(\n async () => {\n const model = 'claude-sonnet-4-20250514'\n const messages: MessageParam[] = [{ role: 'user', content: 'test' }]\n await anthropic.messages.create({\n model,\n max_tokens: 1000, // Simple test token limit for API verification\n messages,\n temperature: 0,\n metadata: getMetadata(),\n })\n return true\n },\n { maxRetries: 2 }, // Use fewer retries for API key verification\n )\n return true\n } catch (error) {\n logError(error)\n // Check for authentication error\n if (\n error instanceof Error &&\n error.message.includes(\n '{\"type\":\"error\",\"error\":{\"type\":\"authentication_error\",\"message\":\"invalid x-api-key\"}}',\n )\n ) {\n return false\n }\n throw error\n }\n}\n\nfunction convertAnthropicMessagesToOpenAIMessages(\n messages: (UserMessage | AssistantMessage)[],\n): (\n | OpenAI.ChatCompletionMessageParam\n | OpenAI.ChatCompletionToolMessageParam\n)[] {\n const openaiMessages: (\n | OpenAI.ChatCompletionMessageParam\n | OpenAI.ChatCompletionToolMessageParam\n )[] = []\n\n const toolResults: Record<string, OpenAI.ChatCompletionToolMessageParam> = {}\n\n for (const message of messages) {\n let contentBlocks = []\n if (typeof message.message.content === 'string') {\n contentBlocks = [\n {\n type: 'text',\n text: message.message.content,\n },\n ]\n } else if (!Array.isArray(message.message.content)) {\n contentBlocks = [message.message.content]\n } else {\n contentBlocks = message.message.content\n }\n\n for (const block of contentBlocks) {\n if (block.type === 'text') {\n openaiMessages.push({\n role: message.message.role,\n content: block.text,\n })\n } else if (block.type === 'tool_use') {\n openaiMessages.push({\n role: 'assistant',\n content: undefined,\n tool_calls: [\n {\n type: 'function',\n function: {\n name: block.name,\n arguments: JSON.stringify(block.input),\n },\n id: block.id,\n },\n ],\n })\n } else if (block.type === 'tool_result') {\n // Ensure content is always a string for role:tool messages\n let toolContent = block.content\n if (typeof toolContent !== 'string') {\n // Convert content to string if it's not already\n toolContent = JSON.stringify(toolContent)\n }\n\n toolResults[block.tool_use_id] = {\n role: 'tool',\n content: toolContent,\n tool_call_id: block.tool_use_id,\n }\n }\n }\n }\n\n const finalMessages: (\n | OpenAI.ChatCompletionMessageParam\n | OpenAI.ChatCompletionToolMessageParam\n )[] = []\n\n for (const message of openaiMessages) {\n finalMessages.push(message)\n\n if ('tool_calls' in message && message.tool_calls) {\n for (const toolCall of message.tool_calls) {\n if (toolResults[toolCall.id]) {\n finalMessages.push(toolResults[toolCall.id])\n }\n }\n }\n }\n\n return finalMessages\n}\n\nfunction messageReducer(\n previous: OpenAI.ChatCompletionMessage,\n item: OpenAI.ChatCompletionChunk,\n): OpenAI.ChatCompletionMessage {\n const reduce = (acc: any, delta: OpenAI.ChatCompletionChunk.Choice.Delta) => {\n acc = { ...acc }\n for (const [key, value] of Object.entries(delta)) {\n if (acc[key] === undefined || acc[key] === null) {\n acc[key] = value\n // OpenAI.Chat.Completions.ChatCompletionMessageToolCall does not have a key, .index\n if (Array.isArray(acc[key])) {\n for (const arr of acc[key]) {\n delete arr.index\n }\n }\n } else if (typeof acc[key] === 'string' && typeof value === 'string') {\n acc[key] += value\n } else if (typeof acc[key] === 'number' && typeof value === 'number') {\n acc[key] = value\n } else if (Array.isArray(acc[key]) && Array.isArray(value)) {\n const accArray = acc[key]\n for (let i = 0; i < value.length; i++) {\n const { index, ...chunkTool } = value[i]\n if (index - accArray.length > 1) {\n throw new Error(\n `Error: An array has an empty value when tool_calls are constructed. tool_calls: ${accArray}; tool: ${value}`,\n )\n }\n accArray[index] = reduce(accArray[index], chunkTool)\n }\n } else if (typeof acc[key] === 'object' && typeof value === 'object') {\n acc[key] = reduce(acc[key], value)\n }\n }\n return acc\n }\n\n const choice = item.choices?.[0]\n if (!choice) {\n // chunk contains information about usage and token counts\n return previous\n }\n return reduce(previous, choice.delta) as OpenAI.ChatCompletionMessage\n}\nasync function handleMessageStream(\n stream: ChatCompletionStream,\n signal?: AbortSignal,\n): Promise<OpenAI.ChatCompletion> {\n const streamStartTime = Date.now()\n let ttftMs: number | undefined\n let chunkCount = 0\n let errorCount = 0\n\n debugLogger.api('OPENAI_STREAM_START', {\n streamStartTime: String(streamStartTime),\n })\n\n let message = {} as OpenAI.ChatCompletionMessage\n\n let id, model, created, object, usage\n try {\n for await (const chunk of stream) {\n if (signal?.aborted) {\n debugLogger.flow('OPENAI_STREAM_ABORTED', {\n chunkCount,\n timestamp: Date.now(),\n })\n throw new Error('Request was cancelled')\n }\n\n chunkCount++\n\n try {\n if (!id) {\n id = chunk.id\n debugLogger.api('OPENAI_STREAM_ID_RECEIVED', {\n id,\n chunkNumber: String(chunkCount),\n })\n }\n if (!model) {\n model = chunk.model\n debugLogger.api('OPENAI_STREAM_MODEL_RECEIVED', {\n model,\n chunkNumber: String(chunkCount),\n })\n }\n if (!created) {\n created = chunk.created\n }\n if (!object) {\n object = chunk.object\n }\n if (!usage) {\n usage = chunk.usage\n }\n\n message = messageReducer(message, chunk)\n\n if (chunk?.choices?.[0]?.delta?.content) {\n if (!ttftMs) {\n ttftMs = Date.now() - streamStartTime\n debugLogger.api('OPENAI_STREAM_FIRST_TOKEN', {\n ttftMs: String(ttftMs),\n chunkNumber: String(chunkCount),\n })\n }\n }\n } catch (chunkError) {\n errorCount++\n debugLogger.error('OPENAI_STREAM_CHUNK_ERROR', {\n chunkNumber: String(chunkCount),\n errorMessage:\n chunkError instanceof Error\n ? chunkError.message\n : String(chunkError),\n errorType:\n chunkError instanceof Error\n ? chunkError.constructor.name\n : typeof chunkError,\n })\n // Continue processing other chunks\n }\n }\n\n debugLogger.api('OPENAI_STREAM_COMPLETE', {\n totalChunks: String(chunkCount),\n errorCount: String(errorCount),\n totalDuration: String(Date.now() - streamStartTime),\n ttftMs: String(ttftMs || 0),\n finalMessageId: id || 'undefined',\n })\n } catch (streamError) {\n debugLogger.error('OPENAI_STREAM_FATAL_ERROR', {\n totalChunks: String(chunkCount),\n errorCount: String(errorCount),\n errorMessage:\n streamError instanceof Error\n ? streamError.message\n : String(streamError),\n errorType:\n streamError instanceof Error\n ? streamError.constructor.name\n : typeof streamError,\n })\n throw streamError\n }\n return {\n id,\n created,\n model,\n object,\n choices: [\n {\n index: 0,\n message,\n finish_reason: 'stop',\n logprobs: undefined,\n },\n ],\n usage,\n }\n}\n\nfunction convertOpenAIResponseToAnthropic(\n response: OpenAI.ChatCompletion,\n _tools?: Tool[],\n) {\n let contentBlocks: ContentBlock[] = []\n const message = response.choices?.[0]?.message\n if (!message) {\n return {\n role: 'assistant',\n content: [],\n stop_reason: response.choices?.[0]?.finish_reason,\n type: 'message',\n usage: response.usage,\n }\n }\n\n if (message?.tool_calls) {\n for (const toolCall of message.tool_calls) {\n const tool = toolCall.function\n const toolName = tool?.name\n let toolArgs = {}\n try {\n toolArgs = tool?.arguments ? JSON.parse(tool.arguments) : {}\n } catch (e) {\n // Invalid JSON in tool arguments\n }\n\n contentBlocks.push({\n type: 'tool_use',\n input: toolArgs,\n name: toolName,\n id: toolCall.id?.length > 0 ? toolCall.id : nanoid(),\n })\n }\n }\n\n if ((message as any).reasoning) {\n contentBlocks.push({\n type: 'thinking',\n thinking: (message as any).reasoning,\n signature: '',\n })\n }\n\n // NOTE: For deepseek api, the key for its returned reasoning process is reasoning_content\n if ((message as any).reasoning_content) {\n contentBlocks.push({\n type: 'thinking',\n thinking: (message as any).reasoning_content,\n signature: '',\n })\n }\n\n if (message.content) {\n contentBlocks.push({\n type: 'text',\n text: message?.content,\n citations: [],\n })\n }\n\n const finalMessage = {\n role: 'assistant',\n content: contentBlocks,\n stop_reason: response.choices?.[0]?.finish_reason,\n type: 'message',\n usage: response.usage,\n }\n\n return finalMessage\n}\n\nlet anthropicClient: Anthropic | AnthropicBedrock | AnthropicVertex | null =\n null\n\n/**\n * Get the Anthropic client, creating it if it doesn't exist\n */\nexport function getAnthropicClient(\n model?: string,\n): Anthropic | AnthropicBedrock | AnthropicVertex {\n const config = getGlobalConfig()\n const provider = config.primaryProvider\n\n // Reset client if provider has changed to ensure correct configuration\n if (anthropicClient && provider) {\n // Always recreate client for provider-specific configurations\n anthropicClient = null\n }\n\n if (anthropicClient) {\n return anthropicClient\n }\n\n const region = getVertexRegionForModel(model)\n\n const defaultHeaders: { [key: string]: string } = {\n 'x-app': 'cli',\n 'User-Agent': USER_AGENT,\n }\n if (process.env.ANTHROPIC_AUTH_TOKEN) {\n defaultHeaders['Authorization'] =\n `Bearer ${process.env.ANTHROPIC_AUTH_TOKEN}`\n }\n\n const ARGS = {\n defaultHeaders,\n maxRetries: 0, // Disabled auto-retry in favor of manual implementation\n timeout: parseInt(process.env.API_TIMEOUT_MS || String(60 * 1000), 10),\n }\n if (USE_BEDROCK) {\n const client = new AnthropicBedrock(ARGS)\n anthropicClient = client\n return client\n }\n if (USE_VERTEX) {\n const vertexArgs = {\n ...ARGS,\n region: region || process.env.CLOUD_ML_REGION || 'us-east5',\n }\n const client = new AnthropicVertex(vertexArgs)\n anthropicClient = client\n return client\n }\n\n // Get appropriate API key and baseURL from ModelProfile\n const modelManager = getModelManager()\n const modelProfile = modelManager.getModel('main')\n\n let apiKey: string\n let baseURL: string | undefined\n\n if (modelProfile) {\n apiKey = modelProfile.apiKey || ''\n baseURL = modelProfile.baseURL\n } else {\n // Fallback to default anthropic if no ModelProfile\n apiKey = getAnthropicApiKey()\n baseURL = undefined\n }\n\n if (process.env.USER_TYPE === 'ant' && !apiKey && provider === 'anthropic') {\n console.error(\n chalk.red(\n '[ANT-ONLY] Please set the ANTHROPIC_API_KEY environment variable to use the CLI. To create a new key, go to https://console.anthropic.com/settings/keys.',\n ),\n )\n }\n\n // Create client with custom baseURL for BigDream/OpenDev\n // Anthropic SDK will append the appropriate paths (like /v1/messages)\n const clientConfig = {\n apiKey,\n dangerouslyAllowBrowser: true,\n ...ARGS,\n ...(baseURL && { baseURL }), // Use baseURL directly, SDK will handle API versioning\n }\n\n anthropicClient = new Anthropic(clientConfig)\n return anthropicClient\n}\n\n/**\n * Reset the Anthropic client to null, forcing a new client to be created on next use\n */\nexport function resetAnthropicClient(): void {\n anthropicClient = null\n}\n\n/**\n * Environment variables for different client types:\n *\n * Direct API:\n * - ANTHROPIC_API_KEY: Required for direct API access\n *\n * AWS Bedrock:\n * - AWS credentials configured via aws-sdk defaults\n *\n * Vertex AI:\n * - Model-specific region variables (highest priority):\n * - VERTEX_REGION_CLAUDE_3_5_HAIKU: Region for Claude 3.5 Haiku model\n * - VERTEX_REGION_CLAUDE_3_5_SONNET: Region for Claude 3.5 Sonnet model\n * - VERTEX_REGION_CLAUDE_3_7_SONNET: Region for Claude 3.7 Sonnet model\n * - CLOUD_ML_REGION: Optional. The default GCP region to use for all models\n * If specific model region not specified above\n * - ANTHROPIC_VERTEX_PROJECT_ID: Required. Your GCP project ID\n * - Standard GCP credentials configured via google-auth-library\n *\n * Priority for determining region:\n * 1. Hardcoded model-specific environment variables\n * 2. Global CLOUD_ML_REGION variable\n * 3. Default region from config\n * 4. Fallback region (us-east5)\n */\n\n/**\n * Manage cache control to ensure it doesn't exceed Claude's 4 cache block limit\n * Priority:\n * 1. System prompts (high priority)\n * 2. Long documents or reference materials (high priority)\n * 3. Reusable context (medium priority)\n * 4. Short messages or one-time content (no caching)\n */\nfunction applyCacheControlWithLimits(\n systemBlocks: TextBlockParam[],\n messageParams: MessageParam[],\n): { systemBlocks: TextBlockParam[]; messageParams: MessageParam[] } {\n if (!PROMPT_CACHING_ENABLED) {\n return { systemBlocks, messageParams }\n }\n\n const maxCacheBlocks = 4\n let usedCacheBlocks = 0\n\n // 1. Prioritize adding cache to system prompts (highest priority)\n const processedSystemBlocks = systemBlocks.map((block, _index) => {\n if (usedCacheBlocks < maxCacheBlocks && block.text.length > 1000) {\n usedCacheBlocks++\n return {\n ...block,\n cache_control: { type: 'ephemeral' as const },\n }\n }\n const { cache_control, ...blockWithoutCache } = block\n return blockWithoutCache\n })\n\n // 2. Add cache to message content based on priority\n const processedMessageParams = messageParams.map((message, messageIndex) => {\n if (Array.isArray(message.content)) {\n const processedContent = message.content.map(\n (contentBlock, blockIndex) => {\n // Determine whether this content block should be cached\n const shouldCache =\n usedCacheBlocks < maxCacheBlocks &&\n contentBlock.type === 'text' &&\n typeof contentBlock.text === 'string' &&\n // Long documents (over 2000 characters)\n (contentBlock.text.length > 2000 ||\n // Last content block of the last message (may be important context)\n (messageIndex === messageParams.length - 1 &&\n blockIndex === message.content.length - 1 &&\n contentBlock.text.length > 500))\n\n if (shouldCache) {\n usedCacheBlocks++\n return {\n ...contentBlock,\n cache_control: { type: 'ephemeral' as const },\n }\n }\n\n // Remove existing cache_control\n const { cache_control, ...blockWithoutCache } = contentBlock as any\n return blockWithoutCache\n },\n )\n\n return {\n ...message,\n content: processedContent,\n }\n }\n\n return message\n })\n\n return {\n systemBlocks: processedSystemBlocks,\n messageParams: processedMessageParams,\n }\n}\n\nexport function userMessageToMessageParam(\n message: UserMessage,\n addCache = false,\n): MessageParam {\n if (addCache) {\n if (typeof message.message.content === 'string') {\n return {\n role: 'user',\n content: [\n {\n type: 'text',\n text: message.message.content,\n },\n ],\n }\n } else {\n return {\n role: 'user',\n content: message.message.content.map(_ => ({ ..._ })),\n }\n }\n }\n return {\n role: 'user',\n content: message.message.content,\n }\n}\n\nexport function assistantMessageToMessageParam(\n message: AssistantMessage,\n addCache = false,\n): MessageParam {\n if (addCache) {\n if (typeof message.message.content === 'string') {\n return {\n role: 'assistant',\n content: [\n {\n type: 'text',\n text: message.message.content,\n },\n ],\n }\n } else {\n return {\n role: 'assistant',\n content: message.message.content.map(_ => ({ ..._ })),\n }\n }\n }\n return {\n role: 'assistant',\n content: message.message.content,\n }\n}\n\nfunction splitSysPromptPrefix(systemPrompt: string[]): string[] {\n // split out the first block of the system prompt as the \"prefix\" for API\n\n const systemPromptFirstBlock = systemPrompt[0] || ''\n const systemPromptRest = systemPrompt.slice(1)\n return [systemPromptFirstBlock, systemPromptRest.join('\\n')].filter(Boolean)\n}\n\nexport async function queryLLM(\n messages: (UserMessage | AssistantMessage)[],\n systemPrompt: string[],\n maxThinkingTokens: number,\n tools: Tool[],\n signal: AbortSignal,\n options: {\n safeMode: boolean\n model: string | import('@utils/config').ModelPointerType\n prependCLISysprompt: boolean\n toolUseContext?: ToolUseContext\n },\n): Promise<AssistantMessage> {\n const modelManager = getModelManager()\n const modelResolution = modelManager.resolveModelWithInfo(options.model)\n\n if (!modelResolution.success || !modelResolution.profile) {\n throw new Error(\n modelResolution.error || `Failed to resolve model: ${options.model}`,\n )\n }\n\n const modelProfile = modelResolution.profile\n const resolvedModel = modelProfile.modelName\n\n // Initialize response state if toolUseContext is provided\n const toolUseContext = options.toolUseContext\n if (toolUseContext && !toolUseContext.responseState) {\n const conversationId = getConversationId(\n toolUseContext.agentId,\n toolUseContext.messageId,\n )\n const previousResponseId =\n responseStateManager.getPreviousResponseId(conversationId)\n\n toolUseContext.responseState = {\n previousResponseId,\n conversationId,\n }\n }\n\n debugLogger.api('MODEL_RESOLVED', {\n inputParam: options.model,\n resolvedModelName: resolvedModel,\n provider: modelProfile.provider,\n isPointer: ['main', 'task', 'reasoning', 'quick'].includes(options.model),\n hasResponseState: !!toolUseContext?.responseState,\n conversationId: toolUseContext?.responseState?.conversationId,\n requestId: getCurrentRequest()?.id,\n })\n\n const currentRequest = getCurrentRequest()\n debugLogger.api('LLM_REQUEST_START', {\n messageCount: messages.length,\n systemPromptLength: systemPrompt.join(' ').length,\n toolCount: tools.length,\n model: resolvedModel,\n originalModelParam: options.model,\n requestId: getCurrentRequest()?.id,\n })\n\n markPhase('LLM_CALL')\n\n try {\n const result = await withVCR(messages, () =>\n queryLLMWithPromptCaching(\n messages,\n systemPrompt,\n maxThinkingTokens,\n tools,\n signal,\n { ...options, model: resolvedModel, modelProfile, toolUseContext }, // Pass resolved ModelProfile and toolUseContext\n ),\n )\n\n debugLogger.api('LLM_REQUEST_SUCCESS', {\n costUSD: result.costUSD,\n durationMs: result.durationMs,\n responseLength: result.message.content?.length || 0,\n requestId: getCurrentRequest()?.id,\n })\n\n // Update response state for GPT-5 Responses API continuation\n if (toolUseContext?.responseState?.conversationId && result.responseId) {\n responseStateManager.setPreviousResponseId(\n toolUseContext.responseState.conversationId,\n result.responseId,\n )\n\n debugLogger.api('RESPONSE_STATE_UPDATED', {\n conversationId: toolUseContext.responseState.conversationId,\n responseId: result.responseId,\n requestId: getCurrentRequest()?.id,\n })\n }\n\n return result\n } catch (error) {\n // \u4F7F\u7528\u9519\u8BEF\u8BCA\u65AD\u7CFB\u7EDF\u8BB0\u5F55 LLM \u76F8\u5173\u9519\u8BEF\n logErrorWithDiagnosis(\n error,\n {\n messageCount: messages.length,\n systemPromptLength: systemPrompt.join(' ').length,\n model: options.model,\n toolCount: tools.length,\n phase: 'LLM_CALL',\n },\n currentRequest?.id,\n )\n\n throw error\n }\n}\n\nexport function formatSystemPromptWithContext(\n systemPrompt: string[],\n context: { [k: string]: string },\n agentId?: string,\n skipContextReminders = false, // Parameter kept for API compatibility but not used anymore\n): { systemPrompt: string[]; reminders: string } {\n // \u6784\u5EFA\u589E\u5F3A\u7684\u7CFB\u7EDF\u63D0\u793A\uFF0C\u4FDD\u6301\u4E0E\u539F\u5148\u76F4\u63A5\u6CE8\u5165\u65B9\u5F0F\u7684\u517C\u5BB9\n const enhancedPrompt = [...systemPrompt]\n let reminders = ''\n\n // Step 0: Add GPT-5 Agent persistence support for coding tasks\n const modelManager = getModelManager()\n const modelProfile = modelManager.getModel('main')\n if (modelProfile && isGPT5Model(modelProfile.modelName)) {\n // Add coding-specific persistence instructions based on GPT-5 documentation\n const persistencePrompts = [\n '\\n# Agent Persistence for Long-Running Coding Tasks',\n 'You are working on a coding project that may involve multiple steps and iterations. Please maintain context and continuity throughout the session:',\n '- Remember architectural decisions and design patterns established earlier',\n '- Keep track of file modifications and their relationships',\n '- Maintain awareness of the overall project structure and goals',\n '- Reference previous implementations when making related changes',\n '- Ensure consistency with existing code style and conventions',\n '- Build incrementally on previous work rather than starting from scratch',\n ]\n enhancedPrompt.push(...persistencePrompts)\n }\n\n // \u53EA\u6709\u5F53\u4E0A\u4E0B\u6587\u5B58\u5728\u65F6\u624D\u5904\u7406\n const hasContext = Object.entries(context).length > 0\n\n if (hasContext) {\n // \u6B65\u9AA41: \u76F4\u63A5\u6CE8\u5165 Minto \u4E0A\u4E0B\u6587\u5230\u7CFB\u7EDF\u63D0\u793A - \u5BF9\u9F50\u5B98\u65B9\u8BBE\u8BA1\n if (!skipContextReminders) {\n const mintoContext = generateMintoContext()\n if (mintoContext) {\n // \u6DFB\u52A0\u5206\u9694\u7B26\u548C\u6807\u8BC6\uFF0C\u4F7F\u9879\u76EE\u6587\u6863\u5728\u7CFB\u7EDF\u63D0\u793A\u4E2D\u66F4\u6E05\u6670\n enhancedPrompt.push('\\n---\\n# \u9879\u76EE\u4E0A\u4E0B\u6587\\n')\n enhancedPrompt.push(mintoContext)\n enhancedPrompt.push('\\n---\\n')\n }\n }\n\n // \u6B65\u9AA42: \u751F\u6210\u5176\u4ED6\u52A8\u6001\u63D0\u9192\u8FD4\u56DE\u7ED9\u8C03\u7528\u65B9 - \u4FDD\u6301\u73B0\u6709\u52A8\u6001\u63D0\u9192\u529F\u80FD\n const reminderMessages = generateSystemReminders(hasContext, agentId)\n if (reminderMessages.length > 0) {\n reminders = reminderMessages.map(r => r.content).join('\\n') + '\\n'\n }\n\n // \u6B65\u9AA43: \u6DFB\u52A0\u5176\u4ED6\u4E0A\u4E0B\u6587\u5230\u7CFB\u7EDF\u63D0\u793A\n enhancedPrompt.push(\n `\\nAs you answer the user's questions, you can use the following context:\\n`,\n )\n\n // \u8FC7\u6EE4\u6389\u5DF2\u7ECF\u7531 Minto \u4E0A\u4E0B\u6587\u5904\u7406\u7684\u9879\u76EE\u6587\u6863\uFF08\u907F\u514D\u91CD\u590D\uFF09\n const filteredContext = Object.fromEntries(\n Object.entries(context).filter(\n ([key]) => key !== 'projectDocs' && key !== 'userDocs',\n ),\n )\n\n enhancedPrompt.push(\n ...Object.entries(filteredContext).map(\n ([key, value]) => `<context name=\"${key}\">${value}</context>`,\n ),\n )\n }\n\n return { systemPrompt: enhancedPrompt, reminders }\n}\n\nasync function queryLLMWithPromptCaching(\n messages: (UserMessage | AssistantMessage)[],\n systemPrompt: string[],\n maxThinkingTokens: number,\n tools: Tool[],\n signal: AbortSignal,\n options: {\n safeMode: boolean\n model: string\n prependCLISysprompt: boolean\n modelProfile?: ModelProfile | null\n toolUseContext?: ToolUseContext\n },\n): Promise<AssistantMessage> {\n const config = getGlobalConfig()\n const modelManager = getModelManager()\n const toolUseContext = options.toolUseContext\n\n const modelProfile = options.modelProfile || modelManager.getModel('main')\n let provider: string\n\n if (modelProfile) {\n provider = modelProfile.provider || config.primaryProvider || 'anthropic'\n } else {\n provider = config.primaryProvider || 'anthropic'\n }\n\n // Use native Anthropic SDK for Anthropic and some Anthropic-compatible providers\n if (\n provider === 'anthropic' ||\n provider === 'bigdream' ||\n provider === 'opendev'\n ) {\n return queryAnthropicNative(\n messages,\n systemPrompt,\n maxThinkingTokens,\n tools,\n signal,\n { ...options, modelProfile, toolUseContext },\n )\n }\n\n // Use OpenAI-compatible interface for all other providers\n return queryOpenAI(messages, systemPrompt, maxThinkingTokens, tools, signal, {\n ...options,\n modelProfile,\n toolUseContext,\n })\n}\n\nasync function queryAnthropicNative(\n messages: (UserMessage | AssistantMessage)[],\n systemPrompt: string[],\n maxThinkingTokens: number,\n tools: Tool[],\n signal: AbortSignal,\n options?: {\n safeMode: boolean\n model: string\n prependCLISysprompt: boolean\n modelProfile?: ModelProfile | null\n toolUseContext?: ToolUseContext\n },\n): Promise<AssistantMessage> {\n const config = getGlobalConfig()\n const modelManager = getModelManager()\n const toolUseContext = options?.toolUseContext\n\n const modelProfile = options?.modelProfile || modelManager.getModel('main')\n let anthropic: Anthropic | AnthropicBedrock | AnthropicVertex\n let model: string\n let provider: string\n\n // \uD83D\uDD0D Debug: \u8BB0\u5F55\u6A21\u578B\u914D\u7F6E\u8BE6\u60C5\n debugLogger.api('MODEL_CONFIG_ANTHROPIC', {\n modelProfileFound: !!modelProfile,\n modelProfileId: modelProfile?.modelName,\n modelProfileName: modelProfile?.name,\n modelProfileModelName: modelProfile?.modelName,\n modelProfileProvider: modelProfile?.provider,\n modelProfileBaseURL: modelProfile?.baseURL,\n modelProfileApiKeyExists: !!modelProfile?.apiKey,\n optionsModel: options?.model,\n requestId: getCurrentRequest()?.id,\n })\n\n if (modelProfile) {\n // \u4F7F\u7528ModelProfile\u7684\u5B8C\u6574\u914D\u7F6E\n model = modelProfile.modelName\n provider = modelProfile.provider || config.primaryProvider || 'anthropic'\n\n // \u57FA\u4E8EModelProfile\u521B\u5EFA\u4E13\u7528\u7684API\u5BA2\u6237\u7AEF\n if (\n modelProfile.provider === 'anthropic' ||\n modelProfile.provider === 'bigdream' ||\n modelProfile.provider === 'opendev'\n ) {\n const clientConfig: any = {\n apiKey: modelProfile.apiKey,\n dangerouslyAllowBrowser: true,\n maxRetries: 0,\n timeout: parseInt(process.env.API_TIMEOUT_MS || String(60 * 1000), 10),\n defaultHeaders: {\n 'x-app': 'cli',\n 'User-Agent': USER_AGENT,\n },\n }\n\n // \u4F7F\u7528ModelProfile\u7684baseURL\u800C\u4E0D\u662F\u5168\u5C40\u914D\u7F6E\n if (modelProfile.baseURL) {\n clientConfig.baseURL = modelProfile.baseURL\n }\n\n anthropic = new Anthropic(clientConfig)\n } else {\n // \u5176\u4ED6\u63D0\u4F9B\u5546\u7684\u5904\u7406\u903B\u8F91\n anthropic = getAnthropicClient(model)\n }\n } else {\n // \uD83D\uDEA8 \u964D\u7EA7\uFF1A\u6CA1\u6709\u6709\u6548\u7684ModelProfile\u65F6\uFF0C\u5E94\u8BE5\u629B\u51FA\u9519\u8BEF\n const errorDetails = {\n modelProfileExists: !!modelProfile,\n modelProfileModelName: modelProfile?.modelName,\n requestedModel: options?.model,\n requestId: getCurrentRequest()?.id,\n }\n debugLogger.error('ANTHROPIC_FALLBACK_ERROR', errorDetails)\n throw new Error(\n `No valid ModelProfile available for Anthropic provider. Please configure model through /model command. Debug: ${JSON.stringify(errorDetails)}`,\n )\n }\n\n // Prepend system prompt block for easy API identification\n if (options?.prependCLISysprompt) {\n // Log stats about first block for analyzing prefix matching config\n const [firstSyspromptBlock] = splitSysPromptPrefix(systemPrompt)\n\n systemPrompt = [getCLISyspromptPrefix(), ...systemPrompt]\n }\n\n const system: TextBlockParam[] = splitSysPromptPrefix(systemPrompt).map(\n _ => ({\n text: _,\n type: 'text',\n }),\n )\n\n const toolSchemas = await Promise.all(\n tools.map(\n async tool =>\n ({\n name: tool.name,\n description:\n typeof tool.description === 'function'\n ? await tool.description()\n : tool.description,\n input_schema:\n 'inputJSONSchema' in tool && tool.inputJSONSchema\n ? tool.inputJSONSchema\n : zodToJsonSchema(tool.inputSchema),\n }) as unknown as Anthropic.Beta.Messages.BetaTool,\n ),\n )\n\n const anthropicMessages = addCacheBreakpoints(messages)\n\n // apply cache control\n const { systemBlocks: processedSystem, messageParams: processedMessages } =\n applyCacheControlWithLimits(system, anthropicMessages)\n const startIncludingRetries = Date.now()\n\n // \u8BB0\u5F55\u7CFB\u7EDF\u63D0\u793A\u6784\u5EFA\u8FC7\u7A0B\n logSystemPromptConstruction({\n basePrompt: systemPrompt.join('\\n'),\n mintoContext: generateMintoContext() || '',\n reminders: [], // \u8FD9\u91CC\u53EF\u4EE5\u4ECE generateSystemReminders \u83B7\u53D6\n finalPrompt: systemPrompt.join('\\n'),\n })\n\n let start = Date.now()\n let attemptNumber = 0\n let response\n\n try {\n response = await withRetry(\n async attempt => {\n attemptNumber = attempt\n start = Date.now()\n\n const params: Anthropic.Beta.Messages.MessageCreateParams = {\n model,\n max_tokens: getMaxTokensFromProfile(modelProfile),\n messages: processedMessages,\n system: processedSystem,\n tools: toolSchemas.length > 0 ? toolSchemas : undefined,\n tool_choice: toolSchemas.length > 0 ? { type: 'auto' } : undefined,\n }\n\n if (maxThinkingTokens > 0) {\n ;(params as any).extra_headers = {\n 'anthropic-beta': 'max-tokens-3-5-sonnet-2024-07-15',\n }\n ;(params as any).thinking = { max_tokens: maxThinkingTokens }\n }\n\n // \uD83D\uDD25 REAL-TIME API CALL DEBUG - \u4F7F\u7528\u5168\u5C40\u65E5\u5FD7\u7CFB\u7EDF (Anthropic Streaming)\n debugLogger.api('ANTHROPIC_API_CALL_START_STREAMING', {\n endpoint: modelProfile?.baseURL || 'DEFAULT_ANTHROPIC',\n model,\n provider,\n apiKeyConfigured: !!modelProfile?.apiKey,\n apiKeyPrefix: modelProfile?.apiKey\n ? modelProfile.apiKey.substring(0, 8)\n : null,\n maxTokens: params.max_tokens,\n temperature: MAIN_QUERY_TEMPERATURE,\n params: params,\n messageCount: params.messages?.length || 0,\n streamMode: true,\n toolsCount: toolSchemas.length,\n thinkingTokens: maxThinkingTokens,\n timestamp: new Date().toISOString(),\n modelProfileId: modelProfile?.modelName,\n modelProfileName: modelProfile?.name,\n })\n\n if (config.stream) {\n const stream = await anthropic.beta.messages.create(\n {\n ...params,\n stream: true,\n },\n {\n signal: signal, // \u2190 CRITICAL: Connect the AbortSignal to API call\n },\n )\n\n let finalResponse: any | null = null\n let messageStartEvent: any = null\n const contentBlocks: any[] = []\n const inputJSONBuffers = new Map<number, string>()\n let usage: any = null\n let stopReason: string | null = null\n let stopSequence: string | null = null\n\n for await (const event of stream) {\n if (signal.aborted) {\n debugLogger.flow('STREAM_ABORTED', {\n eventType: event.type,\n timestamp: Date.now(),\n })\n throw new Error('Request was cancelled')\n }\n\n switch (event.type) {\n case 'message_start':\n messageStartEvent = event\n finalResponse = {\n ...event.message,\n content: [], // Will be populated from content blocks\n }\n break\n\n case 'content_block_start':\n contentBlocks[event.index] = { ...event.content_block }\n // Initialize JSON buffer for tool_use blocks\n if (event.content_block.type === 'tool_use') {\n inputJSONBuffers.set(event.index, '')\n }\n break\n\n case 'content_block_delta':\n const blockIndex = event.index\n\n // Ensure content block exists\n if (!contentBlocks[blockIndex]) {\n contentBlocks[blockIndex] = {\n type:\n event.delta.type === 'text_delta' ? 'text' : 'tool_use',\n text: event.delta.type === 'text_delta' ? '' : undefined,\n }\n if (event.delta.type === 'input_json_delta') {\n inputJSONBuffers.set(blockIndex, '')\n }\n }\n\n if (event.delta.type === 'text_delta') {\n contentBlocks[blockIndex].text += event.delta.text\n } else if (event.delta.type === 'input_json_delta') {\n const currentBuffer = inputJSONBuffers.get(blockIndex) || ''\n inputJSONBuffers.set(\n blockIndex,\n currentBuffer + event.delta.partial_json,\n )\n }\n break\n\n case 'message_delta':\n if (event.delta.stop_reason)\n stopReason = event.delta.stop_reason\n if (event.delta.stop_sequence)\n stopSequence = event.delta.stop_sequence\n if (event.usage) usage = { ...usage, ...event.usage }\n break\n\n case 'content_block_stop':\n const stopIndex = event.index\n const block = contentBlocks[stopIndex]\n\n if (\n block?.type === 'tool_use' &&\n inputJSONBuffers.has(stopIndex)\n ) {\n const jsonStr = inputJSONBuffers.get(stopIndex)\n if (jsonStr) {\n try {\n block.input = JSON.parse(jsonStr)\n } catch (error) {\n debugLogger.error('JSON_PARSE_ERROR', {\n blockIndex: stopIndex,\n jsonStr,\n error:\n error instanceof Error\n ? error.message\n : String(error),\n })\n block.input = {}\n }\n inputJSONBuffers.delete(stopIndex)\n }\n }\n break\n\n case 'message_stop':\n // Clear any remaining buffers\n inputJSONBuffers.clear()\n break\n }\n\n if (event.type === 'message_stop') {\n break\n }\n }\n\n if (!finalResponse || !messageStartEvent) {\n throw new Error('Stream ended without proper message structure')\n }\n\n // Construct the final response\n finalResponse = {\n ...messageStartEvent.message,\n content: contentBlocks.filter(Boolean),\n stop_reason: stopReason,\n stop_sequence: stopSequence,\n usage: {\n ...messageStartEvent.message.usage,\n ...usage,\n },\n }\n\n return finalResponse\n } else {\n // \uD83D\uDD25 REAL-TIME API CALL DEBUG - \u4F7F\u7528\u5168\u5C40\u65E5\u5FD7\u7CFB\u7EDF (Anthropic Non-Streaming)\n debugLogger.api('ANTHROPIC_API_CALL_START_NON_STREAMING', {\n endpoint: modelProfile?.baseURL || 'DEFAULT_ANTHROPIC',\n model,\n provider,\n apiKeyConfigured: !!modelProfile?.apiKey,\n apiKeyPrefix: modelProfile?.apiKey\n ? modelProfile.apiKey.substring(0, 8)\n : null,\n maxTokens: params.max_tokens,\n temperature: MAIN_QUERY_TEMPERATURE,\n messageCount: params.messages?.length || 0,\n streamMode: false,\n toolsCount: toolSchemas.length,\n thinkingTokens: maxThinkingTokens,\n timestamp: new Date().toISOString(),\n modelProfileId: modelProfile?.modelName,\n modelProfileName: modelProfile?.name,\n })\n\n return await anthropic.beta.messages.create(params, {\n signal: signal, // \u2190 CRITICAL: Connect the AbortSignal to API call\n })\n }\n },\n { signal },\n )\n\n debugLogger.api('ANTHROPIC_API_CALL_SUCCESS', {\n content: response.content,\n })\n\n const ttftMs = start - Date.now()\n const durationMs = Date.now() - startIncludingRetries\n\n const content = response.content.map((block: ContentBlock) => {\n if (block.type === 'text') {\n return {\n type: 'text' as const,\n text: block.text,\n }\n } else if (block.type === 'tool_use') {\n return {\n type: 'tool_use' as const,\n id: block.id,\n name: block.name,\n input: block.input,\n }\n }\n return block\n })\n\n const assistantMessage: AssistantMessage = {\n message: {\n id: response.id,\n content,\n model: response.model,\n role: 'assistant',\n stop_reason: response.stop_reason,\n stop_sequence: response.stop_sequence,\n type: 'message',\n usage: response.usage,\n },\n type: 'assistant',\n uuid: nanoid() as UUID,\n durationMs,\n costUSD: 0, // Will be calculated below\n }\n\n // \u8BB0\u5F55\u5B8C\u6574\u7684 LLM \u4EA4\u4E92\u8C03\u8BD5\u4FE1\u606F (Anthropic path)\n // \u6CE8\u610F\uFF1AAnthropic API\u5C06system prompt\u548Cmessages\u5206\u5F00\uFF0C\u8FD9\u91CC\u91CD\u6784\u4E3A\u5B8C\u6574\u7684API\u8C03\u7528\u89C6\u56FE\n const systemMessages = system.map(block => ({\n role: 'system',\n content: block.text,\n }))\n\n logLLMInteraction({\n systemPrompt: systemPrompt.join('\\n'),\n messages: [...systemMessages, ...anthropicMessages],\n response: response,\n usage: response.usage\n ? {\n inputTokens: response.usage.input_tokens,\n outputTokens: response.usage.output_tokens,\n }\n : undefined,\n timing: {\n start: start,\n end: Date.now(),\n },\n apiFormat: 'anthropic',\n })\n\n // Calculate cost using native Anthropic usage data\n const inputTokens = response.usage.input_tokens\n const outputTokens = response.usage.output_tokens\n const cacheCreationInputTokens =\n response.usage.cache_creation_input_tokens ?? 0\n const cacheReadInputTokens = response.usage.cache_read_input_tokens ?? 0\n\n const costUSD =\n (inputTokens / 1_000_000) * getModelInputTokenCostUSD(model) +\n (outputTokens / 1_000_000) * getModelOutputTokenCostUSD(model) +\n (cacheCreationInputTokens / 1_000_000) *\n getModelInputTokenCostUSD(model) +\n (cacheReadInputTokens / 1_000_000) *\n (getModelInputTokenCostUSD(model) * 0.1) // Cache reads are 10% of input cost\n\n assistantMessage.costUSD = costUSD\n addToTotalCost(costUSD, durationMs)\n\n return assistantMessage\n } catch (error) {\n return getAssistantMessageFromError(error)\n }\n}\n\nfunction getAssistantMessageFromError(error: unknown): AssistantMessage {\n if (error instanceof Error && error.message.includes('prompt is too long')) {\n return createAssistantAPIErrorMessage(PROMPT_TOO_LONG_ERROR_MESSAGE)\n }\n if (\n error instanceof Error &&\n error.message.includes('Your credit balance is too low')\n ) {\n return createAssistantAPIErrorMessage(CREDIT_BALANCE_TOO_LOW_ERROR_MESSAGE)\n }\n if (\n error instanceof Error &&\n error.message.toLowerCase().includes('x-api-key')\n ) {\n return createAssistantAPIErrorMessage(INVALID_API_KEY_ERROR_MESSAGE)\n }\n if (error instanceof Error) {\n if (process.env.NODE_ENV === 'development') {\n debugLogger.error('ANTHROPIC_API_ERROR', {\n message: error.message,\n stack: error.stack,\n })\n }\n return createAssistantAPIErrorMessage(\n `${API_ERROR_MESSAGE_PREFIX}: ${error.message}`,\n )\n }\n return createAssistantAPIErrorMessage(API_ERROR_MESSAGE_PREFIX)\n}\n\nfunction addCacheBreakpoints(\n messages: (UserMessage | AssistantMessage)[],\n): MessageParam[] {\n return messages.map((msg, index) => {\n return msg.type === 'user'\n ? userMessageToMessageParam(msg, index > messages.length - 3)\n : assistantMessageToMessageParam(msg, index > messages.length - 3)\n })\n}\n\nasync function queryOpenAI(\n messages: (UserMessage | AssistantMessage)[],\n systemPrompt: string[],\n maxThinkingTokens: number,\n tools: Tool[],\n signal: AbortSignal,\n options?: {\n safeMode: boolean\n model: string\n prependCLISysprompt: boolean\n modelProfile?: ModelProfile | null\n toolUseContext?: ToolUseContext\n },\n): Promise<AssistantMessage> {\n const config = getGlobalConfig()\n const modelManager = getModelManager()\n const toolUseContext = options?.toolUseContext\n\n const modelProfile = options?.modelProfile || modelManager.getModel('main')\n let model: string\n\n // \uD83D\uDD0D Debug: \u8BB0\u5F55\u6A21\u578B\u914D\u7F6E\u8BE6\u60C5\n const currentRequest = getCurrentRequest()\n debugLogger.api('MODEL_CONFIG_OPENAI', {\n modelProfileFound: !!modelProfile,\n modelProfileId: modelProfile?.modelName,\n modelProfileName: modelProfile?.name,\n modelProfileModelName: modelProfile?.modelName,\n modelProfileProvider: modelProfile?.provider,\n modelProfileBaseURL: modelProfile?.baseURL,\n modelProfileApiKeyExists: !!modelProfile?.apiKey,\n optionsModel: options?.model,\n requestId: getCurrentRequest()?.id,\n })\n\n if (modelProfile) {\n model = modelProfile.modelName\n } else {\n model = options?.model || modelProfile?.modelName || ''\n }\n // Prepend system prompt block for easy API identification\n if (options?.prependCLISysprompt) {\n const [firstSyspromptBlock] = splitSysPromptPrefix(systemPrompt)\n\n systemPrompt = [getCLISyspromptPrefix() + systemPrompt] // some openai-like providers need the entire system prompt as a single block\n }\n\n const system: TextBlockParam[] = splitSysPromptPrefix(systemPrompt).map(\n _ => ({\n ...(PROMPT_CACHING_ENABLED\n ? { cache_control: { type: 'ephemeral' } }\n : {}),\n text: _,\n type: 'text',\n }),\n )\n\n const toolSchemas = await Promise.all(\n tools.map(\n async _ =>\n ({\n type: 'function',\n function: {\n name: _.name,\n description: await _.prompt({\n safeMode: options?.safeMode,\n }),\n // Use tool's JSON schema directly if provided, otherwise convert Zod schema\n parameters:\n 'inputJSONSchema' in _ && _.inputJSONSchema\n ? _.inputJSONSchema\n : zodToJsonSchema(_.inputSchema),\n },\n }) as OpenAI.ChatCompletionTool,\n ),\n )\n\n const openaiSystem = system.map(\n s =>\n ({\n role: 'system',\n content: s.text,\n }) as OpenAI.ChatCompletionMessageParam,\n )\n\n const openaiMessages = convertAnthropicMessagesToOpenAIMessages(messages)\n const startIncludingRetries = Date.now()\n\n // \u8BB0\u5F55\u7CFB\u7EDF\u63D0\u793A\u6784\u5EFA\u8FC7\u7A0B (OpenAI path)\n logSystemPromptConstruction({\n basePrompt: systemPrompt.join('\\n'),\n mintoContext: generateMintoContext() || '',\n reminders: [], // \u8FD9\u91CC\u53EF\u4EE5\u4ECE generateSystemReminders \u83B7\u53D6\n finalPrompt: systemPrompt.join('\\n'),\n })\n\n let start = Date.now()\n let attemptNumber = 0\n let response\n\n try {\n response = await withRetry(\n async attempt => {\n attemptNumber = attempt\n start = Date.now()\n // \uD83D\uDD25 GPT-5 Enhanced Parameter Construction\n const maxTokens = getMaxTokensFromProfile(modelProfile)\n const isGPT5 = isGPT5Model(model)\n\n const opts: OpenAI.ChatCompletionCreateParams = {\n model,\n\n ...(isGPT5\n ? { max_completion_tokens: maxTokens }\n : { max_tokens: maxTokens }),\n messages: [...openaiSystem, ...openaiMessages],\n\n temperature: isGPT5 ? 1 : MAIN_QUERY_TEMPERATURE,\n }\n if (config.stream) {\n ;(opts as OpenAI.ChatCompletionCreateParams).stream = true\n opts.stream_options = {\n include_usage: true,\n }\n }\n\n if (toolSchemas.length > 0) {\n opts.tools = toolSchemas\n opts.tool_choice = 'auto'\n }\n const reasoningEffort = await getReasoningEffort(modelProfile, messages)\n if (reasoningEffort) {\n opts.reasoning_effort = reasoningEffort\n }\n\n if (modelProfile && modelProfile.modelName) {\n debugLogger.api('USING_MODEL_PROFILE_PATH', {\n modelProfileName: modelProfile.modelName,\n modelName: modelProfile.modelName,\n provider: modelProfile.provider,\n baseURL: modelProfile.baseURL,\n apiKeyExists: !!modelProfile.apiKey,\n requestId: getCurrentRequest()?.id,\n })\n\n // Enable new adapter system with environment variable\n const USE_NEW_ADAPTER_SYSTEM =\n process.env.USE_NEW_ADAPTERS !== 'false'\n\n if (USE_NEW_ADAPTER_SYSTEM) {\n // New adapter system\n const adapter = ModelAdapterFactory.createAdapter(modelProfile)\n\n // Build unified request parameters\n const unifiedParams: UnifiedRequestParams = {\n messages: openaiMessages,\n systemPrompt: openaiSystem.map(s => s.content as string),\n tools: tools,\n maxTokens: getMaxTokensFromProfile(modelProfile),\n stream: config.stream,\n reasoningEffort: reasoningEffort as any,\n temperature: isGPT5Model(model) ? 1 : MAIN_QUERY_TEMPERATURE,\n previousResponseId:\n toolUseContext?.responseState?.previousResponseId,\n verbosity: 'high', // High verbosity for coding tasks\n }\n\n // Create request using adapter\n const request = adapter.createRequest(unifiedParams)\n\n // Determine which API to use\n if (ModelAdapterFactory.shouldUseResponsesAPI(modelProfile)) {\n // Use Responses API for GPT-5 and similar models\n const { callGPT5ResponsesAPI } = await import('./openai')\n const response = await callGPT5ResponsesAPI(\n modelProfile,\n request,\n signal,\n )\n const unifiedResponse = adapter.parseResponse(response)\n\n // Convert unified response back to Anthropic format\n const apiMessage = {\n role: 'assistant' as const,\n content: unifiedResponse.content,\n tool_calls: unifiedResponse.toolCalls,\n usage: {\n prompt_tokens: unifiedResponse.usage.promptTokens,\n completion_tokens: unifiedResponse.usage.completionTokens,\n },\n }\n const assistantMsg: AssistantMessage = {\n type: 'assistant',\n message: apiMessage as any,\n costUSD: 0, // Will be calculated later\n durationMs: Date.now() - start,\n uuid: `${Date.now()}-${Math.random().toString(36).substr(2, 9)}` as any,\n responseId: unifiedResponse.responseId, // For state management\n }\n return assistantMsg\n } else {\n // Use existing Chat Completions flow\n const s = await getCompletionWithProfile(\n modelProfile,\n request,\n 0,\n 10,\n signal,\n )\n let finalResponse\n if (config.stream) {\n finalResponse = await handleMessageStream(\n s as ChatCompletionStream,\n signal,\n )\n } else {\n finalResponse = s\n }\n const r = convertOpenAIResponseToAnthropic(finalResponse, tools)\n return r\n }\n } else {\n // Legacy system (preserved for fallback)\n const completionFunction = isGPT5Model(modelProfile.modelName)\n ? getGPT5CompletionWithProfile\n : getCompletionWithProfile\n const s = await completionFunction(\n modelProfile,\n opts,\n 0,\n 10,\n signal,\n )\n let finalResponse\n if (opts.stream) {\n finalResponse = await handleMessageStream(\n s as ChatCompletionStream,\n signal,\n )\n } else {\n finalResponse = s\n }\n const r = convertOpenAIResponseToAnthropic(finalResponse, tools)\n return r\n }\n } else {\n // \uD83D\uDEA8 \u8B66\u544A\uFF1AModelProfile\u4E0D\u53EF\u7528\uFF0C\u4F7F\u7528\u65E7\u903B\u8F91\u8DEF\u5F84\n debugLogger.api('USING_LEGACY_PATH', {\n modelProfileExists: !!modelProfile,\n modelProfileId: modelProfile?.modelName,\n modelNameExists: !!modelProfile?.modelName,\n fallbackModel: 'main',\n actualModel: model,\n requestId: getCurrentRequest()?.id,\n })\n\n // \uD83D\uDEA8 FALLBACK: \u6CA1\u6709\u6709\u6548\u7684ModelProfile\u65F6\uFF0C\u5E94\u8BE5\u629B\u51FA\u9519\u8BEF\u800C\u4E0D\u662F\u4F7F\u7528\u9057\u7559\u7CFB\u7EDF\n const errorDetails = {\n modelProfileExists: !!modelProfile,\n modelProfileId: modelProfile?.modelName,\n modelNameExists: !!modelProfile?.modelName,\n requestedModel: model,\n requestId: getCurrentRequest()?.id,\n }\n debugLogger.error('NO_VALID_MODEL_PROFILE', errorDetails)\n throw new Error(\n `No valid ModelProfile available for model: ${model}. Please configure model through /model command. Debug: ${JSON.stringify(errorDetails)}`,\n )\n }\n },\n { signal },\n )\n } catch (error) {\n logError(error)\n return getAssistantMessageFromError(error)\n }\n const durationMs = Date.now() - start\n const durationMsIncludingRetries = Date.now() - startIncludingRetries\n\n const inputTokens = response.usage?.prompt_tokens ?? 0\n const outputTokens = response.usage?.completion_tokens ?? 0\n const cacheReadInputTokens =\n response.usage?.prompt_token_details?.cached_tokens ?? 0\n const cacheCreationInputTokens =\n response.usage?.prompt_token_details?.cached_tokens ?? 0\n const costUSD =\n (inputTokens / 1_000_000) * SONNET_COST_PER_MILLION_INPUT_TOKENS +\n (outputTokens / 1_000_000) * SONNET_COST_PER_MILLION_OUTPUT_TOKENS +\n (cacheReadInputTokens / 1_000_000) *\n SONNET_COST_PER_MILLION_PROMPT_CACHE_READ_TOKENS +\n (cacheCreationInputTokens / 1_000_000) *\n SONNET_COST_PER_MILLION_PROMPT_CACHE_WRITE_TOKENS\n\n addToTotalCost(costUSD, durationMsIncludingRetries)\n\n // \u8BB0\u5F55\u5B8C\u6574\u7684 LLM \u4EA4\u4E92\u8C03\u8BD5\u4FE1\u606F (OpenAI path)\n logLLMInteraction({\n systemPrompt: systemPrompt.join('\\n'),\n messages: [...openaiSystem, ...openaiMessages],\n response: response,\n usage: {\n inputTokens: inputTokens,\n outputTokens: outputTokens,\n },\n timing: {\n start: start,\n end: Date.now(),\n },\n apiFormat: 'openai',\n })\n\n return {\n message: {\n ...response,\n content: normalizeContentFromAPI(response.content),\n usage: {\n input_tokens: inputTokens,\n output_tokens: outputTokens,\n cache_read_input_tokens: cacheReadInputTokens,\n cache_creation_input_tokens: 0,\n },\n },\n costUSD,\n durationMs,\n type: 'assistant',\n uuid: randomUUID(),\n }\n}\n\nfunction getMaxTokensFromProfile(modelProfile: any): number {\n // Use ModelProfile maxTokens or reasonable default\n return modelProfile?.maxTokens || 8000\n}\n\nfunction getModelInputTokenCostUSD(model: string): number {\n // Find the model in the models object\n for (const providerModels of Object.values(models)) {\n const modelInfo = providerModels.find((m: any) => m.model === model)\n if (modelInfo) {\n return modelInfo.input_cost_per_token || 0\n }\n }\n // Default fallback cost for unknown models\n return 0.000003 // Default to Claude 3 Haiku cost\n}\n\nfunction getModelOutputTokenCostUSD(model: string): number {\n // Find the model in the models object\n for (const providerModels of Object.values(models)) {\n const modelInfo = providerModels.find((m: any) => m.model === model)\n if (modelInfo) {\n return modelInfo.output_cost_per_token || 0\n }\n }\n // Default fallback cost for unknown models\n return 0.000015 // Default to Claude 3 Haiku cost\n}\n\n// New unified query functions for model pointer system\nexport async function queryModel(\n modelPointer: import('@utils/config').ModelPointerType,\n messages: (UserMessage | AssistantMessage)[],\n systemPrompt: string[] = [],\n signal?: AbortSignal,\n): Promise<AssistantMessage> {\n // Use queryLLM with the pointer directly\n return queryLLM(\n messages,\n systemPrompt,\n 0, // maxThinkingTokens\n [], // tools\n signal || new AbortController().signal,\n {\n safeMode: false,\n model: modelPointer,\n prependCLISysprompt: true,\n },\n )\n}\n\n// Note: Use queryModel(pointer, ...) directly instead of these convenience functions\n\n// Simplified query function using quick model pointer\nexport async function queryQuick({\n systemPrompt = [],\n userPrompt,\n assistantPrompt,\n enablePromptCaching = false,\n signal,\n}: {\n systemPrompt?: string[]\n userPrompt: string\n assistantPrompt?: string\n enablePromptCaching?: boolean\n signal?: AbortSignal\n}): Promise<AssistantMessage> {\n const messages = [\n {\n message: { role: 'user', content: userPrompt },\n type: 'user',\n uuid: randomUUID(),\n },\n ] as (UserMessage | AssistantMessage)[]\n\n return queryModel('quick', messages, systemPrompt, signal)\n}\n"],
|
|
5
|
-
"mappings": "AAAA,OAAO;AACP,OAAO,aAAa,oBAAoB,gBAAgB;AACxD,SAAS,wBAAwB;AACjC,SAAS,uBAAuB;AAChC,OAAO,WAAW;AAClB,SAAS,kBAAwB;AACjC,OAAO;AAEP,SAAS,sBAAsB;AAC/B,OAAO,YAAY;AAGnB;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,OAEK;AACP,SAAS,sBAAsB;AAC/B,SAAS,UAAU,kBAAkB;AACrC,SAAS,sBAAsB;AAC/B,SAAS,kBAAkB;AAC3B;AAAA,EACE;AAAA,EACA;AAAA,OACK;AACP,SAAS,eAAe;AACxB;AAAA,EACE,SAAS;AAAA,EACT;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACK;AACP,SAAS,uBAAuB;AAChC,SAAS,uBAAuB;AAChC,SAAS,2BAA2B;AAEpC,SAAS,sBAAsB,yBAAyB;AAMxD,SAAS,aAAa,kBAAkB;AACxC,SAAS,6BAA6B;AACtC,SAAS,+BAA+B;AAIxC,SAAS,cAAc;AACvB;AAAA,EACE;AAAA,EACA;AAAA,OACK;AACP,SAAS,0BAA0B;AACnC,SAAS,+BAA+B;AAGxC,SAAS,YAAY,WAA4B;AAC/C,SAAO,UAAU,WAAW,OAAO;AACrC;AAGA,MAAM,oBAAoB;AAAA,EACxB,OAAe;AAAA,EACP,mBAA2B;AAAA,EAC3B,mBAA4B;AAAA,EAC5B,cAAoC;AAAA,EAEpC,cAAc;AAAA,EAAC;AAAA,EAEvB,OAAc,cAAmC;AAC/C,QAAI,CAAC,oBAAoB,UAAU;AACjC,0BAAoB,WAAW,IAAI,oBAAoB;AAAA,IACzD;AACA,WAAO,oBAAoB;AAAA,EAC7B;AAAA,EAEA,MAAa,aAA4B;AACvC,QAAI,KAAK,iBAAkB;AAE3B,QAAI,KAAK,aAAa;AACpB,aAAO,KAAK;AAAA,IACd;AAEA,SAAK,cAAc,KAAK,gBAAgB;AACxC,UAAM,KAAK;AAAA,EACb;AAAA,EAEA,MAAc,kBAAiC;AAC7C,QAAI;AACF,YAAM,cAAc,MAAM,eAAe;AACzC,WAAK,mBAAmB,eAAe;AACvC,WAAK,mBAAmB;AAGxB,UAAI,QAAQ,IAAI,aAAa,eAAe;AAC1C,oBAAY,KAAK,wBAAwB;AAAA,UACvC,YAAY,KAAK,iBAAiB;AAAA,QACpC,CAAC;AAAA,MACH;AAAA,IACF,SAAS,OAAO;AACd,kBAAY,KAAK,6BAA6B,EAAE,OAAO,OAAO,KAAK,EAAE,CAAC;AACtE,WAAK,mBAAmB;AACxB,WAAK,mBAAmB;AAAA,IAC1B;AAAA,EACF;AAAA,EAEO,kBAA0B;AAC/B,QAAI,CAAC,KAAK,kBAAkB;AAE1B,WAAK,WAAW,EAAE,MAAM,MAAM;AAAA,MAAC,CAAC;AAChC,aAAO;AAAA,IACT;AACA,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,MAAa,eAA8B;AACzC,SAAK,mBAAmB;AACxB,SAAK,cAAc;AACnB,UAAM,KAAK,WAAW;AAAA,EACxB;AACF;AAGA,MAAM,sBAAsB,oBAAoB,YAAY;AAG5D,oBAAoB,WAAW,EAAE,MAAM,MAAM;AAAC,CAAC;AAExC,MAAM,uBAAuB,MAAc;AAChD,SAAO,oBAAoB,gBAAgB;AAC7C;AAEO,MAAM,sBAAsB,YAA2B;AAC5D,QAAM,oBAAoB,aAAa;AACzC;AAEO,MAAM,2BAA2B;AACjC,MAAM,gCAAgC;AACtC,MAAM,uCAAuC;AAC7C,MAAM,gCACX;AACK,MAAM,qBAAqB;AAClC,MAAM,yBAAyB,CAAC,QAAQ,IAAI;AAG5C,MAAM,uCAAuC;AAC7C,MAAM,wCAAwC;AAC9C,MAAM,oDAAoD;AAC1D,MAAM,mDAAmD;AAElD,MAAM,yBAAyB;AAEtC,SAAS,cAAc;AACrB,SAAO;AAAA,IACL,SAAS,GAAG,kBAAkB,CAAC,IAAI,UAAU;AAAA,EAC/C;AACF;AAEA,MAAM,cAAc,QAAQ,IAAI,cAAc,cAAc,MAAM;AAClE,MAAM,gBAAgB;AAOtB,SAAS,cACP,SACA,kBACQ;AACR,MAAI,kBAAkB;AACpB,UAAM,UAAU,SAAS,kBAAkB,EAAE;AAC7C,QAAI,CAAC,MAAM,OAAO,GAAG;AACnB,aAAO,UAAU;AAAA,IACnB;AAAA,EACF;AACA,SAAO,KAAK,IAAI,gBAAgB,KAAK,IAAI,GAAG,UAAU,CAAC,GAAG,IAAK;AACjE;AAEA,SAAS,YAAY,OAA0B;AAE7C,MAAI,MAAM,SAAS,SAAS,2BAA2B,GAAG;AACxD,WAAO,QAAQ,IAAI,cAAc;AAAA,EACnC;AAGA,QAAM,oBAAoB,MAAM,UAAU,gBAAgB;AAG1D,MAAI,sBAAsB,OAAQ,QAAO;AACzC,MAAI,sBAAsB,QAAS,QAAO;AAE1C,MAAI,iBAAiB,oBAAoB;AACvC,WAAO;AAAA,EACT;AAEA,MAAI,CAAC,MAAM,OAAQ,QAAO;AAG1B,MAAI,MAAM,WAAW,IAAK,QAAO;AAGjC,MAAI,MAAM,WAAW,IAAK,QAAO;AAGjC,MAAI,MAAM,WAAW,IAAK,QAAO;AAGjC,MAAI,MAAM,UAAU,MAAM,UAAU,IAAK,QAAO;AAEhD,SAAO;AACT;AAEA,eAAe,UACb,WACA,UAAwB,CAAC,GACb;AACZ,QAAM,aAAa,QAAQ,cAAc;AACzC,MAAI;AAEJ,WAAS,UAAU,GAAG,WAAW,aAAa,GAAG,WAAW;AAC1D,QAAI;AACF,aAAO,MAAM,UAAU,OAAO;AAAA,IAChC,SAAS,OAAO;AACd,kBAAY;AAEZ,UACE,UAAU,cACV,EAAE,iBAAiB,aACnB,CAAC,YAAY,KAAK,GAClB;AACA,cAAM;AAAA,MACR;AAEA,UAAI,QAAQ,QAAQ,SAAS;AAC3B,cAAM,IAAI,MAAM,2BAA2B;AAAA,MAC7C;AAGA,YAAM,aAAa,MAAM,UAAU,aAAa,KAAK;AACrD,YAAM,UAAU,cAAc,SAAS,UAAU;AAEjD,cAAQ;AAAA,QACN,aAAQ,MAAM,IAAI,OAAO,MAAM,IAAI,KAAK,MAAM,OAAO,sBAAmB,KAAK,MAAM,UAAU,GAAI,CAAC,2BAAsB,OAAO,IAAI,UAAU,GAAG,CAAC;AAAA,MACnJ;AAEA,UAAI;AACF,cAAM,eAAe,SAAS,QAAQ,MAAM;AAAA,MAC9C,SAAS,YAAY;AAEnB,YAAI,WAAW,YAAY,uBAAuB;AAChD,gBAAM,IAAI,MAAM,2BAA2B;AAAA,QAC7C;AACA,cAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,QAAM;AACR;AAKA,eAAsB,qBACpB,SACA,QACgB;AAChB,MAAI;AAEF,UAAM,YAAY,UACd,GAAG,QAAQ,QAAQ,QAAQ,EAAE,CAAC,eAC9B;AAEJ,UAAM,WAAW,MAAM,MAAM,WAAW;AAAA,MACtC,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,aAAa;AAAA,QACb,qBAAqB;AAAA,QACrB,cAAc;AAAA,MAChB;AAAA,IACF,CAAC;AAED,QAAI,CAAC,SAAS,IAAI;AAEhB,UAAI,SAAS,WAAW,KAAK;AAC3B,cAAM,IAAI;AAAA,UACR;AAAA,QACF;AAAA,MACF,WAAW,SAAS,WAAW,KAAK;AAClC,cAAM,IAAI;AAAA,UACR;AAAA,QACF;AAAA,MACF,WAAW,SAAS,WAAW,KAAK;AAClC,cAAM,IAAI;AAAA,UACR;AAAA,QACF;AAAA,MACF,WAAW,SAAS,UAAU,KAAK;AACjC,cAAM,IAAI;AAAA,UACR;AAAA,QACF;AAAA,MACF,OAAO;AACL,cAAM,IAAI;AAAA,UACR,uCAAuC,SAAS,MAAM;AAAA,QACxD;AAAA,MACF;AAAA,IACF;AAEA,UAAM,OAAO,MAAM,SAAS,KAAK;AACjC,WAAO,KAAK,QAAQ,CAAC;AAAA,EACvB,SAAS,OAAO;AAEd,QACG,iBAAiB,SAAS,MAAM,QAAQ,SAAS,SAAS,KAC1D,iBAAiB,SAAS,MAAM,QAAQ,SAAS,WAAW,GAC7D;AACA,YAAM;AAAA,IACR;AAGA,aAAS,KAAK;AACd,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACF;AAEA,eAAsB,aACpB,QACA,SACA,UACkB;AAClB,MAAI,CAAC,QAAQ;AACX,WAAO;AAAA,EACT;AAGA,MAAI,YAAY,aAAa,aAAa;AACxC,QAAI;AACF,YAAM,UAAkC;AAAA,QACtC,eAAe,UAAU,MAAM;AAAA,QAC/B,gBAAgB;AAAA,MAClB;AAEA,UAAI,CAAC,SAAS;AACZ,oBAAY,KAAK,cAAc;AAAA,UAC7B,OAAO;AAAA,QACT,CAAC;AACD,eAAO;AAAA,MACT;AAEA,YAAM,YAAY,GAAG,QAAQ,QAAQ,QAAQ,EAAE,CAAC;AAEhD,YAAM,WAAW,MAAM,MAAM,WAAW;AAAA,QACtC,QAAQ;AAAA,QACR;AAAA,MACF,CAAC;AAED,aAAO,SAAS;AAAA,IAClB,SAAS,OAAO;AACd,kBAAY,KAAK,cAAc;AAAA,QAC7B,OAAO;AAAA,QACP,SAAS,OAAO,KAAK;AAAA,MACvB,CAAC;AACD,aAAO;AAAA,IACT;AAAA,EACF;AAGA,QAAM,eAAoB;AAAA,IACxB;AAAA,IACA,yBAAyB;AAAA,IACzB,YAAY;AAAA,IACZ,gBAAgB;AAAA,MACd,cAAc;AAAA,IAChB;AAAA,EACF;AAGA,MACE,YACC,aAAa,eACZ,aAAa,cACb,aAAa,YACf;AACA,iBAAa,UAAU;AAAA,EACzB;AAEA,QAAM,YAAY,IAAI,UAAU,YAAY;AAE5C,MAAI;AACF,UAAM;AAAA,MACJ,YAAY;AACV,cAAM,QAAQ;AACd,cAAM,WAA2B,CAAC,EAAE,MAAM,QAAQ,SAAS,OAAO,CAAC;AACnE,cAAM,UAAU,SAAS,OAAO;AAAA,UAC9B;AAAA,UACA,YAAY;AAAA;AAAA,UACZ;AAAA,UACA,aAAa;AAAA,UACb,UAAU,YAAY;AAAA,QACxB,CAAC;AACD,eAAO;AAAA,MACT;AAAA,MACA,EAAE,YAAY,EAAE;AAAA;AAAA,IAClB;AACA,WAAO;AAAA,EACT,SAAS,OAAO;AACd,aAAS,KAAK;AAEd,QACE,iBAAiB,SACjB,MAAM,QAAQ;AAAA,MACZ;AAAA,IACF,GACA;AACA,aAAO;AAAA,IACT;AACA,UAAM;AAAA,EACR;AACF;AAEA,SAAS,yCACP,UAIE;AACF,QAAM,iBAGA,CAAC;AAEP,QAAM,cAAqE,CAAC;AAE5E,aAAW,WAAW,UAAU;AAC9B,QAAI,gBAAgB,CAAC;AACrB,QAAI,OAAO,QAAQ,QAAQ,YAAY,UAAU;AAC/C,sBAAgB;AAAA,QACd;AAAA,UACE,MAAM;AAAA,UACN,MAAM,QAAQ,QAAQ;AAAA,QACxB;AAAA,MACF;AAAA,IACF,WAAW,CAAC,MAAM,QAAQ,QAAQ,QAAQ,OAAO,GAAG;AAClD,sBAAgB,CAAC,QAAQ,QAAQ,OAAO;AAAA,IAC1C,OAAO;AACL,sBAAgB,QAAQ,QAAQ;AAAA,IAClC;AAEA,eAAW,SAAS,eAAe;AACjC,UAAI,MAAM,SAAS,QAAQ;AACzB,uBAAe,KAAK;AAAA,UAClB,MAAM,QAAQ,QAAQ;AAAA,UACtB,SAAS,MAAM;AAAA,QACjB,CAAC;AAAA,MACH,WAAW,MAAM,SAAS,YAAY;AACpC,uBAAe,KAAK;AAAA,UAClB,MAAM;AAAA,UACN,SAAS;AAAA,UACT,YAAY;AAAA,YACV;AAAA,cACE,MAAM;AAAA,cACN,UAAU;AAAA,gBACR,MAAM,MAAM;AAAA,gBACZ,WAAW,KAAK,UAAU,MAAM,KAAK;AAAA,cACvC;AAAA,cACA,IAAI,MAAM;AAAA,YACZ;AAAA,UACF;AAAA,QACF,CAAC;AAAA,MACH,WAAW,MAAM,SAAS,eAAe;AAEvC,YAAI,cAAc,MAAM;AACxB,YAAI,OAAO,gBAAgB,UAAU;AAEnC,wBAAc,KAAK,UAAU,WAAW;AAAA,QAC1C;AAEA,oBAAY,MAAM,WAAW,IAAI;AAAA,UAC/B,MAAM;AAAA,UACN,SAAS;AAAA,UACT,cAAc,MAAM;AAAA,QACtB;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,QAAM,gBAGA,CAAC;AAEP,aAAW,WAAW,gBAAgB;AACpC,kBAAc,KAAK,OAAO;AAE1B,QAAI,gBAAgB,WAAW,QAAQ,YAAY;AACjD,iBAAW,YAAY,QAAQ,YAAY;AACzC,YAAI,YAAY,SAAS,EAAE,GAAG;AAC5B,wBAAc,KAAK,YAAY,SAAS,EAAE,CAAC;AAAA,QAC7C;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAEA,SAAS,eACP,UACA,MAC8B;AAC9B,QAAM,SAAS,CAAC,KAAU,UAAmD;AAC3E,UAAM,EAAE,GAAG,IAAI;AACf,eAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,KAAK,GAAG;AAChD,UAAI,IAAI,GAAG,MAAM,UAAa,IAAI,GAAG,MAAM,MAAM;AAC/C,YAAI,GAAG,IAAI;AAEX,YAAI,MAAM,QAAQ,IAAI,GAAG,CAAC,GAAG;AAC3B,qBAAW,OAAO,IAAI,GAAG,GAAG;AAC1B,mBAAO,IAAI;AAAA,UACb;AAAA,QACF;AAAA,MACF,WAAW,OAAO,IAAI,GAAG,MAAM,YAAY,OAAO,UAAU,UAAU;AACpE,YAAI,GAAG,KAAK;AAAA,MACd,WAAW,OAAO,IAAI,GAAG,MAAM,YAAY,OAAO,UAAU,UAAU;AACpE,YAAI,GAAG,IAAI;AAAA,MACb,WAAW,MAAM,QAAQ,IAAI,GAAG,CAAC,KAAK,MAAM,QAAQ,KAAK,GAAG;AAC1D,cAAM,WAAW,IAAI,GAAG;AACxB,iBAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,gBAAM,EAAE,OAAO,GAAG,UAAU,IAAI,MAAM,CAAC;AACvC,cAAI,QAAQ,SAAS,SAAS,GAAG;AAC/B,kBAAM,IAAI;AAAA,cACR,mFAAmF,QAAQ,WAAW,KAAK;AAAA,YAC7G;AAAA,UACF;AACA,mBAAS,KAAK,IAAI,OAAO,SAAS,KAAK,GAAG,SAAS;AAAA,QACrD;AAAA,MACF,WAAW,OAAO,IAAI,GAAG,MAAM,YAAY,OAAO,UAAU,UAAU;AACpE,YAAI,GAAG,IAAI,OAAO,IAAI,GAAG,GAAG,KAAK;AAAA,MACnC;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAEA,QAAM,SAAS,KAAK,UAAU,CAAC;AAC/B,MAAI,CAAC,QAAQ;AAEX,WAAO;AAAA,EACT;AACA,SAAO,OAAO,UAAU,OAAO,KAAK;AACtC;AACA,eAAe,oBACb,QACA,QACgC;AAChC,QAAM,kBAAkB,KAAK,IAAI;AACjC,MAAI;AACJ,MAAI,aAAa;AACjB,MAAI,aAAa;AAEjB,cAAY,IAAI,uBAAuB;AAAA,IACrC,iBAAiB,OAAO,eAAe;AAAA,EACzC,CAAC;AAED,MAAI,UAAU,CAAC;AAEf,MAAI,IAAI,OAAO,SAAS,QAAQ;AAChC,MAAI;AACF,qBAAiB,SAAS,QAAQ;AAChC,UAAI,QAAQ,SAAS;AACnB,oBAAY,KAAK,yBAAyB;AAAA,UACxC;AAAA,UACA,WAAW,KAAK,IAAI;AAAA,QACtB,CAAC;AACD,cAAM,IAAI,MAAM,uBAAuB;AAAA,MACzC;AAEA;AAEA,UAAI;AACF,YAAI,CAAC,IAAI;AACP,eAAK,MAAM;AACX,sBAAY,IAAI,6BAA6B;AAAA,YAC3C;AAAA,YACA,aAAa,OAAO,UAAU;AAAA,UAChC,CAAC;AAAA,QACH;AACA,YAAI,CAAC,OAAO;AACV,kBAAQ,MAAM;AACd,sBAAY,IAAI,gCAAgC;AAAA,YAC9C;AAAA,YACA,aAAa,OAAO,UAAU;AAAA,UAChC,CAAC;AAAA,QACH;AACA,YAAI,CAAC,SAAS;AACZ,oBAAU,MAAM;AAAA,QAClB;AACA,YAAI,CAAC,QAAQ;AACX,mBAAS,MAAM;AAAA,QACjB;AACA,YAAI,CAAC,OAAO;AACV,kBAAQ,MAAM;AAAA,QAChB;AAEA,kBAAU,eAAe,SAAS,KAAK;AAEvC,YAAI,OAAO,UAAU,CAAC,GAAG,OAAO,SAAS;AACvC,cAAI,CAAC,QAAQ;AACX,qBAAS,KAAK,IAAI,IAAI;AACtB,wBAAY,IAAI,6BAA6B;AAAA,cAC3C,QAAQ,OAAO,MAAM;AAAA,cACrB,aAAa,OAAO,UAAU;AAAA,YAChC,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF,SAAS,YAAY;AACnB;AACA,oBAAY,MAAM,6BAA6B;AAAA,UAC7C,aAAa,OAAO,UAAU;AAAA,UAC9B,cACE,sBAAsB,QAClB,WAAW,UACX,OAAO,UAAU;AAAA,UACvB,WACE,sBAAsB,QAClB,WAAW,YAAY,OACvB,OAAO;AAAA,QACf,CAAC;AAAA,MAEH;AAAA,IACF;AAEA,gBAAY,IAAI,0BAA0B;AAAA,MACxC,aAAa,OAAO,UAAU;AAAA,MAC9B,YAAY,OAAO,UAAU;AAAA,MAC7B,eAAe,OAAO,KAAK,IAAI,IAAI,eAAe;AAAA,MAClD,QAAQ,OAAO,UAAU,CAAC;AAAA,MAC1B,gBAAgB,MAAM;AAAA,IACxB,CAAC;AAAA,EACH,SAAS,aAAa;AACpB,gBAAY,MAAM,6BAA6B;AAAA,MAC7C,aAAa,OAAO,UAAU;AAAA,MAC9B,YAAY,OAAO,UAAU;AAAA,MAC7B,cACE,uBAAuB,QACnB,YAAY,UACZ,OAAO,WAAW;AAAA,MACxB,WACE,uBAAuB,QACnB,YAAY,YAAY,OACxB,OAAO;AAAA,IACf,CAAC;AACD,UAAM;AAAA,EACR;AACA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,SAAS;AAAA,MACP;AAAA,QACE,OAAO;AAAA,QACP;AAAA,QACA,eAAe;AAAA,QACf,UAAU;AAAA,MACZ;AAAA,IACF;AAAA,IACA;AAAA,EACF;AACF;AAEA,SAAS,iCACP,UACA,QACA;AACA,MAAI,gBAAgC,CAAC;AACrC,QAAM,UAAU,SAAS,UAAU,CAAC,GAAG;AACvC,MAAI,CAAC,SAAS;AACZ,WAAO;AAAA,MACL,MAAM;AAAA,MACN,SAAS,CAAC;AAAA,MACV,aAAa,SAAS,UAAU,CAAC,GAAG;AAAA,MACpC,MAAM;AAAA,MACN,OAAO,SAAS;AAAA,IAClB;AAAA,EACF;AAEA,MAAI,SAAS,YAAY;AACvB,eAAW,YAAY,QAAQ,YAAY;AACzC,YAAM,OAAO,SAAS;AACtB,YAAM,WAAW,MAAM;AACvB,UAAI,WAAW,CAAC;AAChB,UAAI;AACF,mBAAW,MAAM,YAAY,KAAK,MAAM,KAAK,SAAS,IAAI,CAAC;AAAA,MAC7D,SAAS,GAAG;AAAA,MAEZ;AAEA,oBAAc,KAAK;AAAA,QACjB,MAAM;AAAA,QACN,OAAO;AAAA,QACP,MAAM;AAAA,QACN,IAAI,SAAS,IAAI,SAAS,IAAI,SAAS,KAAK,OAAO;AAAA,MACrD,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAK,QAAgB,WAAW;AAC9B,kBAAc,KAAK;AAAA,MACjB,MAAM;AAAA,MACN,UAAW,QAAgB;AAAA,MAC3B,WAAW;AAAA,IACb,CAAC;AAAA,EACH;AAGA,MAAK,QAAgB,mBAAmB;AACtC,kBAAc,KAAK;AAAA,MACjB,MAAM;AAAA,MACN,UAAW,QAAgB;AAAA,MAC3B,WAAW;AAAA,IACb,CAAC;AAAA,EACH;AAEA,MAAI,QAAQ,SAAS;AACnB,kBAAc,KAAK;AAAA,MACjB,MAAM;AAAA,MACN,MAAM,SAAS;AAAA,MACf,WAAW,CAAC;AAAA,IACd,CAAC;AAAA,EACH;AAEA,QAAM,eAAe;AAAA,IACnB,MAAM;AAAA,IACN,SAAS;AAAA,IACT,aAAa,SAAS,UAAU,CAAC,GAAG;AAAA,IACpC,MAAM;AAAA,IACN,OAAO,SAAS;AAAA,EAClB;AAEA,SAAO;AACT;AAEA,IAAI,kBACF;AAKK,SAAS,mBACd,OACgD;AAChD,QAAM,SAAS,gBAAgB;AAC/B,QAAM,WAAW,OAAO;AAGxB,MAAI,mBAAmB,UAAU;AAE/B,sBAAkB;AAAA,EACpB;AAEA,MAAI,iBAAiB;AACnB,WAAO;AAAA,EACT;AAEA,QAAM,SAAS,wBAAwB,KAAK;AAE5C,QAAM,iBAA4C;AAAA,IAChD,SAAS;AAAA,IACT,cAAc;AAAA,EAChB;AACA,MAAI,QAAQ,IAAI,sBAAsB;AACpC,mBAAe,eAAe,IAC5B,UAAU,QAAQ,IAAI,oBAAoB;AAAA,EAC9C;AAEA,QAAM,OAAO;AAAA,IACX;AAAA,IACA,YAAY;AAAA;AAAA,IACZ,SAAS,SAAS,QAAQ,IAAI,kBAAkB,OAAO,KAAK,GAAI,GAAG,EAAE;AAAA,EACvE;AACA,MAAI,aAAa;AACf,UAAM,SAAS,IAAI,iBAAiB,IAAI;AACxC,sBAAkB;AAClB,WAAO;AAAA,EACT;AACA,MAAI,YAAY;AACd,UAAM,aAAa;AAAA,MACjB,GAAG;AAAA,MACH,QAAQ,UAAU,QAAQ,IAAI,mBAAmB;AAAA,IACnD;AACA,UAAM,SAAS,IAAI,gBAAgB,UAAU;AAC7C,sBAAkB;AAClB,WAAO;AAAA,EACT;AAGA,QAAM,eAAe,gBAAgB;AACrC,QAAM,eAAe,aAAa,SAAS,MAAM;AAEjD,MAAI;AACJ,MAAI;AAEJ,MAAI,cAAc;AAChB,aAAS,aAAa,UAAU;AAChC,cAAU,aAAa;AAAA,EACzB,OAAO;AAEL,aAAS,mBAAmB;AAC5B,cAAU;AAAA,EACZ;AAEA,MAAI,QAAQ,IAAI,cAAc,SAAS,CAAC,UAAU,aAAa,aAAa;AAC1E,YAAQ;AAAA,MACN,MAAM;AAAA,QACJ;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAIA,QAAM,eAAe;AAAA,IACnB;AAAA,IACA,yBAAyB;AAAA,IACzB,GAAG;AAAA,IACH,GAAI,WAAW,EAAE,QAAQ;AAAA;AAAA,EAC3B;AAEA,oBAAkB,IAAI,UAAU,YAAY;AAC5C,SAAO;AACT;AAKO,SAAS,uBAA6B;AAC3C,oBAAkB;AACpB;AAoCA,SAAS,4BACP,cACA,eACmE;AACnE,MAAI,CAAC,wBAAwB;AAC3B,WAAO,EAAE,cAAc,cAAc;AAAA,EACvC;AAEA,QAAM,iBAAiB;AACvB,MAAI,kBAAkB;AAGtB,QAAM,wBAAwB,aAAa,IAAI,CAAC,OAAO,WAAW;AAChE,QAAI,kBAAkB,kBAAkB,MAAM,KAAK,SAAS,KAAM;AAChE;AACA,aAAO;AAAA,QACL,GAAG;AAAA,QACH,eAAe,EAAE,MAAM,YAAqB;AAAA,MAC9C;AAAA,IACF;AACA,UAAM,EAAE,eAAe,GAAG,kBAAkB,IAAI;AAChD,WAAO;AAAA,EACT,CAAC;AAGD,QAAM,yBAAyB,cAAc,IAAI,CAAC,SAAS,iBAAiB;AAC1E,QAAI,MAAM,QAAQ,QAAQ,OAAO,GAAG;AAClC,YAAM,mBAAmB,QAAQ,QAAQ;AAAA,QACvC,CAAC,cAAc,eAAe;AAE5B,gBAAM,cACJ,kBAAkB,kBAClB,aAAa,SAAS,UACtB,OAAO,aAAa,SAAS;AAAA,WAE5B,aAAa,KAAK,SAAS;AAAA,UAEzB,iBAAiB,cAAc,SAAS,KACvC,eAAe,QAAQ,QAAQ,SAAS,KACxC,aAAa,KAAK,SAAS;AAEjC,cAAI,aAAa;AACf;AACA,mBAAO;AAAA,cACL,GAAG;AAAA,cACH,eAAe,EAAE,MAAM,YAAqB;AAAA,YAC9C;AAAA,UACF;AAGA,gBAAM,EAAE,eAAe,GAAG,kBAAkB,IAAI;AAChD,iBAAO;AAAA,QACT;AAAA,MACF;AAEA,aAAO;AAAA,QACL,GAAG;AAAA,QACH,SAAS;AAAA,MACX;AAAA,IACF;AAEA,WAAO;AAAA,EACT,CAAC;AAED,SAAO;AAAA,IACL,cAAc;AAAA,IACd,eAAe;AAAA,EACjB;AACF;AAEO,SAAS,0BACd,SACA,WAAW,OACG;AACd,MAAI,UAAU;AACZ,QAAI,OAAO,QAAQ,QAAQ,YAAY,UAAU;AAC/C,aAAO;AAAA,QACL,MAAM;AAAA,QACN,SAAS;AAAA,UACP;AAAA,YACE,MAAM;AAAA,YACN,MAAM,QAAQ,QAAQ;AAAA,UACxB;AAAA,QACF;AAAA,MACF;AAAA,IACF,OAAO;AACL,aAAO;AAAA,QACL,MAAM;AAAA,QACN,SAAS,QAAQ,QAAQ,QAAQ,IAAI,QAAM,EAAE,GAAG,EAAE,EAAE;AAAA,MACtD;AAAA,IACF;AAAA,EACF;AACA,SAAO;AAAA,IACL,MAAM;AAAA,IACN,SAAS,QAAQ,QAAQ;AAAA,EAC3B;AACF;AAEO,SAAS,+BACd,SACA,WAAW,OACG;AACd,MAAI,UAAU;AACZ,QAAI,OAAO,QAAQ,QAAQ,YAAY,UAAU;AAC/C,aAAO;AAAA,QACL,MAAM;AAAA,QACN,SAAS;AAAA,UACP;AAAA,YACE,MAAM;AAAA,YACN,MAAM,QAAQ,QAAQ;AAAA,UACxB;AAAA,QACF;AAAA,MACF;AAAA,IACF,OAAO;AACL,aAAO;AAAA,QACL,MAAM;AAAA,QACN,SAAS,QAAQ,QAAQ,QAAQ,IAAI,QAAM,EAAE,GAAG,EAAE,EAAE;AAAA,MACtD;AAAA,IACF;AAAA,EACF;AACA,SAAO;AAAA,IACL,MAAM;AAAA,IACN,SAAS,QAAQ,QAAQ;AAAA,EAC3B;AACF;AAEA,SAAS,qBAAqB,cAAkC;AAG9D,QAAM,yBAAyB,aAAa,CAAC,KAAK;AAClD,QAAM,mBAAmB,aAAa,MAAM,CAAC;AAC7C,SAAO,CAAC,wBAAwB,iBAAiB,KAAK,IAAI,CAAC,EAAE,OAAO,OAAO;AAC7E;AAEA,eAAsB,SACpB,UACA,cACA,mBACA,OACA,QACA,SAM2B;AAC3B,QAAM,eAAe,gBAAgB;AACrC,QAAM,kBAAkB,aAAa,qBAAqB,QAAQ,KAAK;AAEvE,MAAI,CAAC,gBAAgB,WAAW,CAAC,gBAAgB,SAAS;AACxD,UAAM,IAAI;AAAA,MACR,gBAAgB,SAAS,4BAA4B,QAAQ,KAAK;AAAA,IACpE;AAAA,EACF;AAEA,QAAM,eAAe,gBAAgB;AACrC,QAAM,gBAAgB,aAAa;AAGnC,QAAM,iBAAiB,QAAQ;AAC/B,MAAI,kBAAkB,CAAC,eAAe,eAAe;AACnD,UAAM,iBAAiB;AAAA,MACrB,eAAe;AAAA,MACf,eAAe;AAAA,IACjB;AACA,UAAM,qBACJ,qBAAqB,sBAAsB,cAAc;AAE3D,mBAAe,gBAAgB;AAAA,MAC7B;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,cAAY,IAAI,kBAAkB;AAAA,IAChC,YAAY,QAAQ;AAAA,IACpB,mBAAmB;AAAA,IACnB,UAAU,aAAa;AAAA,IACvB,WAAW,CAAC,QAAQ,QAAQ,aAAa,OAAO,EAAE,SAAS,QAAQ,KAAK;AAAA,IACxE,kBAAkB,CAAC,CAAC,gBAAgB;AAAA,IACpC,gBAAgB,gBAAgB,eAAe;AAAA,IAC/C,WAAW,kBAAkB,GAAG;AAAA,EAClC,CAAC;AAED,QAAM,iBAAiB,kBAAkB;AACzC,cAAY,IAAI,qBAAqB;AAAA,IACnC,cAAc,SAAS;AAAA,IACvB,oBAAoB,aAAa,KAAK,GAAG,EAAE;AAAA,IAC3C,WAAW,MAAM;AAAA,IACjB,OAAO;AAAA,IACP,oBAAoB,QAAQ;AAAA,IAC5B,WAAW,kBAAkB,GAAG;AAAA,EAClC,CAAC;AAED,YAAU,UAAU;AAEpB,MAAI;AACF,UAAM,SAAS,MAAM;AAAA,MAAQ;AAAA,MAAU,MACrC;AAAA,QACE;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA,EAAE,GAAG,SAAS,OAAO,eAAe,cAAc,eAAe;AAAA;AAAA,MACnE;AAAA,IACF;AAEA,gBAAY,IAAI,uBAAuB;AAAA,MACrC,SAAS,OAAO;AAAA,MAChB,YAAY,OAAO;AAAA,MACnB,gBAAgB,OAAO,QAAQ,SAAS,UAAU;AAAA,MAClD,WAAW,kBAAkB,GAAG;AAAA,IAClC,CAAC;AAGD,QAAI,gBAAgB,eAAe,kBAAkB,OAAO,YAAY;AACtE,2BAAqB;AAAA,QACnB,eAAe,cAAc;AAAA,QAC7B,OAAO;AAAA,MACT;AAEA,kBAAY,IAAI,0BAA0B;AAAA,QACxC,gBAAgB,eAAe,cAAc;AAAA,QAC7C,YAAY,OAAO;AAAA,QACnB,WAAW,kBAAkB,GAAG;AAAA,MAClC,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT,SAAS,OAAO;AAEd;AAAA,MACE;AAAA,MACA;AAAA,QACE,cAAc,SAAS;AAAA,QACvB,oBAAoB,aAAa,KAAK,GAAG,EAAE;AAAA,QAC3C,OAAO,QAAQ;AAAA,QACf,WAAW,MAAM;AAAA,QACjB,OAAO;AAAA,MACT;AAAA,MACA,gBAAgB;AAAA,IAClB;AAEA,UAAM;AAAA,EACR;AACF;AAEO,SAAS,8BACd,cACA,SACA,SACA,uBAAuB,OACwB;AAE/C,QAAM,iBAAiB,CAAC,GAAG,YAAY;AACvC,MAAI,YAAY;AAGhB,QAAM,eAAe,gBAAgB;AACrC,QAAM,eAAe,aAAa,SAAS,MAAM;AACjD,MAAI,gBAAgB,YAAY,aAAa,SAAS,GAAG;AAEvD,UAAM,qBAAqB;AAAA,MACzB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AACA,mBAAe,KAAK,GAAG,kBAAkB;AAAA,EAC3C;AAGA,QAAM,aAAa,OAAO,QAAQ,OAAO,EAAE,SAAS;AAEpD,MAAI,YAAY;AAEd,QAAI,CAAC,sBAAsB;AACzB,YAAM,eAAe,qBAAqB;AAC1C,UAAI,cAAc;AAEhB,uBAAe,KAAK,2CAAkB;AACtC,uBAAe,KAAK,YAAY;AAChC,uBAAe,KAAK,SAAS;AAAA,MAC/B;AAAA,IACF;AAGA,UAAM,mBAAmB,wBAAwB,YAAY,OAAO;AACpE,QAAI,iBAAiB,SAAS,GAAG;AAC/B,kBAAY,iBAAiB,IAAI,OAAK,EAAE,OAAO,EAAE,KAAK,IAAI,IAAI;AAAA,IAChE;AAGA,mBAAe;AAAA,MACb;AAAA;AAAA;AAAA,IACF;AAGA,UAAM,kBAAkB,OAAO;AAAA,MAC7B,OAAO,QAAQ,OAAO,EAAE;AAAA,QACtB,CAAC,CAAC,GAAG,MAAM,QAAQ,iBAAiB,QAAQ;AAAA,MAC9C;AAAA,IACF;AAEA,mBAAe;AAAA,MACb,GAAG,OAAO,QAAQ,eAAe,EAAE;AAAA,QACjC,CAAC,CAAC,KAAK,KAAK,MAAM,kBAAkB,GAAG,KAAK,KAAK;AAAA,MACnD;AAAA,IACF;AAAA,EACF;AAEA,SAAO,EAAE,cAAc,gBAAgB,UAAU;AACnD;AAEA,eAAe,0BACb,UACA,cACA,mBACA,OACA,QACA,SAO2B;AAC3B,QAAM,SAAS,gBAAgB;AAC/B,QAAM,eAAe,gBAAgB;AACrC,QAAM,iBAAiB,QAAQ;AAE/B,QAAM,eAAe,QAAQ,gBAAgB,aAAa,SAAS,MAAM;AACzE,MAAI;AAEJ,MAAI,cAAc;AAChB,eAAW,aAAa,YAAY,OAAO,mBAAmB;AAAA,EAChE,OAAO;AACL,eAAW,OAAO,mBAAmB;AAAA,EACvC;AAGA,MACE,aAAa,eACb,aAAa,cACb,aAAa,WACb;AACA,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,EAAE,GAAG,SAAS,cAAc,eAAe;AAAA,IAC7C;AAAA,EACF;AAGA,SAAO,YAAY,UAAU,cAAc,mBAAmB,OAAO,QAAQ;AAAA,IAC3E,GAAG;AAAA,IACH;AAAA,IACA;AAAA,EACF,CAAC;AACH;AAEA,eAAe,qBACb,UACA,cACA,mBACA,OACA,QACA,SAO2B;AAC3B,QAAM,SAAS,gBAAgB;AAC/B,QAAM,eAAe,gBAAgB;AACrC,QAAM,iBAAiB,SAAS;AAEhC,QAAM,eAAe,SAAS,gBAAgB,aAAa,SAAS,MAAM;AAC1E,MAAI;AACJ,MAAI;AACJ,MAAI;AAGJ,cAAY,IAAI,0BAA0B;AAAA,IACxC,mBAAmB,CAAC,CAAC;AAAA,IACrB,gBAAgB,cAAc;AAAA,IAC9B,kBAAkB,cAAc;AAAA,IAChC,uBAAuB,cAAc;AAAA,IACrC,sBAAsB,cAAc;AAAA,IACpC,qBAAqB,cAAc;AAAA,IACnC,0BAA0B,CAAC,CAAC,cAAc;AAAA,IAC1C,cAAc,SAAS;AAAA,IACvB,WAAW,kBAAkB,GAAG;AAAA,EAClC,CAAC;AAED,MAAI,cAAc;AAEhB,YAAQ,aAAa;AACrB,eAAW,aAAa,YAAY,OAAO,mBAAmB;AAG9D,QACE,aAAa,aAAa,eAC1B,aAAa,aAAa,cAC1B,aAAa,aAAa,WAC1B;AACA,YAAM,eAAoB;AAAA,QACxB,QAAQ,aAAa;AAAA,QACrB,yBAAyB;AAAA,QACzB,YAAY;AAAA,QACZ,SAAS,SAAS,QAAQ,IAAI,kBAAkB,OAAO,KAAK,GAAI,GAAG,EAAE;AAAA,QACrE,gBAAgB;AAAA,UACd,SAAS;AAAA,UACT,cAAc;AAAA,QAChB;AAAA,MACF;AAGA,UAAI,aAAa,SAAS;AACxB,qBAAa,UAAU,aAAa;AAAA,MACtC;AAEA,kBAAY,IAAI,UAAU,YAAY;AAAA,IACxC,OAAO;AAEL,kBAAY,mBAAmB,KAAK;AAAA,IACtC;AAAA,EACF,OAAO;AAEL,UAAM,eAAe;AAAA,MACnB,oBAAoB,CAAC,CAAC;AAAA,MACtB,uBAAuB,cAAc;AAAA,MACrC,gBAAgB,SAAS;AAAA,MACzB,WAAW,kBAAkB,GAAG;AAAA,IAClC;AACA,gBAAY,MAAM,4BAA4B,YAAY;AAC1D,UAAM,IAAI;AAAA,MACR,iHAAiH,KAAK,UAAU,YAAY,CAAC;AAAA,IAC/I;AAAA,EACF;AAGA,MAAI,SAAS,qBAAqB;AAEhC,UAAM,CAAC,mBAAmB,IAAI,qBAAqB,YAAY;AAE/D,mBAAe,CAAC,sBAAsB,GAAG,GAAG,YAAY;AAAA,EAC1D;AAEA,QAAM,SAA2B,qBAAqB,YAAY,EAAE;AAAA,IAClE,QAAM;AAAA,MACJ,MAAM;AAAA,MACN,MAAM;AAAA,IACR;AAAA,EACF;AAEA,QAAM,cAAc,MAAM,QAAQ;AAAA,IAChC,MAAM;AAAA,MACJ,OAAM,UACH;AAAA,QACC,MAAM,KAAK;AAAA,QACX,aACE,OAAO,KAAK,gBAAgB,aACxB,MAAM,KAAK,YAAY,IACvB,KAAK;AAAA,QACX,cACE,qBAAqB,QAAQ,KAAK,kBAC9B,KAAK,kBACL,gBAAgB,KAAK,WAAW;AAAA,MACxC;AAAA,IACJ;AAAA,EACF;AAEA,QAAM,oBAAoB,oBAAoB,QAAQ;AAGtD,QAAM,EAAE,cAAc,iBAAiB,eAAe,kBAAkB,IACtE,4BAA4B,QAAQ,iBAAiB;AACvD,QAAM,wBAAwB,KAAK,IAAI;AAGvC,8BAA4B;AAAA,IAC1B,YAAY,aAAa,KAAK,IAAI;AAAA,IAClC,cAAc,qBAAqB,KAAK;AAAA,IACxC,WAAW,CAAC;AAAA;AAAA,IACZ,aAAa,aAAa,KAAK,IAAI;AAAA,EACrC,CAAC;AAED,MAAI,QAAQ,KAAK,IAAI;AACrB,MAAI,gBAAgB;AACpB,MAAI;AAEJ,MAAI;AACF,eAAW,MAAM;AAAA,MACf,OAAM,YAAW;AACf,wBAAgB;AAChB,gBAAQ,KAAK,IAAI;AAEjB,cAAM,SAAsD;AAAA,UAC1D;AAAA,UACA,YAAY,wBAAwB,YAAY;AAAA,UAChD,UAAU;AAAA,UACV,QAAQ;AAAA,UACR,OAAO,YAAY,SAAS,IAAI,cAAc;AAAA,UAC9C,aAAa,YAAY,SAAS,IAAI,EAAE,MAAM,OAAO,IAAI;AAAA,QAC3D;AAEA,YAAI,oBAAoB,GAAG;AACzB;AAAC,UAAC,OAAe,gBAAgB;AAAA,YAC/B,kBAAkB;AAAA,UACpB;AACC,UAAC,OAAe,WAAW,EAAE,YAAY,kBAAkB;AAAA,QAC9D;AAGA,oBAAY,IAAI,sCAAsC;AAAA,UACpD,UAAU,cAAc,WAAW;AAAA,UACnC;AAAA,UACA;AAAA,UACA,kBAAkB,CAAC,CAAC,cAAc;AAAA,UAClC,cAAc,cAAc,SACxB,aAAa,OAAO,UAAU,GAAG,CAAC,IAClC;AAAA,UACJ,WAAW,OAAO;AAAA,UAClB,aAAa;AAAA,UACb;AAAA,UACA,cAAc,OAAO,UAAU,UAAU;AAAA,UACzC,YAAY;AAAA,UACZ,YAAY,YAAY;AAAA,UACxB,gBAAgB;AAAA,UAChB,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,UAClC,gBAAgB,cAAc;AAAA,UAC9B,kBAAkB,cAAc;AAAA,QAClC,CAAC;AAED,YAAI,OAAO,QAAQ;AACjB,gBAAM,SAAS,MAAM,UAAU,KAAK,SAAS;AAAA,YAC3C;AAAA,cACE,GAAG;AAAA,cACH,QAAQ;AAAA,YACV;AAAA,YACA;AAAA,cACE;AAAA;AAAA,YACF;AAAA,UACF;AAEA,cAAI,gBAA4B;AAChC,cAAI,oBAAyB;AAC7B,gBAAM,gBAAuB,CAAC;AAC9B,gBAAM,mBAAmB,oBAAI,IAAoB;AACjD,cAAI,QAAa;AACjB,cAAI,aAA4B;AAChC,cAAI,eAA8B;AAElC,2BAAiB,SAAS,QAAQ;AAChC,gBAAI,OAAO,SAAS;AAClB,0BAAY,KAAK,kBAAkB;AAAA,gBACjC,WAAW,MAAM;AAAA,gBACjB,WAAW,KAAK,IAAI;AAAA,cACtB,CAAC;AACD,oBAAM,IAAI,MAAM,uBAAuB;AAAA,YACzC;AAEA,oBAAQ,MAAM,MAAM;AAAA,cAClB,KAAK;AACH,oCAAoB;AACpB,gCAAgB;AAAA,kBACd,GAAG,MAAM;AAAA,kBACT,SAAS,CAAC;AAAA;AAAA,gBACZ;AACA;AAAA,cAEF,KAAK;AACH,8BAAc,MAAM,KAAK,IAAI,EAAE,GAAG,MAAM,cAAc;AAEtD,oBAAI,MAAM,cAAc,SAAS,YAAY;AAC3C,mCAAiB,IAAI,MAAM,OAAO,EAAE;AAAA,gBACtC;AACA;AAAA,cAEF,KAAK;AACH,sBAAM,aAAa,MAAM;AAGzB,oBAAI,CAAC,cAAc,UAAU,GAAG;AAC9B,gCAAc,UAAU,IAAI;AAAA,oBAC1B,MACE,MAAM,MAAM,SAAS,eAAe,SAAS;AAAA,oBAC/C,MAAM,MAAM,MAAM,SAAS,eAAe,KAAK;AAAA,kBACjD;AACA,sBAAI,MAAM,MAAM,SAAS,oBAAoB;AAC3C,qCAAiB,IAAI,YAAY,EAAE;AAAA,kBACrC;AAAA,gBACF;AAEA,oBAAI,MAAM,MAAM,SAAS,cAAc;AACrC,gCAAc,UAAU,EAAE,QAAQ,MAAM,MAAM;AAAA,gBAChD,WAAW,MAAM,MAAM,SAAS,oBAAoB;AAClD,wBAAM,gBAAgB,iBAAiB,IAAI,UAAU,KAAK;AAC1D,mCAAiB;AAAA,oBACf;AAAA,oBACA,gBAAgB,MAAM,MAAM;AAAA,kBAC9B;AAAA,gBACF;AACA;AAAA,cAEF,KAAK;AACH,oBAAI,MAAM,MAAM;AACd,+BAAa,MAAM,MAAM;AAC3B,oBAAI,MAAM,MAAM;AACd,iCAAe,MAAM,MAAM;AAC7B,oBAAI,MAAM,MAAO,SAAQ,EAAE,GAAG,OAAO,GAAG,MAAM,MAAM;AACpD;AAAA,cAEF,KAAK;AACH,sBAAM,YAAY,MAAM;AACxB,sBAAM,QAAQ,cAAc,SAAS;AAErC,oBACE,OAAO,SAAS,cAChB,iBAAiB,IAAI,SAAS,GAC9B;AACA,wBAAM,UAAU,iBAAiB,IAAI,SAAS;AAC9C,sBAAI,SAAS;AACX,wBAAI;AACF,4BAAM,QAAQ,KAAK,MAAM,OAAO;AAAA,oBAClC,SAAS,OAAO;AACd,kCAAY,MAAM,oBAAoB;AAAA,wBACpC,YAAY;AAAA,wBACZ;AAAA,wBACA,OACE,iBAAiB,QACb,MAAM,UACN,OAAO,KAAK;AAAA,sBACpB,CAAC;AACD,4BAAM,QAAQ,CAAC;AAAA,oBACjB;AACA,qCAAiB,OAAO,SAAS;AAAA,kBACnC;AAAA,gBACF;AACA;AAAA,cAEF,KAAK;AAEH,iCAAiB,MAAM;AACvB;AAAA,YACJ;AAEA,gBAAI,MAAM,SAAS,gBAAgB;AACjC;AAAA,YACF;AAAA,UACF;AAEA,cAAI,CAAC,iBAAiB,CAAC,mBAAmB;AACxC,kBAAM,IAAI,MAAM,+CAA+C;AAAA,UACjE;AAGA,0BAAgB;AAAA,YACd,GAAG,kBAAkB;AAAA,YACrB,SAAS,cAAc,OAAO,OAAO;AAAA,YACrC,aAAa;AAAA,YACb,eAAe;AAAA,YACf,OAAO;AAAA,cACL,GAAG,kBAAkB,QAAQ;AAAA,cAC7B,GAAG;AAAA,YACL;AAAA,UACF;AAEA,iBAAO;AAAA,QACT,OAAO;AAEL,sBAAY,IAAI,0CAA0C;AAAA,YACxD,UAAU,cAAc,WAAW;AAAA,YACnC;AAAA,YACA;AAAA,YACA,kBAAkB,CAAC,CAAC,cAAc;AAAA,YAClC,cAAc,cAAc,SACxB,aAAa,OAAO,UAAU,GAAG,CAAC,IAClC;AAAA,YACJ,WAAW,OAAO;AAAA,YAClB,aAAa;AAAA,YACb,cAAc,OAAO,UAAU,UAAU;AAAA,YACzC,YAAY;AAAA,YACZ,YAAY,YAAY;AAAA,YACxB,gBAAgB;AAAA,YAChB,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,YAClC,gBAAgB,cAAc;AAAA,YAC9B,kBAAkB,cAAc;AAAA,UAClC,CAAC;AAED,iBAAO,MAAM,UAAU,KAAK,SAAS,OAAO,QAAQ;AAAA,YAClD;AAAA;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAAA,MACA,EAAE,OAAO;AAAA,IACX;AAEA,gBAAY,IAAI,8BAA8B;AAAA,MAC5C,SAAS,SAAS;AAAA,IACpB,CAAC;AAED,UAAM,SAAS,QAAQ,KAAK,IAAI;AAChC,UAAM,aAAa,KAAK,IAAI,IAAI;AAEhC,UAAM,UAAU,SAAS,QAAQ,IAAI,CAAC,UAAwB;AAC5D,UAAI,MAAM,SAAS,QAAQ;AACzB,eAAO;AAAA,UACL,MAAM;AAAA,UACN,MAAM,MAAM;AAAA,QACd;AAAA,MACF,WAAW,MAAM,SAAS,YAAY;AACpC,eAAO;AAAA,UACL,MAAM;AAAA,UACN,IAAI,MAAM;AAAA,UACV,MAAM,MAAM;AAAA,UACZ,OAAO,MAAM;AAAA,QACf;AAAA,MACF;AACA,aAAO;AAAA,IACT,CAAC;AAED,UAAM,mBAAqC;AAAA,MACzC,SAAS;AAAA,QACP,IAAI,SAAS;AAAA,QACb;AAAA,QACA,OAAO,SAAS;AAAA,QAChB,MAAM;AAAA,QACN,aAAa,SAAS;AAAA,QACtB,eAAe,SAAS;AAAA,QACxB,MAAM;AAAA,QACN,OAAO,SAAS;AAAA,MAClB;AAAA,MACA,MAAM;AAAA,MACN,MAAM,OAAO;AAAA,MACb;AAAA,MACA,SAAS;AAAA;AAAA,IACX;AAIA,UAAM,iBAAiB,OAAO,IAAI,YAAU;AAAA,MAC1C,MAAM;AAAA,MACN,SAAS,MAAM;AAAA,IACjB,EAAE;AAEF,sBAAkB;AAAA,MAChB,cAAc,aAAa,KAAK,IAAI;AAAA,MACpC,UAAU,CAAC,GAAG,gBAAgB,GAAG,iBAAiB;AAAA,MAClD;AAAA,MACA,OAAO,SAAS,QACZ;AAAA,QACE,aAAa,SAAS,MAAM;AAAA,QAC5B,cAAc,SAAS,MAAM;AAAA,MAC/B,IACA;AAAA,MACJ,QAAQ;AAAA,QACN;AAAA,QACA,KAAK,KAAK,IAAI;AAAA,MAChB;AAAA,MACA,WAAW;AAAA,IACb,CAAC;AAGD,UAAM,cAAc,SAAS,MAAM;AACnC,UAAM,eAAe,SAAS,MAAM;AACpC,UAAM,2BACJ,SAAS,MAAM,+BAA+B;AAChD,UAAM,uBAAuB,SAAS,MAAM,2BAA2B;AAEvE,UAAM,UACH,cAAc,MAAa,0BAA0B,KAAK,IAC1D,eAAe,MAAa,2BAA2B,KAAK,IAC5D,2BAA2B,MAC1B,0BAA0B,KAAK,IAChC,uBAAuB,OACrB,0BAA0B,KAAK,IAAI;AAExC,qBAAiB,UAAU;AAC3B,mBAAe,SAAS,UAAU;AAElC,WAAO;AAAA,EACT,SAAS,OAAO;AACd,WAAO,6BAA6B,KAAK;AAAA,EAC3C;AACF;AAEA,SAAS,6BAA6B,OAAkC;AACtE,MAAI,iBAAiB,SAAS,MAAM,QAAQ,SAAS,oBAAoB,GAAG;AAC1E,WAAO,+BAA+B,6BAA6B;AAAA,EACrE;AACA,MACE,iBAAiB,SACjB,MAAM,QAAQ,SAAS,gCAAgC,GACvD;AACA,WAAO,+BAA+B,oCAAoC;AAAA,EAC5E;AACA,MACE,iBAAiB,SACjB,MAAM,QAAQ,YAAY,EAAE,SAAS,WAAW,GAChD;AACA,WAAO,+BAA+B,6BAA6B;AAAA,EACrE;AACA,MAAI,iBAAiB,OAAO;AAC1B,QAAI,QAAQ,IAAI,aAAa,eAAe;AAC1C,kBAAY,MAAM,uBAAuB;AAAA,QACvC,SAAS,MAAM;AAAA,QACf,OAAO,MAAM;AAAA,MACf,CAAC;AAAA,IACH;AACA,WAAO;AAAA,MACL,GAAG,wBAAwB,KAAK,MAAM,OAAO;AAAA,IAC/C;AAAA,EACF;AACA,SAAO,+BAA+B,wBAAwB;AAChE;AAEA,SAAS,oBACP,UACgB;AAChB,SAAO,SAAS,IAAI,CAAC,KAAK,UAAU;AAClC,WAAO,IAAI,SAAS,SAChB,0BAA0B,KAAK,QAAQ,SAAS,SAAS,CAAC,IAC1D,+BAA+B,KAAK,QAAQ,SAAS,SAAS,CAAC;AAAA,EACrE,CAAC;AACH;AAEA,eAAe,YACb,UACA,cACA,mBACA,OACA,QACA,SAO2B;AAC3B,QAAM,SAAS,gBAAgB;AAC/B,QAAM,eAAe,gBAAgB;AACrC,QAAM,iBAAiB,SAAS;AAEhC,QAAM,eAAe,SAAS,gBAAgB,aAAa,SAAS,MAAM;AAC1E,MAAI;AAGJ,QAAM,iBAAiB,kBAAkB;AACzC,cAAY,IAAI,uBAAuB;AAAA,IACrC,mBAAmB,CAAC,CAAC;AAAA,IACrB,gBAAgB,cAAc;AAAA,IAC9B,kBAAkB,cAAc;AAAA,IAChC,uBAAuB,cAAc;AAAA,IACrC,sBAAsB,cAAc;AAAA,IACpC,qBAAqB,cAAc;AAAA,IACnC,0BAA0B,CAAC,CAAC,cAAc;AAAA,IAC1C,cAAc,SAAS;AAAA,IACvB,WAAW,kBAAkB,GAAG;AAAA,EAClC,CAAC;AAED,MAAI,cAAc;AAChB,YAAQ,aAAa;AAAA,EACvB,OAAO;AACL,YAAQ,SAAS,SAAS,cAAc,aAAa;AAAA,EACvD;AAEA,MAAI,SAAS,qBAAqB;AAChC,UAAM,CAAC,mBAAmB,IAAI,qBAAqB,YAAY;AAE/D,mBAAe,CAAC,sBAAsB,IAAI,YAAY;AAAA,EACxD;AAEA,QAAM,SAA2B,qBAAqB,YAAY,EAAE;AAAA,IAClE,QAAM;AAAA,MACJ,GAAI,yBACA,EAAE,eAAe,EAAE,MAAM,YAAY,EAAE,IACvC,CAAC;AAAA,MACL,MAAM;AAAA,MACN,MAAM;AAAA,IACR;AAAA,EACF;AAEA,QAAM,cAAc,MAAM,QAAQ;AAAA,IAChC,MAAM;AAAA,MACJ,OAAM,OACH;AAAA,QACC,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,EAAE;AAAA,UACR,aAAa,MAAM,EAAE,OAAO;AAAA,YAC1B,UAAU,SAAS;AAAA,UACrB,CAAC;AAAA;AAAA,UAED,YACE,qBAAqB,KAAK,EAAE,kBACxB,EAAE,kBACF,gBAAgB,EAAE,WAAW;AAAA,QACrC;AAAA,MACF;AAAA,IACJ;AAAA,EACF;AAEA,QAAM,eAAe,OAAO;AAAA,IAC1B,QACG;AAAA,MACC,MAAM;AAAA,MACN,SAAS,EAAE;AAAA,IACb;AAAA,EACJ;AAEA,QAAM,iBAAiB,yCAAyC,QAAQ;AACxE,QAAM,wBAAwB,KAAK,IAAI;AAGvC,8BAA4B;AAAA,IAC1B,YAAY,aAAa,KAAK,IAAI;AAAA,IAClC,cAAc,qBAAqB,KAAK;AAAA,IACxC,WAAW,CAAC;AAAA;AAAA,IACZ,aAAa,aAAa,KAAK,IAAI;AAAA,EACrC,CAAC;AAED,MAAI,QAAQ,KAAK,IAAI;AACrB,MAAI,gBAAgB;AACpB,MAAI;AAEJ,MAAI;AACF,eAAW,MAAM;AAAA,MACf,OAAM,YAAW;AACf,wBAAgB;AAChB,gBAAQ,KAAK,IAAI;AAEjB,cAAM,YAAY,wBAAwB,YAAY;AACtD,cAAM,SAAS,YAAY,KAAK;AAEhC,cAAM,OAA0C;AAAA,UAC9C;AAAA,UAEA,GAAI,SACA,EAAE,uBAAuB,UAAU,IACnC,EAAE,YAAY,UAAU;AAAA,UAC5B,UAAU,CAAC,GAAG,cAAc,GAAG,cAAc;AAAA,UAE7C,aAAa,SAAS,IAAI;AAAA,QAC5B;AACA,YAAI,OAAO,QAAQ;AACjB;AAAC,UAAC,KAA2C,SAAS;AACtD,eAAK,iBAAiB;AAAA,YACpB,eAAe;AAAA,UACjB;AAAA,QACF;AAEA,YAAI,YAAY,SAAS,GAAG;AAC1B,eAAK,QAAQ;AACb,eAAK,cAAc;AAAA,QACrB;AACA,cAAM,kBAAkB,MAAM,mBAAmB,cAAc,QAAQ;AACvE,YAAI,iBAAiB;AACnB,eAAK,mBAAmB;AAAA,QAC1B;AAEA,YAAI,gBAAgB,aAAa,WAAW;AAC1C,sBAAY,IAAI,4BAA4B;AAAA,YAC1C,kBAAkB,aAAa;AAAA,YAC/B,WAAW,aAAa;AAAA,YACxB,UAAU,aAAa;AAAA,YACvB,SAAS,aAAa;AAAA,YACtB,cAAc,CAAC,CAAC,aAAa;AAAA,YAC7B,WAAW,kBAAkB,GAAG;AAAA,UAClC,CAAC;AAGD,gBAAM,yBACJ,QAAQ,IAAI,qBAAqB;AAEnC,cAAI,wBAAwB;AAE1B,kBAAM,UAAU,oBAAoB,cAAc,YAAY;AAG9D,kBAAM,gBAAsC;AAAA,cAC1C,UAAU;AAAA,cACV,cAAc,aAAa,IAAI,OAAK,EAAE,OAAiB;AAAA,cACvD;AAAA,cACA,WAAW,wBAAwB,YAAY;AAAA,cAC/C,QAAQ,OAAO;AAAA,cACf;AAAA,cACA,aAAa,YAAY,KAAK,IAAI,IAAI;AAAA,cACtC,oBACE,gBAAgB,eAAe;AAAA,cACjC,WAAW;AAAA;AAAA,YACb;AAGA,kBAAM,UAAU,QAAQ,cAAc,aAAa;AAGnD,gBAAI,oBAAoB,sBAAsB,YAAY,GAAG;AAE3D,oBAAM,EAAE,qBAAqB,IAAI,MAAM,OAAO,UAAU;AACxD,oBAAMA,YAAW,MAAM;AAAA,gBACrB;AAAA,gBACA;AAAA,gBACA;AAAA,cACF;AACA,oBAAM,kBAAkB,QAAQ,cAAcA,SAAQ;AAGtD,oBAAM,aAAa;AAAA,gBACjB,MAAM;AAAA,gBACN,SAAS,gBAAgB;AAAA,gBACzB,YAAY,gBAAgB;AAAA,gBAC5B,OAAO;AAAA,kBACL,eAAe,gBAAgB,MAAM;AAAA,kBACrC,mBAAmB,gBAAgB,MAAM;AAAA,gBAC3C;AAAA,cACF;AACA,oBAAM,eAAiC;AAAA,gBACrC,MAAM;AAAA,gBACN,SAAS;AAAA,gBACT,SAAS;AAAA;AAAA,gBACT,YAAY,KAAK,IAAI,IAAI;AAAA,gBACzB,MAAM,GAAG,KAAK,IAAI,CAAC,IAAI,KAAK,OAAO,EAAE,SAAS,EAAE,EAAE,OAAO,GAAG,CAAC,CAAC;AAAA,gBAC9D,YAAY,gBAAgB;AAAA;AAAA,cAC9B;AACA,qBAAO;AAAA,YACT,OAAO;AAEL,oBAAM,IAAI,MAAM;AAAA,gBACd;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,cACF;AACA,kBAAI;AACJ,kBAAI,OAAO,QAAQ;AACjB,gCAAgB,MAAM;AAAA,kBACpB;AAAA,kBACA;AAAA,gBACF;AAAA,cACF,OAAO;AACL,gCAAgB;AAAA,cAClB;AACA,oBAAM,IAAI,iCAAiC,eAAe,KAAK;AAC/D,qBAAO;AAAA,YACT;AAAA,UACF,OAAO;AAEL,kBAAM,qBAAqB,YAAY,aAAa,SAAS,IACzD,+BACA;AACJ,kBAAM,IAAI,MAAM;AAAA,cACd;AAAA,cACA;AAAA,cACA;AAAA,cACA;AAAA,cACA;AAAA,YACF;AACA,gBAAI;AACJ,gBAAI,KAAK,QAAQ;AACf,8BAAgB,MAAM;AAAA,gBACpB;AAAA,gBACA;AAAA,cACF;AAAA,YACF,OAAO;AACL,8BAAgB;AAAA,YAClB;AACA,kBAAM,IAAI,iCAAiC,eAAe,KAAK;AAC/D,mBAAO;AAAA,UACT;AAAA,QACF,OAAO;AAEL,sBAAY,IAAI,qBAAqB;AAAA,YACnC,oBAAoB,CAAC,CAAC;AAAA,YACtB,gBAAgB,cAAc;AAAA,YAC9B,iBAAiB,CAAC,CAAC,cAAc;AAAA,YACjC,eAAe;AAAA,YACf,aAAa;AAAA,YACb,WAAW,kBAAkB,GAAG;AAAA,UAClC,CAAC;AAGD,gBAAM,eAAe;AAAA,YACnB,oBAAoB,CAAC,CAAC;AAAA,YACtB,gBAAgB,cAAc;AAAA,YAC9B,iBAAiB,CAAC,CAAC,cAAc;AAAA,YACjC,gBAAgB;AAAA,YAChB,WAAW,kBAAkB,GAAG;AAAA,UAClC;AACA,sBAAY,MAAM,0BAA0B,YAAY;AACxD,gBAAM,IAAI;AAAA,YACR,8CAA8C,KAAK,2DAA2D,KAAK,UAAU,YAAY,CAAC;AAAA,UAC5I;AAAA,QACF;AAAA,MACF;AAAA,MACA,EAAE,OAAO;AAAA,IACX;AAAA,EACF,SAAS,OAAO;AACd,aAAS,KAAK;AACd,WAAO,6BAA6B,KAAK;AAAA,EAC3C;AACA,QAAM,aAAa,KAAK,IAAI,IAAI;AAChC,QAAM,6BAA6B,KAAK,IAAI,IAAI;AAEhD,QAAM,cAAc,SAAS,OAAO,iBAAiB;AACrD,QAAM,eAAe,SAAS,OAAO,qBAAqB;AAC1D,QAAM,uBACJ,SAAS,OAAO,sBAAsB,iBAAiB;AACzD,QAAM,2BACJ,SAAS,OAAO,sBAAsB,iBAAiB;AACzD,QAAM,UACH,cAAc,MAAa,uCAC3B,eAAe,MAAa,wCAC5B,uBAAuB,MACtB,mDACD,2BAA2B,MAC1B;AAEJ,iBAAe,SAAS,0BAA0B;AAGlD,oBAAkB;AAAA,IAChB,cAAc,aAAa,KAAK,IAAI;AAAA,IACpC,UAAU,CAAC,GAAG,cAAc,GAAG,cAAc;AAAA,IAC7C;AAAA,IACA,OAAO;AAAA,MACL;AAAA,MACA;AAAA,IACF;AAAA,IACA,QAAQ;AAAA,MACN;AAAA,MACA,KAAK,KAAK,IAAI;AAAA,IAChB;AAAA,IACA,WAAW;AAAA,EACb,CAAC;AAED,SAAO;AAAA,IACL,SAAS;AAAA,MACP,GAAG;AAAA,MACH,SAAS,wBAAwB,SAAS,OAAO;AAAA,MACjD,OAAO;AAAA,QACL,cAAc;AAAA,QACd,eAAe;AAAA,QACf,yBAAyB;AAAA,QACzB,6BAA6B;AAAA,MAC/B;AAAA,IACF;AAAA,IACA;AAAA,IACA;AAAA,IACA,MAAM;AAAA,IACN,MAAM,WAAW;AAAA,EACnB;AACF;AAEA,SAAS,wBAAwB,cAA2B;AAE1D,SAAO,cAAc,aAAa;AACpC;AAEA,SAAS,0BAA0B,OAAuB;AAExD,aAAW,kBAAkB,OAAO,OAAO,MAAM,GAAG;AAClD,UAAM,YAAY,eAAe,KAAK,CAAC,MAAW,EAAE,UAAU,KAAK;AACnE,QAAI,WAAW;AACb,aAAO,UAAU,wBAAwB;AAAA,IAC3C;AAAA,EACF;AAEA,SAAO;AACT;AAEA,SAAS,2BAA2B,OAAuB;AAEzD,aAAW,kBAAkB,OAAO,OAAO,MAAM,GAAG;AAClD,UAAM,YAAY,eAAe,KAAK,CAAC,MAAW,EAAE,UAAU,KAAK;AACnE,QAAI,WAAW;AACb,aAAO,UAAU,yBAAyB;AAAA,IAC5C;AAAA,EACF;AAEA,SAAO;AACT;AAGA,eAAsB,WACpB,cACA,UACA,eAAyB,CAAC,GAC1B,QAC2B;AAE3B,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA;AAAA,IACA,CAAC;AAAA;AAAA,IACD,UAAU,IAAI,gBAAgB,EAAE;AAAA,IAChC;AAAA,MACE,UAAU;AAAA,MACV,OAAO;AAAA,MACP,qBAAqB;AAAA,IACvB;AAAA,EACF;AACF;AAKA,eAAsB,WAAW;AAAA,EAC/B,eAAe,CAAC;AAAA,EAChB;AAAA,EACA;AAAA,EACA,sBAAsB;AAAA,EACtB;AACF,GAM8B;AAC5B,QAAM,WAAW;AAAA,IACf;AAAA,MACE,SAAS,EAAE,MAAM,QAAQ,SAAS,WAAW;AAAA,MAC7C,MAAM;AAAA,MACN,MAAM,WAAW;AAAA,IACnB;AAAA,EACF;AAEA,SAAO,WAAW,SAAS,UAAU,cAAc,MAAM;AAC3D;",
|
|
6
|
-
"names": ["response"]
|
|
4
|
+
"sourcesContent": ["import '@anthropic-ai/sdk/shims/node'\nimport Anthropic, { APIConnectionError, APIError } from '@anthropic-ai/sdk'\nimport { AnthropicBedrock } from '@anthropic-ai/bedrock-sdk'\nimport { AnthropicVertex } from '@anthropic-ai/vertex-sdk'\nimport chalk from 'chalk'\nimport { randomUUID, UUID } from 'crypto'\nimport 'dotenv/config'\n\nimport { addToTotalCost } from '@costTracker'\nimport { recordTokenUsage } from '@core/tokenStatsManager'\nimport type { TokenTrackingContext } from '@core/tokenStats'\nimport models from '@constants/models'\nimport type { AssistantMessage, UserMessage } from '@query'\nimport { Tool, getToolDescriptionAsync } from '@tool'\nimport {\n getAnthropicApiKey,\n getOrCreateUserID,\n getGlobalConfig,\n ModelProfile,\n} from '@utils/config'\nimport { getProjectDocs } from '@context'\nimport { logError, SESSION_ID } from '@utils/log'\nimport { abortableDelay } from '@utils/async'\nimport { USER_AGENT } from '@utils/http'\nimport {\n createAssistantAPIErrorMessage,\n normalizeContentFromAPI,\n} from '@utils/messages'\nimport { withVCR } from './vcr'\nimport {\n debug as debugLogger,\n markPhase,\n getCurrentRequest,\n logLLMInteraction,\n logSystemPromptConstruction,\n logErrorWithDiagnosis,\n} from '@utils/debugLogger'\nimport { getModelManager } from '@utils/model'\nimport { zodToJsonSchema } from 'zod-to-json-schema'\nimport { ModelAdapterFactory } from './modelAdapterFactory'\nimport { UnifiedRequestParams } from '@minto-types/modelCapabilities'\nimport { responseStateManager, getConversationId } from './responseStateManager'\nimport type { ToolUseContext } from '@tool'\nimport type {\n MessageParam,\n TextBlockParam,\n} from '@anthropic-ai/sdk/resources/index.mjs'\nimport { USE_BEDROCK, USE_VERTEX } from '@utils/model'\nimport { getCLISyspromptPrefix } from '@constants/prompts'\nimport { getVertexRegionForModel } from '@utils/model'\nimport OpenAI from 'openai'\nimport type { ChatCompletionStream } from 'openai/lib/ChatCompletionStream'\nimport { ContentBlock } from '@anthropic-ai/sdk/resources/messages/messages'\nimport { nanoid } from 'nanoid'\nimport {\n getCompletionWithProfile,\n getGPT5CompletionWithProfile,\n} from './openai'\nimport { getReasoningEffort } from '@utils/thinking'\nimport { generateSystemReminders } from './systemReminder'\nimport { addRetryEventToTranscript } from '@utils/agentTranscripts'\nimport { setStreamingState, getStreamingState } from '@components/Spinner'\nimport type { ThinkingMetadata } from '@minto-types/thinking'\n\n// Helper function to check if a model is GPT-5\nfunction isGPT5Model(modelName: string): boolean {\n return modelName.startsWith('gpt-5')\n}\n\n// MintoContext\u7BA1\u7406\u5668 - \u7528\u4E8E\u9879\u76EE\u6587\u6863\u7684\u540C\u6B65\u7F13\u5B58\u548C\u8BBF\u95EE\nclass MintoContextManager {\n private static instance: MintoContextManager\n private projectDocsCache: string = ''\n private cacheInitialized: boolean = false\n private initPromise: Promise<void> | null = null\n\n private constructor() {}\n\n public static getInstance(): MintoContextManager {\n if (!MintoContextManager.instance) {\n MintoContextManager.instance = new MintoContextManager()\n }\n return MintoContextManager.instance\n }\n\n public async initialize(): Promise<void> {\n if (this.cacheInitialized) return\n\n if (this.initPromise) {\n return this.initPromise\n }\n\n this.initPromise = this.loadProjectDocs()\n await this.initPromise\n }\n\n private async loadProjectDocs(): Promise<void> {\n try {\n const projectDocs = await getProjectDocs()\n this.projectDocsCache = projectDocs || ''\n this.cacheInitialized = true\n\n // \u5728\u8C03\u8BD5\u6A21\u5F0F\u4E0B\u8BB0\u5F55\u52A0\u8F7D\u7ED3\u679C\n if (process.env.NODE_ENV === 'development') {\n debugLogger.info('MINTO_CONTEXT_LOADED', {\n characters: this.projectDocsCache.length,\n })\n }\n } catch (error) {\n debugLogger.warn('MINTO_CONTEXT_LOAD_FAILED', { error: String(error) })\n this.projectDocsCache = ''\n this.cacheInitialized = true\n }\n }\n\n public getMintoContext(): string {\n if (!this.cacheInitialized) {\n // \u5982\u679C\u672A\u521D\u59CB\u5316\uFF0C\u5F02\u6B65\u521D\u59CB\u5316\u4F46\u7ACB\u5373\u8FD4\u56DE\u7A7A\u5B57\u7B26\u4E32\n this.initialize().catch(error => {\n debugLogger.warn('MINTO_CONTEXT_INIT_FAILED', { error: String(error) })\n })\n return ''\n }\n return this.projectDocsCache\n }\n\n public async refreshCache(): Promise<void> {\n this.cacheInitialized = false\n this.initPromise = null\n await this.initialize()\n }\n}\n\n// \u5BFC\u51FA\u51FD\u6570\u4FDD\u6301\u5411\u540E\u517C\u5BB9\nconst mintoContextManager = MintoContextManager.getInstance()\n\n// \u5728\u6A21\u5757\u52A0\u8F7D\u65F6\u5F02\u6B65\u521D\u59CB\u5316\nmintoContextManager.initialize().catch(error => {\n debugLogger.warn('MINTO_CONTEXT_MODULE_INIT_FAILED', { error: String(error) })\n})\n\nexport const generateMintoContext = (): string => {\n return mintoContextManager.getMintoContext()\n}\n\nexport const refreshMintoContext = async (): Promise<void> => {\n await mintoContextManager.refreshCache()\n}\n\nexport const API_ERROR_MESSAGE_PREFIX = 'API Error'\nexport const PROMPT_TOO_LONG_ERROR_MESSAGE = 'Prompt is too long'\nexport const CREDIT_BALANCE_TOO_LOW_ERROR_MESSAGE = 'Credit balance is too low'\nexport const INVALID_API_KEY_ERROR_MESSAGE =\n 'Invalid API key \u00B7 Please run /login'\nexport const NO_CONTENT_MESSAGE = '(no content)'\nconst PROMPT_CACHING_ENABLED = !process.env.DISABLE_PROMPT_CACHING\n\n// @see https://docs.anthropic.com/en/docs/about-claude/models#model-comparison-table\nconst SONNET_COST_PER_MILLION_INPUT_TOKENS = 3\nconst SONNET_COST_PER_MILLION_OUTPUT_TOKENS = 15\nconst SONNET_COST_PER_MILLION_PROMPT_CACHE_WRITE_TOKENS = 3.75\nconst SONNET_COST_PER_MILLION_PROMPT_CACHE_READ_TOKENS = 0.3\n\nexport const MAIN_QUERY_TEMPERATURE = 1 // to get more variation for binary feedback\n\nfunction getMetadata() {\n return {\n user_id: `${getOrCreateUserID()}_${SESSION_ID}`,\n }\n}\n\nconst MAX_RETRIES = process.env.USER_TYPE === 'SWE_BENCH' ? 100 : 10\nconst BASE_DELAY_MS = 500\n\ninterface RetryOptions {\n maxRetries?: number\n signal?: AbortSignal\n /** Callback for retry events. If provided, suppresses console.log output. */\n onRetry?: (info: {\n attempt: number\n maxRetries: number\n error: Error\n delayMs: number\n }) => void\n}\n\nfunction getRetryDelay(\n attempt: number,\n retryAfterHeader?: string | null,\n): number {\n if (retryAfterHeader) {\n const seconds = parseInt(retryAfterHeader, 10)\n if (!isNaN(seconds)) {\n return seconds * 1000\n }\n }\n return Math.min(BASE_DELAY_MS * Math.pow(2, attempt - 1), 32000) // Max 32s delay\n}\n\nfunction shouldRetry(error: APIError): boolean {\n // Check for overloaded errors first and only retry for SWE_BENCH\n if (error.message?.includes('\"type\":\"overloaded_error\"')) {\n return process.env.USER_TYPE === 'SWE_BENCH'\n }\n\n // Note this is not a standard header.\n const shouldRetryHeader = error.headers?.['x-should-retry']\n\n // If the server explicitly says whether or not to retry, obey.\n if (shouldRetryHeader === 'true') return true\n if (shouldRetryHeader === 'false') return false\n\n if (error instanceof APIConnectionError) {\n return true\n }\n\n if (!error.status) return false\n\n // Retry on request timeouts.\n if (error.status === 408) return true\n\n // Retry on lock timeouts.\n if (error.status === 409) return true\n\n // Retry on rate limits.\n if (error.status === 429) return true\n\n // Retry internal errors.\n if (error.status && error.status >= 500) return true\n\n return false\n}\n\nasync function withRetry<T>(\n operation: (attempt: number) => Promise<T>,\n options: RetryOptions = {},\n): Promise<T> {\n const maxRetries = options.maxRetries ?? MAX_RETRIES\n let lastError: unknown\n\n for (let attempt = 1; attempt <= maxRetries + 1; attempt++) {\n try {\n return await operation(attempt)\n } catch (error) {\n lastError = error\n // Only retry if the error indicates we should\n if (\n attempt > maxRetries ||\n !(error instanceof APIError) ||\n !shouldRetry(error)\n ) {\n throw error\n }\n\n if (options.signal?.aborted) {\n throw new Error('Request cancelled by user')\n }\n\n // Get retry-after header if available\n const retryAfter = error.headers?.['retry-after'] ?? null\n const delayMs = getRetryDelay(attempt, retryAfter)\n\n // \u2705 P1 FIX: Update streaming state to show retry in UI\n setStreamingState({\n phase: 'retrying',\n retryCount: attempt,\n maxRetries: maxRetries,\n errorName: error.name,\n })\n\n debugLogger.flow('API_RETRY_ATTEMPT', {\n attempt,\n maxRetries,\n errorName: error.name,\n errorMessage: error.message,\n delayMs,\n })\n\n // Use callback if provided, otherwise fallback to console.log\n if (options.onRetry) {\n options.onRetry({\n attempt,\n maxRetries,\n error,\n delayMs,\n })\n } else {\n console.log(\n ` \u23BF ${chalk.red(`API ${error.name} (${error.message}) \u00B7 Retrying in ${Math.round(delayMs / 1000)} seconds\u2026 (attempt ${attempt}/${maxRetries})`)}`,\n )\n }\n\n try {\n await abortableDelay(delayMs, options.signal)\n } catch (delayError) {\n // If aborted during delay, throw the error to stop retrying\n if (delayError.message === 'Request was aborted') {\n throw new Error('Request cancelled by user')\n }\n throw delayError\n }\n }\n }\n\n throw lastError\n}\n\n/**\n * Fetch available models from Anthropic API\n */\nexport async function fetchAnthropicModels(\n baseURL: string,\n apiKey: string,\n): Promise<any[]> {\n try {\n // Use provided baseURL or default to official Anthropic API\n const modelsURL = baseURL\n ? `${baseURL.replace(/\\/+$/, '')}/v1/models`\n : 'https://api.anthropic.com/v1/models'\n\n const response = await fetch(modelsURL, {\n method: 'GET',\n headers: {\n 'x-api-key': apiKey,\n 'anthropic-version': '2023-06-01',\n 'User-Agent': USER_AGENT,\n },\n })\n\n if (!response.ok) {\n // Provide user-friendly error messages based on status code\n if (response.status === 401) {\n throw new Error(\n 'Invalid API key. Please check your Anthropic API key and try again.',\n )\n } else if (response.status === 403) {\n throw new Error(\n 'API key does not have permission to access models. Please check your API key permissions.',\n )\n } else if (response.status === 429) {\n throw new Error(\n 'Too many requests. Please wait a moment and try again.',\n )\n } else if (response.status >= 500) {\n throw new Error(\n 'Anthropic service is temporarily unavailable. Please try again later.',\n )\n } else {\n throw new Error(\n `Unable to connect to Anthropic API (${response.status}). Please check your internet connection and API key.`,\n )\n }\n }\n\n const data = await response.json()\n return data.data || []\n } catch (error) {\n // If it's already our custom error, pass it through\n if (\n (error instanceof Error && error.message.includes('API key')) ||\n (error instanceof Error && error.message.includes('Anthropic'))\n ) {\n throw error\n }\n\n // For network errors or other issues\n logError(error)\n throw new Error(\n 'Unable to connect to Anthropic API. Please check your internet connection and try again.',\n )\n }\n}\n\nexport async function verifyApiKey(\n apiKey: string,\n baseURL?: string,\n provider?: string,\n): Promise<boolean> {\n if (!apiKey) {\n return false\n }\n\n // For non-Anthropic providers, use OpenAI-compatible verification\n if (provider && provider !== 'anthropic') {\n try {\n const headers: Record<string, string> = {\n Authorization: `Bearer ${apiKey}`,\n 'Content-Type': 'application/json',\n }\n\n if (!baseURL) {\n debugLogger.warn('API_VERIFY', {\n error: 'No baseURL provided for non-Anthropic provider',\n })\n return false\n }\n\n const modelsURL = `${baseURL.replace(/\\/+$/, '')}/models`\n\n const response = await fetch(modelsURL, {\n method: 'GET',\n headers,\n })\n\n return response.ok\n } catch (error) {\n debugLogger.warn('API_VERIFY', {\n error: 'non-Anthropic provider verification failed',\n details: String(error),\n })\n return false\n }\n }\n\n // For Anthropic and Anthropic-compatible APIs\n const clientConfig: any = {\n apiKey,\n dangerouslyAllowBrowser: true,\n maxRetries: 3,\n defaultHeaders: {\n 'User-Agent': USER_AGENT,\n },\n }\n\n // Only add baseURL for true Anthropic-compatible APIs\n if (\n baseURL &&\n (provider === 'anthropic' ||\n provider === 'bigdream' ||\n provider === 'opendev')\n ) {\n clientConfig.baseURL = baseURL\n }\n\n const anthropic = new Anthropic(clientConfig)\n\n try {\n await withRetry(\n async () => {\n const model = 'claude-sonnet-4-20250514'\n const messages: MessageParam[] = [{ role: 'user', content: 'test' }]\n await anthropic.messages.create({\n model,\n max_tokens: 1000, // Simple test token limit for API verification\n messages,\n temperature: 0,\n metadata: getMetadata(),\n })\n return true\n },\n { maxRetries: 2 }, // Use fewer retries for API key verification\n )\n return true\n } catch (error) {\n logError(error)\n // Check for authentication error\n if (\n error instanceof Error &&\n error.message.includes(\n '{\"type\":\"error\",\"error\":{\"type\":\"authentication_error\",\"message\":\"invalid x-api-key\"}}',\n )\n ) {\n return false\n }\n throw error\n }\n}\n\nfunction convertAnthropicMessagesToOpenAIMessages(\n messages: (UserMessage | AssistantMessage)[],\n): (\n | OpenAI.ChatCompletionMessageParam\n | OpenAI.ChatCompletionToolMessageParam\n)[] {\n const openaiMessages: (\n | OpenAI.ChatCompletionMessageParam\n | OpenAI.ChatCompletionToolMessageParam\n )[] = []\n\n const toolResults: Record<string, OpenAI.ChatCompletionToolMessageParam> = {}\n\n for (const message of messages) {\n let contentBlocks = []\n if (typeof message.message.content === 'string') {\n contentBlocks = [\n {\n type: 'text',\n text: message.message.content,\n },\n ]\n } else if (!Array.isArray(message.message.content)) {\n contentBlocks = [message.message.content]\n } else {\n contentBlocks = message.message.content\n }\n\n for (const block of contentBlocks) {\n if (block.type === 'text') {\n openaiMessages.push({\n role: message.message.role,\n content: block.text,\n })\n } else if (block.type === 'tool_use') {\n openaiMessages.push({\n role: 'assistant',\n content: undefined,\n tool_calls: [\n {\n type: 'function',\n function: {\n name: block.name,\n arguments: JSON.stringify(block.input),\n },\n id: block.id,\n },\n ],\n })\n } else if (block.type === 'tool_result') {\n // Ensure content is always a string for role:tool messages\n let toolContent = block.content\n if (typeof toolContent !== 'string') {\n // Convert content to string if it's not already\n toolContent = JSON.stringify(toolContent)\n }\n\n toolResults[block.tool_use_id] = {\n role: 'tool',\n content: toolContent,\n tool_call_id: block.tool_use_id,\n }\n }\n }\n }\n\n const finalMessages: (\n | OpenAI.ChatCompletionMessageParam\n | OpenAI.ChatCompletionToolMessageParam\n )[] = []\n\n for (const message of openaiMessages) {\n finalMessages.push(message)\n\n if ('tool_calls' in message && message.tool_calls) {\n for (const toolCall of message.tool_calls) {\n if (toolResults[toolCall.id]) {\n finalMessages.push(toolResults[toolCall.id])\n }\n }\n }\n }\n\n return finalMessages\n}\n\nfunction messageReducer(\n previous: OpenAI.ChatCompletionMessage,\n item: OpenAI.ChatCompletionChunk,\n): OpenAI.ChatCompletionMessage {\n const reduce = (acc: any, delta: OpenAI.ChatCompletionChunk.Choice.Delta) => {\n acc = { ...acc }\n for (const [key, value] of Object.entries(delta)) {\n if (acc[key] === undefined || acc[key] === null) {\n acc[key] = value\n // OpenAI.Chat.Completions.ChatCompletionMessageToolCall does not have a key, .index\n if (Array.isArray(acc[key])) {\n for (const arr of acc[key]) {\n delete arr.index\n }\n }\n } else if (typeof acc[key] === 'string' && typeof value === 'string') {\n acc[key] += value\n } else if (typeof acc[key] === 'number' && typeof value === 'number') {\n acc[key] = value\n } else if (Array.isArray(acc[key]) && Array.isArray(value)) {\n const accArray = acc[key]\n for (let i = 0; i < value.length; i++) {\n const { index, ...chunkTool } = value[i]\n if (index - accArray.length > 1) {\n throw new Error(\n `Error: An array has an empty value when tool_calls are constructed. tool_calls: ${accArray}; tool: ${value}`,\n )\n }\n accArray[index] = reduce(accArray[index], chunkTool)\n }\n } else if (typeof acc[key] === 'object' && typeof value === 'object') {\n acc[key] = reduce(acc[key], value)\n }\n }\n return acc\n }\n\n const choice = item.choices?.[0]\n if (!choice) {\n // chunk contains information about usage and token counts\n return previous\n }\n return reduce(previous, choice.delta) as OpenAI.ChatCompletionMessage\n}\nasync function handleMessageStream(\n stream: ChatCompletionStream,\n signal?: AbortSignal,\n): Promise<OpenAI.ChatCompletion> {\n const streamStartTime = Date.now()\n let ttftMs: number | undefined\n let chunkCount = 0\n let errorCount = 0\n\n debugLogger.api('OPENAI_STREAM_START', {\n streamStartTime: String(streamStartTime),\n })\n\n let message = {} as OpenAI.ChatCompletionMessage\n\n let id, model, created, object, usage\n try {\n for await (const chunk of stream) {\n if (signal?.aborted) {\n debugLogger.flow('OPENAI_STREAM_ABORTED', {\n chunkCount,\n timestamp: Date.now(),\n })\n throw new Error('Request was cancelled')\n }\n\n chunkCount++\n\n try {\n if (!id) {\n id = chunk.id\n debugLogger.api('OPENAI_STREAM_ID_RECEIVED', {\n id,\n chunkNumber: String(chunkCount),\n })\n }\n if (!model) {\n model = chunk.model\n debugLogger.api('OPENAI_STREAM_MODEL_RECEIVED', {\n model,\n chunkNumber: String(chunkCount),\n })\n }\n if (!created) {\n created = chunk.created\n }\n if (!object) {\n object = chunk.object\n }\n if (!usage && chunk.usage) {\n usage = chunk.usage\n // Update streaming state with token usage (OpenAI provides usage in final chunk)\n setStreamingState({\n inputTokens: chunk.usage.prompt_tokens,\n outputTokens: chunk.usage.completion_tokens,\n })\n }\n\n message = messageReducer(message, chunk)\n\n if (chunk?.choices?.[0]?.delta?.content) {\n if (!ttftMs) {\n ttftMs = Date.now() - streamStartTime\n debugLogger.api('OPENAI_STREAM_FIRST_TOKEN', {\n ttftMs: String(ttftMs),\n chunkNumber: String(chunkCount),\n })\n }\n }\n } catch (chunkError) {\n errorCount++\n debugLogger.error('OPENAI_STREAM_CHUNK_ERROR', {\n chunkNumber: String(chunkCount),\n errorMessage:\n chunkError instanceof Error\n ? chunkError.message\n : String(chunkError),\n errorType:\n chunkError instanceof Error\n ? chunkError.constructor.name\n : typeof chunkError,\n })\n // Continue processing other chunks\n }\n }\n\n debugLogger.api('OPENAI_STREAM_COMPLETE', {\n totalChunks: String(chunkCount),\n errorCount: String(errorCount),\n totalDuration: String(Date.now() - streamStartTime),\n ttftMs: String(ttftMs || 0),\n finalMessageId: id || 'undefined',\n })\n } catch (streamError) {\n debugLogger.error('OPENAI_STREAM_FATAL_ERROR', {\n totalChunks: String(chunkCount),\n errorCount: String(errorCount),\n errorMessage:\n streamError instanceof Error\n ? streamError.message\n : String(streamError),\n errorType:\n streamError instanceof Error\n ? streamError.constructor.name\n : typeof streamError,\n })\n throw streamError\n }\n return {\n id,\n created,\n model,\n object,\n choices: [\n {\n index: 0,\n message,\n finish_reason: 'stop',\n logprobs: undefined,\n },\n ],\n usage,\n }\n}\n\nfunction convertOpenAIResponseToAnthropic(\n response: OpenAI.ChatCompletion,\n _tools?: Tool[],\n) {\n let contentBlocks: ContentBlock[] = []\n const message = response.choices?.[0]?.message\n if (!message) {\n return {\n role: 'assistant',\n content: [],\n stop_reason: response.choices?.[0]?.finish_reason,\n type: 'message',\n usage: response.usage,\n }\n }\n\n if (message?.tool_calls) {\n for (const toolCall of message.tool_calls) {\n const tool = toolCall.function\n const toolName = tool?.name\n let toolArgs = {}\n let parseError: string | null = null\n try {\n toolArgs = tool?.arguments ? JSON.parse(tool.arguments) : {}\n } catch (e) {\n // Invalid JSON in tool arguments - log for debugging\n parseError = e instanceof Error ? e.message : String(e)\n debugLogger.warn('TOOL_ARGUMENTS_PARSE_ERROR', {\n toolName,\n rawArguments: tool?.arguments?.substring(0, 200), // Limit log size\n errorMessage: parseError,\n })\n }\n\n // If parsing failed and we have empty args, add context about the parse failure\n if (parseError && Object.keys(toolArgs).length === 0) {\n debugLogger.error('TOOL_CALL_EMPTY_ARGUMENTS', {\n toolName,\n parseError,\n suggestion:\n 'Model may have sent malformed JSON. The tool call will likely fail validation.',\n })\n }\n\n contentBlocks.push({\n type: 'tool_use',\n input: toolArgs,\n name: toolName,\n id: toolCall.id?.length > 0 ? toolCall.id : nanoid(),\n })\n }\n }\n\n if ((message as any).reasoning) {\n contentBlocks.push({\n type: 'thinking',\n thinking: (message as any).reasoning,\n signature: '',\n })\n }\n\n // NOTE: For deepseek api, the key for its returned reasoning process is reasoning_content\n if ((message as any).reasoning_content) {\n contentBlocks.push({\n type: 'thinking',\n thinking: (message as any).reasoning_content,\n signature: '',\n })\n }\n\n if (message.content) {\n contentBlocks.push({\n type: 'text',\n text: message?.content,\n citations: [],\n })\n }\n\n const finalMessage = {\n role: 'assistant',\n content: contentBlocks,\n stop_reason: response.choices?.[0]?.finish_reason,\n type: 'message',\n usage: response.usage,\n }\n\n return finalMessage\n}\n\nlet anthropicClient: Anthropic | AnthropicBedrock | AnthropicVertex | null =\n null\n\n/**\n * Get the Anthropic client, creating it if it doesn't exist\n */\nexport function getAnthropicClient(\n model?: string,\n): Anthropic | AnthropicBedrock | AnthropicVertex {\n const config = getGlobalConfig()\n const provider = config.primaryProvider\n\n // Reset client if provider has changed to ensure correct configuration\n if (anthropicClient && provider) {\n // Always recreate client for provider-specific configurations\n anthropicClient = null\n }\n\n if (anthropicClient) {\n return anthropicClient\n }\n\n const region = getVertexRegionForModel(model)\n\n const defaultHeaders: { [key: string]: string } = {\n 'x-app': 'cli',\n 'User-Agent': USER_AGENT,\n }\n if (process.env.ANTHROPIC_AUTH_TOKEN) {\n defaultHeaders['Authorization'] =\n `Bearer ${process.env.ANTHROPIC_AUTH_TOKEN}`\n }\n\n const ARGS = {\n defaultHeaders,\n maxRetries: 0, // Disabled auto-retry in favor of manual implementation\n timeout: parseInt(process.env.API_TIMEOUT_MS || String(60 * 1000), 10),\n }\n if (USE_BEDROCK) {\n const client = new AnthropicBedrock(ARGS)\n anthropicClient = client\n return client\n }\n if (USE_VERTEX) {\n const vertexArgs = {\n ...ARGS,\n region: region || process.env.CLOUD_ML_REGION || 'us-east5',\n }\n const client = new AnthropicVertex(vertexArgs)\n anthropicClient = client\n return client\n }\n\n // Get appropriate API key and baseURL from ModelProfile\n const modelManager = getModelManager()\n const modelProfile = modelManager.getModel('main')\n\n let apiKey: string\n let baseURL: string | undefined\n\n if (modelProfile) {\n apiKey = modelProfile.apiKey || ''\n baseURL = modelProfile.baseURL\n } else {\n // Fallback to default anthropic if no ModelProfile\n apiKey = getAnthropicApiKey()\n baseURL = undefined\n }\n\n if (process.env.USER_TYPE === 'ant' && !apiKey && provider === 'anthropic') {\n console.error(\n chalk.red(\n '[ANT-ONLY] Please set the ANTHROPIC_API_KEY environment variable to use the CLI. To create a new key, go to https://console.anthropic.com/settings/keys.',\n ),\n )\n }\n\n // Create client with custom baseURL for BigDream/OpenDev\n // Anthropic SDK will append the appropriate paths (like /v1/messages)\n const clientConfig = {\n apiKey,\n dangerouslyAllowBrowser: true,\n ...ARGS,\n ...(baseURL && { baseURL }), // Use baseURL directly, SDK will handle API versioning\n }\n\n anthropicClient = new Anthropic(clientConfig)\n return anthropicClient\n}\n\n/**\n * Reset the Anthropic client to null, forcing a new client to be created on next use\n */\nexport function resetAnthropicClient(): void {\n anthropicClient = null\n}\n\n/**\n * Environment variables for different client types:\n *\n * Direct API:\n * - ANTHROPIC_API_KEY: Required for direct API access\n *\n * AWS Bedrock:\n * - AWS credentials configured via aws-sdk defaults\n *\n * Vertex AI:\n * - Model-specific region variables (highest priority):\n * - VERTEX_REGION_CLAUDE_3_5_HAIKU: Region for Claude 3.5 Haiku model\n * - VERTEX_REGION_CLAUDE_3_5_SONNET: Region for Claude 3.5 Sonnet model\n * - VERTEX_REGION_CLAUDE_3_7_SONNET: Region for Claude 3.7 Sonnet model\n * - CLOUD_ML_REGION: Optional. The default GCP region to use for all models\n * If specific model region not specified above\n * - ANTHROPIC_VERTEX_PROJECT_ID: Required. Your GCP project ID\n * - Standard GCP credentials configured via google-auth-library\n *\n * Priority for determining region:\n * 1. Hardcoded model-specific environment variables\n * 2. Global CLOUD_ML_REGION variable\n * 3. Default region from config\n * 4. Fallback region (us-east5)\n */\n\n/**\n * Manage cache control to ensure it doesn't exceed Claude's 4 cache block limit\n * Priority:\n * 1. System prompts (high priority)\n * 2. Long documents or reference materials (high priority)\n * 3. Reusable context (medium priority)\n * 4. Short messages or one-time content (no caching)\n */\nfunction applyCacheControlWithLimits(\n systemBlocks: TextBlockParam[],\n messageParams: MessageParam[],\n): { systemBlocks: TextBlockParam[]; messageParams: MessageParam[] } {\n if (!PROMPT_CACHING_ENABLED) {\n return { systemBlocks, messageParams }\n }\n\n const maxCacheBlocks = 4\n let usedCacheBlocks = 0\n\n // 1. Prioritize adding cache to system prompts (highest priority)\n const processedSystemBlocks = systemBlocks.map((block, _index) => {\n if (usedCacheBlocks < maxCacheBlocks && block.text.length > 1000) {\n usedCacheBlocks++\n return {\n ...block,\n cache_control: { type: 'ephemeral' as const },\n }\n }\n const { cache_control, ...blockWithoutCache } = block\n return blockWithoutCache\n })\n\n // 2. Add cache to message content based on priority\n const processedMessageParams = messageParams.map((message, messageIndex) => {\n if (Array.isArray(message.content)) {\n const processedContent = message.content.map(\n (contentBlock, blockIndex) => {\n // Determine whether this content block should be cached\n const shouldCache =\n usedCacheBlocks < maxCacheBlocks &&\n contentBlock.type === 'text' &&\n typeof contentBlock.text === 'string' &&\n // Long documents (over 2000 characters)\n (contentBlock.text.length > 2000 ||\n // Last content block of the last message (may be important context)\n (messageIndex === messageParams.length - 1 &&\n blockIndex === message.content.length - 1 &&\n contentBlock.text.length > 500))\n\n if (shouldCache) {\n usedCacheBlocks++\n return {\n ...contentBlock,\n cache_control: { type: 'ephemeral' as const },\n }\n }\n\n // Remove existing cache_control\n const { cache_control, ...blockWithoutCache } = contentBlock as any\n return blockWithoutCache\n },\n )\n\n return {\n ...message,\n content: processedContent,\n }\n }\n\n return message\n })\n\n return {\n systemBlocks: processedSystemBlocks,\n messageParams: processedMessageParams,\n }\n}\n\nexport function userMessageToMessageParam(\n message: UserMessage,\n addCache = false,\n): MessageParam {\n if (addCache) {\n if (typeof message.message.content === 'string') {\n return {\n role: 'user',\n content: [\n {\n type: 'text',\n text: message.message.content,\n },\n ],\n }\n } else {\n return {\n role: 'user',\n content: message.message.content.map(_ => ({ ..._ })),\n }\n }\n }\n return {\n role: 'user',\n content: message.message.content,\n }\n}\n\nexport function assistantMessageToMessageParam(\n message: AssistantMessage,\n addCache = false,\n): MessageParam {\n if (addCache) {\n if (typeof message.message.content === 'string') {\n return {\n role: 'assistant',\n content: [\n {\n type: 'text',\n text: message.message.content,\n },\n ],\n }\n } else {\n return {\n role: 'assistant',\n content: message.message.content.map(_ => ({ ..._ })),\n }\n }\n }\n return {\n role: 'assistant',\n content: message.message.content,\n }\n}\n\nfunction splitSysPromptPrefix(systemPrompt: string[]): string[] {\n // split out the first block of the system prompt as the \"prefix\" for API\n\n const systemPromptFirstBlock = systemPrompt[0] || ''\n const systemPromptRest = systemPrompt.slice(1)\n return [systemPromptFirstBlock, systemPromptRest.join('\\n')].filter(Boolean)\n}\n\nexport async function queryLLM(\n messages: (UserMessage | AssistantMessage)[],\n systemPrompt: string[],\n maxThinkingTokens: number,\n tools: Tool[],\n signal: AbortSignal,\n options: {\n safeMode: boolean\n model: string | import('@utils/config').ModelPointerType\n prependCLISysprompt: boolean\n toolUseContext?: ToolUseContext\n },\n): Promise<AssistantMessage> {\n const modelManager = getModelManager()\n const modelResolution = modelManager.resolveModelWithInfo(options.model)\n\n if (!modelResolution.success || !modelResolution.profile) {\n throw new Error(\n modelResolution.error || `Failed to resolve model: ${options.model}`,\n )\n }\n\n const modelProfile = modelResolution.profile\n const resolvedModel = modelProfile.modelName\n\n // Initialize response state if toolUseContext is provided\n const toolUseContext = options.toolUseContext\n if (toolUseContext && !toolUseContext.responseState) {\n const conversationId = getConversationId(\n toolUseContext.agentId,\n toolUseContext.messageId,\n )\n const previousResponseId =\n responseStateManager.getPreviousResponseId(conversationId)\n\n toolUseContext.responseState = {\n previousResponseId,\n conversationId,\n }\n }\n\n debugLogger.api('MODEL_RESOLVED', {\n inputParam: options.model,\n resolvedModelName: resolvedModel,\n provider: modelProfile.provider,\n isPointer: ['main', 'task', 'reasoning', 'quick'].includes(options.model),\n hasResponseState: !!toolUseContext?.responseState,\n conversationId: toolUseContext?.responseState?.conversationId,\n requestId: getCurrentRequest()?.id,\n })\n\n const currentRequest = getCurrentRequest()\n debugLogger.api('LLM_REQUEST_START', {\n messageCount: messages.length,\n systemPromptLength: systemPrompt.join(' ').length,\n toolCount: tools.length,\n model: resolvedModel,\n originalModelParam: options.model,\n requestId: getCurrentRequest()?.id,\n })\n\n markPhase('LLM_CALL')\n\n try {\n const result = await withVCR(messages, () =>\n queryLLMWithPromptCaching(\n messages,\n systemPrompt,\n maxThinkingTokens,\n tools,\n signal,\n { ...options, model: resolvedModel, modelProfile, toolUseContext }, // Pass resolved ModelProfile and toolUseContext\n ),\n )\n\n debugLogger.api('LLM_REQUEST_SUCCESS', {\n costUSD: result.costUSD,\n durationMs: result.durationMs,\n responseLength: result.message.content?.length || 0,\n requestId: getCurrentRequest()?.id,\n })\n\n // Update response state for GPT-5 Responses API continuation\n if (toolUseContext?.responseState?.conversationId && result.responseId) {\n responseStateManager.setPreviousResponseId(\n toolUseContext.responseState.conversationId,\n result.responseId,\n )\n\n debugLogger.api('RESPONSE_STATE_UPDATED', {\n conversationId: toolUseContext.responseState.conversationId,\n responseId: result.responseId,\n requestId: getCurrentRequest()?.id,\n })\n }\n\n return result\n } catch (error) {\n // \u4F7F\u7528\u9519\u8BEF\u8BCA\u65AD\u7CFB\u7EDF\u8BB0\u5F55 LLM \u76F8\u5173\u9519\u8BEF\n logErrorWithDiagnosis(\n error,\n {\n messageCount: messages.length,\n systemPromptLength: systemPrompt.join(' ').length,\n model: options.model,\n toolCount: tools.length,\n phase: 'LLM_CALL',\n },\n currentRequest?.id,\n )\n\n throw error\n }\n}\n\nexport function formatSystemPromptWithContext(\n systemPrompt: string[],\n context: { [k: string]: string },\n agentId?: string,\n skipContextReminders = false, // Parameter kept for API compatibility but not used anymore\n): { systemPrompt: string[]; reminders: string } {\n // \u6784\u5EFA\u589E\u5F3A\u7684\u7CFB\u7EDF\u63D0\u793A\uFF0C\u4FDD\u6301\u4E0E\u539F\u5148\u76F4\u63A5\u6CE8\u5165\u65B9\u5F0F\u7684\u517C\u5BB9\n const enhancedPrompt = [...systemPrompt]\n let reminders = ''\n\n // Step 0: Add GPT-5 Agent persistence support for coding tasks\n const modelManager = getModelManager()\n const modelProfile = modelManager.getModel('main')\n if (modelProfile && isGPT5Model(modelProfile.modelName)) {\n // Add coding-specific persistence instructions based on GPT-5 documentation\n const persistencePrompts = [\n '\\n# Agent Persistence for Long-Running Coding Tasks',\n 'You are working on a coding project that may involve multiple steps and iterations. Please maintain context and continuity throughout the session:',\n '- Remember architectural decisions and design patterns established earlier',\n '- Keep track of file modifications and their relationships',\n '- Maintain awareness of the overall project structure and goals',\n '- Reference previous implementations when making related changes',\n '- Ensure consistency with existing code style and conventions',\n '- Build incrementally on previous work rather than starting from scratch',\n ]\n enhancedPrompt.push(...persistencePrompts)\n }\n\n // \u53EA\u6709\u5F53\u4E0A\u4E0B\u6587\u5B58\u5728\u65F6\u624D\u5904\u7406\n const hasContext = Object.entries(context).length > 0\n\n if (hasContext) {\n // \u6B65\u9AA41: \u76F4\u63A5\u6CE8\u5165 Minto \u4E0A\u4E0B\u6587\u5230\u7CFB\u7EDF\u63D0\u793A - \u5BF9\u9F50\u5B98\u65B9\u8BBE\u8BA1\n if (!skipContextReminders) {\n const mintoContext = generateMintoContext()\n if (mintoContext) {\n // \u6DFB\u52A0\u5206\u9694\u7B26\u548C\u6807\u8BC6\uFF0C\u4F7F\u9879\u76EE\u6587\u6863\u5728\u7CFB\u7EDF\u63D0\u793A\u4E2D\u66F4\u6E05\u6670\n enhancedPrompt.push('\\n---\\n# \u9879\u76EE\u4E0A\u4E0B\u6587\\n')\n enhancedPrompt.push(mintoContext)\n enhancedPrompt.push('\\n---\\n')\n }\n }\n\n // \u6B65\u9AA42: \u751F\u6210\u5176\u4ED6\u52A8\u6001\u63D0\u9192\u8FD4\u56DE\u7ED9\u8C03\u7528\u65B9 - \u4FDD\u6301\u73B0\u6709\u52A8\u6001\u63D0\u9192\u529F\u80FD\n const reminderMessages = generateSystemReminders(hasContext, agentId)\n if (reminderMessages.length > 0) {\n reminders = reminderMessages.map(r => r.content).join('\\n') + '\\n'\n }\n\n // \u6B65\u9AA43: \u6DFB\u52A0\u5176\u4ED6\u4E0A\u4E0B\u6587\u5230\u7CFB\u7EDF\u63D0\u793A\n enhancedPrompt.push(\n `\\nAs you answer the user's questions, you can use the following context:\\n`,\n )\n\n // \u8FC7\u6EE4\u6389\u5DF2\u7ECF\u7531 Minto \u4E0A\u4E0B\u6587\u5904\u7406\u7684\u9879\u76EE\u6587\u6863\uFF08\u907F\u514D\u91CD\u590D\uFF09\n const filteredContext = Object.fromEntries(\n Object.entries(context).filter(\n ([key]) => key !== 'projectDocs' && key !== 'userDocs',\n ),\n )\n\n enhancedPrompt.push(\n ...Object.entries(filteredContext).map(\n ([key, value]) => `<context name=\"${key}\">${value}</context>`,\n ),\n )\n }\n\n return { systemPrompt: enhancedPrompt, reminders }\n}\n\nasync function queryLLMWithPromptCaching(\n messages: (UserMessage | AssistantMessage)[],\n systemPrompt: string[],\n maxThinkingTokens: number,\n tools: Tool[],\n signal: AbortSignal,\n options: {\n safeMode: boolean\n model: string\n prependCLISysprompt: boolean\n modelProfile?: ModelProfile | null\n toolUseContext?: ToolUseContext\n },\n): Promise<AssistantMessage> {\n const config = getGlobalConfig()\n const modelManager = getModelManager()\n const toolUseContext = options.toolUseContext\n\n const modelProfile = options.modelProfile || modelManager.getModel('main')\n let provider: string\n\n if (modelProfile) {\n provider = modelProfile.provider || config.primaryProvider || 'anthropic'\n } else {\n provider = config.primaryProvider || 'anthropic'\n }\n\n // Use native Anthropic SDK for Anthropic and some Anthropic-compatible providers\n if (\n provider === 'anthropic' ||\n provider === 'bigdream' ||\n provider === 'opendev'\n ) {\n return queryAnthropicNative(\n messages,\n systemPrompt,\n maxThinkingTokens,\n tools,\n signal,\n { ...options, modelProfile, toolUseContext },\n )\n }\n\n // Use OpenAI-compatible interface for all other providers\n return queryOpenAI(messages, systemPrompt, maxThinkingTokens, tools, signal, {\n ...options,\n modelProfile,\n toolUseContext,\n })\n}\n\nasync function queryAnthropicNative(\n messages: (UserMessage | AssistantMessage)[],\n systemPrompt: string[],\n maxThinkingTokens: number,\n tools: Tool[],\n signal: AbortSignal,\n options?: {\n safeMode: boolean\n model: string\n prependCLISysprompt: boolean\n modelProfile?: ModelProfile | null\n toolUseContext?: ToolUseContext\n },\n): Promise<AssistantMessage> {\n const config = getGlobalConfig()\n const modelManager = getModelManager()\n const toolUseContext = options?.toolUseContext\n\n const modelProfile = options?.modelProfile || modelManager.getModel('main')\n let anthropic: Anthropic | AnthropicBedrock | AnthropicVertex\n let model: string\n let provider: string\n\n // \uD83D\uDD0D Debug: \u8BB0\u5F55\u6A21\u578B\u914D\u7F6E\u8BE6\u60C5\n debugLogger.api('MODEL_CONFIG_ANTHROPIC', {\n modelProfileFound: !!modelProfile,\n modelProfileId: modelProfile?.modelName,\n modelProfileName: modelProfile?.name,\n modelProfileModelName: modelProfile?.modelName,\n modelProfileProvider: modelProfile?.provider,\n modelProfileBaseURL: modelProfile?.baseURL,\n modelProfileApiKeyExists: !!modelProfile?.apiKey,\n optionsModel: options?.model,\n requestId: getCurrentRequest()?.id,\n })\n\n if (modelProfile) {\n // \u4F7F\u7528ModelProfile\u7684\u5B8C\u6574\u914D\u7F6E\n model = modelProfile.modelName\n provider = modelProfile.provider || config.primaryProvider || 'anthropic'\n\n // \u57FA\u4E8EModelProfile\u521B\u5EFA\u4E13\u7528\u7684API\u5BA2\u6237\u7AEF\n if (\n modelProfile.provider === 'anthropic' ||\n modelProfile.provider === 'bigdream' ||\n modelProfile.provider === 'opendev'\n ) {\n const clientConfig: any = {\n apiKey: modelProfile.apiKey,\n dangerouslyAllowBrowser: true,\n maxRetries: 0,\n timeout: parseInt(process.env.API_TIMEOUT_MS || String(60 * 1000), 10),\n defaultHeaders: {\n 'x-app': 'cli',\n 'User-Agent': USER_AGENT,\n },\n }\n\n // \u4F7F\u7528ModelProfile\u7684baseURL\u800C\u4E0D\u662F\u5168\u5C40\u914D\u7F6E\n if (modelProfile.baseURL) {\n clientConfig.baseURL = modelProfile.baseURL\n }\n\n anthropic = new Anthropic(clientConfig)\n } else {\n // \u5176\u4ED6\u63D0\u4F9B\u5546\u7684\u5904\u7406\u903B\u8F91\n anthropic = getAnthropicClient(model)\n }\n } else {\n // \uD83D\uDEA8 \u964D\u7EA7\uFF1A\u6CA1\u6709\u6709\u6548\u7684ModelProfile\u65F6\uFF0C\u5E94\u8BE5\u629B\u51FA\u9519\u8BEF\n const errorDetails = {\n modelProfileExists: !!modelProfile,\n modelProfileModelName: modelProfile?.modelName,\n requestedModel: options?.model,\n requestId: getCurrentRequest()?.id,\n }\n debugLogger.error('ANTHROPIC_FALLBACK_ERROR', errorDetails)\n throw new Error(\n `No valid ModelProfile available for Anthropic provider. Please configure model through /model command. Debug: ${JSON.stringify(errorDetails)}`,\n )\n }\n\n // Prepend system prompt block for easy API identification\n if (options?.prependCLISysprompt) {\n // Log stats about first block for analyzing prefix matching config\n const [firstSyspromptBlock] = splitSysPromptPrefix(systemPrompt)\n\n systemPrompt = [getCLISyspromptPrefix(), ...systemPrompt]\n }\n\n const system: TextBlockParam[] = splitSysPromptPrefix(systemPrompt).map(\n _ => ({\n text: _,\n type: 'text',\n }),\n )\n\n const toolSchemas = await Promise.all(\n tools.map(\n async tool =>\n ({\n name: tool.name,\n // Use getToolDescriptionAsync for consistent async access\n description: await getToolDescriptionAsync(tool),\n input_schema:\n 'inputJSONSchema' in tool && tool.inputJSONSchema\n ? tool.inputJSONSchema\n : zodToJsonSchema(tool.inputSchema),\n }) as unknown as Anthropic.Beta.Messages.BetaTool,\n ),\n )\n\n const anthropicMessages = addCacheBreakpoints(messages)\n\n // apply cache control\n const { systemBlocks: processedSystem, messageParams: processedMessages } =\n applyCacheControlWithLimits(system, anthropicMessages)\n const startIncludingRetries = Date.now()\n\n // \u8BB0\u5F55\u7CFB\u7EDF\u63D0\u793A\u6784\u5EFA\u8FC7\u7A0B\n logSystemPromptConstruction({\n basePrompt: systemPrompt.join('\\n'),\n mintoContext: generateMintoContext() || '',\n reminders: [], // \u8FD9\u91CC\u53EF\u4EE5\u4ECE generateSystemReminders \u83B7\u53D6\n finalPrompt: systemPrompt.join('\\n'),\n })\n\n let start = Date.now()\n let attemptNumber = 0\n let response\n\n try {\n response = await withRetry(\n async attempt => {\n attemptNumber = attempt\n start = Date.now()\n\n const params: Anthropic.Beta.Messages.MessageCreateParams = {\n model,\n max_tokens: getMaxTokensFromProfile(modelProfile),\n messages: processedMessages,\n system: processedSystem,\n tools: toolSchemas.length > 0 ? toolSchemas : undefined,\n tool_choice: toolSchemas.length > 0 ? { type: 'auto' } : undefined,\n }\n\n if (maxThinkingTokens > 0) {\n ;(params as any).extra_headers = {\n 'anthropic-beta': 'max-tokens-3-5-sonnet-2024-07-15',\n }\n ;(params as any).thinking = { max_tokens: maxThinkingTokens }\n }\n\n // \uD83D\uDD25 REAL-TIME API CALL DEBUG - \u4F7F\u7528\u5168\u5C40\u65E5\u5FD7\u7CFB\u7EDF (Anthropic Streaming)\n debugLogger.api('ANTHROPIC_API_CALL_START_STREAMING', {\n endpoint: modelProfile?.baseURL || 'DEFAULT_ANTHROPIC',\n model,\n provider,\n apiKeyConfigured: !!modelProfile?.apiKey,\n apiKeyPrefix: modelProfile?.apiKey\n ? modelProfile.apiKey.substring(0, 8)\n : null,\n maxTokens: params.max_tokens,\n temperature: MAIN_QUERY_TEMPERATURE,\n params: params,\n messageCount: params.messages?.length || 0,\n streamMode: true,\n toolsCount: toolSchemas.length,\n thinkingTokens: maxThinkingTokens,\n timestamp: new Date().toISOString(),\n modelProfileId: modelProfile?.modelName,\n modelProfileName: modelProfile?.name,\n })\n\n if (config.stream) {\n const stream = await anthropic.beta.messages.create(\n {\n ...params,\n stream: true,\n },\n {\n signal: signal, // \u2190 CRITICAL: Connect the AbortSignal to API call\n },\n )\n\n let finalResponse: any | null = null\n let messageStartEvent: any = null\n const contentBlocks: any[] = []\n const inputJSONBuffers = new Map<number, string>()\n let usage: any = null\n let stopReason: string | null = null\n let stopSequence: string | null = null\n\n for await (const event of stream) {\n if (signal.aborted) {\n debugLogger.flow('STREAM_ABORTED', {\n eventType: event.type,\n timestamp: Date.now(),\n })\n throw new Error('Request was cancelled')\n }\n\n switch (event.type) {\n case 'message_start':\n messageStartEvent = event\n finalResponse = {\n ...event.message,\n content: [], // Will be populated from content blocks\n }\n // NEW: Set initial input tokens from message_start and reset receivedChars\n setStreamingState({\n inputTokens: event.message.usage?.input_tokens,\n receivedChars: 0, // Reset for new request\n })\n break\n\n case 'content_block_start':\n contentBlocks[event.index] = { ...event.content_block }\n // Initialize JSON buffer for tool_use blocks\n if (event.content_block.type === 'tool_use') {\n inputJSONBuffers.set(event.index, '')\n }\n // \u2705 P0 FIX: Detect thinking block start and update UI\n if (event.content_block.type === 'thinking') {\n const thinkingStartTime = Date.now()\n setStreamingState({\n phase: 'deep_thinking',\n tokenCount: 0,\n thinkingMaxTokens: maxThinkingTokens,\n thinkingStartTime, // NEW: Record start time for duration tracking\n })\n debugLogger.flow('THINKING_BLOCK_START', {\n index: event.index,\n maxTokens: maxThinkingTokens,\n startTime: thinkingStartTime,\n })\n }\n break\n\n case 'content_block_delta':\n const blockIndex = event.index\n\n // Ensure content block exists\n if (!contentBlocks[blockIndex]) {\n contentBlocks[blockIndex] = {\n type:\n event.delta.type === 'text_delta' ? 'text' : 'tool_use',\n text: event.delta.type === 'text_delta' ? '' : undefined,\n }\n if (event.delta.type === 'input_json_delta') {\n inputJSONBuffers.set(blockIndex, '')\n }\n }\n\n if (event.delta.type === 'text_delta') {\n contentBlocks[blockIndex].text += event.delta.text\n // Update received chars for approximate token display\n const currentState = getStreamingState()\n setStreamingState({\n receivedChars:\n (currentState.receivedChars || 0) +\n event.delta.text.length,\n })\n } else if (event.delta.type === 'input_json_delta') {\n const currentBuffer = inputJSONBuffers.get(blockIndex) || ''\n inputJSONBuffers.set(\n blockIndex,\n currentBuffer + event.delta.partial_json,\n )\n }\n // \u2705 P0 FIX: Handle thinking_delta events to update token count\n else if (event.delta.type === 'thinking_delta') {\n const currentThinking =\n contentBlocks[blockIndex].thinking || ''\n const updatedThinking =\n currentThinking + (event.delta.thinking || '')\n contentBlocks[blockIndex] = {\n ...contentBlocks[blockIndex],\n thinking: updatedThinking,\n }\n\n // Update UI with current thinking token count\n setStreamingState({\n phase: 'deep_thinking',\n tokenCount: updatedThinking.length,\n thinkingMaxTokens: maxThinkingTokens,\n })\n\n debugLogger.trace('THINKING_DELTA', {\n index: blockIndex,\n tokenCount: updatedThinking.length,\n })\n }\n break\n\n case 'message_delta':\n if (event.delta.stop_reason)\n stopReason = event.delta.stop_reason\n if (event.delta.stop_sequence)\n stopSequence = event.delta.stop_sequence\n if (event.usage) {\n usage = { ...usage, ...event.usage }\n // NEW: Update UI with real-time output token count\n // Note: message_delta only provides output_tokens, not input_tokens\n setStreamingState({\n outputTokens: event.usage.output_tokens,\n })\n }\n break\n\n case 'content_block_stop':\n const stopIndex = event.index\n const block = contentBlocks[stopIndex]\n\n // \u2705 P0 FIX: Handle thinking block completion\n if (block?.type === 'thinking') {\n const streamState = getStreamingState()\n const thinkingDurationMs = streamState.thinkingStartTime\n ? Date.now() - streamState.thinkingStartTime\n : 0\n setStreamingState({\n phase: 'generating',\n thinkingDurationMs, // NEW: Store duration for display\n })\n debugLogger.flow('THINKING_BLOCK_COMPLETE', {\n index: stopIndex,\n finalTokenCount: block.thinking?.length || 0,\n durationMs: thinkingDurationMs,\n })\n }\n\n if (\n block?.type === 'tool_use' &&\n inputJSONBuffers.has(stopIndex)\n ) {\n const jsonStr = inputJSONBuffers.get(stopIndex)\n if (jsonStr) {\n try {\n block.input = JSON.parse(jsonStr)\n } catch (error) {\n const errorMsg =\n error instanceof Error ? error.message : String(error)\n debugLogger.error('JSON_PARSE_ERROR', {\n blockIndex: stopIndex,\n jsonStr:\n jsonStr.length > 500\n ? jsonStr.slice(0, 500) + '...'\n : jsonStr,\n error: errorMsg,\n })\n // Preserve parse error info for downstream error handling\n // Instead of empty {}, provide error context that query.ts can detect\n block.input = {\n __parse_error__: true,\n __error_message__: `JSON parse failed: ${errorMsg}`,\n __raw_json_preview__:\n jsonStr.length > 200\n ? jsonStr.slice(0, 200) + '...'\n : jsonStr,\n }\n }\n inputJSONBuffers.delete(stopIndex)\n }\n }\n break\n\n case 'message_stop':\n // Clear any remaining buffers\n inputJSONBuffers.clear()\n break\n }\n\n if (event.type === 'message_stop') {\n break\n }\n }\n\n if (!finalResponse || !messageStartEvent) {\n throw new Error('Stream ended without proper message structure')\n }\n\n // Construct the final response\n finalResponse = {\n ...messageStartEvent.message,\n content: contentBlocks.filter(Boolean),\n stop_reason: stopReason,\n stop_sequence: stopSequence,\n usage: {\n ...messageStartEvent.message.usage,\n ...usage,\n },\n }\n\n return finalResponse\n } else {\n // \uD83D\uDD25 REAL-TIME API CALL DEBUG - \u4F7F\u7528\u5168\u5C40\u65E5\u5FD7\u7CFB\u7EDF (Anthropic Non-Streaming)\n debugLogger.api('ANTHROPIC_API_CALL_START_NON_STREAMING', {\n endpoint: modelProfile?.baseURL || 'DEFAULT_ANTHROPIC',\n model,\n provider,\n apiKeyConfigured: !!modelProfile?.apiKey,\n apiKeyPrefix: modelProfile?.apiKey\n ? modelProfile.apiKey.substring(0, 8)\n : null,\n maxTokens: params.max_tokens,\n temperature: MAIN_QUERY_TEMPERATURE,\n messageCount: params.messages?.length || 0,\n streamMode: false,\n toolsCount: toolSchemas.length,\n thinkingTokens: maxThinkingTokens,\n timestamp: new Date().toISOString(),\n modelProfileId: modelProfile?.modelName,\n modelProfileName: modelProfile?.name,\n })\n\n return await anthropic.beta.messages.create(params, {\n signal: signal, // \u2190 CRITICAL: Connect the AbortSignal to API call\n })\n }\n },\n {\n signal,\n // Pass retry events to agent transcript for UI display\n onRetry: toolUseContext?.agentId\n ? ({ attempt, maxRetries, error, delayMs }) => {\n addRetryEventToTranscript(toolUseContext.agentId!, {\n attempt,\n maxRetries,\n errorMessage: error.message,\n delayMs,\n })\n }\n : undefined,\n },\n )\n\n debugLogger.api('ANTHROPIC_API_CALL_SUCCESS', {\n content: response.content,\n })\n\n const ttftMs = start - Date.now()\n const durationMs = Date.now() - startIncludingRetries\n\n const content = response.content.map((block: ContentBlock) => {\n if (block.type === 'text') {\n return {\n type: 'text' as const,\n text: block.text,\n }\n } else if (block.type === 'tool_use') {\n return {\n type: 'tool_use' as const,\n id: block.id,\n name: block.name,\n input: block.input,\n }\n }\n return block\n })\n\n // Build thinking metadata if thinking blocks are present\n const thinkingBlocks = content.filter(\n (block): block is ContentBlock & { type: 'thinking'; thinking: string } =>\n block.type === 'thinking',\n )\n const streamState = getStreamingState()\n const thinkingMetadata: ThinkingMetadata | undefined =\n thinkingBlocks.length > 0\n ? {\n charCount: thinkingBlocks.reduce(\n (sum, b) => sum + (b.thinking?.length || 0),\n 0,\n ),\n durationMs: streamState.thinkingDurationMs || 0,\n startTime: streamState.thinkingStartTime || Date.now(),\n isComplete: true,\n }\n : undefined\n\n const assistantMessage: AssistantMessage = {\n message: {\n id: response.id,\n content,\n model: response.model,\n role: 'assistant',\n stop_reason: response.stop_reason,\n stop_sequence: response.stop_sequence,\n type: 'message',\n usage: response.usage,\n },\n type: 'assistant',\n uuid: nanoid() as UUID,\n durationMs,\n costUSD: 0, // Will be calculated below\n thinkingMetadata,\n }\n\n // \u8BB0\u5F55\u5B8C\u6574\u7684 LLM \u4EA4\u4E92\u8C03\u8BD5\u4FE1\u606F (Anthropic path)\n // \u6CE8\u610F\uFF1AAnthropic API\u5C06system prompt\u548Cmessages\u5206\u5F00\uFF0C\u8FD9\u91CC\u91CD\u6784\u4E3A\u5B8C\u6574\u7684API\u8C03\u7528\u89C6\u56FE\n const systemMessages = system.map(block => ({\n role: 'system',\n content: block.text,\n }))\n\n logLLMInteraction({\n systemPrompt: systemPrompt.join('\\n'),\n messages: [...systemMessages, ...anthropicMessages],\n response: response,\n usage: response.usage\n ? {\n inputTokens: response.usage.input_tokens,\n outputTokens: response.usage.output_tokens,\n }\n : undefined,\n timing: {\n start: start,\n end: Date.now(),\n },\n apiFormat: 'anthropic',\n })\n\n // Calculate cost using native Anthropic usage data\n const inputTokens = response.usage.input_tokens\n const outputTokens = response.usage.output_tokens\n const cacheCreationInputTokens =\n response.usage.cache_creation_input_tokens ?? 0\n const cacheReadInputTokens = response.usage.cache_read_input_tokens ?? 0\n\n const costUSD =\n (inputTokens / 1_000_000) * getModelInputTokenCostUSD(model) +\n (outputTokens / 1_000_000) * getModelOutputTokenCostUSD(model) +\n (cacheCreationInputTokens / 1_000_000) *\n getModelInputTokenCostUSD(model) +\n (cacheReadInputTokens / 1_000_000) *\n (getModelInputTokenCostUSD(model) * 0.1) // Cache reads are 10% of input cost\n\n assistantMessage.costUSD = costUSD\n addToTotalCost(costUSD, durationMs)\n\n // Record token usage to unified stats manager\n recordTokenUsage(\n {\n inputTokens,\n outputTokens,\n cacheCreationTokens: cacheCreationInputTokens,\n cacheReadTokens: cacheReadInputTokens,\n },\n costUSD,\n model,\n toolUseContext\n ? ({\n agentId: toolUseContext.agentId,\n toolUseId: toolUseContext.toolUseId,\n model,\n } as TokenTrackingContext)\n : undefined,\n )\n\n return assistantMessage\n } catch (error) {\n return getAssistantMessageFromError(error)\n }\n}\n\nfunction getAssistantMessageFromError(error: unknown): AssistantMessage {\n if (error instanceof Error && error.message.includes('prompt is too long')) {\n return createAssistantAPIErrorMessage(PROMPT_TOO_LONG_ERROR_MESSAGE)\n }\n if (\n error instanceof Error &&\n error.message.includes('Your credit balance is too low')\n ) {\n return createAssistantAPIErrorMessage(CREDIT_BALANCE_TOO_LOW_ERROR_MESSAGE)\n }\n if (\n error instanceof Error &&\n error.message.toLowerCase().includes('x-api-key')\n ) {\n return createAssistantAPIErrorMessage(INVALID_API_KEY_ERROR_MESSAGE)\n }\n if (error instanceof Error) {\n if (process.env.NODE_ENV === 'development') {\n debugLogger.error('ANTHROPIC_API_ERROR', {\n message: error.message,\n stack: error.stack,\n })\n }\n return createAssistantAPIErrorMessage(\n `${API_ERROR_MESSAGE_PREFIX}: ${error.message}`,\n )\n }\n return createAssistantAPIErrorMessage(API_ERROR_MESSAGE_PREFIX)\n}\n\nfunction addCacheBreakpoints(\n messages: (UserMessage | AssistantMessage)[],\n): MessageParam[] {\n return messages.map((msg, index) => {\n return msg.type === 'user'\n ? userMessageToMessageParam(msg, index > messages.length - 3)\n : assistantMessageToMessageParam(msg, index > messages.length - 3)\n })\n}\n\nasync function queryOpenAI(\n messages: (UserMessage | AssistantMessage)[],\n systemPrompt: string[],\n maxThinkingTokens: number,\n tools: Tool[],\n signal: AbortSignal,\n options?: {\n safeMode: boolean\n model: string\n prependCLISysprompt: boolean\n modelProfile?: ModelProfile | null\n toolUseContext?: ToolUseContext\n },\n): Promise<AssistantMessage> {\n const config = getGlobalConfig()\n const modelManager = getModelManager()\n const toolUseContext = options?.toolUseContext\n\n const modelProfile = options?.modelProfile || modelManager.getModel('main')\n let model: string\n\n // \uD83D\uDD0D Debug: \u8BB0\u5F55\u6A21\u578B\u914D\u7F6E\u8BE6\u60C5\n const currentRequest = getCurrentRequest()\n debugLogger.api('MODEL_CONFIG_OPENAI', {\n modelProfileFound: !!modelProfile,\n modelProfileId: modelProfile?.modelName,\n modelProfileName: modelProfile?.name,\n modelProfileModelName: modelProfile?.modelName,\n modelProfileProvider: modelProfile?.provider,\n modelProfileBaseURL: modelProfile?.baseURL,\n modelProfileApiKeyExists: !!modelProfile?.apiKey,\n optionsModel: options?.model,\n requestId: getCurrentRequest()?.id,\n })\n\n if (modelProfile) {\n model = modelProfile.modelName\n } else {\n model = options?.model || modelProfile?.modelName || ''\n }\n // Prepend system prompt block for easy API identification\n if (options?.prependCLISysprompt) {\n const [firstSyspromptBlock] = splitSysPromptPrefix(systemPrompt)\n\n systemPrompt = [getCLISyspromptPrefix() + systemPrompt] // some openai-like providers need the entire system prompt as a single block\n }\n\n const system: TextBlockParam[] = splitSysPromptPrefix(systemPrompt).map(\n _ => ({\n ...(PROMPT_CACHING_ENABLED\n ? { cache_control: { type: 'ephemeral' } }\n : {}),\n text: _,\n type: 'text',\n }),\n )\n\n const toolSchemas = await Promise.all(\n tools.map(\n async _ =>\n ({\n type: 'function',\n function: {\n name: _.name,\n description: await _.prompt({\n safeMode: options?.safeMode,\n }),\n // Use tool's JSON schema directly if provided, otherwise convert Zod schema\n parameters:\n 'inputJSONSchema' in _ && _.inputJSONSchema\n ? _.inputJSONSchema\n : zodToJsonSchema(_.inputSchema),\n },\n }) as OpenAI.ChatCompletionTool,\n ),\n )\n\n const openaiSystem = system.map(\n s =>\n ({\n role: 'system',\n content: s.text,\n }) as OpenAI.ChatCompletionMessageParam,\n )\n\n const openaiMessages = convertAnthropicMessagesToOpenAIMessages(messages)\n const startIncludingRetries = Date.now()\n\n // \u8BB0\u5F55\u7CFB\u7EDF\u63D0\u793A\u6784\u5EFA\u8FC7\u7A0B (OpenAI path)\n logSystemPromptConstruction({\n basePrompt: systemPrompt.join('\\n'),\n mintoContext: generateMintoContext() || '',\n reminders: [], // \u8FD9\u91CC\u53EF\u4EE5\u4ECE generateSystemReminders \u83B7\u53D6\n finalPrompt: systemPrompt.join('\\n'),\n })\n\n let start = Date.now()\n let attemptNumber = 0\n let response\n\n try {\n response = await withRetry(\n async attempt => {\n attemptNumber = attempt\n start = Date.now()\n // \uD83D\uDD25 GPT-5 Enhanced Parameter Construction\n const maxTokens = getMaxTokensFromProfile(modelProfile)\n const isGPT5 = isGPT5Model(model)\n\n const opts: OpenAI.ChatCompletionCreateParams = {\n model,\n\n ...(isGPT5\n ? { max_completion_tokens: maxTokens }\n : { max_tokens: maxTokens }),\n messages: [...openaiSystem, ...openaiMessages],\n\n temperature: isGPT5 ? 1 : MAIN_QUERY_TEMPERATURE,\n }\n if (config.stream) {\n ;(opts as OpenAI.ChatCompletionCreateParams).stream = true\n opts.stream_options = {\n include_usage: true,\n }\n }\n\n if (toolSchemas.length > 0) {\n opts.tools = toolSchemas\n opts.tool_choice = 'auto'\n }\n const reasoningEffort = await getReasoningEffort(modelProfile, messages)\n if (reasoningEffort) {\n opts.reasoning_effort = reasoningEffort\n }\n\n if (modelProfile && modelProfile.modelName) {\n debugLogger.api('USING_MODEL_PROFILE_PATH', {\n modelProfileName: modelProfile.modelName,\n modelName: modelProfile.modelName,\n provider: modelProfile.provider,\n baseURL: modelProfile.baseURL,\n apiKeyExists: !!modelProfile.apiKey,\n requestId: getCurrentRequest()?.id,\n })\n\n // Enable new adapter system with environment variable\n const USE_NEW_ADAPTER_SYSTEM =\n process.env.USE_NEW_ADAPTERS !== 'false'\n\n if (USE_NEW_ADAPTER_SYSTEM) {\n // New adapter system\n const adapter = ModelAdapterFactory.createAdapter(modelProfile)\n\n // Build unified request parameters\n const unifiedParams: UnifiedRequestParams = {\n messages: openaiMessages,\n systemPrompt: openaiSystem.map(s => s.content as string),\n tools: tools,\n maxTokens: getMaxTokensFromProfile(modelProfile),\n stream: config.stream,\n reasoningEffort: reasoningEffort as any,\n temperature: isGPT5Model(model) ? 1 : MAIN_QUERY_TEMPERATURE,\n previousResponseId:\n toolUseContext?.responseState?.previousResponseId,\n verbosity: 'high', // High verbosity for coding tasks\n }\n\n // Create request using adapter\n const request = adapter.createRequest(unifiedParams)\n\n // Determine which API to use\n if (ModelAdapterFactory.shouldUseResponsesAPI(modelProfile)) {\n // Use Responses API for GPT-5 and similar models\n const { callGPT5ResponsesAPI } = await import('./openai')\n const response = await callGPT5ResponsesAPI(\n modelProfile,\n request,\n signal,\n )\n const unifiedResponse = adapter.parseResponse(response)\n\n // Convert unified response back to Anthropic format\n const apiMessage = {\n role: 'assistant' as const,\n content: unifiedResponse.content,\n tool_calls: unifiedResponse.toolCalls,\n usage: {\n prompt_tokens: unifiedResponse.usage.promptTokens,\n completion_tokens: unifiedResponse.usage.completionTokens,\n },\n }\n const assistantMsg: AssistantMessage = {\n type: 'assistant',\n message: apiMessage as any,\n costUSD: 0, // Will be calculated later\n durationMs: Date.now() - start,\n uuid: `${Date.now()}-${Math.random().toString(36).substr(2, 9)}` as any,\n responseId: unifiedResponse.responseId, // For state management\n }\n return assistantMsg\n } else {\n // Use existing Chat Completions flow\n const s = await getCompletionWithProfile(\n modelProfile,\n request,\n 0,\n 10,\n signal,\n )\n let finalResponse\n if (config.stream) {\n finalResponse = await handleMessageStream(\n s as ChatCompletionStream,\n signal,\n )\n } else {\n finalResponse = s\n }\n const r = convertOpenAIResponseToAnthropic(finalResponse, tools)\n return r\n }\n } else {\n // Legacy system (preserved for fallback)\n const completionFunction = isGPT5Model(modelProfile.modelName)\n ? getGPT5CompletionWithProfile\n : getCompletionWithProfile\n const s = await completionFunction(\n modelProfile,\n opts,\n 0,\n 10,\n signal,\n )\n let finalResponse\n if (opts.stream) {\n finalResponse = await handleMessageStream(\n s as ChatCompletionStream,\n signal,\n )\n } else {\n finalResponse = s\n }\n const r = convertOpenAIResponseToAnthropic(finalResponse, tools)\n return r\n }\n } else {\n // \uD83D\uDEA8 \u8B66\u544A\uFF1AModelProfile\u4E0D\u53EF\u7528\uFF0C\u4F7F\u7528\u65E7\u903B\u8F91\u8DEF\u5F84\n debugLogger.api('USING_LEGACY_PATH', {\n modelProfileExists: !!modelProfile,\n modelProfileId: modelProfile?.modelName,\n modelNameExists: !!modelProfile?.modelName,\n fallbackModel: 'main',\n actualModel: model,\n requestId: getCurrentRequest()?.id,\n })\n\n // \uD83D\uDEA8 FALLBACK: \u6CA1\u6709\u6709\u6548\u7684ModelProfile\u65F6\uFF0C\u5E94\u8BE5\u629B\u51FA\u9519\u8BEF\u800C\u4E0D\u662F\u4F7F\u7528\u9057\u7559\u7CFB\u7EDF\n const errorDetails = {\n modelProfileExists: !!modelProfile,\n modelProfileId: modelProfile?.modelName,\n modelNameExists: !!modelProfile?.modelName,\n requestedModel: model,\n requestId: getCurrentRequest()?.id,\n }\n debugLogger.error('NO_VALID_MODEL_PROFILE', errorDetails)\n throw new Error(\n `No valid ModelProfile available for model: ${model}. Please configure model through /model command. Debug: ${JSON.stringify(errorDetails)}`,\n )\n }\n },\n {\n signal,\n // Pass retry events to agent transcript for UI display\n onRetry: toolUseContext?.agentId\n ? ({ attempt, maxRetries, error, delayMs }) => {\n addRetryEventToTranscript(toolUseContext.agentId!, {\n attempt,\n maxRetries,\n errorMessage: error.message,\n delayMs,\n })\n }\n : undefined,\n },\n )\n } catch (error) {\n logError(error)\n return getAssistantMessageFromError(error)\n }\n const durationMs = Date.now() - start\n const durationMsIncludingRetries = Date.now() - startIncludingRetries\n\n const inputTokens = response.usage?.prompt_tokens ?? 0\n const outputTokens = response.usage?.completion_tokens ?? 0\n const cacheReadInputTokens =\n response.usage?.prompt_token_details?.cached_tokens ?? 0\n const cacheCreationInputTokens =\n response.usage?.prompt_token_details?.cached_tokens ?? 0\n const costUSD =\n (inputTokens / 1_000_000) * SONNET_COST_PER_MILLION_INPUT_TOKENS +\n (outputTokens / 1_000_000) * SONNET_COST_PER_MILLION_OUTPUT_TOKENS +\n (cacheReadInputTokens / 1_000_000) *\n SONNET_COST_PER_MILLION_PROMPT_CACHE_READ_TOKENS +\n (cacheCreationInputTokens / 1_000_000) *\n SONNET_COST_PER_MILLION_PROMPT_CACHE_WRITE_TOKENS\n\n addToTotalCost(costUSD, durationMsIncludingRetries)\n\n // Record token usage to unified stats manager\n recordTokenUsage(\n {\n inputTokens,\n outputTokens,\n cacheCreationTokens: cacheCreationInputTokens,\n cacheReadTokens: cacheReadInputTokens,\n },\n costUSD,\n model,\n toolUseContext\n ? ({\n agentId: toolUseContext.agentId,\n toolUseId: toolUseContext.toolUseId,\n model,\n } as TokenTrackingContext)\n : undefined,\n )\n\n // \u8BB0\u5F55\u5B8C\u6574\u7684 LLM \u4EA4\u4E92\u8C03\u8BD5\u4FE1\u606F (OpenAI path)\n logLLMInteraction({\n systemPrompt: systemPrompt.join('\\n'),\n messages: [...openaiSystem, ...openaiMessages],\n response: response,\n usage: {\n inputTokens: inputTokens,\n outputTokens: outputTokens,\n },\n timing: {\n start: start,\n end: Date.now(),\n },\n apiFormat: 'openai',\n })\n\n return {\n message: {\n ...response,\n content: normalizeContentFromAPI(response.content),\n usage: {\n input_tokens: inputTokens,\n output_tokens: outputTokens,\n cache_read_input_tokens: cacheReadInputTokens,\n cache_creation_input_tokens: 0,\n },\n },\n costUSD,\n durationMs,\n type: 'assistant',\n uuid: randomUUID(),\n }\n}\n\nfunction getMaxTokensFromProfile(modelProfile: any): number {\n // Use ModelProfile maxTokens or reasonable default\n return modelProfile?.maxTokens || 8000\n}\n\nfunction getModelInputTokenCostUSD(model: string): number {\n // Find the model in the models object\n for (const providerModels of Object.values(models)) {\n const modelInfo = providerModels.find((m: any) => m.model === model)\n if (modelInfo) {\n return modelInfo.input_cost_per_token || 0\n }\n }\n // Default fallback cost for unknown models\n return 0.000003 // Default to Claude 3 Haiku cost\n}\n\nfunction getModelOutputTokenCostUSD(model: string): number {\n // Find the model in the models object\n for (const providerModels of Object.values(models)) {\n const modelInfo = providerModels.find((m: any) => m.model === model)\n if (modelInfo) {\n return modelInfo.output_cost_per_token || 0\n }\n }\n // Default fallback cost for unknown models\n return 0.000015 // Default to Claude 3 Haiku cost\n}\n\n// New unified query functions for model pointer system\nexport async function queryModel(\n modelPointer: import('@utils/config').ModelPointerType,\n messages: (UserMessage | AssistantMessage)[],\n systemPrompt: string[] = [],\n signal?: AbortSignal,\n): Promise<AssistantMessage> {\n // Use queryLLM with the pointer directly\n return queryLLM(\n messages,\n systemPrompt,\n 0, // maxThinkingTokens\n [], // tools\n signal || new AbortController().signal,\n {\n safeMode: false,\n model: modelPointer,\n prependCLISysprompt: true,\n },\n )\n}\n\n// Note: Use queryModel(pointer, ...) directly instead of these convenience functions\n\n// Simplified query function using quick model pointer\nexport async function queryQuick({\n systemPrompt = [],\n userPrompt,\n assistantPrompt,\n enablePromptCaching = false,\n signal,\n}: {\n systemPrompt?: string[]\n userPrompt: string\n assistantPrompt?: string\n enablePromptCaching?: boolean\n signal?: AbortSignal\n}): Promise<AssistantMessage> {\n const messages = [\n {\n message: { role: 'user', content: userPrompt },\n type: 'user',\n uuid: randomUUID(),\n },\n ] as (UserMessage | AssistantMessage)[]\n\n return queryModel('quick', messages, systemPrompt, signal)\n}\n"],
|
|
5
|
+
"mappings": "AAAA,OAAO;AACP,OAAO,aAAa,oBAAoB,gBAAgB;AACxD,SAAS,wBAAwB;AACjC,SAAS,uBAAuB;AAChC,OAAO,WAAW;AAClB,SAAS,kBAAwB;AACjC,OAAO;AAEP,SAAS,sBAAsB;AAC/B,SAAS,wBAAwB;AAEjC,OAAO,YAAY;AAEnB,SAAe,+BAA+B;AAC9C;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,OAEK;AACP,SAAS,sBAAsB;AAC/B,SAAS,UAAU,kBAAkB;AACrC,SAAS,sBAAsB;AAC/B,SAAS,kBAAkB;AAC3B;AAAA,EACE;AAAA,EACA;AAAA,OACK;AACP,SAAS,eAAe;AACxB;AAAA,EACE,SAAS;AAAA,EACT;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACK;AACP,SAAS,uBAAuB;AAChC,SAAS,uBAAuB;AAChC,SAAS,2BAA2B;AAEpC,SAAS,sBAAsB,yBAAyB;AAMxD,SAAS,aAAa,kBAAkB;AACxC,SAAS,6BAA6B;AACtC,SAAS,+BAA+B;AAIxC,SAAS,cAAc;AACvB;AAAA,EACE;AAAA,EACA;AAAA,OACK;AACP,SAAS,0BAA0B;AACnC,SAAS,+BAA+B;AACxC,SAAS,iCAAiC;AAC1C,SAAS,mBAAmB,yBAAyB;AAIrD,SAAS,YAAY,WAA4B;AAC/C,SAAO,UAAU,WAAW,OAAO;AACrC;AAGA,MAAM,oBAAoB;AAAA,EACxB,OAAe;AAAA,EACP,mBAA2B;AAAA,EAC3B,mBAA4B;AAAA,EAC5B,cAAoC;AAAA,EAEpC,cAAc;AAAA,EAAC;AAAA,EAEvB,OAAc,cAAmC;AAC/C,QAAI,CAAC,oBAAoB,UAAU;AACjC,0BAAoB,WAAW,IAAI,oBAAoB;AAAA,IACzD;AACA,WAAO,oBAAoB;AAAA,EAC7B;AAAA,EAEA,MAAa,aAA4B;AACvC,QAAI,KAAK,iBAAkB;AAE3B,QAAI,KAAK,aAAa;AACpB,aAAO,KAAK;AAAA,IACd;AAEA,SAAK,cAAc,KAAK,gBAAgB;AACxC,UAAM,KAAK;AAAA,EACb;AAAA,EAEA,MAAc,kBAAiC;AAC7C,QAAI;AACF,YAAM,cAAc,MAAM,eAAe;AACzC,WAAK,mBAAmB,eAAe;AACvC,WAAK,mBAAmB;AAGxB,UAAI,QAAQ,IAAI,aAAa,eAAe;AAC1C,oBAAY,KAAK,wBAAwB;AAAA,UACvC,YAAY,KAAK,iBAAiB;AAAA,QACpC,CAAC;AAAA,MACH;AAAA,IACF,SAAS,OAAO;AACd,kBAAY,KAAK,6BAA6B,EAAE,OAAO,OAAO,KAAK,EAAE,CAAC;AACtE,WAAK,mBAAmB;AACxB,WAAK,mBAAmB;AAAA,IAC1B;AAAA,EACF;AAAA,EAEO,kBAA0B;AAC/B,QAAI,CAAC,KAAK,kBAAkB;AAE1B,WAAK,WAAW,EAAE,MAAM,WAAS;AAC/B,oBAAY,KAAK,6BAA6B,EAAE,OAAO,OAAO,KAAK,EAAE,CAAC;AAAA,MACxE,CAAC;AACD,aAAO;AAAA,IACT;AACA,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,MAAa,eAA8B;AACzC,SAAK,mBAAmB;AACxB,SAAK,cAAc;AACnB,UAAM,KAAK,WAAW;AAAA,EACxB;AACF;AAGA,MAAM,sBAAsB,oBAAoB,YAAY;AAG5D,oBAAoB,WAAW,EAAE,MAAM,WAAS;AAC9C,cAAY,KAAK,oCAAoC,EAAE,OAAO,OAAO,KAAK,EAAE,CAAC;AAC/E,CAAC;AAEM,MAAM,uBAAuB,MAAc;AAChD,SAAO,oBAAoB,gBAAgB;AAC7C;AAEO,MAAM,sBAAsB,YAA2B;AAC5D,QAAM,oBAAoB,aAAa;AACzC;AAEO,MAAM,2BAA2B;AACjC,MAAM,gCAAgC;AACtC,MAAM,uCAAuC;AAC7C,MAAM,gCACX;AACK,MAAM,qBAAqB;AAClC,MAAM,yBAAyB,CAAC,QAAQ,IAAI;AAG5C,MAAM,uCAAuC;AAC7C,MAAM,wCAAwC;AAC9C,MAAM,oDAAoD;AAC1D,MAAM,mDAAmD;AAElD,MAAM,yBAAyB;AAEtC,SAAS,cAAc;AACrB,SAAO;AAAA,IACL,SAAS,GAAG,kBAAkB,CAAC,IAAI,UAAU;AAAA,EAC/C;AACF;AAEA,MAAM,cAAc,QAAQ,IAAI,cAAc,cAAc,MAAM;AAClE,MAAM,gBAAgB;AActB,SAAS,cACP,SACA,kBACQ;AACR,MAAI,kBAAkB;AACpB,UAAM,UAAU,SAAS,kBAAkB,EAAE;AAC7C,QAAI,CAAC,MAAM,OAAO,GAAG;AACnB,aAAO,UAAU;AAAA,IACnB;AAAA,EACF;AACA,SAAO,KAAK,IAAI,gBAAgB,KAAK,IAAI,GAAG,UAAU,CAAC,GAAG,IAAK;AACjE;AAEA,SAAS,YAAY,OAA0B;AAE7C,MAAI,MAAM,SAAS,SAAS,2BAA2B,GAAG;AACxD,WAAO,QAAQ,IAAI,cAAc;AAAA,EACnC;AAGA,QAAM,oBAAoB,MAAM,UAAU,gBAAgB;AAG1D,MAAI,sBAAsB,OAAQ,QAAO;AACzC,MAAI,sBAAsB,QAAS,QAAO;AAE1C,MAAI,iBAAiB,oBAAoB;AACvC,WAAO;AAAA,EACT;AAEA,MAAI,CAAC,MAAM,OAAQ,QAAO;AAG1B,MAAI,MAAM,WAAW,IAAK,QAAO;AAGjC,MAAI,MAAM,WAAW,IAAK,QAAO;AAGjC,MAAI,MAAM,WAAW,IAAK,QAAO;AAGjC,MAAI,MAAM,UAAU,MAAM,UAAU,IAAK,QAAO;AAEhD,SAAO;AACT;AAEA,eAAe,UACb,WACA,UAAwB,CAAC,GACb;AACZ,QAAM,aAAa,QAAQ,cAAc;AACzC,MAAI;AAEJ,WAAS,UAAU,GAAG,WAAW,aAAa,GAAG,WAAW;AAC1D,QAAI;AACF,aAAO,MAAM,UAAU,OAAO;AAAA,IAChC,SAAS,OAAO;AACd,kBAAY;AAEZ,UACE,UAAU,cACV,EAAE,iBAAiB,aACnB,CAAC,YAAY,KAAK,GAClB;AACA,cAAM;AAAA,MACR;AAEA,UAAI,QAAQ,QAAQ,SAAS;AAC3B,cAAM,IAAI,MAAM,2BAA2B;AAAA,MAC7C;AAGA,YAAM,aAAa,MAAM,UAAU,aAAa,KAAK;AACrD,YAAM,UAAU,cAAc,SAAS,UAAU;AAGjD,wBAAkB;AAAA,QAChB,OAAO;AAAA,QACP,YAAY;AAAA,QACZ;AAAA,QACA,WAAW,MAAM;AAAA,MACnB,CAAC;AAED,kBAAY,KAAK,qBAAqB;AAAA,QACpC;AAAA,QACA;AAAA,QACA,WAAW,MAAM;AAAA,QACjB,cAAc,MAAM;AAAA,QACpB;AAAA,MACF,CAAC;AAGD,UAAI,QAAQ,SAAS;AACnB,gBAAQ,QAAQ;AAAA,UACd;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF,CAAC;AAAA,MACH,OAAO;AACL,gBAAQ;AAAA,UACN,aAAQ,MAAM,IAAI,OAAO,MAAM,IAAI,KAAK,MAAM,OAAO,sBAAmB,KAAK,MAAM,UAAU,GAAI,CAAC,2BAAsB,OAAO,IAAI,UAAU,GAAG,CAAC;AAAA,QACnJ;AAAA,MACF;AAEA,UAAI;AACF,cAAM,eAAe,SAAS,QAAQ,MAAM;AAAA,MAC9C,SAAS,YAAY;AAEnB,YAAI,WAAW,YAAY,uBAAuB;AAChD,gBAAM,IAAI,MAAM,2BAA2B;AAAA,QAC7C;AACA,cAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,QAAM;AACR;AAKA,eAAsB,qBACpB,SACA,QACgB;AAChB,MAAI;AAEF,UAAM,YAAY,UACd,GAAG,QAAQ,QAAQ,QAAQ,EAAE,CAAC,eAC9B;AAEJ,UAAM,WAAW,MAAM,MAAM,WAAW;AAAA,MACtC,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,aAAa;AAAA,QACb,qBAAqB;AAAA,QACrB,cAAc;AAAA,MAChB;AAAA,IACF,CAAC;AAED,QAAI,CAAC,SAAS,IAAI;AAEhB,UAAI,SAAS,WAAW,KAAK;AAC3B,cAAM,IAAI;AAAA,UACR;AAAA,QACF;AAAA,MACF,WAAW,SAAS,WAAW,KAAK;AAClC,cAAM,IAAI;AAAA,UACR;AAAA,QACF;AAAA,MACF,WAAW,SAAS,WAAW,KAAK;AAClC,cAAM,IAAI;AAAA,UACR;AAAA,QACF;AAAA,MACF,WAAW,SAAS,UAAU,KAAK;AACjC,cAAM,IAAI;AAAA,UACR;AAAA,QACF;AAAA,MACF,OAAO;AACL,cAAM,IAAI;AAAA,UACR,uCAAuC,SAAS,MAAM;AAAA,QACxD;AAAA,MACF;AAAA,IACF;AAEA,UAAM,OAAO,MAAM,SAAS,KAAK;AACjC,WAAO,KAAK,QAAQ,CAAC;AAAA,EACvB,SAAS,OAAO;AAEd,QACG,iBAAiB,SAAS,MAAM,QAAQ,SAAS,SAAS,KAC1D,iBAAiB,SAAS,MAAM,QAAQ,SAAS,WAAW,GAC7D;AACA,YAAM;AAAA,IACR;AAGA,aAAS,KAAK;AACd,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACF;AAEA,eAAsB,aACpB,QACA,SACA,UACkB;AAClB,MAAI,CAAC,QAAQ;AACX,WAAO;AAAA,EACT;AAGA,MAAI,YAAY,aAAa,aAAa;AACxC,QAAI;AACF,YAAM,UAAkC;AAAA,QACtC,eAAe,UAAU,MAAM;AAAA,QAC/B,gBAAgB;AAAA,MAClB;AAEA,UAAI,CAAC,SAAS;AACZ,oBAAY,KAAK,cAAc;AAAA,UAC7B,OAAO;AAAA,QACT,CAAC;AACD,eAAO;AAAA,MACT;AAEA,YAAM,YAAY,GAAG,QAAQ,QAAQ,QAAQ,EAAE,CAAC;AAEhD,YAAM,WAAW,MAAM,MAAM,WAAW;AAAA,QACtC,QAAQ;AAAA,QACR;AAAA,MACF,CAAC;AAED,aAAO,SAAS;AAAA,IAClB,SAAS,OAAO;AACd,kBAAY,KAAK,cAAc;AAAA,QAC7B,OAAO;AAAA,QACP,SAAS,OAAO,KAAK;AAAA,MACvB,CAAC;AACD,aAAO;AAAA,IACT;AAAA,EACF;AAGA,QAAM,eAAoB;AAAA,IACxB;AAAA,IACA,yBAAyB;AAAA,IACzB,YAAY;AAAA,IACZ,gBAAgB;AAAA,MACd,cAAc;AAAA,IAChB;AAAA,EACF;AAGA,MACE,YACC,aAAa,eACZ,aAAa,cACb,aAAa,YACf;AACA,iBAAa,UAAU;AAAA,EACzB;AAEA,QAAM,YAAY,IAAI,UAAU,YAAY;AAE5C,MAAI;AACF,UAAM;AAAA,MACJ,YAAY;AACV,cAAM,QAAQ;AACd,cAAM,WAA2B,CAAC,EAAE,MAAM,QAAQ,SAAS,OAAO,CAAC;AACnE,cAAM,UAAU,SAAS,OAAO;AAAA,UAC9B;AAAA,UACA,YAAY;AAAA;AAAA,UACZ;AAAA,UACA,aAAa;AAAA,UACb,UAAU,YAAY;AAAA,QACxB,CAAC;AACD,eAAO;AAAA,MACT;AAAA,MACA,EAAE,YAAY,EAAE;AAAA;AAAA,IAClB;AACA,WAAO;AAAA,EACT,SAAS,OAAO;AACd,aAAS,KAAK;AAEd,QACE,iBAAiB,SACjB,MAAM,QAAQ;AAAA,MACZ;AAAA,IACF,GACA;AACA,aAAO;AAAA,IACT;AACA,UAAM;AAAA,EACR;AACF;AAEA,SAAS,yCACP,UAIE;AACF,QAAM,iBAGA,CAAC;AAEP,QAAM,cAAqE,CAAC;AAE5E,aAAW,WAAW,UAAU;AAC9B,QAAI,gBAAgB,CAAC;AACrB,QAAI,OAAO,QAAQ,QAAQ,YAAY,UAAU;AAC/C,sBAAgB;AAAA,QACd;AAAA,UACE,MAAM;AAAA,UACN,MAAM,QAAQ,QAAQ;AAAA,QACxB;AAAA,MACF;AAAA,IACF,WAAW,CAAC,MAAM,QAAQ,QAAQ,QAAQ,OAAO,GAAG;AAClD,sBAAgB,CAAC,QAAQ,QAAQ,OAAO;AAAA,IAC1C,OAAO;AACL,sBAAgB,QAAQ,QAAQ;AAAA,IAClC;AAEA,eAAW,SAAS,eAAe;AACjC,UAAI,MAAM,SAAS,QAAQ;AACzB,uBAAe,KAAK;AAAA,UAClB,MAAM,QAAQ,QAAQ;AAAA,UACtB,SAAS,MAAM;AAAA,QACjB,CAAC;AAAA,MACH,WAAW,MAAM,SAAS,YAAY;AACpC,uBAAe,KAAK;AAAA,UAClB,MAAM;AAAA,UACN,SAAS;AAAA,UACT,YAAY;AAAA,YACV;AAAA,cACE,MAAM;AAAA,cACN,UAAU;AAAA,gBACR,MAAM,MAAM;AAAA,gBACZ,WAAW,KAAK,UAAU,MAAM,KAAK;AAAA,cACvC;AAAA,cACA,IAAI,MAAM;AAAA,YACZ;AAAA,UACF;AAAA,QACF,CAAC;AAAA,MACH,WAAW,MAAM,SAAS,eAAe;AAEvC,YAAI,cAAc,MAAM;AACxB,YAAI,OAAO,gBAAgB,UAAU;AAEnC,wBAAc,KAAK,UAAU,WAAW;AAAA,QAC1C;AAEA,oBAAY,MAAM,WAAW,IAAI;AAAA,UAC/B,MAAM;AAAA,UACN,SAAS;AAAA,UACT,cAAc,MAAM;AAAA,QACtB;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,QAAM,gBAGA,CAAC;AAEP,aAAW,WAAW,gBAAgB;AACpC,kBAAc,KAAK,OAAO;AAE1B,QAAI,gBAAgB,WAAW,QAAQ,YAAY;AACjD,iBAAW,YAAY,QAAQ,YAAY;AACzC,YAAI,YAAY,SAAS,EAAE,GAAG;AAC5B,wBAAc,KAAK,YAAY,SAAS,EAAE,CAAC;AAAA,QAC7C;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAEA,SAAS,eACP,UACA,MAC8B;AAC9B,QAAM,SAAS,CAAC,KAAU,UAAmD;AAC3E,UAAM,EAAE,GAAG,IAAI;AACf,eAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,KAAK,GAAG;AAChD,UAAI,IAAI,GAAG,MAAM,UAAa,IAAI,GAAG,MAAM,MAAM;AAC/C,YAAI,GAAG,IAAI;AAEX,YAAI,MAAM,QAAQ,IAAI,GAAG,CAAC,GAAG;AAC3B,qBAAW,OAAO,IAAI,GAAG,GAAG;AAC1B,mBAAO,IAAI;AAAA,UACb;AAAA,QACF;AAAA,MACF,WAAW,OAAO,IAAI,GAAG,MAAM,YAAY,OAAO,UAAU,UAAU;AACpE,YAAI,GAAG,KAAK;AAAA,MACd,WAAW,OAAO,IAAI,GAAG,MAAM,YAAY,OAAO,UAAU,UAAU;AACpE,YAAI,GAAG,IAAI;AAAA,MACb,WAAW,MAAM,QAAQ,IAAI,GAAG,CAAC,KAAK,MAAM,QAAQ,KAAK,GAAG;AAC1D,cAAM,WAAW,IAAI,GAAG;AACxB,iBAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,gBAAM,EAAE,OAAO,GAAG,UAAU,IAAI,MAAM,CAAC;AACvC,cAAI,QAAQ,SAAS,SAAS,GAAG;AAC/B,kBAAM,IAAI;AAAA,cACR,mFAAmF,QAAQ,WAAW,KAAK;AAAA,YAC7G;AAAA,UACF;AACA,mBAAS,KAAK,IAAI,OAAO,SAAS,KAAK,GAAG,SAAS;AAAA,QACrD;AAAA,MACF,WAAW,OAAO,IAAI,GAAG,MAAM,YAAY,OAAO,UAAU,UAAU;AACpE,YAAI,GAAG,IAAI,OAAO,IAAI,GAAG,GAAG,KAAK;AAAA,MACnC;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAEA,QAAM,SAAS,KAAK,UAAU,CAAC;AAC/B,MAAI,CAAC,QAAQ;AAEX,WAAO;AAAA,EACT;AACA,SAAO,OAAO,UAAU,OAAO,KAAK;AACtC;AACA,eAAe,oBACb,QACA,QACgC;AAChC,QAAM,kBAAkB,KAAK,IAAI;AACjC,MAAI;AACJ,MAAI,aAAa;AACjB,MAAI,aAAa;AAEjB,cAAY,IAAI,uBAAuB;AAAA,IACrC,iBAAiB,OAAO,eAAe;AAAA,EACzC,CAAC;AAED,MAAI,UAAU,CAAC;AAEf,MAAI,IAAI,OAAO,SAAS,QAAQ;AAChC,MAAI;AACF,qBAAiB,SAAS,QAAQ;AAChC,UAAI,QAAQ,SAAS;AACnB,oBAAY,KAAK,yBAAyB;AAAA,UACxC;AAAA,UACA,WAAW,KAAK,IAAI;AAAA,QACtB,CAAC;AACD,cAAM,IAAI,MAAM,uBAAuB;AAAA,MACzC;AAEA;AAEA,UAAI;AACF,YAAI,CAAC,IAAI;AACP,eAAK,MAAM;AACX,sBAAY,IAAI,6BAA6B;AAAA,YAC3C;AAAA,YACA,aAAa,OAAO,UAAU;AAAA,UAChC,CAAC;AAAA,QACH;AACA,YAAI,CAAC,OAAO;AACV,kBAAQ,MAAM;AACd,sBAAY,IAAI,gCAAgC;AAAA,YAC9C;AAAA,YACA,aAAa,OAAO,UAAU;AAAA,UAChC,CAAC;AAAA,QACH;AACA,YAAI,CAAC,SAAS;AACZ,oBAAU,MAAM;AAAA,QAClB;AACA,YAAI,CAAC,QAAQ;AACX,mBAAS,MAAM;AAAA,QACjB;AACA,YAAI,CAAC,SAAS,MAAM,OAAO;AACzB,kBAAQ,MAAM;AAEd,4BAAkB;AAAA,YAChB,aAAa,MAAM,MAAM;AAAA,YACzB,cAAc,MAAM,MAAM;AAAA,UAC5B,CAAC;AAAA,QACH;AAEA,kBAAU,eAAe,SAAS,KAAK;AAEvC,YAAI,OAAO,UAAU,CAAC,GAAG,OAAO,SAAS;AACvC,cAAI,CAAC,QAAQ;AACX,qBAAS,KAAK,IAAI,IAAI;AACtB,wBAAY,IAAI,6BAA6B;AAAA,cAC3C,QAAQ,OAAO,MAAM;AAAA,cACrB,aAAa,OAAO,UAAU;AAAA,YAChC,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF,SAAS,YAAY;AACnB;AACA,oBAAY,MAAM,6BAA6B;AAAA,UAC7C,aAAa,OAAO,UAAU;AAAA,UAC9B,cACE,sBAAsB,QAClB,WAAW,UACX,OAAO,UAAU;AAAA,UACvB,WACE,sBAAsB,QAClB,WAAW,YAAY,OACvB,OAAO;AAAA,QACf,CAAC;AAAA,MAEH;AAAA,IACF;AAEA,gBAAY,IAAI,0BAA0B;AAAA,MACxC,aAAa,OAAO,UAAU;AAAA,MAC9B,YAAY,OAAO,UAAU;AAAA,MAC7B,eAAe,OAAO,KAAK,IAAI,IAAI,eAAe;AAAA,MAClD,QAAQ,OAAO,UAAU,CAAC;AAAA,MAC1B,gBAAgB,MAAM;AAAA,IACxB,CAAC;AAAA,EACH,SAAS,aAAa;AACpB,gBAAY,MAAM,6BAA6B;AAAA,MAC7C,aAAa,OAAO,UAAU;AAAA,MAC9B,YAAY,OAAO,UAAU;AAAA,MAC7B,cACE,uBAAuB,QACnB,YAAY,UACZ,OAAO,WAAW;AAAA,MACxB,WACE,uBAAuB,QACnB,YAAY,YAAY,OACxB,OAAO;AAAA,IACf,CAAC;AACD,UAAM;AAAA,EACR;AACA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,SAAS;AAAA,MACP;AAAA,QACE,OAAO;AAAA,QACP;AAAA,QACA,eAAe;AAAA,QACf,UAAU;AAAA,MACZ;AAAA,IACF;AAAA,IACA;AAAA,EACF;AACF;AAEA,SAAS,iCACP,UACA,QACA;AACA,MAAI,gBAAgC,CAAC;AACrC,QAAM,UAAU,SAAS,UAAU,CAAC,GAAG;AACvC,MAAI,CAAC,SAAS;AACZ,WAAO;AAAA,MACL,MAAM;AAAA,MACN,SAAS,CAAC;AAAA,MACV,aAAa,SAAS,UAAU,CAAC,GAAG;AAAA,MACpC,MAAM;AAAA,MACN,OAAO,SAAS;AAAA,IAClB;AAAA,EACF;AAEA,MAAI,SAAS,YAAY;AACvB,eAAW,YAAY,QAAQ,YAAY;AACzC,YAAM,OAAO,SAAS;AACtB,YAAM,WAAW,MAAM;AACvB,UAAI,WAAW,CAAC;AAChB,UAAI,aAA4B;AAChC,UAAI;AACF,mBAAW,MAAM,YAAY,KAAK,MAAM,KAAK,SAAS,IAAI,CAAC;AAAA,MAC7D,SAAS,GAAG;AAEV,qBAAa,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC;AACtD,oBAAY,KAAK,8BAA8B;AAAA,UAC7C;AAAA,UACA,cAAc,MAAM,WAAW,UAAU,GAAG,GAAG;AAAA;AAAA,UAC/C,cAAc;AAAA,QAChB,CAAC;AAAA,MACH;AAGA,UAAI,cAAc,OAAO,KAAK,QAAQ,EAAE,WAAW,GAAG;AACpD,oBAAY,MAAM,6BAA6B;AAAA,UAC7C;AAAA,UACA;AAAA,UACA,YACE;AAAA,QACJ,CAAC;AAAA,MACH;AAEA,oBAAc,KAAK;AAAA,QACjB,MAAM;AAAA,QACN,OAAO;AAAA,QACP,MAAM;AAAA,QACN,IAAI,SAAS,IAAI,SAAS,IAAI,SAAS,KAAK,OAAO;AAAA,MACrD,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAK,QAAgB,WAAW;AAC9B,kBAAc,KAAK;AAAA,MACjB,MAAM;AAAA,MACN,UAAW,QAAgB;AAAA,MAC3B,WAAW;AAAA,IACb,CAAC;AAAA,EACH;AAGA,MAAK,QAAgB,mBAAmB;AACtC,kBAAc,KAAK;AAAA,MACjB,MAAM;AAAA,MACN,UAAW,QAAgB;AAAA,MAC3B,WAAW;AAAA,IACb,CAAC;AAAA,EACH;AAEA,MAAI,QAAQ,SAAS;AACnB,kBAAc,KAAK;AAAA,MACjB,MAAM;AAAA,MACN,MAAM,SAAS;AAAA,MACf,WAAW,CAAC;AAAA,IACd,CAAC;AAAA,EACH;AAEA,QAAM,eAAe;AAAA,IACnB,MAAM;AAAA,IACN,SAAS;AAAA,IACT,aAAa,SAAS,UAAU,CAAC,GAAG;AAAA,IACpC,MAAM;AAAA,IACN,OAAO,SAAS;AAAA,EAClB;AAEA,SAAO;AACT;AAEA,IAAI,kBACF;AAKK,SAAS,mBACd,OACgD;AAChD,QAAM,SAAS,gBAAgB;AAC/B,QAAM,WAAW,OAAO;AAGxB,MAAI,mBAAmB,UAAU;AAE/B,sBAAkB;AAAA,EACpB;AAEA,MAAI,iBAAiB;AACnB,WAAO;AAAA,EACT;AAEA,QAAM,SAAS,wBAAwB,KAAK;AAE5C,QAAM,iBAA4C;AAAA,IAChD,SAAS;AAAA,IACT,cAAc;AAAA,EAChB;AACA,MAAI,QAAQ,IAAI,sBAAsB;AACpC,mBAAe,eAAe,IAC5B,UAAU,QAAQ,IAAI,oBAAoB;AAAA,EAC9C;AAEA,QAAM,OAAO;AAAA,IACX;AAAA,IACA,YAAY;AAAA;AAAA,IACZ,SAAS,SAAS,QAAQ,IAAI,kBAAkB,OAAO,KAAK,GAAI,GAAG,EAAE;AAAA,EACvE;AACA,MAAI,aAAa;AACf,UAAM,SAAS,IAAI,iBAAiB,IAAI;AACxC,sBAAkB;AAClB,WAAO;AAAA,EACT;AACA,MAAI,YAAY;AACd,UAAM,aAAa;AAAA,MACjB,GAAG;AAAA,MACH,QAAQ,UAAU,QAAQ,IAAI,mBAAmB;AAAA,IACnD;AACA,UAAM,SAAS,IAAI,gBAAgB,UAAU;AAC7C,sBAAkB;AAClB,WAAO;AAAA,EACT;AAGA,QAAM,eAAe,gBAAgB;AACrC,QAAM,eAAe,aAAa,SAAS,MAAM;AAEjD,MAAI;AACJ,MAAI;AAEJ,MAAI,cAAc;AAChB,aAAS,aAAa,UAAU;AAChC,cAAU,aAAa;AAAA,EACzB,OAAO;AAEL,aAAS,mBAAmB;AAC5B,cAAU;AAAA,EACZ;AAEA,MAAI,QAAQ,IAAI,cAAc,SAAS,CAAC,UAAU,aAAa,aAAa;AAC1E,YAAQ;AAAA,MACN,MAAM;AAAA,QACJ;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAIA,QAAM,eAAe;AAAA,IACnB;AAAA,IACA,yBAAyB;AAAA,IACzB,GAAG;AAAA,IACH,GAAI,WAAW,EAAE,QAAQ;AAAA;AAAA,EAC3B;AAEA,oBAAkB,IAAI,UAAU,YAAY;AAC5C,SAAO;AACT;AAKO,SAAS,uBAA6B;AAC3C,oBAAkB;AACpB;AAoCA,SAAS,4BACP,cACA,eACmE;AACnE,MAAI,CAAC,wBAAwB;AAC3B,WAAO,EAAE,cAAc,cAAc;AAAA,EACvC;AAEA,QAAM,iBAAiB;AACvB,MAAI,kBAAkB;AAGtB,QAAM,wBAAwB,aAAa,IAAI,CAAC,OAAO,WAAW;AAChE,QAAI,kBAAkB,kBAAkB,MAAM,KAAK,SAAS,KAAM;AAChE;AACA,aAAO;AAAA,QACL,GAAG;AAAA,QACH,eAAe,EAAE,MAAM,YAAqB;AAAA,MAC9C;AAAA,IACF;AACA,UAAM,EAAE,eAAe,GAAG,kBAAkB,IAAI;AAChD,WAAO;AAAA,EACT,CAAC;AAGD,QAAM,yBAAyB,cAAc,IAAI,CAAC,SAAS,iBAAiB;AAC1E,QAAI,MAAM,QAAQ,QAAQ,OAAO,GAAG;AAClC,YAAM,mBAAmB,QAAQ,QAAQ;AAAA,QACvC,CAAC,cAAc,eAAe;AAE5B,gBAAM,cACJ,kBAAkB,kBAClB,aAAa,SAAS,UACtB,OAAO,aAAa,SAAS;AAAA,WAE5B,aAAa,KAAK,SAAS;AAAA,UAEzB,iBAAiB,cAAc,SAAS,KACvC,eAAe,QAAQ,QAAQ,SAAS,KACxC,aAAa,KAAK,SAAS;AAEjC,cAAI,aAAa;AACf;AACA,mBAAO;AAAA,cACL,GAAG;AAAA,cACH,eAAe,EAAE,MAAM,YAAqB;AAAA,YAC9C;AAAA,UACF;AAGA,gBAAM,EAAE,eAAe,GAAG,kBAAkB,IAAI;AAChD,iBAAO;AAAA,QACT;AAAA,MACF;AAEA,aAAO;AAAA,QACL,GAAG;AAAA,QACH,SAAS;AAAA,MACX;AAAA,IACF;AAEA,WAAO;AAAA,EACT,CAAC;AAED,SAAO;AAAA,IACL,cAAc;AAAA,IACd,eAAe;AAAA,EACjB;AACF;AAEO,SAAS,0BACd,SACA,WAAW,OACG;AACd,MAAI,UAAU;AACZ,QAAI,OAAO,QAAQ,QAAQ,YAAY,UAAU;AAC/C,aAAO;AAAA,QACL,MAAM;AAAA,QACN,SAAS;AAAA,UACP;AAAA,YACE,MAAM;AAAA,YACN,MAAM,QAAQ,QAAQ;AAAA,UACxB;AAAA,QACF;AAAA,MACF;AAAA,IACF,OAAO;AACL,aAAO;AAAA,QACL,MAAM;AAAA,QACN,SAAS,QAAQ,QAAQ,QAAQ,IAAI,QAAM,EAAE,GAAG,EAAE,EAAE;AAAA,MACtD;AAAA,IACF;AAAA,EACF;AACA,SAAO;AAAA,IACL,MAAM;AAAA,IACN,SAAS,QAAQ,QAAQ;AAAA,EAC3B;AACF;AAEO,SAAS,+BACd,SACA,WAAW,OACG;AACd,MAAI,UAAU;AACZ,QAAI,OAAO,QAAQ,QAAQ,YAAY,UAAU;AAC/C,aAAO;AAAA,QACL,MAAM;AAAA,QACN,SAAS;AAAA,UACP;AAAA,YACE,MAAM;AAAA,YACN,MAAM,QAAQ,QAAQ;AAAA,UACxB;AAAA,QACF;AAAA,MACF;AAAA,IACF,OAAO;AACL,aAAO;AAAA,QACL,MAAM;AAAA,QACN,SAAS,QAAQ,QAAQ,QAAQ,IAAI,QAAM,EAAE,GAAG,EAAE,EAAE;AAAA,MACtD;AAAA,IACF;AAAA,EACF;AACA,SAAO;AAAA,IACL,MAAM;AAAA,IACN,SAAS,QAAQ,QAAQ;AAAA,EAC3B;AACF;AAEA,SAAS,qBAAqB,cAAkC;AAG9D,QAAM,yBAAyB,aAAa,CAAC,KAAK;AAClD,QAAM,mBAAmB,aAAa,MAAM,CAAC;AAC7C,SAAO,CAAC,wBAAwB,iBAAiB,KAAK,IAAI,CAAC,EAAE,OAAO,OAAO;AAC7E;AAEA,eAAsB,SACpB,UACA,cACA,mBACA,OACA,QACA,SAM2B;AAC3B,QAAM,eAAe,gBAAgB;AACrC,QAAM,kBAAkB,aAAa,qBAAqB,QAAQ,KAAK;AAEvE,MAAI,CAAC,gBAAgB,WAAW,CAAC,gBAAgB,SAAS;AACxD,UAAM,IAAI;AAAA,MACR,gBAAgB,SAAS,4BAA4B,QAAQ,KAAK;AAAA,IACpE;AAAA,EACF;AAEA,QAAM,eAAe,gBAAgB;AACrC,QAAM,gBAAgB,aAAa;AAGnC,QAAM,iBAAiB,QAAQ;AAC/B,MAAI,kBAAkB,CAAC,eAAe,eAAe;AACnD,UAAM,iBAAiB;AAAA,MACrB,eAAe;AAAA,MACf,eAAe;AAAA,IACjB;AACA,UAAM,qBACJ,qBAAqB,sBAAsB,cAAc;AAE3D,mBAAe,gBAAgB;AAAA,MAC7B;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,cAAY,IAAI,kBAAkB;AAAA,IAChC,YAAY,QAAQ;AAAA,IACpB,mBAAmB;AAAA,IACnB,UAAU,aAAa;AAAA,IACvB,WAAW,CAAC,QAAQ,QAAQ,aAAa,OAAO,EAAE,SAAS,QAAQ,KAAK;AAAA,IACxE,kBAAkB,CAAC,CAAC,gBAAgB;AAAA,IACpC,gBAAgB,gBAAgB,eAAe;AAAA,IAC/C,WAAW,kBAAkB,GAAG;AAAA,EAClC,CAAC;AAED,QAAM,iBAAiB,kBAAkB;AACzC,cAAY,IAAI,qBAAqB;AAAA,IACnC,cAAc,SAAS;AAAA,IACvB,oBAAoB,aAAa,KAAK,GAAG,EAAE;AAAA,IAC3C,WAAW,MAAM;AAAA,IACjB,OAAO;AAAA,IACP,oBAAoB,QAAQ;AAAA,IAC5B,WAAW,kBAAkB,GAAG;AAAA,EAClC,CAAC;AAED,YAAU,UAAU;AAEpB,MAAI;AACF,UAAM,SAAS,MAAM;AAAA,MAAQ;AAAA,MAAU,MACrC;AAAA,QACE;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA,EAAE,GAAG,SAAS,OAAO,eAAe,cAAc,eAAe;AAAA;AAAA,MACnE;AAAA,IACF;AAEA,gBAAY,IAAI,uBAAuB;AAAA,MACrC,SAAS,OAAO;AAAA,MAChB,YAAY,OAAO;AAAA,MACnB,gBAAgB,OAAO,QAAQ,SAAS,UAAU;AAAA,MAClD,WAAW,kBAAkB,GAAG;AAAA,IAClC,CAAC;AAGD,QAAI,gBAAgB,eAAe,kBAAkB,OAAO,YAAY;AACtE,2BAAqB;AAAA,QACnB,eAAe,cAAc;AAAA,QAC7B,OAAO;AAAA,MACT;AAEA,kBAAY,IAAI,0BAA0B;AAAA,QACxC,gBAAgB,eAAe,cAAc;AAAA,QAC7C,YAAY,OAAO;AAAA,QACnB,WAAW,kBAAkB,GAAG;AAAA,MAClC,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT,SAAS,OAAO;AAEd;AAAA,MACE;AAAA,MACA;AAAA,QACE,cAAc,SAAS;AAAA,QACvB,oBAAoB,aAAa,KAAK,GAAG,EAAE;AAAA,QAC3C,OAAO,QAAQ;AAAA,QACf,WAAW,MAAM;AAAA,QACjB,OAAO;AAAA,MACT;AAAA,MACA,gBAAgB;AAAA,IAClB;AAEA,UAAM;AAAA,EACR;AACF;AAEO,SAAS,8BACd,cACA,SACA,SACA,uBAAuB,OACwB;AAE/C,QAAM,iBAAiB,CAAC,GAAG,YAAY;AACvC,MAAI,YAAY;AAGhB,QAAM,eAAe,gBAAgB;AACrC,QAAM,eAAe,aAAa,SAAS,MAAM;AACjD,MAAI,gBAAgB,YAAY,aAAa,SAAS,GAAG;AAEvD,UAAM,qBAAqB;AAAA,MACzB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AACA,mBAAe,KAAK,GAAG,kBAAkB;AAAA,EAC3C;AAGA,QAAM,aAAa,OAAO,QAAQ,OAAO,EAAE,SAAS;AAEpD,MAAI,YAAY;AAEd,QAAI,CAAC,sBAAsB;AACzB,YAAM,eAAe,qBAAqB;AAC1C,UAAI,cAAc;AAEhB,uBAAe,KAAK,2CAAkB;AACtC,uBAAe,KAAK,YAAY;AAChC,uBAAe,KAAK,SAAS;AAAA,MAC/B;AAAA,IACF;AAGA,UAAM,mBAAmB,wBAAwB,YAAY,OAAO;AACpE,QAAI,iBAAiB,SAAS,GAAG;AAC/B,kBAAY,iBAAiB,IAAI,OAAK,EAAE,OAAO,EAAE,KAAK,IAAI,IAAI;AAAA,IAChE;AAGA,mBAAe;AAAA,MACb;AAAA;AAAA;AAAA,IACF;AAGA,UAAM,kBAAkB,OAAO;AAAA,MAC7B,OAAO,QAAQ,OAAO,EAAE;AAAA,QACtB,CAAC,CAAC,GAAG,MAAM,QAAQ,iBAAiB,QAAQ;AAAA,MAC9C;AAAA,IACF;AAEA,mBAAe;AAAA,MACb,GAAG,OAAO,QAAQ,eAAe,EAAE;AAAA,QACjC,CAAC,CAAC,KAAK,KAAK,MAAM,kBAAkB,GAAG,KAAK,KAAK;AAAA,MACnD;AAAA,IACF;AAAA,EACF;AAEA,SAAO,EAAE,cAAc,gBAAgB,UAAU;AACnD;AAEA,eAAe,0BACb,UACA,cACA,mBACA,OACA,QACA,SAO2B;AAC3B,QAAM,SAAS,gBAAgB;AAC/B,QAAM,eAAe,gBAAgB;AACrC,QAAM,iBAAiB,QAAQ;AAE/B,QAAM,eAAe,QAAQ,gBAAgB,aAAa,SAAS,MAAM;AACzE,MAAI;AAEJ,MAAI,cAAc;AAChB,eAAW,aAAa,YAAY,OAAO,mBAAmB;AAAA,EAChE,OAAO;AACL,eAAW,OAAO,mBAAmB;AAAA,EACvC;AAGA,MACE,aAAa,eACb,aAAa,cACb,aAAa,WACb;AACA,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,EAAE,GAAG,SAAS,cAAc,eAAe;AAAA,IAC7C;AAAA,EACF;AAGA,SAAO,YAAY,UAAU,cAAc,mBAAmB,OAAO,QAAQ;AAAA,IAC3E,GAAG;AAAA,IACH;AAAA,IACA;AAAA,EACF,CAAC;AACH;AAEA,eAAe,qBACb,UACA,cACA,mBACA,OACA,QACA,SAO2B;AAC3B,QAAM,SAAS,gBAAgB;AAC/B,QAAM,eAAe,gBAAgB;AACrC,QAAM,iBAAiB,SAAS;AAEhC,QAAM,eAAe,SAAS,gBAAgB,aAAa,SAAS,MAAM;AAC1E,MAAI;AACJ,MAAI;AACJ,MAAI;AAGJ,cAAY,IAAI,0BAA0B;AAAA,IACxC,mBAAmB,CAAC,CAAC;AAAA,IACrB,gBAAgB,cAAc;AAAA,IAC9B,kBAAkB,cAAc;AAAA,IAChC,uBAAuB,cAAc;AAAA,IACrC,sBAAsB,cAAc;AAAA,IACpC,qBAAqB,cAAc;AAAA,IACnC,0BAA0B,CAAC,CAAC,cAAc;AAAA,IAC1C,cAAc,SAAS;AAAA,IACvB,WAAW,kBAAkB,GAAG;AAAA,EAClC,CAAC;AAED,MAAI,cAAc;AAEhB,YAAQ,aAAa;AACrB,eAAW,aAAa,YAAY,OAAO,mBAAmB;AAG9D,QACE,aAAa,aAAa,eAC1B,aAAa,aAAa,cAC1B,aAAa,aAAa,WAC1B;AACA,YAAM,eAAoB;AAAA,QACxB,QAAQ,aAAa;AAAA,QACrB,yBAAyB;AAAA,QACzB,YAAY;AAAA,QACZ,SAAS,SAAS,QAAQ,IAAI,kBAAkB,OAAO,KAAK,GAAI,GAAG,EAAE;AAAA,QACrE,gBAAgB;AAAA,UACd,SAAS;AAAA,UACT,cAAc;AAAA,QAChB;AAAA,MACF;AAGA,UAAI,aAAa,SAAS;AACxB,qBAAa,UAAU,aAAa;AAAA,MACtC;AAEA,kBAAY,IAAI,UAAU,YAAY;AAAA,IACxC,OAAO;AAEL,kBAAY,mBAAmB,KAAK;AAAA,IACtC;AAAA,EACF,OAAO;AAEL,UAAM,eAAe;AAAA,MACnB,oBAAoB,CAAC,CAAC;AAAA,MACtB,uBAAuB,cAAc;AAAA,MACrC,gBAAgB,SAAS;AAAA,MACzB,WAAW,kBAAkB,GAAG;AAAA,IAClC;AACA,gBAAY,MAAM,4BAA4B,YAAY;AAC1D,UAAM,IAAI;AAAA,MACR,iHAAiH,KAAK,UAAU,YAAY,CAAC;AAAA,IAC/I;AAAA,EACF;AAGA,MAAI,SAAS,qBAAqB;AAEhC,UAAM,CAAC,mBAAmB,IAAI,qBAAqB,YAAY;AAE/D,mBAAe,CAAC,sBAAsB,GAAG,GAAG,YAAY;AAAA,EAC1D;AAEA,QAAM,SAA2B,qBAAqB,YAAY,EAAE;AAAA,IAClE,QAAM;AAAA,MACJ,MAAM;AAAA,MACN,MAAM;AAAA,IACR;AAAA,EACF;AAEA,QAAM,cAAc,MAAM,QAAQ;AAAA,IAChC,MAAM;AAAA,MACJ,OAAM,UACH;AAAA,QACC,MAAM,KAAK;AAAA;AAAA,QAEX,aAAa,MAAM,wBAAwB,IAAI;AAAA,QAC/C,cACE,qBAAqB,QAAQ,KAAK,kBAC9B,KAAK,kBACL,gBAAgB,KAAK,WAAW;AAAA,MACxC;AAAA,IACJ;AAAA,EACF;AAEA,QAAM,oBAAoB,oBAAoB,QAAQ;AAGtD,QAAM,EAAE,cAAc,iBAAiB,eAAe,kBAAkB,IACtE,4BAA4B,QAAQ,iBAAiB;AACvD,QAAM,wBAAwB,KAAK,IAAI;AAGvC,8BAA4B;AAAA,IAC1B,YAAY,aAAa,KAAK,IAAI;AAAA,IAClC,cAAc,qBAAqB,KAAK;AAAA,IACxC,WAAW,CAAC;AAAA;AAAA,IACZ,aAAa,aAAa,KAAK,IAAI;AAAA,EACrC,CAAC;AAED,MAAI,QAAQ,KAAK,IAAI;AACrB,MAAI,gBAAgB;AACpB,MAAI;AAEJ,MAAI;AACF,eAAW,MAAM;AAAA,MACf,OAAM,YAAW;AACf,wBAAgB;AAChB,gBAAQ,KAAK,IAAI;AAEjB,cAAM,SAAsD;AAAA,UAC1D;AAAA,UACA,YAAY,wBAAwB,YAAY;AAAA,UAChD,UAAU;AAAA,UACV,QAAQ;AAAA,UACR,OAAO,YAAY,SAAS,IAAI,cAAc;AAAA,UAC9C,aAAa,YAAY,SAAS,IAAI,EAAE,MAAM,OAAO,IAAI;AAAA,QAC3D;AAEA,YAAI,oBAAoB,GAAG;AACzB;AAAC,UAAC,OAAe,gBAAgB;AAAA,YAC/B,kBAAkB;AAAA,UACpB;AACC,UAAC,OAAe,WAAW,EAAE,YAAY,kBAAkB;AAAA,QAC9D;AAGA,oBAAY,IAAI,sCAAsC;AAAA,UACpD,UAAU,cAAc,WAAW;AAAA,UACnC;AAAA,UACA;AAAA,UACA,kBAAkB,CAAC,CAAC,cAAc;AAAA,UAClC,cAAc,cAAc,SACxB,aAAa,OAAO,UAAU,GAAG,CAAC,IAClC;AAAA,UACJ,WAAW,OAAO;AAAA,UAClB,aAAa;AAAA,UACb;AAAA,UACA,cAAc,OAAO,UAAU,UAAU;AAAA,UACzC,YAAY;AAAA,UACZ,YAAY,YAAY;AAAA,UACxB,gBAAgB;AAAA,UAChB,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,UAClC,gBAAgB,cAAc;AAAA,UAC9B,kBAAkB,cAAc;AAAA,QAClC,CAAC;AAED,YAAI,OAAO,QAAQ;AACjB,gBAAM,SAAS,MAAM,UAAU,KAAK,SAAS;AAAA,YAC3C;AAAA,cACE,GAAG;AAAA,cACH,QAAQ;AAAA,YACV;AAAA,YACA;AAAA,cACE;AAAA;AAAA,YACF;AAAA,UACF;AAEA,cAAI,gBAA4B;AAChC,cAAI,oBAAyB;AAC7B,gBAAM,gBAAuB,CAAC;AAC9B,gBAAM,mBAAmB,oBAAI,IAAoB;AACjD,cAAI,QAAa;AACjB,cAAI,aAA4B;AAChC,cAAI,eAA8B;AAElC,2BAAiB,SAAS,QAAQ;AAChC,gBAAI,OAAO,SAAS;AAClB,0BAAY,KAAK,kBAAkB;AAAA,gBACjC,WAAW,MAAM;AAAA,gBACjB,WAAW,KAAK,IAAI;AAAA,cACtB,CAAC;AACD,oBAAM,IAAI,MAAM,uBAAuB;AAAA,YACzC;AAEA,oBAAQ,MAAM,MAAM;AAAA,cAClB,KAAK;AACH,oCAAoB;AACpB,gCAAgB;AAAA,kBACd,GAAG,MAAM;AAAA,kBACT,SAAS,CAAC;AAAA;AAAA,gBACZ;AAEA,kCAAkB;AAAA,kBAChB,aAAa,MAAM,QAAQ,OAAO;AAAA,kBAClC,eAAe;AAAA;AAAA,gBACjB,CAAC;AACD;AAAA,cAEF,KAAK;AACH,8BAAc,MAAM,KAAK,IAAI,EAAE,GAAG,MAAM,cAAc;AAEtD,oBAAI,MAAM,cAAc,SAAS,YAAY;AAC3C,mCAAiB,IAAI,MAAM,OAAO,EAAE;AAAA,gBACtC;AAEA,oBAAI,MAAM,cAAc,SAAS,YAAY;AAC3C,wBAAM,oBAAoB,KAAK,IAAI;AACnC,oCAAkB;AAAA,oBAChB,OAAO;AAAA,oBACP,YAAY;AAAA,oBACZ,mBAAmB;AAAA,oBACnB;AAAA;AAAA,kBACF,CAAC;AACD,8BAAY,KAAK,wBAAwB;AAAA,oBACvC,OAAO,MAAM;AAAA,oBACb,WAAW;AAAA,oBACX,WAAW;AAAA,kBACb,CAAC;AAAA,gBACH;AACA;AAAA,cAEF,KAAK;AACH,sBAAM,aAAa,MAAM;AAGzB,oBAAI,CAAC,cAAc,UAAU,GAAG;AAC9B,gCAAc,UAAU,IAAI;AAAA,oBAC1B,MACE,MAAM,MAAM,SAAS,eAAe,SAAS;AAAA,oBAC/C,MAAM,MAAM,MAAM,SAAS,eAAe,KAAK;AAAA,kBACjD;AACA,sBAAI,MAAM,MAAM,SAAS,oBAAoB;AAC3C,qCAAiB,IAAI,YAAY,EAAE;AAAA,kBACrC;AAAA,gBACF;AAEA,oBAAI,MAAM,MAAM,SAAS,cAAc;AACrC,gCAAc,UAAU,EAAE,QAAQ,MAAM,MAAM;AAE9C,wBAAM,eAAe,kBAAkB;AACvC,oCAAkB;AAAA,oBAChB,gBACG,aAAa,iBAAiB,KAC/B,MAAM,MAAM,KAAK;AAAA,kBACrB,CAAC;AAAA,gBACH,WAAW,MAAM,MAAM,SAAS,oBAAoB;AAClD,wBAAM,gBAAgB,iBAAiB,IAAI,UAAU,KAAK;AAC1D,mCAAiB;AAAA,oBACf;AAAA,oBACA,gBAAgB,MAAM,MAAM;AAAA,kBAC9B;AAAA,gBACF,WAES,MAAM,MAAM,SAAS,kBAAkB;AAC9C,wBAAM,kBACJ,cAAc,UAAU,EAAE,YAAY;AACxC,wBAAM,kBACJ,mBAAmB,MAAM,MAAM,YAAY;AAC7C,gCAAc,UAAU,IAAI;AAAA,oBAC1B,GAAG,cAAc,UAAU;AAAA,oBAC3B,UAAU;AAAA,kBACZ;AAGA,oCAAkB;AAAA,oBAChB,OAAO;AAAA,oBACP,YAAY,gBAAgB;AAAA,oBAC5B,mBAAmB;AAAA,kBACrB,CAAC;AAED,8BAAY,MAAM,kBAAkB;AAAA,oBAClC,OAAO;AAAA,oBACP,YAAY,gBAAgB;AAAA,kBAC9B,CAAC;AAAA,gBACH;AACA;AAAA,cAEF,KAAK;AACH,oBAAI,MAAM,MAAM;AACd,+BAAa,MAAM,MAAM;AAC3B,oBAAI,MAAM,MAAM;AACd,iCAAe,MAAM,MAAM;AAC7B,oBAAI,MAAM,OAAO;AACf,0BAAQ,EAAE,GAAG,OAAO,GAAG,MAAM,MAAM;AAGnC,oCAAkB;AAAA,oBAChB,cAAc,MAAM,MAAM;AAAA,kBAC5B,CAAC;AAAA,gBACH;AACA;AAAA,cAEF,KAAK;AACH,sBAAM,YAAY,MAAM;AACxB,sBAAM,QAAQ,cAAc,SAAS;AAGrC,oBAAI,OAAO,SAAS,YAAY;AAC9B,wBAAMA,eAAc,kBAAkB;AACtC,wBAAM,qBAAqBA,aAAY,oBACnC,KAAK,IAAI,IAAIA,aAAY,oBACzB;AACJ,oCAAkB;AAAA,oBAChB,OAAO;AAAA,oBACP;AAAA;AAAA,kBACF,CAAC;AACD,8BAAY,KAAK,2BAA2B;AAAA,oBAC1C,OAAO;AAAA,oBACP,iBAAiB,MAAM,UAAU,UAAU;AAAA,oBAC3C,YAAY;AAAA,kBACd,CAAC;AAAA,gBACH;AAEA,oBACE,OAAO,SAAS,cAChB,iBAAiB,IAAI,SAAS,GAC9B;AACA,wBAAM,UAAU,iBAAiB,IAAI,SAAS;AAC9C,sBAAI,SAAS;AACX,wBAAI;AACF,4BAAM,QAAQ,KAAK,MAAM,OAAO;AAAA,oBAClC,SAAS,OAAO;AACd,4BAAM,WACJ,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACvD,kCAAY,MAAM,oBAAoB;AAAA,wBACpC,YAAY;AAAA,wBACZ,SACE,QAAQ,SAAS,MACb,QAAQ,MAAM,GAAG,GAAG,IAAI,QACxB;AAAA,wBACN,OAAO;AAAA,sBACT,CAAC;AAGD,4BAAM,QAAQ;AAAA,wBACZ,iBAAiB;AAAA,wBACjB,mBAAmB,sBAAsB,QAAQ;AAAA,wBACjD,sBACE,QAAQ,SAAS,MACb,QAAQ,MAAM,GAAG,GAAG,IAAI,QACxB;AAAA,sBACR;AAAA,oBACF;AACA,qCAAiB,OAAO,SAAS;AAAA,kBACnC;AAAA,gBACF;AACA;AAAA,cAEF,KAAK;AAEH,iCAAiB,MAAM;AACvB;AAAA,YACJ;AAEA,gBAAI,MAAM,SAAS,gBAAgB;AACjC;AAAA,YACF;AAAA,UACF;AAEA,cAAI,CAAC,iBAAiB,CAAC,mBAAmB;AACxC,kBAAM,IAAI,MAAM,+CAA+C;AAAA,UACjE;AAGA,0BAAgB;AAAA,YACd,GAAG,kBAAkB;AAAA,YACrB,SAAS,cAAc,OAAO,OAAO;AAAA,YACrC,aAAa;AAAA,YACb,eAAe;AAAA,YACf,OAAO;AAAA,cACL,GAAG,kBAAkB,QAAQ;AAAA,cAC7B,GAAG;AAAA,YACL;AAAA,UACF;AAEA,iBAAO;AAAA,QACT,OAAO;AAEL,sBAAY,IAAI,0CAA0C;AAAA,YACxD,UAAU,cAAc,WAAW;AAAA,YACnC;AAAA,YACA;AAAA,YACA,kBAAkB,CAAC,CAAC,cAAc;AAAA,YAClC,cAAc,cAAc,SACxB,aAAa,OAAO,UAAU,GAAG,CAAC,IAClC;AAAA,YACJ,WAAW,OAAO;AAAA,YAClB,aAAa;AAAA,YACb,cAAc,OAAO,UAAU,UAAU;AAAA,YACzC,YAAY;AAAA,YACZ,YAAY,YAAY;AAAA,YACxB,gBAAgB;AAAA,YAChB,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,YAClC,gBAAgB,cAAc;AAAA,YAC9B,kBAAkB,cAAc;AAAA,UAClC,CAAC;AAED,iBAAO,MAAM,UAAU,KAAK,SAAS,OAAO,QAAQ;AAAA,YAClD;AAAA;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAAA,MACA;AAAA,QACE;AAAA;AAAA,QAEA,SAAS,gBAAgB,UACrB,CAAC,EAAE,SAAS,YAAY,OAAO,QAAQ,MAAM;AAC3C,oCAA0B,eAAe,SAAU;AAAA,YACjD;AAAA,YACA;AAAA,YACA,cAAc,MAAM;AAAA,YACpB;AAAA,UACF,CAAC;AAAA,QACH,IACA;AAAA,MACN;AAAA,IACF;AAEA,gBAAY,IAAI,8BAA8B;AAAA,MAC5C,SAAS,SAAS;AAAA,IACpB,CAAC;AAED,UAAM,SAAS,QAAQ,KAAK,IAAI;AAChC,UAAM,aAAa,KAAK,IAAI,IAAI;AAEhC,UAAM,UAAU,SAAS,QAAQ,IAAI,CAAC,UAAwB;AAC5D,UAAI,MAAM,SAAS,QAAQ;AACzB,eAAO;AAAA,UACL,MAAM;AAAA,UACN,MAAM,MAAM;AAAA,QACd;AAAA,MACF,WAAW,MAAM,SAAS,YAAY;AACpC,eAAO;AAAA,UACL,MAAM;AAAA,UACN,IAAI,MAAM;AAAA,UACV,MAAM,MAAM;AAAA,UACZ,OAAO,MAAM;AAAA,QACf;AAAA,MACF;AACA,aAAO;AAAA,IACT,CAAC;AAGD,UAAM,iBAAiB,QAAQ;AAAA,MAC7B,CAAC,UACC,MAAM,SAAS;AAAA,IACnB;AACA,UAAM,cAAc,kBAAkB;AACtC,UAAM,mBACJ,eAAe,SAAS,IACpB;AAAA,MACE,WAAW,eAAe;AAAA,QACxB,CAAC,KAAK,MAAM,OAAO,EAAE,UAAU,UAAU;AAAA,QACzC;AAAA,MACF;AAAA,MACA,YAAY,YAAY,sBAAsB;AAAA,MAC9C,WAAW,YAAY,qBAAqB,KAAK,IAAI;AAAA,MACrD,YAAY;AAAA,IACd,IACA;AAEN,UAAM,mBAAqC;AAAA,MACzC,SAAS;AAAA,QACP,IAAI,SAAS;AAAA,QACb;AAAA,QACA,OAAO,SAAS;AAAA,QAChB,MAAM;AAAA,QACN,aAAa,SAAS;AAAA,QACtB,eAAe,SAAS;AAAA,QACxB,MAAM;AAAA,QACN,OAAO,SAAS;AAAA,MAClB;AAAA,MACA,MAAM;AAAA,MACN,MAAM,OAAO;AAAA,MACb;AAAA,MACA,SAAS;AAAA;AAAA,MACT;AAAA,IACF;AAIA,UAAM,iBAAiB,OAAO,IAAI,YAAU;AAAA,MAC1C,MAAM;AAAA,MACN,SAAS,MAAM;AAAA,IACjB,EAAE;AAEF,sBAAkB;AAAA,MAChB,cAAc,aAAa,KAAK,IAAI;AAAA,MACpC,UAAU,CAAC,GAAG,gBAAgB,GAAG,iBAAiB;AAAA,MAClD;AAAA,MACA,OAAO,SAAS,QACZ;AAAA,QACE,aAAa,SAAS,MAAM;AAAA,QAC5B,cAAc,SAAS,MAAM;AAAA,MAC/B,IACA;AAAA,MACJ,QAAQ;AAAA,QACN;AAAA,QACA,KAAK,KAAK,IAAI;AAAA,MAChB;AAAA,MACA,WAAW;AAAA,IACb,CAAC;AAGD,UAAM,cAAc,SAAS,MAAM;AACnC,UAAM,eAAe,SAAS,MAAM;AACpC,UAAM,2BACJ,SAAS,MAAM,+BAA+B;AAChD,UAAM,uBAAuB,SAAS,MAAM,2BAA2B;AAEvE,UAAM,UACH,cAAc,MAAa,0BAA0B,KAAK,IAC1D,eAAe,MAAa,2BAA2B,KAAK,IAC5D,2BAA2B,MAC1B,0BAA0B,KAAK,IAChC,uBAAuB,OACrB,0BAA0B,KAAK,IAAI;AAExC,qBAAiB,UAAU;AAC3B,mBAAe,SAAS,UAAU;AAGlC;AAAA,MACE;AAAA,QACE;AAAA,QACA;AAAA,QACA,qBAAqB;AAAA,QACrB,iBAAiB;AAAA,MACnB;AAAA,MACA;AAAA,MACA;AAAA,MACA,iBACK;AAAA,QACC,SAAS,eAAe;AAAA,QACxB,WAAW,eAAe;AAAA,QAC1B;AAAA,MACF,IACA;AAAA,IACN;AAEA,WAAO;AAAA,EACT,SAAS,OAAO;AACd,WAAO,6BAA6B,KAAK;AAAA,EAC3C;AACF;AAEA,SAAS,6BAA6B,OAAkC;AACtE,MAAI,iBAAiB,SAAS,MAAM,QAAQ,SAAS,oBAAoB,GAAG;AAC1E,WAAO,+BAA+B,6BAA6B;AAAA,EACrE;AACA,MACE,iBAAiB,SACjB,MAAM,QAAQ,SAAS,gCAAgC,GACvD;AACA,WAAO,+BAA+B,oCAAoC;AAAA,EAC5E;AACA,MACE,iBAAiB,SACjB,MAAM,QAAQ,YAAY,EAAE,SAAS,WAAW,GAChD;AACA,WAAO,+BAA+B,6BAA6B;AAAA,EACrE;AACA,MAAI,iBAAiB,OAAO;AAC1B,QAAI,QAAQ,IAAI,aAAa,eAAe;AAC1C,kBAAY,MAAM,uBAAuB;AAAA,QACvC,SAAS,MAAM;AAAA,QACf,OAAO,MAAM;AAAA,MACf,CAAC;AAAA,IACH;AACA,WAAO;AAAA,MACL,GAAG,wBAAwB,KAAK,MAAM,OAAO;AAAA,IAC/C;AAAA,EACF;AACA,SAAO,+BAA+B,wBAAwB;AAChE;AAEA,SAAS,oBACP,UACgB;AAChB,SAAO,SAAS,IAAI,CAAC,KAAK,UAAU;AAClC,WAAO,IAAI,SAAS,SAChB,0BAA0B,KAAK,QAAQ,SAAS,SAAS,CAAC,IAC1D,+BAA+B,KAAK,QAAQ,SAAS,SAAS,CAAC;AAAA,EACrE,CAAC;AACH;AAEA,eAAe,YACb,UACA,cACA,mBACA,OACA,QACA,SAO2B;AAC3B,QAAM,SAAS,gBAAgB;AAC/B,QAAM,eAAe,gBAAgB;AACrC,QAAM,iBAAiB,SAAS;AAEhC,QAAM,eAAe,SAAS,gBAAgB,aAAa,SAAS,MAAM;AAC1E,MAAI;AAGJ,QAAM,iBAAiB,kBAAkB;AACzC,cAAY,IAAI,uBAAuB;AAAA,IACrC,mBAAmB,CAAC,CAAC;AAAA,IACrB,gBAAgB,cAAc;AAAA,IAC9B,kBAAkB,cAAc;AAAA,IAChC,uBAAuB,cAAc;AAAA,IACrC,sBAAsB,cAAc;AAAA,IACpC,qBAAqB,cAAc;AAAA,IACnC,0BAA0B,CAAC,CAAC,cAAc;AAAA,IAC1C,cAAc,SAAS;AAAA,IACvB,WAAW,kBAAkB,GAAG;AAAA,EAClC,CAAC;AAED,MAAI,cAAc;AAChB,YAAQ,aAAa;AAAA,EACvB,OAAO;AACL,YAAQ,SAAS,SAAS,cAAc,aAAa;AAAA,EACvD;AAEA,MAAI,SAAS,qBAAqB;AAChC,UAAM,CAAC,mBAAmB,IAAI,qBAAqB,YAAY;AAE/D,mBAAe,CAAC,sBAAsB,IAAI,YAAY;AAAA,EACxD;AAEA,QAAM,SAA2B,qBAAqB,YAAY,EAAE;AAAA,IAClE,QAAM;AAAA,MACJ,GAAI,yBACA,EAAE,eAAe,EAAE,MAAM,YAAY,EAAE,IACvC,CAAC;AAAA,MACL,MAAM;AAAA,MACN,MAAM;AAAA,IACR;AAAA,EACF;AAEA,QAAM,cAAc,MAAM,QAAQ;AAAA,IAChC,MAAM;AAAA,MACJ,OAAM,OACH;AAAA,QACC,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,EAAE;AAAA,UACR,aAAa,MAAM,EAAE,OAAO;AAAA,YAC1B,UAAU,SAAS;AAAA,UACrB,CAAC;AAAA;AAAA,UAED,YACE,qBAAqB,KAAK,EAAE,kBACxB,EAAE,kBACF,gBAAgB,EAAE,WAAW;AAAA,QACrC;AAAA,MACF;AAAA,IACJ;AAAA,EACF;AAEA,QAAM,eAAe,OAAO;AAAA,IAC1B,QACG;AAAA,MACC,MAAM;AAAA,MACN,SAAS,EAAE;AAAA,IACb;AAAA,EACJ;AAEA,QAAM,iBAAiB,yCAAyC,QAAQ;AACxE,QAAM,wBAAwB,KAAK,IAAI;AAGvC,8BAA4B;AAAA,IAC1B,YAAY,aAAa,KAAK,IAAI;AAAA,IAClC,cAAc,qBAAqB,KAAK;AAAA,IACxC,WAAW,CAAC;AAAA;AAAA,IACZ,aAAa,aAAa,KAAK,IAAI;AAAA,EACrC,CAAC;AAED,MAAI,QAAQ,KAAK,IAAI;AACrB,MAAI,gBAAgB;AACpB,MAAI;AAEJ,MAAI;AACF,eAAW,MAAM;AAAA,MACf,OAAM,YAAW;AACf,wBAAgB;AAChB,gBAAQ,KAAK,IAAI;AAEjB,cAAM,YAAY,wBAAwB,YAAY;AACtD,cAAM,SAAS,YAAY,KAAK;AAEhC,cAAM,OAA0C;AAAA,UAC9C;AAAA,UAEA,GAAI,SACA,EAAE,uBAAuB,UAAU,IACnC,EAAE,YAAY,UAAU;AAAA,UAC5B,UAAU,CAAC,GAAG,cAAc,GAAG,cAAc;AAAA,UAE7C,aAAa,SAAS,IAAI;AAAA,QAC5B;AACA,YAAI,OAAO,QAAQ;AACjB;AAAC,UAAC,KAA2C,SAAS;AACtD,eAAK,iBAAiB;AAAA,YACpB,eAAe;AAAA,UACjB;AAAA,QACF;AAEA,YAAI,YAAY,SAAS,GAAG;AAC1B,eAAK,QAAQ;AACb,eAAK,cAAc;AAAA,QACrB;AACA,cAAM,kBAAkB,MAAM,mBAAmB,cAAc,QAAQ;AACvE,YAAI,iBAAiB;AACnB,eAAK,mBAAmB;AAAA,QAC1B;AAEA,YAAI,gBAAgB,aAAa,WAAW;AAC1C,sBAAY,IAAI,4BAA4B;AAAA,YAC1C,kBAAkB,aAAa;AAAA,YAC/B,WAAW,aAAa;AAAA,YACxB,UAAU,aAAa;AAAA,YACvB,SAAS,aAAa;AAAA,YACtB,cAAc,CAAC,CAAC,aAAa;AAAA,YAC7B,WAAW,kBAAkB,GAAG;AAAA,UAClC,CAAC;AAGD,gBAAM,yBACJ,QAAQ,IAAI,qBAAqB;AAEnC,cAAI,wBAAwB;AAE1B,kBAAM,UAAU,oBAAoB,cAAc,YAAY;AAG9D,kBAAM,gBAAsC;AAAA,cAC1C,UAAU;AAAA,cACV,cAAc,aAAa,IAAI,OAAK,EAAE,OAAiB;AAAA,cACvD;AAAA,cACA,WAAW,wBAAwB,YAAY;AAAA,cAC/C,QAAQ,OAAO;AAAA,cACf;AAAA,cACA,aAAa,YAAY,KAAK,IAAI,IAAI;AAAA,cACtC,oBACE,gBAAgB,eAAe;AAAA,cACjC,WAAW;AAAA;AAAA,YACb;AAGA,kBAAM,UAAU,QAAQ,cAAc,aAAa;AAGnD,gBAAI,oBAAoB,sBAAsB,YAAY,GAAG;AAE3D,oBAAM,EAAE,qBAAqB,IAAI,MAAM,OAAO,UAAU;AACxD,oBAAMC,YAAW,MAAM;AAAA,gBACrB;AAAA,gBACA;AAAA,gBACA;AAAA,cACF;AACA,oBAAM,kBAAkB,QAAQ,cAAcA,SAAQ;AAGtD,oBAAM,aAAa;AAAA,gBACjB,MAAM;AAAA,gBACN,SAAS,gBAAgB;AAAA,gBACzB,YAAY,gBAAgB;AAAA,gBAC5B,OAAO;AAAA,kBACL,eAAe,gBAAgB,MAAM;AAAA,kBACrC,mBAAmB,gBAAgB,MAAM;AAAA,gBAC3C;AAAA,cACF;AACA,oBAAM,eAAiC;AAAA,gBACrC,MAAM;AAAA,gBACN,SAAS;AAAA,gBACT,SAAS;AAAA;AAAA,gBACT,YAAY,KAAK,IAAI,IAAI;AAAA,gBACzB,MAAM,GAAG,KAAK,IAAI,CAAC,IAAI,KAAK,OAAO,EAAE,SAAS,EAAE,EAAE,OAAO,GAAG,CAAC,CAAC;AAAA,gBAC9D,YAAY,gBAAgB;AAAA;AAAA,cAC9B;AACA,qBAAO;AAAA,YACT,OAAO;AAEL,oBAAM,IAAI,MAAM;AAAA,gBACd;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,cACF;AACA,kBAAI;AACJ,kBAAI,OAAO,QAAQ;AACjB,gCAAgB,MAAM;AAAA,kBACpB;AAAA,kBACA;AAAA,gBACF;AAAA,cACF,OAAO;AACL,gCAAgB;AAAA,cAClB;AACA,oBAAM,IAAI,iCAAiC,eAAe,KAAK;AAC/D,qBAAO;AAAA,YACT;AAAA,UACF,OAAO;AAEL,kBAAM,qBAAqB,YAAY,aAAa,SAAS,IACzD,+BACA;AACJ,kBAAM,IAAI,MAAM;AAAA,cACd;AAAA,cACA;AAAA,cACA;AAAA,cACA;AAAA,cACA;AAAA,YACF;AACA,gBAAI;AACJ,gBAAI,KAAK,QAAQ;AACf,8BAAgB,MAAM;AAAA,gBACpB;AAAA,gBACA;AAAA,cACF;AAAA,YACF,OAAO;AACL,8BAAgB;AAAA,YAClB;AACA,kBAAM,IAAI,iCAAiC,eAAe,KAAK;AAC/D,mBAAO;AAAA,UACT;AAAA,QACF,OAAO;AAEL,sBAAY,IAAI,qBAAqB;AAAA,YACnC,oBAAoB,CAAC,CAAC;AAAA,YACtB,gBAAgB,cAAc;AAAA,YAC9B,iBAAiB,CAAC,CAAC,cAAc;AAAA,YACjC,eAAe;AAAA,YACf,aAAa;AAAA,YACb,WAAW,kBAAkB,GAAG;AAAA,UAClC,CAAC;AAGD,gBAAM,eAAe;AAAA,YACnB,oBAAoB,CAAC,CAAC;AAAA,YACtB,gBAAgB,cAAc;AAAA,YAC9B,iBAAiB,CAAC,CAAC,cAAc;AAAA,YACjC,gBAAgB;AAAA,YAChB,WAAW,kBAAkB,GAAG;AAAA,UAClC;AACA,sBAAY,MAAM,0BAA0B,YAAY;AACxD,gBAAM,IAAI;AAAA,YACR,8CAA8C,KAAK,2DAA2D,KAAK,UAAU,YAAY,CAAC;AAAA,UAC5I;AAAA,QACF;AAAA,MACF;AAAA,MACA;AAAA,QACE;AAAA;AAAA,QAEA,SAAS,gBAAgB,UACrB,CAAC,EAAE,SAAS,YAAY,OAAO,QAAQ,MAAM;AAC3C,oCAA0B,eAAe,SAAU;AAAA,YACjD;AAAA,YACA;AAAA,YACA,cAAc,MAAM;AAAA,YACpB;AAAA,UACF,CAAC;AAAA,QACH,IACA;AAAA,MACN;AAAA,IACF;AAAA,EACF,SAAS,OAAO;AACd,aAAS,KAAK;AACd,WAAO,6BAA6B,KAAK;AAAA,EAC3C;AACA,QAAM,aAAa,KAAK,IAAI,IAAI;AAChC,QAAM,6BAA6B,KAAK,IAAI,IAAI;AAEhD,QAAM,cAAc,SAAS,OAAO,iBAAiB;AACrD,QAAM,eAAe,SAAS,OAAO,qBAAqB;AAC1D,QAAM,uBACJ,SAAS,OAAO,sBAAsB,iBAAiB;AACzD,QAAM,2BACJ,SAAS,OAAO,sBAAsB,iBAAiB;AACzD,QAAM,UACH,cAAc,MAAa,uCAC3B,eAAe,MAAa,wCAC5B,uBAAuB,MACtB,mDACD,2BAA2B,MAC1B;AAEJ,iBAAe,SAAS,0BAA0B;AAGlD;AAAA,IACE;AAAA,MACE;AAAA,MACA;AAAA,MACA,qBAAqB;AAAA,MACrB,iBAAiB;AAAA,IACnB;AAAA,IACA;AAAA,IACA;AAAA,IACA,iBACK;AAAA,MACC,SAAS,eAAe;AAAA,MACxB,WAAW,eAAe;AAAA,MAC1B;AAAA,IACF,IACA;AAAA,EACN;AAGA,oBAAkB;AAAA,IAChB,cAAc,aAAa,KAAK,IAAI;AAAA,IACpC,UAAU,CAAC,GAAG,cAAc,GAAG,cAAc;AAAA,IAC7C;AAAA,IACA,OAAO;AAAA,MACL;AAAA,MACA;AAAA,IACF;AAAA,IACA,QAAQ;AAAA,MACN;AAAA,MACA,KAAK,KAAK,IAAI;AAAA,IAChB;AAAA,IACA,WAAW;AAAA,EACb,CAAC;AAED,SAAO;AAAA,IACL,SAAS;AAAA,MACP,GAAG;AAAA,MACH,SAAS,wBAAwB,SAAS,OAAO;AAAA,MACjD,OAAO;AAAA,QACL,cAAc;AAAA,QACd,eAAe;AAAA,QACf,yBAAyB;AAAA,QACzB,6BAA6B;AAAA,MAC/B;AAAA,IACF;AAAA,IACA;AAAA,IACA;AAAA,IACA,MAAM;AAAA,IACN,MAAM,WAAW;AAAA,EACnB;AACF;AAEA,SAAS,wBAAwB,cAA2B;AAE1D,SAAO,cAAc,aAAa;AACpC;AAEA,SAAS,0BAA0B,OAAuB;AAExD,aAAW,kBAAkB,OAAO,OAAO,MAAM,GAAG;AAClD,UAAM,YAAY,eAAe,KAAK,CAAC,MAAW,EAAE,UAAU,KAAK;AACnE,QAAI,WAAW;AACb,aAAO,UAAU,wBAAwB;AAAA,IAC3C;AAAA,EACF;AAEA,SAAO;AACT;AAEA,SAAS,2BAA2B,OAAuB;AAEzD,aAAW,kBAAkB,OAAO,OAAO,MAAM,GAAG;AAClD,UAAM,YAAY,eAAe,KAAK,CAAC,MAAW,EAAE,UAAU,KAAK;AACnE,QAAI,WAAW;AACb,aAAO,UAAU,yBAAyB;AAAA,IAC5C;AAAA,EACF;AAEA,SAAO;AACT;AAGA,eAAsB,WACpB,cACA,UACA,eAAyB,CAAC,GAC1B,QAC2B;AAE3B,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA;AAAA,IACA,CAAC;AAAA;AAAA,IACD,UAAU,IAAI,gBAAgB,EAAE;AAAA,IAChC;AAAA,MACE,UAAU;AAAA,MACV,OAAO;AAAA,MACP,qBAAqB;AAAA,IACvB;AAAA,EACF;AACF;AAKA,eAAsB,WAAW;AAAA,EAC/B,eAAe,CAAC;AAAA,EAChB;AAAA,EACA;AAAA,EACA,sBAAsB;AAAA,EACtB;AACF,GAM8B;AAC5B,QAAM,WAAW;AAAA,IACf;AAAA,MACE,SAAS,EAAE,MAAM,QAAQ,SAAS,WAAW;AAAA,MAC7C,MAAM;AAAA,MACN,MAAM,WAAW;AAAA,IACnB;AAAA,EACF;AAEA,SAAO,WAAW,SAAS,UAAU,cAAc,MAAM;AAC3D;",
|
|
6
|
+
"names": ["streamState", "response"]
|
|
7
7
|
}
|
|
@@ -5,6 +5,10 @@ import {
|
|
|
5
5
|
getCompressionPrompt
|
|
6
6
|
} from "../constants/compressionPrompts.js";
|
|
7
7
|
import { getCompressionMode } from "../utils/compressionMode.js";
|
|
8
|
+
import { fileFreshnessService } from "./fileFreshness.js";
|
|
9
|
+
import { readTextContent } from "../utils/file.js";
|
|
10
|
+
const MAX_TOKENS_PER_FILE = 1e4;
|
|
11
|
+
const MAX_TOTAL_FILE_TOKENS = 5e4;
|
|
8
12
|
class CompressionService {
|
|
9
13
|
/**
|
|
10
14
|
* Determine if compression should be triggered
|
|
@@ -151,6 +155,19 @@ class CompressionService {
|
|
|
151
155
|
recoveredFiles = await this.recoverRecentFiles(
|
|
152
156
|
options.maxRecoveredFiles ?? 5
|
|
153
157
|
);
|
|
158
|
+
if (recoveredFiles && recoveredFiles.length > 0) {
|
|
159
|
+
for (const file of recoveredFiles) {
|
|
160
|
+
const truncatedNote = file.truncated ? " (truncated)" : "";
|
|
161
|
+
const fileMessage = createUserMessage(
|
|
162
|
+
`[Recovered File${truncatedNote}] ${file.path}
|
|
163
|
+
|
|
164
|
+
\`\`\`
|
|
165
|
+
${file.content}
|
|
166
|
+
\`\`\``
|
|
167
|
+
);
|
|
168
|
+
compressedMessages.push(fileMessage);
|
|
169
|
+
}
|
|
170
|
+
}
|
|
154
171
|
}
|
|
155
172
|
return {
|
|
156
173
|
compressedMessages,
|
|
@@ -201,7 +218,39 @@ class CompressionService {
|
|
|
201
218
|
* @see src/services/fileFreshness.ts - File tracking service
|
|
202
219
|
*/
|
|
203
220
|
static async recoverRecentFiles(maxFiles) {
|
|
204
|
-
|
|
221
|
+
const importantFiles = fileFreshnessService.getImportantFiles(maxFiles);
|
|
222
|
+
const results = [];
|
|
223
|
+
let totalTokens = 0;
|
|
224
|
+
for (const fileInfo of importantFiles) {
|
|
225
|
+
try {
|
|
226
|
+
const { content } = readTextContent(fileInfo.path);
|
|
227
|
+
const estimatedTokens = Math.ceil(content.length * 0.25);
|
|
228
|
+
let finalContent = content;
|
|
229
|
+
let truncated = false;
|
|
230
|
+
if (estimatedTokens > MAX_TOKENS_PER_FILE) {
|
|
231
|
+
const maxChars = Math.floor(MAX_TOKENS_PER_FILE / 0.25);
|
|
232
|
+
finalContent = content.substring(0, maxChars);
|
|
233
|
+
truncated = true;
|
|
234
|
+
}
|
|
235
|
+
const finalTokens = Math.min(estimatedTokens, MAX_TOKENS_PER_FILE);
|
|
236
|
+
if (totalTokens + finalTokens > MAX_TOTAL_FILE_TOKENS) {
|
|
237
|
+
break;
|
|
238
|
+
}
|
|
239
|
+
totalTokens += finalTokens;
|
|
240
|
+
results.push({
|
|
241
|
+
path: fileInfo.path,
|
|
242
|
+
content: finalContent,
|
|
243
|
+
tokens: finalTokens,
|
|
244
|
+
truncated
|
|
245
|
+
});
|
|
246
|
+
} catch (error) {
|
|
247
|
+
console.error(
|
|
248
|
+
`Failed to read file for recovery: ${fileInfo.path}`,
|
|
249
|
+
error
|
|
250
|
+
);
|
|
251
|
+
}
|
|
252
|
+
}
|
|
253
|
+
return results;
|
|
205
254
|
}
|
|
206
255
|
}
|
|
207
256
|
export {
|