@within-7/minto 0.1.4 → 0.1.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (264) hide show
  1. package/dist/commands/agents/AgentsCommand.js +2342 -0
  2. package/dist/commands/agents/AgentsCommand.js.map +7 -0
  3. package/dist/commands/agents/constants.js +58 -0
  4. package/dist/commands/agents/constants.js.map +7 -0
  5. package/dist/commands/agents/index.js +37 -0
  6. package/dist/commands/agents/index.js.map +7 -0
  7. package/dist/commands/agents/types.js +10 -0
  8. package/dist/commands/agents/types.js.map +7 -0
  9. package/dist/commands/agents/utils/fileOperations.js +185 -0
  10. package/dist/commands/agents/utils/fileOperations.js.map +7 -0
  11. package/dist/commands/agents/utils/index.js +21 -0
  12. package/dist/commands/agents/utils/index.js.map +7 -0
  13. package/dist/commands/bug.js +2 -2
  14. package/dist/commands/bug.js.map +2 -2
  15. package/dist/commands/compact.js +5 -5
  16. package/dist/commands/compact.js.map +2 -2
  17. package/dist/commands/ctx_viz.js +55 -22
  18. package/dist/commands/ctx_viz.js.map +2 -2
  19. package/dist/commands/mcp-interactive.js +11 -11
  20. package/dist/commands/mcp-interactive.js.map +2 -2
  21. package/dist/commands/model.js +94 -32
  22. package/dist/commands/model.js.map +3 -3
  23. package/dist/commands/plugin/AddMarketplaceForm.js +49 -21
  24. package/dist/commands/plugin/AddMarketplaceForm.js.map +2 -2
  25. package/dist/commands/plugin/ConfirmDialog.js +38 -26
  26. package/dist/commands/plugin/ConfirmDialog.js.map +2 -2
  27. package/dist/commands/plugin/InstalledPluginsByMarketplace.js +24 -8
  28. package/dist/commands/plugin/InstalledPluginsByMarketplace.js.map +2 -2
  29. package/dist/commands/plugin/InstalledPluginsManager.js +3 -1
  30. package/dist/commands/plugin/InstalledPluginsManager.js.map +2 -2
  31. package/dist/commands/plugin/MainMenu.js +16 -7
  32. package/dist/commands/plugin/MainMenu.js.map +2 -2
  33. package/dist/commands/plugin/MarketplaceManager.js +84 -39
  34. package/dist/commands/plugin/MarketplaceManager.js.map +2 -2
  35. package/dist/commands/plugin/MarketplaceSelector.js +7 -3
  36. package/dist/commands/plugin/MarketplaceSelector.js.map +2 -2
  37. package/dist/commands/plugin/PlaceholderScreen.js +16 -2
  38. package/dist/commands/plugin/PlaceholderScreen.js.map +2 -2
  39. package/dist/commands/plugin/PluginBrowser.js +4 -2
  40. package/dist/commands/plugin/PluginBrowser.js.map +2 -2
  41. package/dist/commands/plugin/PluginDetailsInstall.js +12 -6
  42. package/dist/commands/plugin/PluginDetailsInstall.js.map +2 -2
  43. package/dist/commands/plugin/PluginDetailsManage.js +14 -5
  44. package/dist/commands/plugin/PluginDetailsManage.js.map +2 -2
  45. package/dist/commands/plugin/example-usage.js.map +2 -2
  46. package/dist/commands/plugin/utils.js.map +2 -2
  47. package/dist/commands/plugin.js +226 -46
  48. package/dist/commands/plugin.js.map +2 -2
  49. package/dist/commands/refreshCommands.js +6 -3
  50. package/dist/commands/refreshCommands.js.map +2 -2
  51. package/dist/commands/resume.js +2 -1
  52. package/dist/commands/resume.js.map +2 -2
  53. package/dist/commands/setup.js +19 -5
  54. package/dist/commands/setup.js.map +2 -2
  55. package/dist/commands/terminalSetup.js +2 -2
  56. package/dist/commands/terminalSetup.js.map +1 -1
  57. package/dist/commands.js +14 -30
  58. package/dist/commands.js.map +2 -2
  59. package/dist/components/AskUserQuestionDialog/AskUserQuestionDialog.js.map +2 -2
  60. package/dist/components/AskUserQuestionDialog/QuestionView.js +10 -1
  61. package/dist/components/AskUserQuestionDialog/QuestionView.js.map +2 -2
  62. package/dist/components/BackgroundTasksPanel.js +5 -1
  63. package/dist/components/BackgroundTasksPanel.js.map +2 -2
  64. package/dist/components/Config.js +17 -4
  65. package/dist/components/Config.js.map +2 -2
  66. package/dist/components/ConsoleOAuthFlow.js.map +2 -2
  67. package/dist/components/CustomSelect/select-option.js +4 -1
  68. package/dist/components/CustomSelect/select-option.js.map +2 -2
  69. package/dist/components/Help.js +6 -8
  70. package/dist/components/Help.js.map +2 -2
  71. package/dist/components/Logo.js +1 -1
  72. package/dist/components/Logo.js.map +2 -2
  73. package/dist/components/ModelListManager.js.map +2 -2
  74. package/dist/components/ModelSelector/ModelSelector.js +2030 -0
  75. package/dist/components/ModelSelector/ModelSelector.js.map +7 -0
  76. package/dist/components/ModelSelector/ScreenContainer.js +27 -0
  77. package/dist/components/ModelSelector/ScreenContainer.js.map +7 -0
  78. package/dist/components/ModelSelector/constants.js +37 -0
  79. package/dist/components/ModelSelector/constants.js.map +7 -0
  80. package/dist/components/ModelSelector/hooks/index.js +5 -0
  81. package/dist/components/ModelSelector/hooks/index.js.map +7 -0
  82. package/dist/components/ModelSelector/hooks/useEscapeNavigation.js +21 -0
  83. package/dist/components/ModelSelector/hooks/useEscapeNavigation.js.map +7 -0
  84. package/dist/components/ModelSelector/index.js +17 -0
  85. package/dist/components/ModelSelector/index.js.map +7 -0
  86. package/dist/components/ModelSelector/types.js +1 -0
  87. package/dist/components/ModelSelector/types.js.map +7 -0
  88. package/dist/components/PressEnterToContinue.js +1 -1
  89. package/dist/components/PressEnterToContinue.js.map +2 -2
  90. package/dist/components/ProjectOnboarding.js +1 -1
  91. package/dist/components/ProjectOnboarding.js.map +2 -2
  92. package/dist/components/PromptInput.js +88 -37
  93. package/dist/components/PromptInput.js.map +2 -2
  94. package/dist/components/QuitSummary.js +17 -10
  95. package/dist/components/QuitSummary.js.map +2 -2
  96. package/dist/components/SentryErrorBoundary.js.map +2 -2
  97. package/dist/components/StreamingBashOutput.js.map +2 -2
  98. package/dist/components/StructuredDiff.js.map +2 -2
  99. package/dist/components/SubagentProgress.js.map +2 -2
  100. package/dist/components/TaskCard.js.map +2 -2
  101. package/dist/components/TextInput.js.map +1 -1
  102. package/dist/components/TodoItem.js.map +1 -1
  103. package/dist/components/binary-feedback/BinaryFeedbackOption.js +1 -3
  104. package/dist/components/binary-feedback/BinaryFeedbackOption.js.map +2 -2
  105. package/dist/components/messages/AssistantLocalCommandOutputMessage.js.map +1 -1
  106. package/dist/components/messages/AssistantToolUseMessage.js +3 -1
  107. package/dist/components/messages/AssistantToolUseMessage.js.map +2 -2
  108. package/dist/components/messages/TaskProgressMessage.js.map +2 -2
  109. package/dist/components/messages/TaskToolMessage.js.map +2 -2
  110. package/dist/components/messages/UserToolResultMessage/utils.js.map +2 -2
  111. package/dist/components/permissions/FileEditPermissionRequest/FileEditToolDiff.js.map +2 -2
  112. package/dist/components/permissions/FileWritePermissionRequest/FileWriteToolDiff.js.map +2 -2
  113. package/dist/components/permissions/hooks.js.map +2 -2
  114. package/dist/constants/modelCapabilities.js +1 -1
  115. package/dist/constants/modelCapabilities.js.map +2 -2
  116. package/dist/constants/prompts.js.map +1 -1
  117. package/dist/constants/timing.js +34 -0
  118. package/dist/constants/timing.js.map +7 -0
  119. package/dist/entrypoints/cli.js +128 -33
  120. package/dist/entrypoints/cli.js.map +3 -3
  121. package/dist/entrypoints/mcp.js +13 -18
  122. package/dist/entrypoints/mcp.js.map +2 -2
  123. package/dist/hooks/useCanUseTool.js.map +2 -2
  124. package/dist/hooks/useCancelRequest.js.map +1 -1
  125. package/dist/hooks/useHistorySearch.js.map +2 -2
  126. package/dist/hooks/useLogStartupTime.js.map +2 -2
  127. package/dist/hooks/usePermissionRequestLogging.js.map +2 -2
  128. package/dist/hooks/useTextInput.js.map +1 -1
  129. package/dist/hooks/useUnifiedCompletion.js +493 -394
  130. package/dist/hooks/useUnifiedCompletion.js.map +2 -2
  131. package/dist/index.js.map +2 -2
  132. package/dist/permissions.js +4 -7
  133. package/dist/permissions.js.map +2 -2
  134. package/dist/query.js +6 -1
  135. package/dist/query.js.map +2 -2
  136. package/dist/screens/REPL.js +72 -36
  137. package/dist/screens/REPL.js.map +2 -2
  138. package/dist/screens/ResumeConversation.js +2 -1
  139. package/dist/screens/ResumeConversation.js.map +2 -2
  140. package/dist/services/adapters/base.js.map +2 -2
  141. package/dist/services/adapters/chatCompletions.js.map +2 -2
  142. package/dist/services/adapters/responsesAPI.js +3 -1
  143. package/dist/services/adapters/responsesAPI.js.map +2 -2
  144. package/dist/services/claude.js +327 -328
  145. package/dist/services/claude.js.map +2 -2
  146. package/dist/services/customCommands.js +6 -1
  147. package/dist/services/customCommands.js.map +2 -2
  148. package/dist/services/fileFreshness.js.map +2 -2
  149. package/dist/services/gpt5ConnectionTest.js +20 -7
  150. package/dist/services/gpt5ConnectionTest.js.map +2 -2
  151. package/dist/services/hookExecutor.js +6 -12
  152. package/dist/services/hookExecutor.js.map +2 -2
  153. package/dist/services/mcpClient.js +29 -2
  154. package/dist/services/mcpClient.js.map +2 -2
  155. package/dist/services/mentionProcessor.js +23 -10
  156. package/dist/services/mentionProcessor.js.map +2 -2
  157. package/dist/services/modelAdapterFactory.js.map +2 -2
  158. package/dist/services/oauth.js.map +2 -2
  159. package/dist/services/openai.js +109 -72
  160. package/dist/services/openai.js.map +3 -3
  161. package/dist/services/responseStateManager.js.map +2 -2
  162. package/dist/services/systemReminder.js.map +2 -2
  163. package/dist/tools/ArchitectTool/ArchitectTool.js.map +1 -1
  164. package/dist/tools/AskExpertModelTool/AskExpertModelTool.js +14 -8
  165. package/dist/tools/AskExpertModelTool/AskExpertModelTool.js.map +2 -2
  166. package/dist/tools/BashOutputTool/BashOutputTool.js.map +2 -2
  167. package/dist/tools/BashTool/BashTool.js.map +2 -2
  168. package/dist/tools/FileReadTool/FileReadTool.js.map +1 -1
  169. package/dist/tools/FileWriteTool/FileWriteTool.js.map +2 -2
  170. package/dist/tools/GrepTool/GrepTool.js +1 -4
  171. package/dist/tools/GrepTool/GrepTool.js.map +2 -2
  172. package/dist/tools/MultiEditTool/MultiEditTool.js +4 -1
  173. package/dist/tools/MultiEditTool/MultiEditTool.js.map +2 -2
  174. package/dist/tools/NotebookReadTool/NotebookReadTool.js +3 -1
  175. package/dist/tools/NotebookReadTool/NotebookReadTool.js.map +2 -2
  176. package/dist/tools/SkillTool/SkillTool.js +12 -6
  177. package/dist/tools/SkillTool/SkillTool.js.map +2 -2
  178. package/dist/tools/TaskTool/TaskTool.js +14 -5
  179. package/dist/tools/TaskTool/TaskTool.js.map +2 -2
  180. package/dist/tools/TaskTool/prompt.js.map +2 -2
  181. package/dist/tools/ThinkTool/ThinkTool.js +6 -1
  182. package/dist/tools/ThinkTool/ThinkTool.js.map +2 -2
  183. package/dist/tools/TodoWriteTool/TodoWriteTool.js +23 -3
  184. package/dist/tools/TodoWriteTool/TodoWriteTool.js.map +2 -2
  185. package/dist/tools/URLFetcherTool/URLFetcherTool.js +2 -2
  186. package/dist/tools/URLFetcherTool/URLFetcherTool.js.map +2 -2
  187. package/dist/tools/URLFetcherTool/cache.js +6 -3
  188. package/dist/tools/URLFetcherTool/cache.js.map +2 -2
  189. package/dist/tools/URLFetcherTool/htmlToMarkdown.js +3 -1
  190. package/dist/tools/URLFetcherTool/htmlToMarkdown.js.map +2 -2
  191. package/dist/tools/WebSearchTool/WebSearchTool.js.map +2 -2
  192. package/dist/tools/WebSearchTool/prompt.js.map +2 -2
  193. package/dist/tools/WebSearchTool/searchProviders.js +15 -6
  194. package/dist/tools/WebSearchTool/searchProviders.js.map +2 -2
  195. package/dist/tools.js +4 -1
  196. package/dist/tools.js.map +2 -2
  197. package/dist/types/core.js +1 -0
  198. package/dist/types/core.js.map +7 -0
  199. package/dist/types/hooks.js +1 -4
  200. package/dist/types/hooks.js.map +2 -2
  201. package/dist/types/marketplace.js +8 -2
  202. package/dist/types/marketplace.js.map +2 -2
  203. package/dist/types/plugin.js +9 -6
  204. package/dist/types/plugin.js.map +2 -2
  205. package/dist/utils/BackgroundShellManager.js +76 -10
  206. package/dist/utils/BackgroundShellManager.js.map +2 -2
  207. package/dist/utils/PersistentShell.js +7 -2
  208. package/dist/utils/PersistentShell.js.map +2 -2
  209. package/dist/utils/advancedFuzzyMatcher.js +4 -1
  210. package/dist/utils/advancedFuzzyMatcher.js.map +2 -2
  211. package/dist/utils/agentLoader.js +69 -35
  212. package/dist/utils/agentLoader.js.map +2 -2
  213. package/dist/utils/agentStorage.js.map +2 -2
  214. package/dist/utils/async.js +163 -0
  215. package/dist/utils/async.js.map +7 -0
  216. package/dist/utils/autoUpdater.js +8 -2
  217. package/dist/utils/autoUpdater.js.map +2 -2
  218. package/dist/utils/commands.js +23 -11
  219. package/dist/utils/commands.js.map +2 -2
  220. package/dist/utils/commonUnixCommands.js +3 -1
  221. package/dist/utils/commonUnixCommands.js.map +2 -2
  222. package/dist/utils/compressionMode.js.map +2 -2
  223. package/dist/utils/config.js +30 -14
  224. package/dist/utils/config.js.map +2 -2
  225. package/dist/utils/debugLogger.js.map +2 -2
  226. package/dist/utils/env.js.map +2 -2
  227. package/dist/utils/envConfig.js +82 -0
  228. package/dist/utils/envConfig.js.map +7 -0
  229. package/dist/utils/errorHandling.js +89 -0
  230. package/dist/utils/errorHandling.js.map +7 -0
  231. package/dist/utils/expertChatStorage.js.map +2 -2
  232. package/dist/utils/fuzzyMatcher.js +13 -7
  233. package/dist/utils/fuzzyMatcher.js.map +2 -2
  234. package/dist/utils/hookManager.js +14 -4
  235. package/dist/utils/hookManager.js.map +2 -2
  236. package/dist/utils/log.js.map +2 -2
  237. package/dist/utils/marketplaceManager.js +44 -9
  238. package/dist/utils/marketplaceManager.js.map +2 -2
  239. package/dist/utils/messageContextManager.js.map +1 -1
  240. package/dist/utils/messages.js +6 -3
  241. package/dist/utils/messages.js.map +2 -2
  242. package/dist/utils/model.js +3 -1
  243. package/dist/utils/model.js.map +2 -2
  244. package/dist/utils/pluginInstaller.js +3 -15
  245. package/dist/utils/pluginInstaller.js.map +2 -2
  246. package/dist/utils/pluginLoader.js +41 -13
  247. package/dist/utils/pluginLoader.js.map +2 -2
  248. package/dist/utils/pluginRegistry.js.map +2 -2
  249. package/dist/utils/pluginValidator.js +71 -49
  250. package/dist/utils/pluginValidator.js.map +2 -2
  251. package/dist/utils/ptyCompat.js.map +2 -2
  252. package/dist/utils/roundConverter.js.map +2 -2
  253. package/dist/utils/secureFile.js +43 -14
  254. package/dist/utils/secureFile.js.map +2 -2
  255. package/dist/utils/sessionState.js.map +2 -2
  256. package/dist/utils/skillLoader.js.map +2 -2
  257. package/dist/utils/teamConfig.js +7 -4
  258. package/dist/utils/teamConfig.js.map +2 -2
  259. package/dist/utils/theme.js.map +2 -2
  260. package/dist/utils/thinking.js.map +2 -2
  261. package/dist/utils/unaryLogging.js.map +2 -2
  262. package/dist/version.js +2 -2
  263. package/dist/version.js.map +1 -1
  264. package/package.json +5 -5
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../src/services/openai.ts"],
4
- "sourcesContent": ["import { OpenAI } from 'openai'\nimport { getGlobalConfig, GlobalConfig } from '@utils/config'\nimport { ProxyAgent, fetch, Response } from 'undici'\nimport { setSessionState, getSessionState } from '@utils/sessionState'\nimport { debug as debugLogger, getCurrentRequest, logAPIError } from '@utils/debugLogger'\n\n/**\n * Retry configuration constants for API calls\n */\nconst RETRY_CONFIG = {\n BASE_DELAY_MS: 1000,\n MAX_DELAY_MS: 32000,\n MAX_SERVER_DELAY_MS: 60000,\n JITTER_FACTOR: 0.1,\n} as const\n\n/**\n * Calculate retry delay with exponential backoff and jitter\n */\nfunction getRetryDelay(attempt: number, retryAfter?: string | null): number {\n // If server suggests a retry-after time, use it\n if (retryAfter) {\n const retryAfterMs = parseInt(retryAfter) * 1000\n if (!isNaN(retryAfterMs) && retryAfterMs > 0) {\n return Math.min(retryAfterMs, RETRY_CONFIG.MAX_SERVER_DELAY_MS)\n }\n }\n\n // Exponential backoff with jitter\n const delay = RETRY_CONFIG.BASE_DELAY_MS * Math.pow(2, attempt - 1)\n const jitter = Math.random() * RETRY_CONFIG.JITTER_FACTOR * delay\n\n return Math.min(delay + jitter, RETRY_CONFIG.MAX_DELAY_MS)\n}\n\n// Helper function to create an abortable delay\nfunction abortableDelay(delayMs: number, signal?: AbortSignal): Promise<void> {\n return new Promise((resolve, reject) => {\n // Check if already aborted\n if (signal?.aborted) {\n reject(new Error('Request was aborted'))\n return\n }\n\n const timeoutId = setTimeout(() => {\n resolve()\n }, delayMs)\n\n // If signal is provided, listen for abort event\n if (signal) {\n const abortHandler = () => {\n clearTimeout(timeoutId)\n reject(new Error('Request was aborted'))\n }\n signal.addEventListener('abort', abortHandler, { once: true })\n }\n })\n}\n\nenum ModelErrorType {\n MaxLength = '1024',\n MaxCompletionTokens = 'max_completion_tokens',\n TemperatureRestriction = 'temperature_restriction',\n StreamOptions = 'stream_options',\n Citations = 'citations',\n RateLimit = 'rate_limit',\n}\n\nfunction getModelErrorKey(\n baseURL: string,\n model: string,\n type: ModelErrorType,\n): string {\n return `${baseURL}:${model}:${type}`\n}\n\nfunction hasModelError(\n baseURL: string,\n model: string,\n type: ModelErrorType,\n): boolean {\n return !!getSessionState('modelErrors')[\n getModelErrorKey(baseURL, model, type)\n ]\n}\n\nfunction setModelError(\n baseURL: string,\n model: string,\n type: ModelErrorType,\n error: string,\n) {\n setSessionState('modelErrors', {\n [getModelErrorKey(baseURL, model, type)]: error,\n })\n}\n\n// More flexible error detection system\ntype ErrorDetector = (errMsg: string) => boolean\ntype ErrorFixer = (\n opts: OpenAI.ChatCompletionCreateParams,\n) => Promise<void> | void\ninterface ErrorHandler {\n type: ModelErrorType\n detect: ErrorDetector\n fix: ErrorFixer\n}\n\n// GPT-5 specific error handlers with enhanced detection patterns\nconst GPT5_ERROR_HANDLERS: ErrorHandler[] = [\n {\n type: ModelErrorType.MaxCompletionTokens,\n detect: errMsg => {\n const lowerMsg = errMsg.toLowerCase()\n return (\n // Exact OpenAI GPT-5 error message\n (lowerMsg.includes(\"unsupported parameter: 'max_tokens'\") && lowerMsg.includes(\"'max_completion_tokens'\")) ||\n // Generic max_tokens error patterns\n (lowerMsg.includes(\"max_tokens\") && lowerMsg.includes(\"max_completion_tokens\")) ||\n (lowerMsg.includes(\"max_tokens\") && lowerMsg.includes(\"not supported\")) ||\n (lowerMsg.includes(\"max_tokens\") && lowerMsg.includes(\"use max_completion_tokens\")) ||\n // Additional patterns for various providers\n (lowerMsg.includes(\"invalid parameter\") && lowerMsg.includes(\"max_tokens\")) ||\n (lowerMsg.includes(\"parameter error\") && lowerMsg.includes(\"max_tokens\"))\n )\n },\n fix: async opts => {\n console.log(`\uD83D\uDD27 GPT-5 Fix: Converting max_tokens (${opts.max_tokens}) to max_completion_tokens`)\n if ('max_tokens' in opts) {\n opts.max_completion_tokens = opts.max_tokens\n delete opts.max_tokens\n }\n },\n },\n {\n type: ModelErrorType.TemperatureRestriction,\n detect: errMsg => {\n const lowerMsg = errMsg.toLowerCase()\n return (\n lowerMsg.includes(\"temperature\") && \n (lowerMsg.includes(\"only supports\") || lowerMsg.includes(\"must be 1\") || lowerMsg.includes(\"invalid temperature\"))\n )\n },\n fix: async opts => {\n console.log(`\uD83D\uDD27 GPT-5 Fix: Adjusting temperature from ${opts.temperature} to 1`)\n opts.temperature = 1\n },\n },\n // Add more GPT-5 specific handlers as needed\n]\n\n// Standard error handlers\nconst ERROR_HANDLERS: ErrorHandler[] = [\n {\n type: ModelErrorType.MaxLength,\n detect: errMsg =>\n errMsg.includes('Expected a string with maximum length 1024'),\n fix: async opts => {\n const toolDescriptions = {}\n for (const tool of opts.tools || []) {\n if (tool.function.description.length <= 1024) continue\n let str = ''\n let remainder = ''\n for (let line of tool.function.description.split('\\n')) {\n if (str.length + line.length < 1024) {\n str += line + '\\n'\n } else {\n remainder += line + '\\n'\n }\n }\n \n tool.function.description = str\n toolDescriptions[tool.function.name] = remainder\n }\n if (Object.keys(toolDescriptions).length > 0) {\n let content = '<additional-tool-usage-instructions>\\n\\n'\n for (const [name, description] of Object.entries(toolDescriptions)) {\n content += `<${name}>\\n${description}\\n</${name}>\\n\\n`\n }\n content += '</additional-tool-usage-instructions>'\n\n for (let i = opts.messages.length - 1; i >= 0; i--) {\n if (opts.messages[i].role === 'system') {\n opts.messages.splice(i + 1, 0, {\n role: 'system',\n content,\n })\n break\n }\n }\n }\n },\n },\n {\n type: ModelErrorType.MaxCompletionTokens,\n detect: errMsg => errMsg.includes(\"Use 'max_completion_tokens'\"),\n fix: async opts => {\n opts.max_completion_tokens = opts.max_tokens\n delete opts.max_tokens\n },\n },\n {\n type: ModelErrorType.StreamOptions,\n detect: errMsg => errMsg.includes('stream_options'),\n fix: async opts => {\n delete opts.stream_options\n },\n },\n {\n type: ModelErrorType.Citations,\n detect: errMsg =>\n errMsg.includes('Extra inputs are not permitted') &&\n errMsg.includes('citations'),\n fix: async opts => {\n if (!opts.messages) return\n\n for (const message of opts.messages) {\n if (!message) continue\n\n if (Array.isArray(message.content)) {\n for (const item of message.content) {\n // Convert to unknown first to safely access properties\n if (item && typeof item === 'object') {\n const itemObj = item as unknown as Record<string, unknown>\n if ('citations' in itemObj) {\n delete itemObj.citations\n }\n }\n }\n } else if (message.content && typeof message.content === 'object') {\n // Convert to unknown first to safely access properties\n const contentObj = message.content as unknown as Record<\n string,\n unknown\n >\n if ('citations' in contentObj) {\n delete contentObj.citations\n }\n }\n }\n },\n },\n]\n\n// Rate limit specific detection\nfunction isRateLimitError(errMsg: string): boolean {\n if (!errMsg) return false\n const lowerMsg = errMsg.toLowerCase()\n return (\n lowerMsg.includes('rate limit') ||\n lowerMsg.includes('too many requests') ||\n lowerMsg.includes('429')\n )\n}\n\n// Model-specific feature flags - can be extended with more properties as needed\ninterface ModelFeatures {\n usesMaxCompletionTokens: boolean\n supportsResponsesAPI?: boolean\n requiresTemperatureOne?: boolean\n supportsVerbosityControl?: boolean\n supportsCustomTools?: boolean\n supportsAllowedTools?: boolean\n}\n\n// Map of model identifiers to their specific features\nconst MODEL_FEATURES: Record<string, ModelFeatures> = {\n // OpenAI thinking models\n o1: { usesMaxCompletionTokens: true },\n 'o1-preview': { usesMaxCompletionTokens: true },\n 'o1-mini': { usesMaxCompletionTokens: true },\n 'o1-pro': { usesMaxCompletionTokens: true },\n 'o3-mini': { usesMaxCompletionTokens: true },\n // GPT-5 models\n 'gpt-5': { \n usesMaxCompletionTokens: true, \n supportsResponsesAPI: true,\n requiresTemperatureOne: true,\n supportsVerbosityControl: true,\n supportsCustomTools: true,\n supportsAllowedTools: true,\n },\n 'gpt-5-mini': { \n usesMaxCompletionTokens: true, \n supportsResponsesAPI: true,\n requiresTemperatureOne: true,\n supportsVerbosityControl: true,\n supportsCustomTools: true,\n supportsAllowedTools: true,\n },\n 'gpt-5-nano': { \n usesMaxCompletionTokens: true, \n supportsResponsesAPI: true,\n requiresTemperatureOne: true,\n supportsVerbosityControl: true,\n supportsCustomTools: true,\n supportsAllowedTools: true,\n },\n 'gpt-5-chat-latest': { \n usesMaxCompletionTokens: true, \n supportsResponsesAPI: false, // Uses Chat Completions only\n requiresTemperatureOne: true,\n supportsVerbosityControl: true,\n },\n}\n\n// Helper to get model features based on model ID/name\nfunction getModelFeatures(modelName: string): ModelFeatures {\n if (!modelName || typeof modelName !== 'string') {\n return { usesMaxCompletionTokens: false }\n }\n\n // Check for exact matches first (highest priority)\n if (MODEL_FEATURES[modelName]) {\n return MODEL_FEATURES[modelName]\n }\n\n // Simple GPT-5 detection: any model name containing 'gpt-5'\n if (modelName.toLowerCase().includes('gpt-5')) {\n return {\n usesMaxCompletionTokens: true,\n supportsResponsesAPI: true,\n requiresTemperatureOne: true,\n supportsVerbosityControl: true,\n supportsCustomTools: true,\n supportsAllowedTools: true,\n }\n }\n\n // Check for partial matches (e.g., other reasoning models)\n for (const [key, features] of Object.entries(MODEL_FEATURES)) {\n if (modelName.includes(key)) {\n return features\n }\n }\n\n // Default features for unknown models\n return { usesMaxCompletionTokens: false }\n}\n\n// Apply model-specific parameter transformations based on model features\nfunction applyModelSpecificTransformations(\n opts: OpenAI.ChatCompletionCreateParams,\n): void {\n if (!opts.model || typeof opts.model !== 'string') {\n return\n }\n\n const features = getModelFeatures(opts.model)\n const isGPT5 = opts.model.toLowerCase().includes('gpt-5')\n\n // \uD83D\uDD25 Enhanced GPT-5 Detection and Transformation\n if (isGPT5 || features.usesMaxCompletionTokens) {\n // Force max_completion_tokens for all GPT-5 models\n if ('max_tokens' in opts && !('max_completion_tokens' in opts)) {\n console.log(`\uD83D\uDD27 Transforming max_tokens (${opts.max_tokens}) to max_completion_tokens for ${opts.model}`)\n opts.max_completion_tokens = opts.max_tokens\n delete opts.max_tokens\n }\n \n // Force temperature = 1 for GPT-5 models\n if (features.requiresTemperatureOne && 'temperature' in opts) {\n if (opts.temperature !== 1 && opts.temperature !== undefined) {\n console.log(\n `\uD83D\uDD27 GPT-5 temperature constraint: Adjusting temperature from ${opts.temperature} to 1 for ${opts.model}`\n )\n opts.temperature = 1\n }\n }\n \n // Remove unsupported parameters for GPT-5\n if (isGPT5) {\n // Remove parameters that may not be supported by GPT-5\n delete opts.frequency_penalty\n delete opts.presence_penalty\n delete opts.logit_bias\n delete opts.user\n \n // Add reasoning_effort if not present and model supports it\n if (!opts.reasoning_effort && features.supportsVerbosityControl) {\n opts.reasoning_effort = 'medium' // Default reasoning effort for coding tasks\n }\n }\n }\n\n // Apply transformations for non-GPT-5 models\n else {\n // Standard max_tokens to max_completion_tokens conversion for other reasoning models\n if (\n features.usesMaxCompletionTokens &&\n 'max_tokens' in opts &&\n !('max_completion_tokens' in opts)\n ) {\n opts.max_completion_tokens = opts.max_tokens\n delete opts.max_tokens\n }\n }\n\n // Add more transformations here as needed\n}\n\nasync function applyModelErrorFixes(\n opts: OpenAI.ChatCompletionCreateParams,\n baseURL: string,\n) {\n const isGPT5 = opts.model.startsWith('gpt-5')\n const handlers = isGPT5 ? [...GPT5_ERROR_HANDLERS, ...ERROR_HANDLERS] : ERROR_HANDLERS\n \n for (const handler of handlers) {\n if (hasModelError(baseURL, opts.model, handler.type)) {\n await handler.fix(opts)\n return\n }\n }\n}\n\n// Helper function to try different endpoints for OpenAI-compatible providers\nasync function tryWithEndpointFallback(\n baseURL: string,\n opts: OpenAI.ChatCompletionCreateParams,\n headers: Record<string, string>,\n provider: string,\n proxy: any,\n signal?: AbortSignal, // \uD83D\uDD27 Add AbortSignal support\n): Promise<{ response: Response; endpoint: string }> {\n const endpointsToTry = []\n\n if (provider === 'minimax') {\n endpointsToTry.push('/text/chatcompletion_v2', '/chat/completions')\n } else {\n endpointsToTry.push('/chat/completions')\n }\n\n let lastError = null\n\n for (const endpoint of endpointsToTry) {\n try {\n const response = await fetch(`${baseURL}${endpoint}`, {\n method: 'POST',\n headers,\n body: JSON.stringify(opts.stream ? { ...opts, stream: true } : opts),\n dispatcher: proxy,\n signal: signal, // \uD83D\uDD27 Connect AbortSignal to fetch call\n })\n\n // If successful, return immediately\n if (response.ok) {\n return { response, endpoint }\n }\n\n // If it's a 404, try the next endpoint\n if (response.status === 404 && endpointsToTry.length > 1) {\n console.log(\n `Endpoint ${endpoint} returned 404, trying next endpoint...`,\n )\n continue\n }\n\n // For other error codes, return this response (don't try fallback)\n return { response, endpoint }\n } catch (error) {\n lastError = error\n // Network errors might be temporary, try next endpoint\n if (endpointsToTry.indexOf(endpoint) < endpointsToTry.length - 1) {\n console.log(`Network error on ${endpoint}, trying next endpoint...`)\n continue\n }\n }\n }\n\n // If we get here, all endpoints failed\n throw lastError || new Error('All endpoints failed')\n}\n\n// Export shared utilities for GPT-5 compatibility\nexport { getGPT5CompletionWithProfile, getModelFeatures, applyModelSpecificTransformations }\n\nexport async function getCompletionWithProfile(\n modelProfile: any,\n opts: OpenAI.ChatCompletionCreateParams,\n attempt: number = 0,\n maxAttempts: number = 10,\n signal?: AbortSignal, // \uD83D\uDD27 CRITICAL FIX: Add AbortSignal support\n): Promise<OpenAI.ChatCompletion | AsyncIterable<OpenAI.ChatCompletionChunk>> {\n if (attempt >= maxAttempts) {\n throw new Error('Max attempts reached')\n }\n\n const provider = modelProfile?.provider || 'anthropic'\n const baseURL = modelProfile?.baseURL\n const apiKey = modelProfile?.apiKey\n const proxy = getGlobalConfig().proxy\n ? new ProxyAgent(getGlobalConfig().proxy)\n : undefined\n\n const headers: Record<string, string> = {\n 'Content-Type': 'application/json',\n }\n\n if (apiKey) {\n if (provider === 'azure') {\n headers['api-key'] = apiKey\n } else {\n headers['Authorization'] = `Bearer ${apiKey}`\n }\n }\n\n applyModelSpecificTransformations(opts)\n await applyModelErrorFixes(opts, baseURL || '')\n\n // \uD83D\uDD25 REAL-TIME API CALL DEBUG - \u4F7F\u7528\u5168\u5C40\u65E5\u5FD7\u7CFB\u7EDF\n debugLogger.api('OPENAI_API_CALL_START', {\n endpoint: baseURL || 'DEFAULT_OPENAI',\n model: opts.model,\n provider,\n apiKeyConfigured: !!apiKey,\n apiKeyPrefix: apiKey ? apiKey.substring(0, 8) : null,\n maxTokens: opts.max_tokens,\n temperature: opts.temperature,\n messageCount: opts.messages?.length || 0,\n streamMode: opts.stream,\n timestamp: new Date().toISOString(),\n modelProfileModelName: modelProfile?.modelName,\n modelProfileName: modelProfile?.name,\n })\n\n // Make sure all tool messages have string content\n opts.messages = opts.messages.map(msg => {\n if (msg.role === 'tool') {\n if (Array.isArray(msg.content)) {\n return {\n ...msg,\n content:\n msg.content\n .map(c => c.text || '')\n .filter(Boolean)\n .join('\\n\\n') || '(empty content)',\n }\n } else if (typeof msg.content !== 'string') {\n return {\n ...msg,\n content:\n typeof msg.content === 'undefined'\n ? '(empty content)'\n : JSON.stringify(msg.content),\n }\n }\n }\n return msg\n })\n\n // Define Azure-specific API endpoint with version\n const azureApiVersion = '2024-06-01'\n let endpoint = '/chat/completions'\n\n if (provider === 'azure') {\n endpoint = `/chat/completions?api-version=${azureApiVersion}`\n } else if (provider === 'minimax') {\n endpoint = '/text/chatcompletion_v2'\n }\n\n try {\n if (opts.stream) {\n const isOpenAICompatible = [\n 'minimax',\n 'kimi',\n 'deepseek',\n 'siliconflow',\n 'qwen',\n 'glm',\n 'baidu-qianfan',\n 'openai',\n 'mistral',\n 'xai',\n 'groq',\n 'custom-openai',\n ].includes(provider)\n\n let response: Response\n let usedEndpoint: string\n\n if (isOpenAICompatible && provider !== 'azure') {\n const result = await tryWithEndpointFallback(\n baseURL,\n opts,\n headers,\n provider,\n proxy,\n signal, // \uD83D\uDD27 Pass AbortSignal to endpoint fallback\n )\n response = result.response\n usedEndpoint = result.endpoint\n } else {\n response = await fetch(`${baseURL}${endpoint}`, {\n method: 'POST',\n headers,\n body: JSON.stringify({ ...opts, stream: true }),\n dispatcher: proxy,\n signal: signal, // \uD83D\uDD27 CRITICAL FIX: Connect AbortSignal to fetch call\n })\n usedEndpoint = endpoint\n }\n\n if (!response.ok) {\n // \uD83D\uDD27 CRITICAL FIX: Check abort signal BEFORE showing retry message\n if (signal?.aborted) {\n throw new Error('Request cancelled by user')\n }\n \n // \uD83D\uDD25 NEW: Parse error message to detect and handle specific API errors\n try {\n const errorData = await response.json()\n // Type guard for error data structure\n const hasError = (data: unknown): data is { error?: { message?: string }; message?: string } => {\n return typeof data === 'object' && data !== null\n }\n const errorMessage = hasError(errorData) \n ? (errorData.error?.message || errorData.message || `HTTP ${response.status}`)\n : `HTTP ${response.status}`\n \n // Check if this is a parameter error that we can fix\n const isGPT5 = opts.model.startsWith('gpt-5')\n const handlers = isGPT5 ? [...GPT5_ERROR_HANDLERS, ...ERROR_HANDLERS] : ERROR_HANDLERS\n \n for (const handler of handlers) {\n if (handler.detect(errorMessage)) {\n console.log(`\uD83D\uDD27 Detected ${handler.type} error for ${opts.model}: ${errorMessage}`)\n \n // Store this error for future requests\n setModelError(baseURL || '', opts.model, handler.type, errorMessage)\n \n // Apply the fix and retry immediately\n await handler.fix(opts)\n console.log(`\uD83D\uDD27 Applied fix for ${handler.type}, retrying...`)\n \n return getCompletionWithProfile(\n modelProfile,\n opts,\n attempt + 1,\n maxAttempts,\n signal,\n )\n }\n }\n \n // If no specific handler found, log the error for debugging\n console.log(`\u26A0\uFE0F Unhandled API error (${response.status}): ${errorMessage}`)\n \n // Log API error using unified logger\n logAPIError({\n model: opts.model,\n endpoint: `${baseURL}${endpoint}`,\n status: response.status,\n error: errorMessage,\n request: opts,\n response: errorData,\n provider: provider\n })\n } catch (parseError) {\n // If we can't parse the error, fall back to generic retry\n console.log(`\u26A0\uFE0F Could not parse error response (${response.status})`)\n \n // Log parse error\n logAPIError({\n model: opts.model,\n endpoint: `${baseURL}${endpoint}`,\n status: response.status,\n error: `Could not parse error response: ${parseError.message}`,\n request: opts,\n response: { parseError: parseError.message },\n provider: provider\n })\n }\n \n const delayMs = getRetryDelay(attempt)\n console.log(\n ` \u23BF API error (${response.status}), retrying in ${Math.round(delayMs / 1000)}s... (attempt ${attempt + 1}/${maxAttempts})`,\n )\n try {\n await abortableDelay(delayMs, signal)\n } catch (error) {\n // If aborted during delay, throw the error to stop retrying\n if (error.message === 'Request was aborted') {\n throw new Error('Request cancelled by user')\n }\n throw error\n }\n return getCompletionWithProfile(\n modelProfile,\n opts,\n attempt + 1,\n maxAttempts,\n signal, // \uD83D\uDD27 Pass AbortSignal to recursive call\n )\n }\n\n const stream = createStreamProcessor(response.body as any, signal)\n return stream\n }\n\n // Non-streaming request\n const isOpenAICompatible = [\n 'minimax',\n 'kimi',\n 'deepseek',\n 'siliconflow',\n 'qwen',\n 'glm',\n 'baidu-qianfan',\n 'openai',\n 'mistral',\n 'xai',\n 'groq',\n 'custom-openai',\n ].includes(provider)\n\n let response: Response\n let usedEndpoint: string\n\n if (isOpenAICompatible && provider !== 'azure') {\n const result = await tryWithEndpointFallback(\n baseURL,\n opts,\n headers,\n provider,\n proxy,\n signal, // \uD83D\uDD27 Pass AbortSignal to endpoint fallback\n )\n response = result.response\n usedEndpoint = result.endpoint\n } else {\n response = await fetch(`${baseURL}${endpoint}`, {\n method: 'POST',\n headers,\n body: JSON.stringify(opts),\n dispatcher: proxy,\n signal: signal, // \uD83D\uDD27 CRITICAL FIX: Connect AbortSignal to non-streaming fetch call\n })\n usedEndpoint = endpoint\n }\n\n if (!response.ok) {\n // \uD83D\uDD27 CRITICAL FIX: Check abort signal BEFORE showing retry message\n if (signal?.aborted) {\n throw new Error('Request cancelled by user')\n }\n \n // \uD83D\uDD25 NEW: Parse error message to detect and handle specific API errors\n try {\n const errorData = await response.json()\n // Type guard for error data structure\n const hasError = (data: unknown): data is { error?: { message?: string }; message?: string } => {\n return typeof data === 'object' && data !== null\n }\n const errorMessage = hasError(errorData) \n ? (errorData.error?.message || errorData.message || `HTTP ${response.status}`)\n : `HTTP ${response.status}`\n \n // Check if this is a parameter error that we can fix\n const isGPT5 = opts.model.startsWith('gpt-5')\n const handlers = isGPT5 ? [...GPT5_ERROR_HANDLERS, ...ERROR_HANDLERS] : ERROR_HANDLERS\n \n for (const handler of handlers) {\n if (handler.detect(errorMessage)) {\n console.log(`\uD83D\uDD27 Detected ${handler.type} error for ${opts.model}: ${errorMessage}`)\n \n // Store this error for future requests\n setModelError(baseURL || '', opts.model, handler.type, errorMessage)\n \n // Apply the fix and retry immediately\n await handler.fix(opts)\n console.log(`\uD83D\uDD27 Applied fix for ${handler.type}, retrying...`)\n \n return getCompletionWithProfile(\n modelProfile,\n opts,\n attempt + 1,\n maxAttempts,\n signal,\n )\n }\n }\n \n // If no specific handler found, log the error for debugging\n console.log(`\u26A0\uFE0F Unhandled API error (${response.status}): ${errorMessage}`)\n } catch (parseError) {\n // If we can't parse the error, fall back to generic retry\n console.log(`\u26A0\uFE0F Could not parse error response (${response.status})`)\n }\n \n const delayMs = getRetryDelay(attempt)\n console.log(\n ` \u23BF API error (${response.status}), retrying in ${Math.round(delayMs / 1000)}s... (attempt ${attempt + 1}/${maxAttempts})`,\n )\n try {\n await abortableDelay(delayMs, signal)\n } catch (error) {\n // If aborted during delay, throw the error to stop retrying\n if (error.message === 'Request was aborted') {\n throw new Error('Request cancelled by user')\n }\n throw error\n }\n return getCompletionWithProfile(\n modelProfile,\n opts,\n attempt + 1,\n maxAttempts,\n signal, // \uD83D\uDD27 Pass AbortSignal to recursive call\n )\n }\n\n const responseData = (await response.json()) as OpenAI.ChatCompletion\n return responseData\n } catch (error) {\n // \uD83D\uDD27 CRITICAL FIX: Check abort signal BEFORE showing retry message\n if (signal?.aborted) {\n throw new Error('Request cancelled by user')\n }\n \n if (attempt < maxAttempts) {\n // \uD83D\uDD27 Double-check abort status to avoid showing misleading retry message\n if (signal?.aborted) {\n throw new Error('Request cancelled by user')\n }\n \n const delayMs = getRetryDelay(attempt)\n console.log(\n ` \u23BF Network error, retrying in ${Math.round(delayMs / 1000)}s... (attempt ${attempt + 1}/${maxAttempts})`,\n )\n try {\n await abortableDelay(delayMs, signal)\n } catch (error) {\n // If aborted during delay, throw the error to stop retrying\n if (error.message === 'Request was aborted') {\n throw new Error('Request cancelled by user')\n }\n throw error\n }\n return getCompletionWithProfile(\n modelProfile,\n opts,\n attempt + 1,\n maxAttempts,\n signal, // \uD83D\uDD27 Pass AbortSignal to recursive call\n )\n }\n throw error\n }\n}\n\nexport function createStreamProcessor(\n stream: any,\n signal?: AbortSignal,\n): AsyncGenerator<OpenAI.ChatCompletionChunk, void, unknown> {\n if (!stream) {\n throw new Error('Stream is null or undefined')\n }\n\n return (async function* () {\n const reader = stream.getReader()\n const decoder = new TextDecoder('utf-8')\n let buffer = ''\n\n try {\n while (true) {\n // Check for cancellation before attempting to read\n if (signal?.aborted) {\n break\n }\n\n let readResult\n try {\n readResult = await reader.read()\n } catch (e) {\n // If signal is aborted, this is user cancellation - exit silently\n if (signal?.aborted) {\n break\n }\n console.error('Error reading from stream:', e)\n break\n }\n\n const { done, value } = readResult\n if (done) {\n break\n }\n\n const chunk = decoder.decode(value, { stream: true })\n buffer += chunk\n\n let lineEnd = buffer.indexOf('\\n')\n while (lineEnd !== -1) {\n const line = buffer.substring(0, lineEnd).trim()\n buffer = buffer.substring(lineEnd + 1)\n\n if (line === 'data: [DONE]') {\n continue\n }\n\n if (line.startsWith('data: ')) {\n const data = line.slice(6).trim()\n if (!data) continue\n\n try {\n const parsed = JSON.parse(data) as OpenAI.ChatCompletionChunk\n yield parsed\n } catch (e) {\n console.error('Error parsing JSON:', data, e)\n }\n }\n\n lineEnd = buffer.indexOf('\\n')\n }\n }\n\n // Process any remaining data in the buffer\n if (buffer.trim()) {\n const lines = buffer.trim().split('\\n')\n for (const line of lines) {\n if (line.startsWith('data: ') && line !== 'data: [DONE]') {\n const data = line.slice(6).trim()\n if (!data) continue\n\n try {\n const parsed = JSON.parse(data) as OpenAI.ChatCompletionChunk\n yield parsed\n } catch (e) {\n console.error('Error parsing final JSON:', data, e)\n }\n }\n }\n }\n } catch (e) {\n console.error('Unexpected error in stream processing:', e)\n } finally {\n try {\n reader.releaseLock()\n } catch (e) {\n console.error('Error releasing reader lock:', e)\n }\n }\n })()\n}\n\nexport function streamCompletion(\n stream: any,\n signal?: AbortSignal,\n): AsyncGenerator<OpenAI.ChatCompletionChunk, void, unknown> {\n return createStreamProcessor(stream, signal)\n}\n\n/**\n * Call GPT-5 Responses API with proper parameter handling\n */\nexport async function callGPT5ResponsesAPI(\n modelProfile: any,\n opts: any, // Using 'any' for Responses API params which differ from ChatCompletionCreateParams\n signal?: AbortSignal,\n): Promise<any> {\n const baseURL = modelProfile?.baseURL || 'https://api.openai.com/v1'\n const apiKey = modelProfile?.apiKey\n const proxy = getGlobalConfig().proxy\n ? new ProxyAgent(getGlobalConfig().proxy)\n : undefined\n\n const headers: Record<string, string> = {\n 'Content-Type': 'application/json',\n Authorization: `Bearer ${apiKey}`,\n }\n\n // \uD83D\uDD25 Enhanced Responses API Parameter Mapping for GPT-5\n const responsesParams: any = {\n model: opts.model,\n input: opts.messages, // Responses API uses 'input' instead of 'messages'\n }\n\n // \uD83D\uDD27 GPT-5 Token Configuration\n if (opts.max_completion_tokens) {\n responsesParams.max_completion_tokens = opts.max_completion_tokens\n } else if (opts.max_tokens) {\n // Fallback conversion if max_tokens is still present\n responsesParams.max_completion_tokens = opts.max_tokens\n }\n\n // \uD83D\uDD27 GPT-5 Temperature Handling (only 1 or undefined)\n if (opts.temperature === 1) {\n responsesParams.temperature = 1\n }\n // Note: Do not pass temperature if it's not 1, GPT-5 will use default\n\n // \uD83D\uDD27 GPT-5 Reasoning Configuration\n const reasoningEffort = opts.reasoning_effort || 'medium'\n responsesParams.reasoning = {\n effort: reasoningEffort,\n // \uD83D\uDE80 Enable reasoning summaries for transparency in coding tasks\n generate_summary: true,\n }\n\n // \uD83D\uDD27 GPT-5 Tools Support\n if (opts.tools && opts.tools.length > 0) {\n responsesParams.tools = opts.tools\n \n // \uD83D\uDE80 GPT-5 Tool Choice Configuration\n if (opts.tool_choice) {\n responsesParams.tool_choice = opts.tool_choice\n }\n }\n\n // \uD83D\uDD27 GPT-5 System Instructions (separate from messages)\n const systemMessages = opts.messages.filter(msg => msg.role === 'system')\n const nonSystemMessages = opts.messages.filter(msg => msg.role !== 'system')\n \n if (systemMessages.length > 0) {\n responsesParams.instructions = systemMessages.map(msg => msg.content).join('\\n\\n')\n responsesParams.input = nonSystemMessages\n }\n\n // Handle verbosity (if supported) - optimized for coding tasks\n const features = getModelFeatures(opts.model)\n if (features.supportsVerbosityControl) {\n // High verbosity for coding tasks to get detailed explanations and structured code\n // Based on GPT-5 best practices for agent-like coding environments\n responsesParams.text = {\n verbosity: 'high',\n }\n }\n\n // Apply GPT-5 coding optimizations\n if (opts.model.startsWith('gpt-5')) {\n // Set reasoning effort based on task complexity\n if (!responsesParams.reasoning) {\n responsesParams.reasoning = {\n effort: 'medium', // Balanced for most coding tasks\n }\n }\n\n // Add instructions parameter for coding-specific guidance\n if (!responsesParams.instructions) {\n responsesParams.instructions = `You are an expert programmer working in a terminal-based coding environment. Follow these guidelines:\n- Provide clear, concise code solutions\n- Use proper error handling and validation\n- Follow coding best practices and patterns\n- Explain complex logic when necessary\n- Focus on maintainable, readable code`\n }\n }\n\n try {\n const response = await fetch(`${baseURL}/responses`, {\n method: 'POST',\n headers,\n body: JSON.stringify(responsesParams),\n dispatcher: proxy,\n signal: signal,\n })\n\n if (!response.ok) {\n throw new Error(`GPT-5 Responses API error: ${response.status} ${response.statusText}`)\n }\n\n const responseData = await response.json()\n \n // Convert Responses API response back to Chat Completion format for compatibility\n return convertResponsesAPIToChatCompletion(responseData)\n } catch (error) {\n if (signal?.aborted) {\n throw new Error('Request cancelled by user')\n }\n throw error\n }\n}\n\n/**\n * Convert Responses API response to Chat Completion format for compatibility\n * \uD83D\uDD25 Enhanced for GPT-5 with reasoning summary support\n */\nfunction convertResponsesAPIToChatCompletion(responsesData: any): any {\n // Extract content from Responses API format\n let outputText = responsesData.output_text || ''\n const usage = responsesData.usage || {}\n \n // \uD83D\uDE80 GPT-5 Reasoning Summary Integration\n // If reasoning summary is available, prepend it to the output for transparency\n if (responsesData.output && Array.isArray(responsesData.output)) {\n const reasoningItems = responsesData.output.filter(item => item.type === 'reasoning' && item.summary)\n const messageItems = responsesData.output.filter(item => item.type === 'message')\n \n if (reasoningItems.length > 0 && messageItems.length > 0) {\n const reasoningSummary = reasoningItems\n .map(item => item.summary?.map(s => s.text).join('\\n'))\n .filter(Boolean)\n .join('\\n\\n')\n \n const mainContent = messageItems\n .map(item => item.content?.map(c => c.text).join('\\n'))\n .filter(Boolean)\n .join('\\n\\n')\n \n if (reasoningSummary) {\n outputText = `**\uD83E\uDDE0 Reasoning Process:**\\n${reasoningSummary}\\n\\n**\uD83D\uDCDD Response:**\\n${mainContent}`\n } else {\n outputText = mainContent\n }\n }\n }\n\n return {\n id: responsesData.id || `chatcmpl-${Date.now()}`,\n object: 'chat.completion',\n created: Math.floor(Date.now() / 1000),\n model: responsesData.model || '',\n choices: [\n {\n index: 0,\n message: {\n role: 'assistant',\n content: outputText,\n // \uD83D\uDE80 Include reasoning metadata if available\n ...(responsesData.reasoning && {\n reasoning: {\n effort: responsesData.reasoning.effort,\n summary: responsesData.reasoning.summary,\n },\n }),\n },\n finish_reason: responsesData.status === 'completed' ? 'stop' : 'length',\n },\n ],\n usage: {\n prompt_tokens: usage.input_tokens || 0,\n completion_tokens: usage.output_tokens || 0,\n total_tokens: (usage.input_tokens || 0) + (usage.output_tokens || 0),\n // \uD83D\uDD27 GPT-5 Enhanced Usage Details\n prompt_tokens_details: {\n cached_tokens: usage.input_tokens_details?.cached_tokens || 0,\n },\n completion_tokens_details: {\n reasoning_tokens: usage.output_tokens_details?.reasoning_tokens || 0,\n },\n },\n }\n}\n\n/**\n * Enhanced getCompletionWithProfile that supports GPT-5 Responses API\n * \uD83D\uDD25 Optimized for both official OpenAI and third-party GPT-5 providers\n */\nasync function getGPT5CompletionWithProfile(\n modelProfile: any,\n opts: OpenAI.ChatCompletionCreateParams,\n attempt: number = 0,\n maxAttempts: number = 10,\n signal?: AbortSignal,\n): Promise<OpenAI.ChatCompletion | AsyncIterable<OpenAI.ChatCompletionChunk>> {\n const features = getModelFeatures(opts.model)\n const isOfficialOpenAI = !modelProfile.baseURL || \n modelProfile.baseURL.includes('api.openai.com')\n\n // \uD83D\uDE80 Try Responses API for official OpenAI non-streaming requests\n if (features.supportsResponsesAPI && !opts.stream && isOfficialOpenAI) {\n try {\n debugLogger.api('ATTEMPTING_GPT5_RESPONSES_API', {\n model: opts.model,\n baseURL: modelProfile.baseURL || 'official',\n provider: modelProfile.provider,\n stream: opts.stream,\n requestId: getCurrentRequest()?.id,\n })\n \n const result = await callGPT5ResponsesAPI(modelProfile, opts, signal)\n \n debugLogger.api('GPT5_RESPONSES_API_SUCCESS', {\n model: opts.model,\n baseURL: modelProfile.baseURL || 'official',\n requestId: getCurrentRequest()?.id,\n })\n \n return result\n } catch (error) {\n debugLogger.api('GPT5_RESPONSES_API_FALLBACK', {\n model: opts.model,\n error: error.message,\n baseURL: modelProfile.baseURL || 'official',\n requestId: getCurrentRequest()?.id,\n })\n \n console.warn(\n `\uD83D\uDD04 GPT-5 Responses API failed, falling back to Chat Completions: ${error.message}`\n )\n // Fall through to Chat Completions API\n }\n } \n \n // \uD83C\uDF10 Handle third-party GPT-5 providers with enhanced compatibility\n else if (!isOfficialOpenAI) {\n debugLogger.api('GPT5_THIRD_PARTY_PROVIDER', {\n model: opts.model,\n baseURL: modelProfile.baseURL,\n provider: modelProfile.provider,\n supportsResponsesAPI: features.supportsResponsesAPI,\n requestId: getCurrentRequest()?.id,\n })\n \n // \uD83D\uDD27 Apply enhanced parameter optimization for third-party providers\n console.log(`\uD83C\uDF10 Using GPT-5 via third-party provider: ${modelProfile.provider} (${modelProfile.baseURL})`)\n \n // Some third-party providers may need additional parameter adjustments\n if (modelProfile.provider === 'azure') {\n // Azure OpenAI specific adjustments\n delete opts.reasoning_effort // Azure may not support this yet\n } else if (modelProfile.provider === 'custom-openai') {\n // Generic OpenAI-compatible provider optimizations\n console.log(`\uD83D\uDD27 Applying OpenAI-compatible optimizations for custom provider`)\n }\n }\n \n // \uD83D\uDCE1 Handle streaming requests (Responses API doesn't support streaming yet)\n else if (opts.stream) {\n debugLogger.api('GPT5_STREAMING_MODE', {\n model: opts.model,\n baseURL: modelProfile.baseURL || 'official',\n reason: 'responses_api_no_streaming',\n requestId: getCurrentRequest()?.id,\n })\n \n console.log(`\uD83D\uDD04 Using Chat Completions for streaming (Responses API streaming not available)`)\n }\n\n // \uD83D\uDD27 Enhanced Chat Completions fallback with GPT-5 optimizations\n debugLogger.api('USING_CHAT_COMPLETIONS_FOR_GPT5', {\n model: opts.model,\n baseURL: modelProfile.baseURL || 'official',\n provider: modelProfile.provider,\n reason: isOfficialOpenAI ? 'streaming_or_fallback' : 'third_party_provider',\n requestId: getCurrentRequest()?.id,\n })\n\n return await getCompletionWithProfile(\n modelProfile,\n opts,\n attempt,\n maxAttempts,\n signal,\n )\n}\n\n/**\n * Fetch available models from custom OpenAI-compatible API\n */\nexport async function fetchCustomModels(\n baseURL: string,\n apiKey: string,\n): Promise<any[]> {\n try {\n // Check if baseURL already contains version number (e.g., v1, v2, etc.)\n const hasVersionNumber = /\\/v\\d+/.test(baseURL)\n const cleanBaseURL = baseURL.replace(/\\/+$/, '')\n const modelsURL = hasVersionNumber\n ? `${cleanBaseURL}/models`\n : `${cleanBaseURL}/v1/models`\n\n const response = await fetch(modelsURL, {\n method: 'GET',\n headers: {\n Authorization: `Bearer ${apiKey}`,\n 'Content-Type': 'application/json',\n },\n })\n\n if (!response.ok) {\n // Provide user-friendly error messages based on status code\n if (response.status === 401) {\n throw new Error(\n 'Invalid API key. Please check your API key and try again.',\n )\n } else if (response.status === 403) {\n throw new Error(\n 'API key does not have permission to access models. Please check your API key permissions.',\n )\n } else if (response.status === 404) {\n throw new Error(\n 'API endpoint not found. Please check if the base URL is correct and supports the /models endpoint.',\n )\n } else if (response.status === 429) {\n throw new Error(\n 'Too many requests. Please wait a moment and try again.',\n )\n } else if (response.status >= 500) {\n throw new Error(\n 'API service is temporarily unavailable. Please try again later.',\n )\n } else {\n throw new Error(\n `Unable to connect to API (${response.status}). Please check your base URL, API key, and internet connection.`,\n )\n }\n }\n\n const data = await response.json()\n\n // Type guards for different API response formats\n const hasDataArray = (obj: unknown): obj is { data: unknown[] } => {\n return typeof obj === 'object' && obj !== null && 'data' in obj && Array.isArray((obj as any).data)\n }\n \n const hasModelsArray = (obj: unknown): obj is { models: unknown[] } => {\n return typeof obj === 'object' && obj !== null && 'models' in obj && Array.isArray((obj as any).models)\n }\n\n // Validate response format and extract models array\n let models = []\n\n if (hasDataArray(data)) {\n // Standard OpenAI format: { data: [...] }\n models = data.data\n } else if (Array.isArray(data)) {\n // Direct array format\n models = data\n } else if (hasModelsArray(data)) {\n // Alternative format: { models: [...] }\n models = data.models\n } else {\n throw new Error(\n 'API returned unexpected response format. Expected an array of models or an object with a \"data\" or \"models\" array.',\n )\n }\n\n // Ensure we have an array and validate it contains model objects\n if (!Array.isArray(models)) {\n throw new Error('API response format error: models data is not an array.')\n }\n\n return models\n } catch (error) {\n // If it's already our custom error, pass it through\n if (\n error instanceof Error &&\n (error.message.includes('API key') ||\n error.message.includes('API endpoint') ||\n error.message.includes('API service') ||\n error.message.includes('response format'))\n ) {\n throw error\n }\n\n // For network errors or other issues\n console.error('Failed to fetch custom API models:', error)\n\n // Check if it's a network error\n if (error instanceof Error && error.message.includes('fetch')) {\n throw new Error(\n 'Unable to connect to the API. Please check the base URL and your internet connection.',\n )\n }\n\n throw new Error(\n 'Failed to fetch models from custom API. Please check your configuration and try again.',\n )\n }\n}\n"],
5
- "mappings": "AACA,SAAS,uBAAqC;AAC9C,SAAS,YAAY,aAAuB;AAC5C,SAAS,iBAAiB,uBAAuB;AACjD,SAAS,SAAS,aAAa,mBAAmB,mBAAmB;AAKrE,MAAM,eAAe;AAAA,EACnB,eAAe;AAAA,EACf,cAAc;AAAA,EACd,qBAAqB;AAAA,EACrB,eAAe;AACjB;AAKA,SAAS,cAAc,SAAiB,YAAoC;AAE1E,MAAI,YAAY;AACd,UAAM,eAAe,SAAS,UAAU,IAAI;AAC5C,QAAI,CAAC,MAAM,YAAY,KAAK,eAAe,GAAG;AAC5C,aAAO,KAAK,IAAI,cAAc,aAAa,mBAAmB;AAAA,IAChE;AAAA,EACF;AAGA,QAAM,QAAQ,aAAa,gBAAgB,KAAK,IAAI,GAAG,UAAU,CAAC;AAClE,QAAM,SAAS,KAAK,OAAO,IAAI,aAAa,gBAAgB;AAE5D,SAAO,KAAK,IAAI,QAAQ,QAAQ,aAAa,YAAY;AAC3D;AAGA,SAAS,eAAe,SAAiB,QAAqC;AAC5E,SAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AAEtC,QAAI,QAAQ,SAAS;AACnB,aAAO,IAAI,MAAM,qBAAqB,CAAC;AACvC;AAAA,IACF;AAEA,UAAM,YAAY,WAAW,MAAM;AACjC,cAAQ;AAAA,IACV,GAAG,OAAO;AAGV,QAAI,QAAQ;AACV,YAAM,eAAe,MAAM;AACzB,qBAAa,SAAS;AACtB,eAAO,IAAI,MAAM,qBAAqB,CAAC;AAAA,MACzC;AACA,aAAO,iBAAiB,SAAS,cAAc,EAAE,MAAM,KAAK,CAAC;AAAA,IAC/D;AAAA,EACF,CAAC;AACH;AAEA,IAAK,iBAAL,kBAAKA,oBAAL;AACE,EAAAA,gBAAA,eAAY;AACZ,EAAAA,gBAAA,yBAAsB;AACtB,EAAAA,gBAAA,4BAAyB;AACzB,EAAAA,gBAAA,mBAAgB;AAChB,EAAAA,gBAAA,eAAY;AACZ,EAAAA,gBAAA,eAAY;AANT,SAAAA;AAAA,GAAA;AASL,SAAS,iBACP,SACA,OACA,MACQ;AACR,SAAO,GAAG,OAAO,IAAI,KAAK,IAAI,IAAI;AACpC;AAEA,SAAS,cACP,SACA,OACA,MACS;AACT,SAAO,CAAC,CAAC,gBAAgB,aAAa,EACpC,iBAAiB,SAAS,OAAO,IAAI,CACvC;AACF;AAEA,SAAS,cACP,SACA,OACA,MACA,OACA;AACA,kBAAgB,eAAe;AAAA,IAC7B,CAAC,iBAAiB,SAAS,OAAO,IAAI,CAAC,GAAG;AAAA,EAC5C,CAAC;AACH;AAcA,MAAM,sBAAsC;AAAA,EAC1C;AAAA,IACE,MAAM;AAAA,IACN,QAAQ,YAAU;AAChB,YAAM,WAAW,OAAO,YAAY;AACpC;AAAA;AAAA,QAEG,SAAS,SAAS,qCAAqC,KAAK,SAAS,SAAS,yBAAyB;AAAA,QAEvG,SAAS,SAAS,YAAY,KAAK,SAAS,SAAS,uBAAuB,KAC5E,SAAS,SAAS,YAAY,KAAK,SAAS,SAAS,eAAe,KACpE,SAAS,SAAS,YAAY,KAAK,SAAS,SAAS,2BAA2B;AAAA,QAEhF,SAAS,SAAS,mBAAmB,KAAK,SAAS,SAAS,YAAY,KACxE,SAAS,SAAS,iBAAiB,KAAK,SAAS,SAAS,YAAY;AAAA;AAAA,IAE3E;AAAA,IACA,KAAK,OAAM,SAAQ;AACjB,cAAQ,IAAI,+CAAwC,KAAK,UAAU,4BAA4B;AAC/F,UAAI,gBAAgB,MAAM;AACxB,aAAK,wBAAwB,KAAK;AAClC,eAAO,KAAK;AAAA,MACd;AAAA,IACF;AAAA,EACF;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,QAAQ,YAAU;AAChB,YAAM,WAAW,OAAO,YAAY;AACpC,aACE,SAAS,SAAS,aAAa,MAC9B,SAAS,SAAS,eAAe,KAAK,SAAS,SAAS,WAAW,KAAK,SAAS,SAAS,qBAAqB;AAAA,IAEpH;AAAA,IACA,KAAK,OAAM,SAAQ;AACjB,cAAQ,IAAI,mDAA4C,KAAK,WAAW,OAAO;AAC/E,WAAK,cAAc;AAAA,IACrB;AAAA,EACF;AAAA;AAEF;AAGA,MAAM,iBAAiC;AAAA,EACrC;AAAA,IACE,MAAM;AAAA,IACN,QAAQ,YACN,OAAO,SAAS,4CAA4C;AAAA,IAC9D,KAAK,OAAM,SAAQ;AACjB,YAAM,mBAAmB,CAAC;AAC1B,iBAAW,QAAQ,KAAK,SAAS,CAAC,GAAG;AACnC,YAAI,KAAK,SAAS,YAAY,UAAU,KAAM;AAC9C,YAAI,MAAM;AACV,YAAI,YAAY;AAChB,iBAAS,QAAQ,KAAK,SAAS,YAAY,MAAM,IAAI,GAAG;AACtD,cAAI,IAAI,SAAS,KAAK,SAAS,MAAM;AACnC,mBAAO,OAAO;AAAA,UAChB,OAAO;AACL,yBAAa,OAAO;AAAA,UACtB;AAAA,QACF;AAEA,aAAK,SAAS,cAAc;AAC5B,yBAAiB,KAAK,SAAS,IAAI,IAAI;AAAA,MACzC;AACA,UAAI,OAAO,KAAK,gBAAgB,EAAE,SAAS,GAAG;AAC5C,YAAI,UAAU;AACd,mBAAW,CAAC,MAAM,WAAW,KAAK,OAAO,QAAQ,gBAAgB,GAAG;AAClE,qBAAW,IAAI,IAAI;AAAA,EAAM,WAAW;AAAA,IAAO,IAAI;AAAA;AAAA;AAAA,QACjD;AACA,mBAAW;AAEX,iBAAS,IAAI,KAAK,SAAS,SAAS,GAAG,KAAK,GAAG,KAAK;AAClD,cAAI,KAAK,SAAS,CAAC,EAAE,SAAS,UAAU;AACtC,iBAAK,SAAS,OAAO,IAAI,GAAG,GAAG;AAAA,cAC7B,MAAM;AAAA,cACN;AAAA,YACF,CAAC;AACD;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,QAAQ,YAAU,OAAO,SAAS,6BAA6B;AAAA,IAC/D,KAAK,OAAM,SAAQ;AACjB,WAAK,wBAAwB,KAAK;AAClC,aAAO,KAAK;AAAA,IACd;AAAA,EACF;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,QAAQ,YAAU,OAAO,SAAS,gBAAgB;AAAA,IAClD,KAAK,OAAM,SAAQ;AACjB,aAAO,KAAK;AAAA,IACd;AAAA,EACF;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,QAAQ,YACN,OAAO,SAAS,gCAAgC,KAChD,OAAO,SAAS,WAAW;AAAA,IAC7B,KAAK,OAAM,SAAQ;AACjB,UAAI,CAAC,KAAK,SAAU;AAEpB,iBAAW,WAAW,KAAK,UAAU;AACnC,YAAI,CAAC,QAAS;AAEd,YAAI,MAAM,QAAQ,QAAQ,OAAO,GAAG;AAClC,qBAAW,QAAQ,QAAQ,SAAS;AAElC,gBAAI,QAAQ,OAAO,SAAS,UAAU;AACpC,oBAAM,UAAU;AAChB,kBAAI,eAAe,SAAS;AAC1B,uBAAO,QAAQ;AAAA,cACjB;AAAA,YACF;AAAA,UACF;AAAA,QACF,WAAW,QAAQ,WAAW,OAAO,QAAQ,YAAY,UAAU;AAEjE,gBAAM,aAAa,QAAQ;AAI3B,cAAI,eAAe,YAAY;AAC7B,mBAAO,WAAW;AAAA,UACpB;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;AAGA,SAAS,iBAAiB,QAAyB;AACjD,MAAI,CAAC,OAAQ,QAAO;AACpB,QAAM,WAAW,OAAO,YAAY;AACpC,SACE,SAAS,SAAS,YAAY,KAC9B,SAAS,SAAS,mBAAmB,KACrC,SAAS,SAAS,KAAK;AAE3B;AAaA,MAAM,iBAAgD;AAAA;AAAA,EAEpD,IAAI,EAAE,yBAAyB,KAAK;AAAA,EACpC,cAAc,EAAE,yBAAyB,KAAK;AAAA,EAC9C,WAAW,EAAE,yBAAyB,KAAK;AAAA,EAC3C,UAAU,EAAE,yBAAyB,KAAK;AAAA,EAC1C,WAAW,EAAE,yBAAyB,KAAK;AAAA;AAAA,EAE3C,SAAS;AAAA,IACP,yBAAyB;AAAA,IACzB,sBAAsB;AAAA,IACtB,wBAAwB;AAAA,IACxB,0BAA0B;AAAA,IAC1B,qBAAqB;AAAA,IACrB,sBAAsB;AAAA,EACxB;AAAA,EACA,cAAc;AAAA,IACZ,yBAAyB;AAAA,IACzB,sBAAsB;AAAA,IACtB,wBAAwB;AAAA,IACxB,0BAA0B;AAAA,IAC1B,qBAAqB;AAAA,IACrB,sBAAsB;AAAA,EACxB;AAAA,EACA,cAAc;AAAA,IACZ,yBAAyB;AAAA,IACzB,sBAAsB;AAAA,IACtB,wBAAwB;AAAA,IACxB,0BAA0B;AAAA,IAC1B,qBAAqB;AAAA,IACrB,sBAAsB;AAAA,EACxB;AAAA,EACA,qBAAqB;AAAA,IACnB,yBAAyB;AAAA,IACzB,sBAAsB;AAAA;AAAA,IACtB,wBAAwB;AAAA,IACxB,0BAA0B;AAAA,EAC5B;AACF;AAGA,SAAS,iBAAiB,WAAkC;AAC1D,MAAI,CAAC,aAAa,OAAO,cAAc,UAAU;AAC/C,WAAO,EAAE,yBAAyB,MAAM;AAAA,EAC1C;AAGA,MAAI,eAAe,SAAS,GAAG;AAC7B,WAAO,eAAe,SAAS;AAAA,EACjC;AAGA,MAAI,UAAU,YAAY,EAAE,SAAS,OAAO,GAAG;AAC7C,WAAO;AAAA,MACL,yBAAyB;AAAA,MACzB,sBAAsB;AAAA,MACtB,wBAAwB;AAAA,MACxB,0BAA0B;AAAA,MAC1B,qBAAqB;AAAA,MACrB,sBAAsB;AAAA,IACxB;AAAA,EACF;AAGA,aAAW,CAAC,KAAK,QAAQ,KAAK,OAAO,QAAQ,cAAc,GAAG;AAC5D,QAAI,UAAU,SAAS,GAAG,GAAG;AAC3B,aAAO;AAAA,IACT;AAAA,EACF;AAGA,SAAO,EAAE,yBAAyB,MAAM;AAC1C;AAGA,SAAS,kCACP,MACM;AACN,MAAI,CAAC,KAAK,SAAS,OAAO,KAAK,UAAU,UAAU;AACjD;AAAA,EACF;AAEA,QAAM,WAAW,iBAAiB,KAAK,KAAK;AAC5C,QAAM,SAAS,KAAK,MAAM,YAAY,EAAE,SAAS,OAAO;AAGxD,MAAI,UAAU,SAAS,yBAAyB;AAE9C,QAAI,gBAAgB,QAAQ,EAAE,2BAA2B,OAAO;AAC9D,cAAQ,IAAI,sCAA+B,KAAK,UAAU,kCAAkC,KAAK,KAAK,EAAE;AACxG,WAAK,wBAAwB,KAAK;AAClC,aAAO,KAAK;AAAA,IACd;AAGA,QAAI,SAAS,0BAA0B,iBAAiB,MAAM;AAC5D,UAAI,KAAK,gBAAgB,KAAK,KAAK,gBAAgB,QAAW;AAC5D,gBAAQ;AAAA,UACN,sEAA+D,KAAK,WAAW,aAAa,KAAK,KAAK;AAAA,QACxG;AACA,aAAK,cAAc;AAAA,MACrB;AAAA,IACF;AAGA,QAAI,QAAQ;AAEV,aAAO,KAAK;AACZ,aAAO,KAAK;AACZ,aAAO,KAAK;AACZ,aAAO,KAAK;AAGZ,UAAI,CAAC,KAAK,oBAAoB,SAAS,0BAA0B;AAC/D,aAAK,mBAAmB;AAAA,MAC1B;AAAA,IACF;AAAA,EACF,OAGK;AAEH,QACE,SAAS,2BACT,gBAAgB,QAChB,EAAE,2BAA2B,OAC7B;AACA,WAAK,wBAAwB,KAAK;AAClC,aAAO,KAAK;AAAA,IACd;AAAA,EACF;AAGF;AAEA,eAAe,qBACb,MACA,SACA;AACA,QAAM,SAAS,KAAK,MAAM,WAAW,OAAO;AAC5C,QAAM,WAAW,SAAS,CAAC,GAAG,qBAAqB,GAAG,cAAc,IAAI;AAExE,aAAW,WAAW,UAAU;AAC9B,QAAI,cAAc,SAAS,KAAK,OAAO,QAAQ,IAAI,GAAG;AACpD,YAAM,QAAQ,IAAI,IAAI;AACtB;AAAA,IACF;AAAA,EACF;AACF;AAGA,eAAe,wBACb,SACA,MACA,SACA,UACA,OACA,QACmD;AACnD,QAAM,iBAAiB,CAAC;AAExB,MAAI,aAAa,WAAW;AAC1B,mBAAe,KAAK,2BAA2B,mBAAmB;AAAA,EACpE,OAAO;AACL,mBAAe,KAAK,mBAAmB;AAAA,EACzC;AAEA,MAAI,YAAY;AAEhB,aAAW,YAAY,gBAAgB;AACrC,QAAI;AACF,YAAM,WAAW,MAAM,MAAM,GAAG,OAAO,GAAG,QAAQ,IAAI;AAAA,QACpD,QAAQ;AAAA,QACR;AAAA,QACA,MAAM,KAAK,UAAU,KAAK,SAAS,EAAE,GAAG,MAAM,QAAQ,KAAK,IAAI,IAAI;AAAA,QACnE,YAAY;AAAA,QACZ;AAAA;AAAA,MACF,CAAC;AAGD,UAAI,SAAS,IAAI;AACf,eAAO,EAAE,UAAU,SAAS;AAAA,MAC9B;AAGA,UAAI,SAAS,WAAW,OAAO,eAAe,SAAS,GAAG;AACxD,gBAAQ;AAAA,UACN,YAAY,QAAQ;AAAA,QACtB;AACA;AAAA,MACF;AAGA,aAAO,EAAE,UAAU,SAAS;AAAA,IAC9B,SAAS,OAAO;AACd,kBAAY;AAEZ,UAAI,eAAe,QAAQ,QAAQ,IAAI,eAAe,SAAS,GAAG;AAChE,gBAAQ,IAAI,oBAAoB,QAAQ,2BAA2B;AACnE;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,QAAM,aAAa,IAAI,MAAM,sBAAsB;AACrD;AAKA,eAAsB,yBACpB,cACA,MACA,UAAkB,GAClB,cAAsB,IACtB,QAC4E;AAC5E,MAAI,WAAW,aAAa;AAC1B,UAAM,IAAI,MAAM,sBAAsB;AAAA,EACxC;AAEA,QAAM,WAAW,cAAc,YAAY;AAC3C,QAAM,UAAU,cAAc;AAC9B,QAAM,SAAS,cAAc;AAC7B,QAAM,QAAQ,gBAAgB,EAAE,QAC5B,IAAI,WAAW,gBAAgB,EAAE,KAAK,IACtC;AAEJ,QAAM,UAAkC;AAAA,IACtC,gBAAgB;AAAA,EAClB;AAEA,MAAI,QAAQ;AACV,QAAI,aAAa,SAAS;AACxB,cAAQ,SAAS,IAAI;AAAA,IACvB,OAAO;AACL,cAAQ,eAAe,IAAI,UAAU,MAAM;AAAA,IAC7C;AAAA,EACF;AAEA,oCAAkC,IAAI;AACtC,QAAM,qBAAqB,MAAM,WAAW,EAAE;AAG9C,cAAY,IAAI,yBAAyB;AAAA,IACvC,UAAU,WAAW;AAAA,IACrB,OAAO,KAAK;AAAA,IACZ;AAAA,IACA,kBAAkB,CAAC,CAAC;AAAA,IACpB,cAAc,SAAS,OAAO,UAAU,GAAG,CAAC,IAAI;AAAA,IAChD,WAAW,KAAK;AAAA,IAChB,aAAa,KAAK;AAAA,IAClB,cAAc,KAAK,UAAU,UAAU;AAAA,IACvC,YAAY,KAAK;AAAA,IACjB,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,IAClC,uBAAuB,cAAc;AAAA,IACrC,kBAAkB,cAAc;AAAA,EAClC,CAAC;AAGD,OAAK,WAAW,KAAK,SAAS,IAAI,SAAO;AACvC,QAAI,IAAI,SAAS,QAAQ;AACvB,UAAI,MAAM,QAAQ,IAAI,OAAO,GAAG;AAC9B,eAAO;AAAA,UACL,GAAG;AAAA,UACH,SACE,IAAI,QACD,IAAI,OAAK,EAAE,QAAQ,EAAE,EACrB,OAAO,OAAO,EACd,KAAK,MAAM,KAAK;AAAA,QACvB;AAAA,MACF,WAAW,OAAO,IAAI,YAAY,UAAU;AAC1C,eAAO;AAAA,UACL,GAAG;AAAA,UACH,SACE,OAAO,IAAI,YAAY,cACnB,oBACA,KAAK,UAAU,IAAI,OAAO;AAAA,QAClC;AAAA,MACF;AAAA,IACF;AACA,WAAO;AAAA,EACT,CAAC;AAGD,QAAM,kBAAkB;AACxB,MAAI,WAAW;AAEf,MAAI,aAAa,SAAS;AACxB,eAAW,iCAAiC,eAAe;AAAA,EAC7D,WAAW,aAAa,WAAW;AACjC,eAAW;AAAA,EACb;AAEA,MAAI;AACF,QAAI,KAAK,QAAQ;AACf,YAAMC,sBAAqB;AAAA,QACzB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,EAAE,SAAS,QAAQ;AAEnB,UAAIC;AACJ,UAAIC;AAEJ,UAAIF,uBAAsB,aAAa,SAAS;AAC9C,cAAM,SAAS,MAAM;AAAA,UACnB;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA;AAAA,QACF;AACA,QAAAC,YAAW,OAAO;AAClB,QAAAC,gBAAe,OAAO;AAAA,MACxB,OAAO;AACL,QAAAD,YAAW,MAAM,MAAM,GAAG,OAAO,GAAG,QAAQ,IAAI;AAAA,UAC9C,QAAQ;AAAA,UACR;AAAA,UACA,MAAM,KAAK,UAAU,EAAE,GAAG,MAAM,QAAQ,KAAK,CAAC;AAAA,UAC9C,YAAY;AAAA,UACZ;AAAA;AAAA,QACF,CAAC;AACD,QAAAC,gBAAe;AAAA,MACjB;AAEA,UAAI,CAACD,UAAS,IAAI;AAEhB,YAAI,QAAQ,SAAS;AACnB,gBAAM,IAAI,MAAM,2BAA2B;AAAA,QAC7C;AAGA,YAAI;AACF,gBAAM,YAAY,MAAMA,UAAS,KAAK;AAEtC,gBAAM,WAAW,CAAC,SAA8E;AAC9F,mBAAO,OAAO,SAAS,YAAY,SAAS;AAAA,UAC9C;AACA,gBAAM,eAAe,SAAS,SAAS,IAClC,UAAU,OAAO,WAAW,UAAU,WAAW,QAAQA,UAAS,MAAM,KACzE,QAAQA,UAAS,MAAM;AAG3B,gBAAM,SAAS,KAAK,MAAM,WAAW,OAAO;AAC5C,gBAAM,WAAW,SAAS,CAAC,GAAG,qBAAqB,GAAG,cAAc,IAAI;AAExE,qBAAW,WAAW,UAAU;AAC9B,gBAAI,QAAQ,OAAO,YAAY,GAAG;AAChC,sBAAQ,IAAI,sBAAe,QAAQ,IAAI,cAAc,KAAK,KAAK,KAAK,YAAY,EAAE;AAGlF,4BAAc,WAAW,IAAI,KAAK,OAAO,QAAQ,MAAM,YAAY;AAGnE,oBAAM,QAAQ,IAAI,IAAI;AACtB,sBAAQ,IAAI,6BAAsB,QAAQ,IAAI,eAAe;AAE7D,qBAAO;AAAA,gBACL;AAAA,gBACA;AAAA,gBACA,UAAU;AAAA,gBACV;AAAA,gBACA;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAGA,kBAAQ,IAAI,sCAA4BA,UAAS,MAAM,MAAM,YAAY,EAAE;AAG3E,sBAAY;AAAA,YACV,OAAO,KAAK;AAAA,YACZ,UAAU,GAAG,OAAO,GAAG,QAAQ;AAAA,YAC/B,QAAQA,UAAS;AAAA,YACjB,OAAO;AAAA,YACP,SAAS;AAAA,YACT,UAAU;AAAA,YACV;AAAA,UACF,CAAC;AAAA,QACH,SAAS,YAAY;AAEnB,kBAAQ,IAAI,iDAAuCA,UAAS,MAAM,GAAG;AAGrE,sBAAY;AAAA,YACV,OAAO,KAAK;AAAA,YACZ,UAAU,GAAG,OAAO,GAAG,QAAQ;AAAA,YAC/B,QAAQA,UAAS;AAAA,YACjB,OAAO,mCAAmC,WAAW,OAAO;AAAA,YAC5D,SAAS;AAAA,YACT,UAAU,EAAE,YAAY,WAAW,QAAQ;AAAA,YAC3C;AAAA,UACF,CAAC;AAAA,QACH;AAEA,cAAM,UAAU,cAAc,OAAO;AACrC,gBAAQ;AAAA,UACN,wBAAmBA,UAAS,MAAM,kBAAkB,KAAK,MAAM,UAAU,GAAI,CAAC,iBAAiB,UAAU,CAAC,IAAI,WAAW;AAAA,QAC3H;AACA,YAAI;AACF,gBAAM,eAAe,SAAS,MAAM;AAAA,QACtC,SAAS,OAAO;AAEd,cAAI,MAAM,YAAY,uBAAuB;AAC3C,kBAAM,IAAI,MAAM,2BAA2B;AAAA,UAC7C;AACA,gBAAM;AAAA,QACR;AACA,eAAO;AAAA,UACL;AAAA,UACA;AAAA,UACA,UAAU;AAAA,UACV;AAAA,UACA;AAAA;AAAA,QACF;AAAA,MACF;AAEA,YAAM,SAAS,sBAAsBA,UAAS,MAAa,MAAM;AACjE,aAAO;AAAA,IACT;AAGA,UAAM,qBAAqB;AAAA,MACzB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,EAAE,SAAS,QAAQ;AAEnB,QAAI;AACJ,QAAI;AAEJ,QAAI,sBAAsB,aAAa,SAAS;AAC9C,YAAM,SAAS,MAAM;AAAA,QACnB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA;AAAA,MACF;AACA,iBAAW,OAAO;AAClB,qBAAe,OAAO;AAAA,IACxB,OAAO;AACL,iBAAW,MAAM,MAAM,GAAG,OAAO,GAAG,QAAQ,IAAI;AAAA,QAC9C,QAAQ;AAAA,QACR;AAAA,QACA,MAAM,KAAK,UAAU,IAAI;AAAA,QACzB,YAAY;AAAA,QACZ;AAAA;AAAA,MACF,CAAC;AACD,qBAAe;AAAA,IACjB;AAEA,QAAI,CAAC,SAAS,IAAI;AAEhB,UAAI,QAAQ,SAAS;AACnB,cAAM,IAAI,MAAM,2BAA2B;AAAA,MAC7C;AAGA,UAAI;AACF,cAAM,YAAY,MAAM,SAAS,KAAK;AAEtC,cAAM,WAAW,CAAC,SAA8E;AAC9F,iBAAO,OAAO,SAAS,YAAY,SAAS;AAAA,QAC9C;AACA,cAAM,eAAe,SAAS,SAAS,IAClC,UAAU,OAAO,WAAW,UAAU,WAAW,QAAQ,SAAS,MAAM,KACzE,QAAQ,SAAS,MAAM;AAG3B,cAAM,SAAS,KAAK,MAAM,WAAW,OAAO;AAC5C,cAAM,WAAW,SAAS,CAAC,GAAG,qBAAqB,GAAG,cAAc,IAAI;AAExE,mBAAW,WAAW,UAAU;AAC9B,cAAI,QAAQ,OAAO,YAAY,GAAG;AAChC,oBAAQ,IAAI,sBAAe,QAAQ,IAAI,cAAc,KAAK,KAAK,KAAK,YAAY,EAAE;AAGlF,0BAAc,WAAW,IAAI,KAAK,OAAO,QAAQ,MAAM,YAAY;AAGnE,kBAAM,QAAQ,IAAI,IAAI;AACtB,oBAAQ,IAAI,6BAAsB,QAAQ,IAAI,eAAe;AAE7D,mBAAO;AAAA,cACL;AAAA,cACA;AAAA,cACA,UAAU;AAAA,cACV;AAAA,cACA;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAGA,gBAAQ,IAAI,sCAA4B,SAAS,MAAM,MAAM,YAAY,EAAE;AAAA,MAC7E,SAAS,YAAY;AAEnB,gBAAQ,IAAI,iDAAuC,SAAS,MAAM,GAAG;AAAA,MACvE;AAEA,YAAM,UAAU,cAAc,OAAO;AACrC,cAAQ;AAAA,QACN,wBAAmB,SAAS,MAAM,kBAAkB,KAAK,MAAM,UAAU,GAAI,CAAC,iBAAiB,UAAU,CAAC,IAAI,WAAW;AAAA,MAC3H;AACA,UAAI;AACF,cAAM,eAAe,SAAS,MAAM;AAAA,MACtC,SAAS,OAAO;AAEd,YAAI,MAAM,YAAY,uBAAuB;AAC3C,gBAAM,IAAI,MAAM,2BAA2B;AAAA,QAC7C;AACA,cAAM;AAAA,MACR;AACA,aAAO;AAAA,QACL;AAAA,QACA;AAAA,QACA,UAAU;AAAA,QACV;AAAA,QACA;AAAA;AAAA,MACF;AAAA,IACF;AAEA,UAAM,eAAgB,MAAM,SAAS,KAAK;AAC1C,WAAO;AAAA,EACT,SAAS,OAAO;AAEd,QAAI,QAAQ,SAAS;AACnB,YAAM,IAAI,MAAM,2BAA2B;AAAA,IAC7C;AAEA,QAAI,UAAU,aAAa;AAEzB,UAAI,QAAQ,SAAS;AACnB,cAAM,IAAI,MAAM,2BAA2B;AAAA,MAC7C;AAEA,YAAM,UAAU,cAAc,OAAO;AACrC,cAAQ;AAAA,QACN,wCAAmC,KAAK,MAAM,UAAU,GAAI,CAAC,iBAAiB,UAAU,CAAC,IAAI,WAAW;AAAA,MAC1G;AACA,UAAI;AACF,cAAM,eAAe,SAAS,MAAM;AAAA,MACtC,SAASE,QAAO;AAEd,YAAIA,OAAM,YAAY,uBAAuB;AAC3C,gBAAM,IAAI,MAAM,2BAA2B;AAAA,QAC7C;AACA,cAAMA;AAAA,MACR;AACA,aAAO;AAAA,QACL;AAAA,QACA;AAAA,QACA,UAAU;AAAA,QACV;AAAA,QACA;AAAA;AAAA,MACF;AAAA,IACF;AACA,UAAM;AAAA,EACR;AACF;AAEO,SAAS,sBACd,QACA,QAC2D;AAC3D,MAAI,CAAC,QAAQ;AACX,UAAM,IAAI,MAAM,6BAA6B;AAAA,EAC/C;AAEA,UAAQ,mBAAmB;AACzB,UAAM,SAAS,OAAO,UAAU;AAChC,UAAM,UAAU,IAAI,YAAY,OAAO;AACvC,QAAI,SAAS;AAEb,QAAI;AACF,aAAO,MAAM;AAEX,YAAI,QAAQ,SAAS;AACnB;AAAA,QACF;AAEA,YAAI;AACJ,YAAI;AACF,uBAAa,MAAM,OAAO,KAAK;AAAA,QACjC,SAAS,GAAG;AAEV,cAAI,QAAQ,SAAS;AACnB;AAAA,UACF;AACA,kBAAQ,MAAM,8BAA8B,CAAC;AAC7C;AAAA,QACF;AAEA,cAAM,EAAE,MAAM,MAAM,IAAI;AACxB,YAAI,MAAM;AACR;AAAA,QACF;AAEA,cAAM,QAAQ,QAAQ,OAAO,OAAO,EAAE,QAAQ,KAAK,CAAC;AACpD,kBAAU;AAEV,YAAI,UAAU,OAAO,QAAQ,IAAI;AACjC,eAAO,YAAY,IAAI;AACrB,gBAAM,OAAO,OAAO,UAAU,GAAG,OAAO,EAAE,KAAK;AAC/C,mBAAS,OAAO,UAAU,UAAU,CAAC;AAErC,cAAI,SAAS,gBAAgB;AAC3B;AAAA,UACF;AAEA,cAAI,KAAK,WAAW,QAAQ,GAAG;AAC7B,kBAAM,OAAO,KAAK,MAAM,CAAC,EAAE,KAAK;AAChC,gBAAI,CAAC,KAAM;AAEX,gBAAI;AACF,oBAAM,SAAS,KAAK,MAAM,IAAI;AAC9B,oBAAM;AAAA,YACR,SAAS,GAAG;AACV,sBAAQ,MAAM,uBAAuB,MAAM,CAAC;AAAA,YAC9C;AAAA,UACF;AAEA,oBAAU,OAAO,QAAQ,IAAI;AAAA,QAC/B;AAAA,MACF;AAGA,UAAI,OAAO,KAAK,GAAG;AACjB,cAAM,QAAQ,OAAO,KAAK,EAAE,MAAM,IAAI;AACtC,mBAAW,QAAQ,OAAO;AACxB,cAAI,KAAK,WAAW,QAAQ,KAAK,SAAS,gBAAgB;AACxD,kBAAM,OAAO,KAAK,MAAM,CAAC,EAAE,KAAK;AAChC,gBAAI,CAAC,KAAM;AAEX,gBAAI;AACF,oBAAM,SAAS,KAAK,MAAM,IAAI;AAC9B,oBAAM;AAAA,YACR,SAAS,GAAG;AACV,sBAAQ,MAAM,6BAA6B,MAAM,CAAC;AAAA,YACpD;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF,SAAS,GAAG;AACV,cAAQ,MAAM,0CAA0C,CAAC;AAAA,IAC3D,UAAE;AACA,UAAI;AACF,eAAO,YAAY;AAAA,MACrB,SAAS,GAAG;AACV,gBAAQ,MAAM,gCAAgC,CAAC;AAAA,MACjD;AAAA,IACF;AAAA,EACF,GAAG;AACL;AAEO,SAAS,iBACd,QACA,QAC2D;AAC3D,SAAO,sBAAsB,QAAQ,MAAM;AAC7C;AAKA,eAAsB,qBACpB,cACA,MACA,QACc;AACd,QAAM,UAAU,cAAc,WAAW;AACzC,QAAM,SAAS,cAAc;AAC7B,QAAM,QAAQ,gBAAgB,EAAE,QAC5B,IAAI,WAAW,gBAAgB,EAAE,KAAK,IACtC;AAEJ,QAAM,UAAkC;AAAA,IACtC,gBAAgB;AAAA,IAChB,eAAe,UAAU,MAAM;AAAA,EACjC;AAGA,QAAM,kBAAuB;AAAA,IAC3B,OAAO,KAAK;AAAA,IACZ,OAAO,KAAK;AAAA;AAAA,EACd;AAGA,MAAI,KAAK,uBAAuB;AAC9B,oBAAgB,wBAAwB,KAAK;AAAA,EAC/C,WAAW,KAAK,YAAY;AAE1B,oBAAgB,wBAAwB,KAAK;AAAA,EAC/C;AAGA,MAAI,KAAK,gBAAgB,GAAG;AAC1B,oBAAgB,cAAc;AAAA,EAChC;AAIA,QAAM,kBAAkB,KAAK,oBAAoB;AACjD,kBAAgB,YAAY;AAAA,IAC1B,QAAQ;AAAA;AAAA,IAER,kBAAkB;AAAA,EACpB;AAGA,MAAI,KAAK,SAAS,KAAK,MAAM,SAAS,GAAG;AACvC,oBAAgB,QAAQ,KAAK;AAG7B,QAAI,KAAK,aAAa;AACpB,sBAAgB,cAAc,KAAK;AAAA,IACrC;AAAA,EACF;AAGA,QAAM,iBAAiB,KAAK,SAAS,OAAO,SAAO,IAAI,SAAS,QAAQ;AACxE,QAAM,oBAAoB,KAAK,SAAS,OAAO,SAAO,IAAI,SAAS,QAAQ;AAE3E,MAAI,eAAe,SAAS,GAAG;AAC7B,oBAAgB,eAAe,eAAe,IAAI,SAAO,IAAI,OAAO,EAAE,KAAK,MAAM;AACjF,oBAAgB,QAAQ;AAAA,EAC1B;AAGA,QAAM,WAAW,iBAAiB,KAAK,KAAK;AAC5C,MAAI,SAAS,0BAA0B;AAGrC,oBAAgB,OAAO;AAAA,MACrB,WAAW;AAAA,IACb;AAAA,EACF;AAGA,MAAI,KAAK,MAAM,WAAW,OAAO,GAAG;AAElC,QAAI,CAAC,gBAAgB,WAAW;AAC9B,sBAAgB,YAAY;AAAA,QAC1B,QAAQ;AAAA;AAAA,MACV;AAAA,IACF;AAGA,QAAI,CAAC,gBAAgB,cAAc;AACjC,sBAAgB,eAAe;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAMjC;AAAA,EACF;AAEA,MAAI;AACF,UAAM,WAAW,MAAM,MAAM,GAAG,OAAO,cAAc;AAAA,MACnD,QAAQ;AAAA,MACR;AAAA,MACA,MAAM,KAAK,UAAU,eAAe;AAAA,MACpC,YAAY;AAAA,MACZ;AAAA,IACF,CAAC;AAED,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,IAAI,MAAM,8BAA8B,SAAS,MAAM,IAAI,SAAS,UAAU,EAAE;AAAA,IACxF;AAEA,UAAM,eAAe,MAAM,SAAS,KAAK;AAGzC,WAAO,oCAAoC,YAAY;AAAA,EACzD,SAAS,OAAO;AACd,QAAI,QAAQ,SAAS;AACnB,YAAM,IAAI,MAAM,2BAA2B;AAAA,IAC7C;AACA,UAAM;AAAA,EACR;AACF;AAMA,SAAS,oCAAoC,eAAyB;AAEpE,MAAI,aAAa,cAAc,eAAe;AAC9C,QAAM,QAAQ,cAAc,SAAS,CAAC;AAItC,MAAI,cAAc,UAAU,MAAM,QAAQ,cAAc,MAAM,GAAG;AAC/D,UAAM,iBAAiB,cAAc,OAAO,OAAO,UAAQ,KAAK,SAAS,eAAe,KAAK,OAAO;AACpG,UAAM,eAAe,cAAc,OAAO,OAAO,UAAQ,KAAK,SAAS,SAAS;AAEhF,QAAI,eAAe,SAAS,KAAK,aAAa,SAAS,GAAG;AACxD,YAAM,mBAAmB,eACtB,IAAI,UAAQ,KAAK,SAAS,IAAI,OAAK,EAAE,IAAI,EAAE,KAAK,IAAI,CAAC,EACrD,OAAO,OAAO,EACd,KAAK,MAAM;AAEd,YAAM,cAAc,aACjB,IAAI,UAAQ,KAAK,SAAS,IAAI,OAAK,EAAE,IAAI,EAAE,KAAK,IAAI,CAAC,EACrD,OAAO,OAAO,EACd,KAAK,MAAM;AAEd,UAAI,kBAAkB;AACpB,qBAAa;AAAA,EAA8B,gBAAgB;AAAA;AAAA;AAAA,EAAyB,WAAW;AAAA,MACjG,OAAO;AACL,qBAAa;AAAA,MACf;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL,IAAI,cAAc,MAAM,YAAY,KAAK,IAAI,CAAC;AAAA,IAC9C,QAAQ;AAAA,IACR,SAAS,KAAK,MAAM,KAAK,IAAI,IAAI,GAAI;AAAA,IACrC,OAAO,cAAc,SAAS;AAAA,IAC9B,SAAS;AAAA,MACP;AAAA,QACE,OAAO;AAAA,QACP,SAAS;AAAA,UACP,MAAM;AAAA,UACN,SAAS;AAAA;AAAA,UAET,GAAI,cAAc,aAAa;AAAA,YAC7B,WAAW;AAAA,cACT,QAAQ,cAAc,UAAU;AAAA,cAChC,SAAS,cAAc,UAAU;AAAA,YACnC;AAAA,UACF;AAAA,QACF;AAAA,QACA,eAAe,cAAc,WAAW,cAAc,SAAS;AAAA,MACjE;AAAA,IACF;AAAA,IACA,OAAO;AAAA,MACL,eAAe,MAAM,gBAAgB;AAAA,MACrC,mBAAmB,MAAM,iBAAiB;AAAA,MAC1C,eAAe,MAAM,gBAAgB,MAAM,MAAM,iBAAiB;AAAA;AAAA,MAElE,uBAAuB;AAAA,QACrB,eAAe,MAAM,sBAAsB,iBAAiB;AAAA,MAC9D;AAAA,MACA,2BAA2B;AAAA,QACzB,kBAAkB,MAAM,uBAAuB,oBAAoB;AAAA,MACrE;AAAA,IACF;AAAA,EACF;AACF;AAMA,eAAe,6BACb,cACA,MACA,UAAkB,GAClB,cAAsB,IACtB,QAC4E;AAC5E,QAAM,WAAW,iBAAiB,KAAK,KAAK;AAC5C,QAAM,mBAAmB,CAAC,aAAa,WACrC,aAAa,QAAQ,SAAS,gBAAgB;AAGhD,MAAI,SAAS,wBAAwB,CAAC,KAAK,UAAU,kBAAkB;AACrE,QAAI;AACF,kBAAY,IAAI,iCAAiC;AAAA,QAC/C,OAAO,KAAK;AAAA,QACZ,SAAS,aAAa,WAAW;AAAA,QACjC,UAAU,aAAa;AAAA,QACvB,QAAQ,KAAK;AAAA,QACb,WAAW,kBAAkB,GAAG;AAAA,MAClC,CAAC;AAED,YAAM,SAAS,MAAM,qBAAqB,cAAc,MAAM,MAAM;AAEpE,kBAAY,IAAI,8BAA8B;AAAA,QAC5C,OAAO,KAAK;AAAA,QACZ,SAAS,aAAa,WAAW;AAAA,QACjC,WAAW,kBAAkB,GAAG;AAAA,MAClC,CAAC;AAED,aAAO;AAAA,IACT,SAAS,OAAO;AACd,kBAAY,IAAI,+BAA+B;AAAA,QAC7C,OAAO,KAAK;AAAA,QACZ,OAAO,MAAM;AAAA,QACb,SAAS,aAAa,WAAW;AAAA,QACjC,WAAW,kBAAkB,GAAG;AAAA,MAClC,CAAC;AAED,cAAQ;AAAA,QACN,2EAAoE,MAAM,OAAO;AAAA,MACnF;AAAA,IAEF;AAAA,EACF,WAGS,CAAC,kBAAkB;AAC1B,gBAAY,IAAI,6BAA6B;AAAA,MAC3C,OAAO,KAAK;AAAA,MACZ,SAAS,aAAa;AAAA,MACtB,UAAU,aAAa;AAAA,MACvB,sBAAsB,SAAS;AAAA,MAC/B,WAAW,kBAAkB,GAAG;AAAA,IAClC,CAAC;AAGD,YAAQ,IAAI,mDAA4C,aAAa,QAAQ,KAAK,aAAa,OAAO,GAAG;AAGzG,QAAI,aAAa,aAAa,SAAS;AAErC,aAAO,KAAK;AAAA,IACd,WAAW,aAAa,aAAa,iBAAiB;AAEpD,cAAQ,IAAI,wEAAiE;AAAA,IAC/E;AAAA,EACF,WAGS,KAAK,QAAQ;AACpB,gBAAY,IAAI,uBAAuB;AAAA,MACrC,OAAO,KAAK;AAAA,MACZ,SAAS,aAAa,WAAW;AAAA,MACjC,QAAQ;AAAA,MACR,WAAW,kBAAkB,GAAG;AAAA,IAClC,CAAC;AAED,YAAQ,IAAI,wFAAiF;AAAA,EAC/F;AAGA,cAAY,IAAI,mCAAmC;AAAA,IACjD,OAAO,KAAK;AAAA,IACZ,SAAS,aAAa,WAAW;AAAA,IACjC,UAAU,aAAa;AAAA,IACvB,QAAQ,mBAAmB,0BAA0B;AAAA,IACrD,WAAW,kBAAkB,GAAG;AAAA,EAClC,CAAC;AAED,SAAO,MAAM;AAAA,IACX;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAKA,eAAsB,kBACpB,SACA,QACgB;AAChB,MAAI;AAEF,UAAM,mBAAmB,SAAS,KAAK,OAAO;AAC9C,UAAM,eAAe,QAAQ,QAAQ,QAAQ,EAAE;AAC/C,UAAM,YAAY,mBACd,GAAG,YAAY,YACf,GAAG,YAAY;AAEnB,UAAM,WAAW,MAAM,MAAM,WAAW;AAAA,MACtC,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,eAAe,UAAU,MAAM;AAAA,QAC/B,gBAAgB;AAAA,MAClB;AAAA,IACF,CAAC;AAED,QAAI,CAAC,SAAS,IAAI;AAEhB,UAAI,SAAS,WAAW,KAAK;AAC3B,cAAM,IAAI;AAAA,UACR;AAAA,QACF;AAAA,MACF,WAAW,SAAS,WAAW,KAAK;AAClC,cAAM,IAAI;AAAA,UACR;AAAA,QACF;AAAA,MACF,WAAW,SAAS,WAAW,KAAK;AAClC,cAAM,IAAI;AAAA,UACR;AAAA,QACF;AAAA,MACF,WAAW,SAAS,WAAW,KAAK;AAClC,cAAM,IAAI;AAAA,UACR;AAAA,QACF;AAAA,MACF,WAAW,SAAS,UAAU,KAAK;AACjC,cAAM,IAAI;AAAA,UACR;AAAA,QACF;AAAA,MACF,OAAO;AACL,cAAM,IAAI;AAAA,UACR,6BAA6B,SAAS,MAAM;AAAA,QAC9C;AAAA,MACF;AAAA,IACF;AAEA,UAAM,OAAO,MAAM,SAAS,KAAK;AAGjC,UAAM,eAAe,CAAC,QAA6C;AACjE,aAAO,OAAO,QAAQ,YAAY,QAAQ,QAAQ,UAAU,OAAO,MAAM,QAAS,IAAY,IAAI;AAAA,IACpG;AAEA,UAAM,iBAAiB,CAAC,QAA+C;AACrE,aAAO,OAAO,QAAQ,YAAY,QAAQ,QAAQ,YAAY,OAAO,MAAM,QAAS,IAAY,MAAM;AAAA,IACxG;AAGA,QAAI,SAAS,CAAC;AAEd,QAAI,aAAa,IAAI,GAAG;AAEtB,eAAS,KAAK;AAAA,IAChB,WAAW,MAAM,QAAQ,IAAI,GAAG;AAE9B,eAAS;AAAA,IACX,WAAW,eAAe,IAAI,GAAG;AAE/B,eAAS,KAAK;AAAA,IAChB,OAAO;AACL,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAGA,QAAI,CAAC,MAAM,QAAQ,MAAM,GAAG;AAC1B,YAAM,IAAI,MAAM,yDAAyD;AAAA,IAC3E;AAEA,WAAO;AAAA,EACT,SAAS,OAAO;AAEd,QACE,iBAAiB,UAChB,MAAM,QAAQ,SAAS,SAAS,KAC/B,MAAM,QAAQ,SAAS,cAAc,KACrC,MAAM,QAAQ,SAAS,aAAa,KACpC,MAAM,QAAQ,SAAS,iBAAiB,IAC1C;AACA,YAAM;AAAA,IACR;AAGA,YAAQ,MAAM,sCAAsC,KAAK;AAGzD,QAAI,iBAAiB,SAAS,MAAM,QAAQ,SAAS,OAAO,GAAG;AAC7D,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACF;",
6
- "names": ["ModelErrorType", "isOpenAICompatible", "response", "usedEndpoint", "error"]
4
+ "sourcesContent": ["import { OpenAI } from 'openai'\nimport { getGlobalConfig } from '@utils/config'\nimport { ProxyAgent, fetch, Response } from 'undici'\nimport { setSessionState, getSessionState } from '@utils/sessionState'\nimport {\n debug as debugLogger,\n getCurrentRequest,\n logAPIError,\n} from '@utils/debugLogger'\nimport { logError } from '@utils/log'\nimport { abortableDelay, DEFAULT_RETRY_CONFIG } from '@utils/async'\n\n/**\n * Retry configuration constants for API calls\n */\nconst RETRY_CONFIG = {\n ...DEFAULT_RETRY_CONFIG,\n} as const\n\n/**\n * Calculate retry delay with exponential backoff and jitter\n */\nfunction getRetryDelay(attempt: number, retryAfter?: string | null): number {\n // If server suggests a retry-after time, use it\n if (retryAfter) {\n const retryAfterMs = parseInt(retryAfter) * 1000\n if (!isNaN(retryAfterMs) && retryAfterMs > 0) {\n return Math.min(retryAfterMs, RETRY_CONFIG.maxServerDelayMs)\n }\n }\n\n // Exponential backoff with jitter\n const delay = RETRY_CONFIG.baseDelayMs * Math.pow(2, attempt - 1)\n const jitter = Math.random() * RETRY_CONFIG.jitterFactor * delay\n\n return Math.min(delay + jitter, RETRY_CONFIG.maxDelayMs)\n}\n\nenum ModelErrorType {\n MaxLength = '1024',\n MaxCompletionTokens = 'max_completion_tokens',\n TemperatureRestriction = 'temperature_restriction',\n StreamOptions = 'stream_options',\n Citations = 'citations',\n RateLimit = 'rate_limit',\n}\n\nfunction getModelErrorKey(\n baseURL: string,\n model: string,\n type: ModelErrorType,\n): string {\n return `${baseURL}:${model}:${type}`\n}\n\nfunction hasModelError(\n baseURL: string,\n model: string,\n type: ModelErrorType,\n): boolean {\n return !!getSessionState('modelErrors')[\n getModelErrorKey(baseURL, model, type)\n ]\n}\n\nfunction setModelError(\n baseURL: string,\n model: string,\n type: ModelErrorType,\n error: string,\n) {\n setSessionState('modelErrors', {\n [getModelErrorKey(baseURL, model, type)]: error,\n })\n}\n\n// More flexible error detection system\ntype ErrorDetector = (errMsg: string) => boolean\ntype ErrorFixer = (\n opts: OpenAI.ChatCompletionCreateParams,\n) => Promise<void> | void\ninterface ErrorHandler {\n type: ModelErrorType\n detect: ErrorDetector\n fix: ErrorFixer\n}\n\n// GPT-5 specific error handlers with enhanced detection patterns\nconst GPT5_ERROR_HANDLERS: ErrorHandler[] = [\n {\n type: ModelErrorType.MaxCompletionTokens,\n detect: errMsg => {\n const lowerMsg = errMsg.toLowerCase()\n return (\n // Exact OpenAI GPT-5 error message\n (lowerMsg.includes(\"unsupported parameter: 'max_tokens'\") &&\n lowerMsg.includes(\"'max_completion_tokens'\")) ||\n // Generic max_tokens error patterns\n (lowerMsg.includes('max_tokens') &&\n lowerMsg.includes('max_completion_tokens')) ||\n (lowerMsg.includes('max_tokens') &&\n lowerMsg.includes('not supported')) ||\n (lowerMsg.includes('max_tokens') &&\n lowerMsg.includes('use max_completion_tokens')) ||\n // Additional patterns for various providers\n (lowerMsg.includes('invalid parameter') &&\n lowerMsg.includes('max_tokens')) ||\n (lowerMsg.includes('parameter error') &&\n lowerMsg.includes('max_tokens'))\n )\n },\n fix: async opts => {\n debugLogger.info('GPT5_FIX', {\n action: 'convert_max_tokens',\n from: opts.max_tokens,\n })\n if ('max_tokens' in opts) {\n opts.max_completion_tokens = opts.max_tokens\n delete opts.max_tokens\n }\n },\n },\n {\n type: ModelErrorType.TemperatureRestriction,\n detect: errMsg => {\n const lowerMsg = errMsg.toLowerCase()\n return (\n lowerMsg.includes('temperature') &&\n (lowerMsg.includes('only supports') ||\n lowerMsg.includes('must be 1') ||\n lowerMsg.includes('invalid temperature'))\n )\n },\n fix: async opts => {\n debugLogger.info('GPT5_FIX', {\n action: 'adjust_temperature',\n from: opts.temperature,\n to: 1,\n })\n opts.temperature = 1\n },\n },\n // Add more GPT-5 specific handlers as needed\n]\n\n// Standard error handlers\nconst ERROR_HANDLERS: ErrorHandler[] = [\n {\n type: ModelErrorType.MaxLength,\n detect: errMsg =>\n errMsg.includes('Expected a string with maximum length 1024'),\n fix: async opts => {\n const toolDescriptions = {}\n for (const tool of opts.tools || []) {\n if (tool.function.description.length <= 1024) continue\n let str = ''\n let remainder = ''\n for (let line of tool.function.description.split('\\n')) {\n if (str.length + line.length < 1024) {\n str += line + '\\n'\n } else {\n remainder += line + '\\n'\n }\n }\n\n tool.function.description = str\n toolDescriptions[tool.function.name] = remainder\n }\n if (Object.keys(toolDescriptions).length > 0) {\n let content = '<additional-tool-usage-instructions>\\n\\n'\n for (const [name, description] of Object.entries(toolDescriptions)) {\n content += `<${name}>\\n${description}\\n</${name}>\\n\\n`\n }\n content += '</additional-tool-usage-instructions>'\n\n for (let i = opts.messages.length - 1; i >= 0; i--) {\n if (opts.messages[i].role === 'system') {\n opts.messages.splice(i + 1, 0, {\n role: 'system',\n content,\n })\n break\n }\n }\n }\n },\n },\n {\n type: ModelErrorType.MaxCompletionTokens,\n detect: errMsg => errMsg.includes(\"Use 'max_completion_tokens'\"),\n fix: async opts => {\n opts.max_completion_tokens = opts.max_tokens\n delete opts.max_tokens\n },\n },\n {\n type: ModelErrorType.StreamOptions,\n detect: errMsg => errMsg.includes('stream_options'),\n fix: async opts => {\n delete opts.stream_options\n },\n },\n {\n type: ModelErrorType.Citations,\n detect: errMsg =>\n errMsg.includes('Extra inputs are not permitted') &&\n errMsg.includes('citations'),\n fix: async opts => {\n if (!opts.messages) return\n\n for (const message of opts.messages) {\n if (!message) continue\n\n if (Array.isArray(message.content)) {\n for (const item of message.content) {\n // Convert to unknown first to safely access properties\n if (item && typeof item === 'object') {\n const itemObj = item as unknown as Record<string, unknown>\n if ('citations' in itemObj) {\n delete itemObj.citations\n }\n }\n }\n } else if (message.content && typeof message.content === 'object') {\n // Convert to unknown first to safely access properties\n const contentObj = message.content as unknown as Record<\n string,\n unknown\n >\n if ('citations' in contentObj) {\n delete contentObj.citations\n }\n }\n }\n },\n },\n]\n\n// Model-specific feature flags - can be extended with more properties as needed\ninterface ModelFeatures {\n usesMaxCompletionTokens: boolean\n supportsResponsesAPI?: boolean\n requiresTemperatureOne?: boolean\n supportsVerbosityControl?: boolean\n supportsCustomTools?: boolean\n supportsAllowedTools?: boolean\n}\n\n// Map of model identifiers to their specific features\nconst MODEL_FEATURES: Record<string, ModelFeatures> = {\n // OpenAI thinking models\n o1: { usesMaxCompletionTokens: true },\n 'o1-preview': { usesMaxCompletionTokens: true },\n 'o1-mini': { usesMaxCompletionTokens: true },\n 'o1-pro': { usesMaxCompletionTokens: true },\n 'o3-mini': { usesMaxCompletionTokens: true },\n // GPT-5 models\n 'gpt-5': {\n usesMaxCompletionTokens: true,\n supportsResponsesAPI: true,\n requiresTemperatureOne: true,\n supportsVerbosityControl: true,\n supportsCustomTools: true,\n supportsAllowedTools: true,\n },\n 'gpt-5-mini': {\n usesMaxCompletionTokens: true,\n supportsResponsesAPI: true,\n requiresTemperatureOne: true,\n supportsVerbosityControl: true,\n supportsCustomTools: true,\n supportsAllowedTools: true,\n },\n 'gpt-5-nano': {\n usesMaxCompletionTokens: true,\n supportsResponsesAPI: true,\n requiresTemperatureOne: true,\n supportsVerbosityControl: true,\n supportsCustomTools: true,\n supportsAllowedTools: true,\n },\n 'gpt-5-chat-latest': {\n usesMaxCompletionTokens: true,\n supportsResponsesAPI: false, // Uses Chat Completions only\n requiresTemperatureOne: true,\n supportsVerbosityControl: true,\n },\n}\n\n// Helper to get model features based on model ID/name\nfunction getModelFeatures(modelName: string): ModelFeatures {\n if (!modelName || typeof modelName !== 'string') {\n return { usesMaxCompletionTokens: false }\n }\n\n // Check for exact matches first (highest priority)\n if (MODEL_FEATURES[modelName]) {\n return MODEL_FEATURES[modelName]\n }\n\n // Simple GPT-5 detection: any model name containing 'gpt-5'\n if (modelName.toLowerCase().includes('gpt-5')) {\n return {\n usesMaxCompletionTokens: true,\n supportsResponsesAPI: true,\n requiresTemperatureOne: true,\n supportsVerbosityControl: true,\n supportsCustomTools: true,\n supportsAllowedTools: true,\n }\n }\n\n // Check for partial matches (e.g., other reasoning models)\n for (const [key, features] of Object.entries(MODEL_FEATURES)) {\n if (modelName.includes(key)) {\n return features\n }\n }\n\n // Default features for unknown models\n return { usesMaxCompletionTokens: false }\n}\n\n// Apply model-specific parameter transformations based on model features\nfunction applyModelSpecificTransformations(\n opts: OpenAI.ChatCompletionCreateParams,\n): void {\n if (!opts.model || typeof opts.model !== 'string') {\n return\n }\n\n const features = getModelFeatures(opts.model)\n const isGPT5 = opts.model.toLowerCase().includes('gpt-5')\n\n // \uD83D\uDD25 Enhanced GPT-5 Detection and Transformation\n if (isGPT5 || features.usesMaxCompletionTokens) {\n // Force max_completion_tokens for all GPT-5 models\n if ('max_tokens' in opts && !('max_completion_tokens' in opts)) {\n debugLogger.info('OPENAI_TRANSFORM', {\n action: 'max_tokens_to_completion',\n model: opts.model,\n value: opts.max_tokens,\n })\n opts.max_completion_tokens = opts.max_tokens\n delete opts.max_tokens\n }\n\n // Force temperature = 1 for GPT-5 models\n if (features.requiresTemperatureOne && 'temperature' in opts) {\n if (opts.temperature !== 1 && opts.temperature !== undefined) {\n debugLogger.info('OPENAI_TRANSFORM', {\n action: 'temperature_constraint',\n model: opts.model,\n from: opts.temperature,\n to: 1,\n })\n opts.temperature = 1\n }\n }\n\n // Remove unsupported parameters for GPT-5\n if (isGPT5) {\n // Remove parameters that may not be supported by GPT-5\n delete opts.frequency_penalty\n delete opts.presence_penalty\n delete opts.logit_bias\n delete opts.user\n\n // Add reasoning_effort if not present and model supports it\n if (!opts.reasoning_effort && features.supportsVerbosityControl) {\n opts.reasoning_effort = 'medium' // Default reasoning effort for coding tasks\n }\n }\n }\n\n // Apply transformations for non-GPT-5 models\n else {\n // Standard max_tokens to max_completion_tokens conversion for other reasoning models\n if (\n features.usesMaxCompletionTokens &&\n 'max_tokens' in opts &&\n !('max_completion_tokens' in opts)\n ) {\n opts.max_completion_tokens = opts.max_tokens\n delete opts.max_tokens\n }\n }\n\n // Add more transformations here as needed\n}\n\nasync function applyModelErrorFixes(\n opts: OpenAI.ChatCompletionCreateParams,\n baseURL: string,\n) {\n const isGPT5 = opts.model.startsWith('gpt-5')\n const handlers = isGPT5\n ? [...GPT5_ERROR_HANDLERS, ...ERROR_HANDLERS]\n : ERROR_HANDLERS\n\n for (const handler of handlers) {\n if (hasModelError(baseURL, opts.model, handler.type)) {\n await handler.fix(opts)\n return\n }\n }\n}\n\n// Helper function to try different endpoints for OpenAI-compatible providers\nasync function tryWithEndpointFallback(\n baseURL: string,\n opts: OpenAI.ChatCompletionCreateParams,\n headers: Record<string, string>,\n provider: string,\n proxy: any,\n signal?: AbortSignal, // \uD83D\uDD27 Add AbortSignal support\n): Promise<{ response: Response; endpoint: string }> {\n const endpointsToTry = []\n\n if (provider === 'minimax') {\n endpointsToTry.push('/text/chatcompletion_v2', '/chat/completions')\n } else {\n endpointsToTry.push('/chat/completions')\n }\n\n let lastError = null\n\n for (const endpoint of endpointsToTry) {\n try {\n const response = await fetch(`${baseURL}${endpoint}`, {\n method: 'POST',\n headers,\n body: JSON.stringify(opts.stream ? { ...opts, stream: true } : opts),\n dispatcher: proxy,\n signal: signal, // \uD83D\uDD27 Connect AbortSignal to fetch call\n })\n\n // If successful, return immediately\n if (response.ok) {\n return { response, endpoint }\n }\n\n // If it's a 404, try the next endpoint\n if (response.status === 404 && endpointsToTry.length > 1) {\n debugLogger.info('OPENAI_ENDPOINT', {\n status: 404,\n endpoint,\n action: 'trying_next',\n })\n continue\n }\n\n // For other error codes, return this response (don't try fallback)\n return { response, endpoint }\n } catch (error) {\n lastError = error\n // Network errors might be temporary, try next endpoint\n if (endpointsToTry.indexOf(endpoint) < endpointsToTry.length - 1) {\n debugLogger.warn('OPENAI_ENDPOINT', {\n error: 'network_error',\n endpoint,\n action: 'trying_next',\n })\n continue\n }\n }\n }\n\n // If we get here, all endpoints failed\n throw lastError || new Error('All endpoints failed')\n}\n\n// Export shared utilities for GPT-5 compatibility\nexport {\n getGPT5CompletionWithProfile,\n getModelFeatures,\n applyModelSpecificTransformations,\n}\n\nexport async function getCompletionWithProfile(\n modelProfile: any,\n opts: OpenAI.ChatCompletionCreateParams,\n attempt: number = 0,\n maxAttempts: number = 10,\n signal?: AbortSignal, // \uD83D\uDD27 CRITICAL FIX: Add AbortSignal support\n): Promise<OpenAI.ChatCompletion | AsyncIterable<OpenAI.ChatCompletionChunk>> {\n if (attempt >= maxAttempts) {\n throw new Error('Max attempts reached')\n }\n\n const provider = modelProfile?.provider || 'anthropic'\n const baseURL = modelProfile?.baseURL\n const apiKey = modelProfile?.apiKey\n const proxy = getGlobalConfig().proxy\n ? new ProxyAgent(getGlobalConfig().proxy)\n : undefined\n\n const headers: Record<string, string> = {\n 'Content-Type': 'application/json',\n }\n\n if (apiKey) {\n if (provider === 'azure') {\n headers['api-key'] = apiKey\n } else {\n headers['Authorization'] = `Bearer ${apiKey}`\n }\n }\n\n applyModelSpecificTransformations(opts)\n await applyModelErrorFixes(opts, baseURL || '')\n\n // \uD83D\uDD25 REAL-TIME API CALL DEBUG - \u4F7F\u7528\u5168\u5C40\u65E5\u5FD7\u7CFB\u7EDF\n debugLogger.api('OPENAI_API_CALL_START', {\n endpoint: baseURL || 'DEFAULT_OPENAI',\n model: opts.model,\n provider,\n apiKeyConfigured: !!apiKey,\n apiKeyPrefix: apiKey ? apiKey.substring(0, 8) : null,\n maxTokens: opts.max_tokens,\n temperature: opts.temperature,\n messageCount: opts.messages?.length || 0,\n streamMode: opts.stream,\n timestamp: new Date().toISOString(),\n modelProfileModelName: modelProfile?.modelName,\n modelProfileName: modelProfile?.name,\n })\n\n // Make sure all tool messages have string content\n opts.messages = opts.messages.map(msg => {\n if (msg.role === 'tool') {\n if (Array.isArray(msg.content)) {\n return {\n ...msg,\n content:\n msg.content\n .map(c => c.text || '')\n .filter(Boolean)\n .join('\\n\\n') || '(empty content)',\n }\n } else if (typeof msg.content !== 'string') {\n return {\n ...msg,\n content:\n typeof msg.content === 'undefined'\n ? '(empty content)'\n : JSON.stringify(msg.content),\n }\n }\n }\n return msg\n })\n\n // Define Azure-specific API endpoint with version\n const azureApiVersion = '2024-06-01'\n let endpoint = '/chat/completions'\n\n if (provider === 'azure') {\n endpoint = `/chat/completions?api-version=${azureApiVersion}`\n } else if (provider === 'minimax') {\n endpoint = '/text/chatcompletion_v2'\n }\n\n try {\n if (opts.stream) {\n const isOpenAICompatible = [\n 'minimax',\n 'kimi',\n 'deepseek',\n 'siliconflow',\n 'qwen',\n 'glm',\n 'baidu-qianfan',\n 'openai',\n 'mistral',\n 'xai',\n 'groq',\n 'custom-openai',\n ].includes(provider)\n\n let response: Response\n\n if (isOpenAICompatible && provider !== 'azure') {\n const result = await tryWithEndpointFallback(\n baseURL,\n opts,\n headers,\n provider,\n proxy,\n signal, // \uD83D\uDD27 Pass AbortSignal to endpoint fallback\n )\n response = result.response\n } else {\n response = await fetch(`${baseURL}${endpoint}`, {\n method: 'POST',\n headers,\n body: JSON.stringify({ ...opts, stream: true }),\n dispatcher: proxy,\n signal: signal, // \uD83D\uDD27 CRITICAL FIX: Connect AbortSignal to fetch call\n })\n }\n\n if (!response.ok) {\n // \uD83D\uDD27 CRITICAL FIX: Check abort signal BEFORE showing retry message\n if (signal?.aborted) {\n throw new Error('Request cancelled by user')\n }\n\n // \uD83D\uDD25 NEW: Parse error message to detect and handle specific API errors\n try {\n const errorData = await response.json()\n // Type guard for error data structure\n const hasError = (\n data: unknown,\n ): data is { error?: { message?: string }; message?: string } => {\n return typeof data === 'object' && data !== null\n }\n const errorMessage = hasError(errorData)\n ? errorData.error?.message ||\n errorData.message ||\n `HTTP ${response.status}`\n : `HTTP ${response.status}`\n\n // Check if this is a parameter error that we can fix\n const isGPT5 = opts.model.startsWith('gpt-5')\n const handlers = isGPT5\n ? [...GPT5_ERROR_HANDLERS, ...ERROR_HANDLERS]\n : ERROR_HANDLERS\n\n for (const handler of handlers) {\n if (handler.detect(errorMessage)) {\n debugLogger.info('OPENAI_ERROR_HANDLER', {\n type: handler.type,\n model: opts.model,\n error: errorMessage,\n })\n\n // Store this error for future requests\n setModelError(\n baseURL || '',\n opts.model,\n handler.type,\n errorMessage,\n )\n\n // Apply the fix and retry immediately\n await handler.fix(opts)\n debugLogger.info('OPENAI_ERROR_HANDLER', {\n action: 'fix_applied',\n type: handler.type,\n })\n\n return getCompletionWithProfile(\n modelProfile,\n opts,\n attempt + 1,\n maxAttempts,\n signal,\n )\n }\n }\n\n // If no specific handler found, log the error for debugging\n debugLogger.warn('OPENAI_UNHANDLED_ERROR', {\n status: response.status,\n error: errorMessage,\n })\n\n // Log API error using unified logger\n logAPIError({\n model: opts.model,\n endpoint: `${baseURL}${endpoint}`,\n status: response.status,\n error: errorMessage,\n request: opts,\n response: errorData,\n provider: provider,\n })\n } catch (parseError) {\n // If we can't parse the error, fall back to generic retry\n debugLogger.warn('OPENAI_PARSE_ERROR', { status: response.status })\n\n // Log parse error\n logAPIError({\n model: opts.model,\n endpoint: `${baseURL}${endpoint}`,\n status: response.status,\n error: `Could not parse error response: ${parseError.message}`,\n request: opts,\n response: { parseError: parseError.message },\n provider: provider,\n })\n }\n\n const delayMs = getRetryDelay(attempt)\n console.log(\n ` \u23BF API error (${response.status}), retrying in ${Math.round(delayMs / 1000)}s... (attempt ${attempt + 1}/${maxAttempts})`,\n )\n try {\n await abortableDelay(delayMs, signal)\n } catch (error) {\n // If aborted during delay, throw the error to stop retrying\n if (error.message === 'Request was aborted') {\n throw new Error('Request cancelled by user')\n }\n throw error\n }\n return getCompletionWithProfile(\n modelProfile,\n opts,\n attempt + 1,\n maxAttempts,\n signal, // \uD83D\uDD27 Pass AbortSignal to recursive call\n )\n }\n\n const stream = createStreamProcessor(response.body as any, signal)\n return stream\n }\n\n // Non-streaming request\n const isOpenAICompatible = [\n 'minimax',\n 'kimi',\n 'deepseek',\n 'siliconflow',\n 'qwen',\n 'glm',\n 'baidu-qianfan',\n 'openai',\n 'mistral',\n 'xai',\n 'groq',\n 'custom-openai',\n ].includes(provider)\n\n let response: Response\n\n if (isOpenAICompatible && provider !== 'azure') {\n const result = await tryWithEndpointFallback(\n baseURL,\n opts,\n headers,\n provider,\n proxy,\n signal, // \uD83D\uDD27 Pass AbortSignal to endpoint fallback\n )\n response = result.response\n } else {\n response = await fetch(`${baseURL}${endpoint}`, {\n method: 'POST',\n headers,\n body: JSON.stringify(opts),\n dispatcher: proxy,\n signal: signal, // \uD83D\uDD27 CRITICAL FIX: Connect AbortSignal to non-streaming fetch call\n })\n }\n\n if (!response.ok) {\n // \uD83D\uDD27 CRITICAL FIX: Check abort signal BEFORE showing retry message\n if (signal?.aborted) {\n throw new Error('Request cancelled by user')\n }\n\n // \uD83D\uDD25 NEW: Parse error message to detect and handle specific API errors\n try {\n const errorData = await response.json()\n // Type guard for error data structure\n const hasError = (\n data: unknown,\n ): data is { error?: { message?: string }; message?: string } => {\n return typeof data === 'object' && data !== null\n }\n const errorMessage = hasError(errorData)\n ? errorData.error?.message ||\n errorData.message ||\n `HTTP ${response.status}`\n : `HTTP ${response.status}`\n\n // Check if this is a parameter error that we can fix\n const isGPT5 = opts.model.startsWith('gpt-5')\n const handlers = isGPT5\n ? [...GPT5_ERROR_HANDLERS, ...ERROR_HANDLERS]\n : ERROR_HANDLERS\n\n for (const handler of handlers) {\n if (handler.detect(errorMessage)) {\n debugLogger.info('OPENAI_ERROR_HANDLER', {\n type: handler.type,\n model: opts.model,\n error: errorMessage,\n })\n\n // Store this error for future requests\n setModelError(baseURL || '', opts.model, handler.type, errorMessage)\n\n // Apply the fix and retry immediately\n await handler.fix(opts)\n debugLogger.info('OPENAI_ERROR_HANDLER', {\n action: 'fix_applied',\n type: handler.type,\n })\n\n return getCompletionWithProfile(\n modelProfile,\n opts,\n attempt + 1,\n maxAttempts,\n signal,\n )\n }\n }\n\n // If no specific handler found, log the error for debugging\n debugLogger.warn('OPENAI_UNHANDLED_ERROR', {\n status: response.status,\n error: errorMessage,\n })\n } catch (parseError) {\n // If we can't parse the error, fall back to generic retry\n debugLogger.warn('OPENAI_PARSE_ERROR', { status: response.status })\n }\n\n const delayMs = getRetryDelay(attempt)\n console.log(\n ` \u23BF API error (${response.status}), retrying in ${Math.round(delayMs / 1000)}s... (attempt ${attempt + 1}/${maxAttempts})`,\n )\n try {\n await abortableDelay(delayMs, signal)\n } catch (error) {\n // If aborted during delay, throw the error to stop retrying\n if (error.message === 'Request was aborted') {\n throw new Error('Request cancelled by user')\n }\n throw error\n }\n return getCompletionWithProfile(\n modelProfile,\n opts,\n attempt + 1,\n maxAttempts,\n signal, // \uD83D\uDD27 Pass AbortSignal to recursive call\n )\n }\n\n const responseData = (await response.json()) as OpenAI.ChatCompletion\n return responseData\n } catch (error) {\n // \uD83D\uDD27 CRITICAL FIX: Check abort signal BEFORE showing retry message\n if (signal?.aborted) {\n throw new Error('Request cancelled by user')\n }\n\n if (attempt < maxAttempts) {\n // \uD83D\uDD27 Double-check abort status to avoid showing misleading retry message\n if (signal?.aborted) {\n throw new Error('Request cancelled by user')\n }\n\n const delayMs = getRetryDelay(attempt)\n console.log(\n ` \u23BF Network error, retrying in ${Math.round(delayMs / 1000)}s... (attempt ${attempt + 1}/${maxAttempts})`,\n )\n try {\n await abortableDelay(delayMs, signal)\n } catch (error) {\n // If aborted during delay, throw the error to stop retrying\n if (error.message === 'Request was aborted') {\n throw new Error('Request cancelled by user')\n }\n throw error\n }\n return getCompletionWithProfile(\n modelProfile,\n opts,\n attempt + 1,\n maxAttempts,\n signal, // \uD83D\uDD27 Pass AbortSignal to recursive call\n )\n }\n throw error\n }\n}\n\nexport function createStreamProcessor(\n stream: any,\n signal?: AbortSignal,\n): AsyncGenerator<OpenAI.ChatCompletionChunk, void, unknown> {\n if (!stream) {\n throw new Error('Stream is null or undefined')\n }\n\n return (async function* () {\n const reader = stream.getReader()\n const decoder = new TextDecoder('utf-8')\n let buffer = ''\n\n try {\n while (true) {\n // Check for cancellation before attempting to read\n if (signal?.aborted) {\n break\n }\n\n let readResult\n try {\n readResult = await reader.read()\n } catch (e) {\n // If signal is aborted, this is user cancellation - exit silently\n if (signal?.aborted) {\n break\n }\n logError(e)\n break\n }\n\n const { done, value } = readResult\n if (done) {\n break\n }\n\n const chunk = decoder.decode(value, { stream: true })\n buffer += chunk\n\n let lineEnd = buffer.indexOf('\\n')\n while (lineEnd !== -1) {\n const line = buffer.substring(0, lineEnd).trim()\n buffer = buffer.substring(lineEnd + 1)\n\n if (line === 'data: [DONE]') {\n continue\n }\n\n if (line.startsWith('data: ')) {\n const data = line.slice(6).trim()\n if (!data) continue\n\n try {\n const parsed = JSON.parse(data) as OpenAI.ChatCompletionChunk\n yield parsed\n } catch (e) {\n debugLogger.warn('STREAM_PARSE_ERROR', {\n data: data.substring(0, 100),\n error: String(e),\n })\n }\n }\n\n lineEnd = buffer.indexOf('\\n')\n }\n }\n\n // Process any remaining data in the buffer\n if (buffer.trim()) {\n const lines = buffer.trim().split('\\n')\n for (const line of lines) {\n if (line.startsWith('data: ') && line !== 'data: [DONE]') {\n const data = line.slice(6).trim()\n if (!data) continue\n\n try {\n const parsed = JSON.parse(data) as OpenAI.ChatCompletionChunk\n yield parsed\n } catch (e) {\n debugLogger.warn('STREAM_PARSE_ERROR', {\n data: data.substring(0, 100),\n error: String(e),\n final: true,\n })\n }\n }\n }\n }\n } catch (e) {\n logError(e)\n } finally {\n try {\n reader.releaseLock()\n } catch (e) {\n // Silent - reader lock release failure is not critical\n }\n }\n })()\n}\n\nexport function streamCompletion(\n stream: any,\n signal?: AbortSignal,\n): AsyncGenerator<OpenAI.ChatCompletionChunk, void, unknown> {\n return createStreamProcessor(stream, signal)\n}\n\n/**\n * Call GPT-5 Responses API with proper parameter handling\n */\nexport async function callGPT5ResponsesAPI(\n modelProfile: any,\n opts: any, // Using 'any' for Responses API params which differ from ChatCompletionCreateParams\n signal?: AbortSignal,\n): Promise<any> {\n const baseURL = modelProfile?.baseURL || 'https://api.openai.com/v1'\n const apiKey = modelProfile?.apiKey\n const proxy = getGlobalConfig().proxy\n ? new ProxyAgent(getGlobalConfig().proxy)\n : undefined\n\n const headers: Record<string, string> = {\n 'Content-Type': 'application/json',\n Authorization: `Bearer ${apiKey}`,\n }\n\n // \uD83D\uDD25 Enhanced Responses API Parameter Mapping for GPT-5\n const responsesParams: any = {\n model: opts.model,\n input: opts.messages, // Responses API uses 'input' instead of 'messages'\n }\n\n // \uD83D\uDD27 GPT-5 Token Configuration\n if (opts.max_completion_tokens) {\n responsesParams.max_completion_tokens = opts.max_completion_tokens\n } else if (opts.max_tokens) {\n // Fallback conversion if max_tokens is still present\n responsesParams.max_completion_tokens = opts.max_tokens\n }\n\n // \uD83D\uDD27 GPT-5 Temperature Handling (only 1 or undefined)\n if (opts.temperature === 1) {\n responsesParams.temperature = 1\n }\n // Note: Do not pass temperature if it's not 1, GPT-5 will use default\n\n // \uD83D\uDD27 GPT-5 Reasoning Configuration\n const reasoningEffort = opts.reasoning_effort || 'medium'\n responsesParams.reasoning = {\n effort: reasoningEffort,\n // \uD83D\uDE80 Enable reasoning summaries for transparency in coding tasks\n generate_summary: true,\n }\n\n // \uD83D\uDD27 GPT-5 Tools Support\n if (opts.tools && opts.tools.length > 0) {\n responsesParams.tools = opts.tools\n\n // \uD83D\uDE80 GPT-5 Tool Choice Configuration\n if (opts.tool_choice) {\n responsesParams.tool_choice = opts.tool_choice\n }\n }\n\n // \uD83D\uDD27 GPT-5 System Instructions (separate from messages)\n const systemMessages = opts.messages.filter(msg => msg.role === 'system')\n const nonSystemMessages = opts.messages.filter(msg => msg.role !== 'system')\n\n if (systemMessages.length > 0) {\n responsesParams.instructions = systemMessages\n .map(msg => msg.content)\n .join('\\n\\n')\n responsesParams.input = nonSystemMessages\n }\n\n // Handle verbosity (if supported) - optimized for coding tasks\n const features = getModelFeatures(opts.model)\n if (features.supportsVerbosityControl) {\n // High verbosity for coding tasks to get detailed explanations and structured code\n // Based on GPT-5 best practices for agent-like coding environments\n responsesParams.text = {\n verbosity: 'high',\n }\n }\n\n // Apply GPT-5 coding optimizations\n if (opts.model.startsWith('gpt-5')) {\n // Set reasoning effort based on task complexity\n if (!responsesParams.reasoning) {\n responsesParams.reasoning = {\n effort: 'medium', // Balanced for most coding tasks\n }\n }\n\n // Add instructions parameter for coding-specific guidance\n if (!responsesParams.instructions) {\n responsesParams.instructions = `You are an expert programmer working in a terminal-based coding environment. Follow these guidelines:\n- Provide clear, concise code solutions\n- Use proper error handling and validation\n- Follow coding best practices and patterns\n- Explain complex logic when necessary\n- Focus on maintainable, readable code`\n }\n }\n\n try {\n const response = await fetch(`${baseURL}/responses`, {\n method: 'POST',\n headers,\n body: JSON.stringify(responsesParams),\n dispatcher: proxy,\n signal: signal,\n })\n\n if (!response.ok) {\n throw new Error(\n `GPT-5 Responses API error: ${response.status} ${response.statusText}`,\n )\n }\n\n const responseData = await response.json()\n\n // Convert Responses API response back to Chat Completion format for compatibility\n return convertResponsesAPIToChatCompletion(responseData)\n } catch (error) {\n if (signal?.aborted) {\n throw new Error('Request cancelled by user')\n }\n throw error\n }\n}\n\n/**\n * Convert Responses API response to Chat Completion format for compatibility\n * \uD83D\uDD25 Enhanced for GPT-5 with reasoning summary support\n */\nfunction convertResponsesAPIToChatCompletion(responsesData: any): any {\n // Extract content from Responses API format\n let outputText = responsesData.output_text || ''\n const usage = responsesData.usage || {}\n\n // \uD83D\uDE80 GPT-5 Reasoning Summary Integration\n // If reasoning summary is available, prepend it to the output for transparency\n if (responsesData.output && Array.isArray(responsesData.output)) {\n const reasoningItems = responsesData.output.filter(\n item => item.type === 'reasoning' && item.summary,\n )\n const messageItems = responsesData.output.filter(\n item => item.type === 'message',\n )\n\n if (reasoningItems.length > 0 && messageItems.length > 0) {\n const reasoningSummary = reasoningItems\n .map(item => item.summary?.map(s => s.text).join('\\n'))\n .filter(Boolean)\n .join('\\n\\n')\n\n const mainContent = messageItems\n .map(item => item.content?.map(c => c.text).join('\\n'))\n .filter(Boolean)\n .join('\\n\\n')\n\n if (reasoningSummary) {\n outputText = `**\uD83E\uDDE0 Reasoning Process:**\\n${reasoningSummary}\\n\\n**\uD83D\uDCDD Response:**\\n${mainContent}`\n } else {\n outputText = mainContent\n }\n }\n }\n\n return {\n id: responsesData.id || `chatcmpl-${Date.now()}`,\n object: 'chat.completion',\n created: Math.floor(Date.now() / 1000),\n model: responsesData.model || '',\n choices: [\n {\n index: 0,\n message: {\n role: 'assistant',\n content: outputText,\n // \uD83D\uDE80 Include reasoning metadata if available\n ...(responsesData.reasoning && {\n reasoning: {\n effort: responsesData.reasoning.effort,\n summary: responsesData.reasoning.summary,\n },\n }),\n },\n finish_reason: responsesData.status === 'completed' ? 'stop' : 'length',\n },\n ],\n usage: {\n prompt_tokens: usage.input_tokens || 0,\n completion_tokens: usage.output_tokens || 0,\n total_tokens: (usage.input_tokens || 0) + (usage.output_tokens || 0),\n // \uD83D\uDD27 GPT-5 Enhanced Usage Details\n prompt_tokens_details: {\n cached_tokens: usage.input_tokens_details?.cached_tokens || 0,\n },\n completion_tokens_details: {\n reasoning_tokens: usage.output_tokens_details?.reasoning_tokens || 0,\n },\n },\n }\n}\n\n/**\n * Enhanced getCompletionWithProfile that supports GPT-5 Responses API\n * \uD83D\uDD25 Optimized for both official OpenAI and third-party GPT-5 providers\n */\nasync function getGPT5CompletionWithProfile(\n modelProfile: any,\n opts: OpenAI.ChatCompletionCreateParams,\n attempt: number = 0,\n maxAttempts: number = 10,\n signal?: AbortSignal,\n): Promise<OpenAI.ChatCompletion | AsyncIterable<OpenAI.ChatCompletionChunk>> {\n const features = getModelFeatures(opts.model)\n const isOfficialOpenAI =\n !modelProfile.baseURL || modelProfile.baseURL.includes('api.openai.com')\n\n // \uD83D\uDE80 Try Responses API for official OpenAI non-streaming requests\n if (features.supportsResponsesAPI && !opts.stream && isOfficialOpenAI) {\n try {\n debugLogger.api('ATTEMPTING_GPT5_RESPONSES_API', {\n model: opts.model,\n baseURL: modelProfile.baseURL || 'official',\n provider: modelProfile.provider,\n stream: opts.stream,\n requestId: getCurrentRequest()?.id,\n })\n\n const result = await callGPT5ResponsesAPI(modelProfile, opts, signal)\n\n debugLogger.api('GPT5_RESPONSES_API_SUCCESS', {\n model: opts.model,\n baseURL: modelProfile.baseURL || 'official',\n requestId: getCurrentRequest()?.id,\n })\n\n return result\n } catch (error) {\n debugLogger.api('GPT5_RESPONSES_API_FALLBACK', {\n model: opts.model,\n error: error.message,\n baseURL: modelProfile.baseURL || 'official',\n requestId: getCurrentRequest()?.id,\n })\n\n debugLogger.warn('GPT5_FALLBACK', {\n reason: 'responses_api_failed',\n error: error.message,\n })\n // Fall through to Chat Completions API\n }\n }\n\n // \uD83C\uDF10 Handle third-party GPT-5 providers with enhanced compatibility\n else if (!isOfficialOpenAI) {\n debugLogger.api('GPT5_THIRD_PARTY_PROVIDER', {\n model: opts.model,\n baseURL: modelProfile.baseURL,\n provider: modelProfile.provider,\n supportsResponsesAPI: features.supportsResponsesAPI,\n requestId: getCurrentRequest()?.id,\n })\n\n // \uD83D\uDD27 Apply enhanced parameter optimization for third-party providers\n debugLogger.info('GPT5_THIRD_PARTY', {\n provider: modelProfile.provider,\n baseURL: modelProfile.baseURL,\n })\n\n // Some third-party providers may need additional parameter adjustments\n if (modelProfile.provider === 'azure') {\n // Azure OpenAI specific adjustments\n delete opts.reasoning_effort // Azure may not support this yet\n } else if (modelProfile.provider === 'custom-openai') {\n // Generic OpenAI-compatible provider optimizations\n debugLogger.info('GPT5_CUSTOM_OPTIMIZATIONS', {\n provider: 'custom-openai',\n })\n }\n }\n\n // \uD83D\uDCE1 Handle streaming requests (Responses API doesn't support streaming yet)\n else if (opts.stream) {\n debugLogger.api('GPT5_STREAMING_MODE', {\n model: opts.model,\n baseURL: modelProfile.baseURL || 'official',\n reason: 'responses_api_no_streaming',\n requestId: getCurrentRequest()?.id,\n })\n\n debugLogger.info('GPT5_STREAMING', { reason: 'responses_api_no_streaming' })\n }\n\n // \uD83D\uDD27 Enhanced Chat Completions fallback with GPT-5 optimizations\n debugLogger.api('USING_CHAT_COMPLETIONS_FOR_GPT5', {\n model: opts.model,\n baseURL: modelProfile.baseURL || 'official',\n provider: modelProfile.provider,\n reason: isOfficialOpenAI ? 'streaming_or_fallback' : 'third_party_provider',\n requestId: getCurrentRequest()?.id,\n })\n\n return await getCompletionWithProfile(\n modelProfile,\n opts,\n attempt,\n maxAttempts,\n signal,\n )\n}\n\n/**\n * Fetch available models from custom OpenAI-compatible API\n */\nexport async function fetchCustomModels(\n baseURL: string,\n apiKey: string,\n): Promise<any[]> {\n try {\n // Check if baseURL already contains version number (e.g., v1, v2, etc.)\n const hasVersionNumber = /\\/v\\d+/.test(baseURL)\n const cleanBaseURL = baseURL.replace(/\\/+$/, '')\n const modelsURL = hasVersionNumber\n ? `${cleanBaseURL}/models`\n : `${cleanBaseURL}/v1/models`\n\n const response = await fetch(modelsURL, {\n method: 'GET',\n headers: {\n Authorization: `Bearer ${apiKey}`,\n 'Content-Type': 'application/json',\n },\n })\n\n if (!response.ok) {\n // Provide user-friendly error messages based on status code\n if (response.status === 401) {\n throw new Error(\n 'Invalid API key. Please check your API key and try again.',\n )\n } else if (response.status === 403) {\n throw new Error(\n 'API key does not have permission to access models. Please check your API key permissions.',\n )\n } else if (response.status === 404) {\n throw new Error(\n 'API endpoint not found. Please check if the base URL is correct and supports the /models endpoint.',\n )\n } else if (response.status === 429) {\n throw new Error(\n 'Too many requests. Please wait a moment and try again.',\n )\n } else if (response.status >= 500) {\n throw new Error(\n 'API service is temporarily unavailable. Please try again later.',\n )\n } else {\n throw new Error(\n `Unable to connect to API (${response.status}). Please check your base URL, API key, and internet connection.`,\n )\n }\n }\n\n const data = await response.json()\n\n // Type guards for different API response formats\n const hasDataArray = (obj: unknown): obj is { data: unknown[] } => {\n return (\n typeof obj === 'object' &&\n obj !== null &&\n 'data' in obj &&\n Array.isArray((obj as any).data)\n )\n }\n\n const hasModelsArray = (obj: unknown): obj is { models: unknown[] } => {\n return (\n typeof obj === 'object' &&\n obj !== null &&\n 'models' in obj &&\n Array.isArray((obj as any).models)\n )\n }\n\n // Validate response format and extract models array\n let models = []\n\n if (hasDataArray(data)) {\n // Standard OpenAI format: { data: [...] }\n models = data.data\n } else if (Array.isArray(data)) {\n // Direct array format\n models = data\n } else if (hasModelsArray(data)) {\n // Alternative format: { models: [...] }\n models = data.models\n } else {\n throw new Error(\n 'API returned unexpected response format. Expected an array of models or an object with a \"data\" or \"models\" array.',\n )\n }\n\n // Ensure we have an array and validate it contains model objects\n if (!Array.isArray(models)) {\n throw new Error('API response format error: models data is not an array.')\n }\n\n return models\n } catch (error) {\n // If it's already our custom error, pass it through\n if (\n error instanceof Error &&\n (error.message.includes('API key') ||\n error.message.includes('API endpoint') ||\n error.message.includes('API service') ||\n error.message.includes('response format'))\n ) {\n throw error\n }\n\n // For network errors or other issues\n logError(error)\n\n // Check if it's a network error\n if (error instanceof Error && error.message.includes('fetch')) {\n throw new Error(\n 'Unable to connect to the API. Please check the base URL and your internet connection.',\n )\n }\n\n throw new Error(\n 'Failed to fetch models from custom API. Please check your configuration and try again.',\n )\n }\n}\n"],
5
+ "mappings": "AACA,SAAS,uBAAuB;AAChC,SAAS,YAAY,aAAuB;AAC5C,SAAS,iBAAiB,uBAAuB;AACjD;AAAA,EACE,SAAS;AAAA,EACT;AAAA,EACA;AAAA,OACK;AACP,SAAS,gBAAgB;AACzB,SAAS,gBAAgB,4BAA4B;AAKrD,MAAM,eAAe;AAAA,EACnB,GAAG;AACL;AAKA,SAAS,cAAc,SAAiB,YAAoC;AAE1E,MAAI,YAAY;AACd,UAAM,eAAe,SAAS,UAAU,IAAI;AAC5C,QAAI,CAAC,MAAM,YAAY,KAAK,eAAe,GAAG;AAC5C,aAAO,KAAK,IAAI,cAAc,aAAa,gBAAgB;AAAA,IAC7D;AAAA,EACF;AAGA,QAAM,QAAQ,aAAa,cAAc,KAAK,IAAI,GAAG,UAAU,CAAC;AAChE,QAAM,SAAS,KAAK,OAAO,IAAI,aAAa,eAAe;AAE3D,SAAO,KAAK,IAAI,QAAQ,QAAQ,aAAa,UAAU;AACzD;AAEA,IAAK,iBAAL,kBAAKA,oBAAL;AACE,EAAAA,gBAAA,eAAY;AACZ,EAAAA,gBAAA,yBAAsB;AACtB,EAAAA,gBAAA,4BAAyB;AACzB,EAAAA,gBAAA,mBAAgB;AAChB,EAAAA,gBAAA,eAAY;AACZ,EAAAA,gBAAA,eAAY;AANT,SAAAA;AAAA,GAAA;AASL,SAAS,iBACP,SACA,OACA,MACQ;AACR,SAAO,GAAG,OAAO,IAAI,KAAK,IAAI,IAAI;AACpC;AAEA,SAAS,cACP,SACA,OACA,MACS;AACT,SAAO,CAAC,CAAC,gBAAgB,aAAa,EACpC,iBAAiB,SAAS,OAAO,IAAI,CACvC;AACF;AAEA,SAAS,cACP,SACA,OACA,MACA,OACA;AACA,kBAAgB,eAAe;AAAA,IAC7B,CAAC,iBAAiB,SAAS,OAAO,IAAI,CAAC,GAAG;AAAA,EAC5C,CAAC;AACH;AAcA,MAAM,sBAAsC;AAAA,EAC1C;AAAA,IACE,MAAM;AAAA,IACN,QAAQ,YAAU;AAChB,YAAM,WAAW,OAAO,YAAY;AACpC;AAAA;AAAA,QAEG,SAAS,SAAS,qCAAqC,KACtD,SAAS,SAAS,yBAAyB;AAAA,QAE5C,SAAS,SAAS,YAAY,KAC7B,SAAS,SAAS,uBAAuB,KAC1C,SAAS,SAAS,YAAY,KAC7B,SAAS,SAAS,eAAe,KAClC,SAAS,SAAS,YAAY,KAC7B,SAAS,SAAS,2BAA2B;AAAA,QAE9C,SAAS,SAAS,mBAAmB,KACpC,SAAS,SAAS,YAAY,KAC/B,SAAS,SAAS,iBAAiB,KAClC,SAAS,SAAS,YAAY;AAAA;AAAA,IAEpC;AAAA,IACA,KAAK,OAAM,SAAQ;AACjB,kBAAY,KAAK,YAAY;AAAA,QAC3B,QAAQ;AAAA,QACR,MAAM,KAAK;AAAA,MACb,CAAC;AACD,UAAI,gBAAgB,MAAM;AACxB,aAAK,wBAAwB,KAAK;AAClC,eAAO,KAAK;AAAA,MACd;AAAA,IACF;AAAA,EACF;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,QAAQ,YAAU;AAChB,YAAM,WAAW,OAAO,YAAY;AACpC,aACE,SAAS,SAAS,aAAa,MAC9B,SAAS,SAAS,eAAe,KAChC,SAAS,SAAS,WAAW,KAC7B,SAAS,SAAS,qBAAqB;AAAA,IAE7C;AAAA,IACA,KAAK,OAAM,SAAQ;AACjB,kBAAY,KAAK,YAAY;AAAA,QAC3B,QAAQ;AAAA,QACR,MAAM,KAAK;AAAA,QACX,IAAI;AAAA,MACN,CAAC;AACD,WAAK,cAAc;AAAA,IACrB;AAAA,EACF;AAAA;AAEF;AAGA,MAAM,iBAAiC;AAAA,EACrC;AAAA,IACE,MAAM;AAAA,IACN,QAAQ,YACN,OAAO,SAAS,4CAA4C;AAAA,IAC9D,KAAK,OAAM,SAAQ;AACjB,YAAM,mBAAmB,CAAC;AAC1B,iBAAW,QAAQ,KAAK,SAAS,CAAC,GAAG;AACnC,YAAI,KAAK,SAAS,YAAY,UAAU,KAAM;AAC9C,YAAI,MAAM;AACV,YAAI,YAAY;AAChB,iBAAS,QAAQ,KAAK,SAAS,YAAY,MAAM,IAAI,GAAG;AACtD,cAAI,IAAI,SAAS,KAAK,SAAS,MAAM;AACnC,mBAAO,OAAO;AAAA,UAChB,OAAO;AACL,yBAAa,OAAO;AAAA,UACtB;AAAA,QACF;AAEA,aAAK,SAAS,cAAc;AAC5B,yBAAiB,KAAK,SAAS,IAAI,IAAI;AAAA,MACzC;AACA,UAAI,OAAO,KAAK,gBAAgB,EAAE,SAAS,GAAG;AAC5C,YAAI,UAAU;AACd,mBAAW,CAAC,MAAM,WAAW,KAAK,OAAO,QAAQ,gBAAgB,GAAG;AAClE,qBAAW,IAAI,IAAI;AAAA,EAAM,WAAW;AAAA,IAAO,IAAI;AAAA;AAAA;AAAA,QACjD;AACA,mBAAW;AAEX,iBAAS,IAAI,KAAK,SAAS,SAAS,GAAG,KAAK,GAAG,KAAK;AAClD,cAAI,KAAK,SAAS,CAAC,EAAE,SAAS,UAAU;AACtC,iBAAK,SAAS,OAAO,IAAI,GAAG,GAAG;AAAA,cAC7B,MAAM;AAAA,cACN;AAAA,YACF,CAAC;AACD;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,QAAQ,YAAU,OAAO,SAAS,6BAA6B;AAAA,IAC/D,KAAK,OAAM,SAAQ;AACjB,WAAK,wBAAwB,KAAK;AAClC,aAAO,KAAK;AAAA,IACd;AAAA,EACF;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,QAAQ,YAAU,OAAO,SAAS,gBAAgB;AAAA,IAClD,KAAK,OAAM,SAAQ;AACjB,aAAO,KAAK;AAAA,IACd;AAAA,EACF;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,QAAQ,YACN,OAAO,SAAS,gCAAgC,KAChD,OAAO,SAAS,WAAW;AAAA,IAC7B,KAAK,OAAM,SAAQ;AACjB,UAAI,CAAC,KAAK,SAAU;AAEpB,iBAAW,WAAW,KAAK,UAAU;AACnC,YAAI,CAAC,QAAS;AAEd,YAAI,MAAM,QAAQ,QAAQ,OAAO,GAAG;AAClC,qBAAW,QAAQ,QAAQ,SAAS;AAElC,gBAAI,QAAQ,OAAO,SAAS,UAAU;AACpC,oBAAM,UAAU;AAChB,kBAAI,eAAe,SAAS;AAC1B,uBAAO,QAAQ;AAAA,cACjB;AAAA,YACF;AAAA,UACF;AAAA,QACF,WAAW,QAAQ,WAAW,OAAO,QAAQ,YAAY,UAAU;AAEjE,gBAAM,aAAa,QAAQ;AAI3B,cAAI,eAAe,YAAY;AAC7B,mBAAO,WAAW;AAAA,UACpB;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;AAaA,MAAM,iBAAgD;AAAA;AAAA,EAEpD,IAAI,EAAE,yBAAyB,KAAK;AAAA,EACpC,cAAc,EAAE,yBAAyB,KAAK;AAAA,EAC9C,WAAW,EAAE,yBAAyB,KAAK;AAAA,EAC3C,UAAU,EAAE,yBAAyB,KAAK;AAAA,EAC1C,WAAW,EAAE,yBAAyB,KAAK;AAAA;AAAA,EAE3C,SAAS;AAAA,IACP,yBAAyB;AAAA,IACzB,sBAAsB;AAAA,IACtB,wBAAwB;AAAA,IACxB,0BAA0B;AAAA,IAC1B,qBAAqB;AAAA,IACrB,sBAAsB;AAAA,EACxB;AAAA,EACA,cAAc;AAAA,IACZ,yBAAyB;AAAA,IACzB,sBAAsB;AAAA,IACtB,wBAAwB;AAAA,IACxB,0BAA0B;AAAA,IAC1B,qBAAqB;AAAA,IACrB,sBAAsB;AAAA,EACxB;AAAA,EACA,cAAc;AAAA,IACZ,yBAAyB;AAAA,IACzB,sBAAsB;AAAA,IACtB,wBAAwB;AAAA,IACxB,0BAA0B;AAAA,IAC1B,qBAAqB;AAAA,IACrB,sBAAsB;AAAA,EACxB;AAAA,EACA,qBAAqB;AAAA,IACnB,yBAAyB;AAAA,IACzB,sBAAsB;AAAA;AAAA,IACtB,wBAAwB;AAAA,IACxB,0BAA0B;AAAA,EAC5B;AACF;AAGA,SAAS,iBAAiB,WAAkC;AAC1D,MAAI,CAAC,aAAa,OAAO,cAAc,UAAU;AAC/C,WAAO,EAAE,yBAAyB,MAAM;AAAA,EAC1C;AAGA,MAAI,eAAe,SAAS,GAAG;AAC7B,WAAO,eAAe,SAAS;AAAA,EACjC;AAGA,MAAI,UAAU,YAAY,EAAE,SAAS,OAAO,GAAG;AAC7C,WAAO;AAAA,MACL,yBAAyB;AAAA,MACzB,sBAAsB;AAAA,MACtB,wBAAwB;AAAA,MACxB,0BAA0B;AAAA,MAC1B,qBAAqB;AAAA,MACrB,sBAAsB;AAAA,IACxB;AAAA,EACF;AAGA,aAAW,CAAC,KAAK,QAAQ,KAAK,OAAO,QAAQ,cAAc,GAAG;AAC5D,QAAI,UAAU,SAAS,GAAG,GAAG;AAC3B,aAAO;AAAA,IACT;AAAA,EACF;AAGA,SAAO,EAAE,yBAAyB,MAAM;AAC1C;AAGA,SAAS,kCACP,MACM;AACN,MAAI,CAAC,KAAK,SAAS,OAAO,KAAK,UAAU,UAAU;AACjD;AAAA,EACF;AAEA,QAAM,WAAW,iBAAiB,KAAK,KAAK;AAC5C,QAAM,SAAS,KAAK,MAAM,YAAY,EAAE,SAAS,OAAO;AAGxD,MAAI,UAAU,SAAS,yBAAyB;AAE9C,QAAI,gBAAgB,QAAQ,EAAE,2BAA2B,OAAO;AAC9D,kBAAY,KAAK,oBAAoB;AAAA,QACnC,QAAQ;AAAA,QACR,OAAO,KAAK;AAAA,QACZ,OAAO,KAAK;AAAA,MACd,CAAC;AACD,WAAK,wBAAwB,KAAK;AAClC,aAAO,KAAK;AAAA,IACd;AAGA,QAAI,SAAS,0BAA0B,iBAAiB,MAAM;AAC5D,UAAI,KAAK,gBAAgB,KAAK,KAAK,gBAAgB,QAAW;AAC5D,oBAAY,KAAK,oBAAoB;AAAA,UACnC,QAAQ;AAAA,UACR,OAAO,KAAK;AAAA,UACZ,MAAM,KAAK;AAAA,UACX,IAAI;AAAA,QACN,CAAC;AACD,aAAK,cAAc;AAAA,MACrB;AAAA,IACF;AAGA,QAAI,QAAQ;AAEV,aAAO,KAAK;AACZ,aAAO,KAAK;AACZ,aAAO,KAAK;AACZ,aAAO,KAAK;AAGZ,UAAI,CAAC,KAAK,oBAAoB,SAAS,0BAA0B;AAC/D,aAAK,mBAAmB;AAAA,MAC1B;AAAA,IACF;AAAA,EACF,OAGK;AAEH,QACE,SAAS,2BACT,gBAAgB,QAChB,EAAE,2BAA2B,OAC7B;AACA,WAAK,wBAAwB,KAAK;AAClC,aAAO,KAAK;AAAA,IACd;AAAA,EACF;AAGF;AAEA,eAAe,qBACb,MACA,SACA;AACA,QAAM,SAAS,KAAK,MAAM,WAAW,OAAO;AAC5C,QAAM,WAAW,SACb,CAAC,GAAG,qBAAqB,GAAG,cAAc,IAC1C;AAEJ,aAAW,WAAW,UAAU;AAC9B,QAAI,cAAc,SAAS,KAAK,OAAO,QAAQ,IAAI,GAAG;AACpD,YAAM,QAAQ,IAAI,IAAI;AACtB;AAAA,IACF;AAAA,EACF;AACF;AAGA,eAAe,wBACb,SACA,MACA,SACA,UACA,OACA,QACmD;AACnD,QAAM,iBAAiB,CAAC;AAExB,MAAI,aAAa,WAAW;AAC1B,mBAAe,KAAK,2BAA2B,mBAAmB;AAAA,EACpE,OAAO;AACL,mBAAe,KAAK,mBAAmB;AAAA,EACzC;AAEA,MAAI,YAAY;AAEhB,aAAW,YAAY,gBAAgB;AACrC,QAAI;AACF,YAAM,WAAW,MAAM,MAAM,GAAG,OAAO,GAAG,QAAQ,IAAI;AAAA,QACpD,QAAQ;AAAA,QACR;AAAA,QACA,MAAM,KAAK,UAAU,KAAK,SAAS,EAAE,GAAG,MAAM,QAAQ,KAAK,IAAI,IAAI;AAAA,QACnE,YAAY;AAAA,QACZ;AAAA;AAAA,MACF,CAAC;AAGD,UAAI,SAAS,IAAI;AACf,eAAO,EAAE,UAAU,SAAS;AAAA,MAC9B;AAGA,UAAI,SAAS,WAAW,OAAO,eAAe,SAAS,GAAG;AACxD,oBAAY,KAAK,mBAAmB;AAAA,UAClC,QAAQ;AAAA,UACR;AAAA,UACA,QAAQ;AAAA,QACV,CAAC;AACD;AAAA,MACF;AAGA,aAAO,EAAE,UAAU,SAAS;AAAA,IAC9B,SAAS,OAAO;AACd,kBAAY;AAEZ,UAAI,eAAe,QAAQ,QAAQ,IAAI,eAAe,SAAS,GAAG;AAChE,oBAAY,KAAK,mBAAmB;AAAA,UAClC,OAAO;AAAA,UACP;AAAA,UACA,QAAQ;AAAA,QACV,CAAC;AACD;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,QAAM,aAAa,IAAI,MAAM,sBAAsB;AACrD;AASA,eAAsB,yBACpB,cACA,MACA,UAAkB,GAClB,cAAsB,IACtB,QAC4E;AAC5E,MAAI,WAAW,aAAa;AAC1B,UAAM,IAAI,MAAM,sBAAsB;AAAA,EACxC;AAEA,QAAM,WAAW,cAAc,YAAY;AAC3C,QAAM,UAAU,cAAc;AAC9B,QAAM,SAAS,cAAc;AAC7B,QAAM,QAAQ,gBAAgB,EAAE,QAC5B,IAAI,WAAW,gBAAgB,EAAE,KAAK,IACtC;AAEJ,QAAM,UAAkC;AAAA,IACtC,gBAAgB;AAAA,EAClB;AAEA,MAAI,QAAQ;AACV,QAAI,aAAa,SAAS;AACxB,cAAQ,SAAS,IAAI;AAAA,IACvB,OAAO;AACL,cAAQ,eAAe,IAAI,UAAU,MAAM;AAAA,IAC7C;AAAA,EACF;AAEA,oCAAkC,IAAI;AACtC,QAAM,qBAAqB,MAAM,WAAW,EAAE;AAG9C,cAAY,IAAI,yBAAyB;AAAA,IACvC,UAAU,WAAW;AAAA,IACrB,OAAO,KAAK;AAAA,IACZ;AAAA,IACA,kBAAkB,CAAC,CAAC;AAAA,IACpB,cAAc,SAAS,OAAO,UAAU,GAAG,CAAC,IAAI;AAAA,IAChD,WAAW,KAAK;AAAA,IAChB,aAAa,KAAK;AAAA,IAClB,cAAc,KAAK,UAAU,UAAU;AAAA,IACvC,YAAY,KAAK;AAAA,IACjB,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,IAClC,uBAAuB,cAAc;AAAA,IACrC,kBAAkB,cAAc;AAAA,EAClC,CAAC;AAGD,OAAK,WAAW,KAAK,SAAS,IAAI,SAAO;AACvC,QAAI,IAAI,SAAS,QAAQ;AACvB,UAAI,MAAM,QAAQ,IAAI,OAAO,GAAG;AAC9B,eAAO;AAAA,UACL,GAAG;AAAA,UACH,SACE,IAAI,QACD,IAAI,OAAK,EAAE,QAAQ,EAAE,EACrB,OAAO,OAAO,EACd,KAAK,MAAM,KAAK;AAAA,QACvB;AAAA,MACF,WAAW,OAAO,IAAI,YAAY,UAAU;AAC1C,eAAO;AAAA,UACL,GAAG;AAAA,UACH,SACE,OAAO,IAAI,YAAY,cACnB,oBACA,KAAK,UAAU,IAAI,OAAO;AAAA,QAClC;AAAA,MACF;AAAA,IACF;AACA,WAAO;AAAA,EACT,CAAC;AAGD,QAAM,kBAAkB;AACxB,MAAI,WAAW;AAEf,MAAI,aAAa,SAAS;AACxB,eAAW,iCAAiC,eAAe;AAAA,EAC7D,WAAW,aAAa,WAAW;AACjC,eAAW;AAAA,EACb;AAEA,MAAI;AACF,QAAI,KAAK,QAAQ;AACf,YAAMC,sBAAqB;AAAA,QACzB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,EAAE,SAAS,QAAQ;AAEnB,UAAIC;AAEJ,UAAID,uBAAsB,aAAa,SAAS;AAC9C,cAAM,SAAS,MAAM;AAAA,UACnB;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA;AAAA,QACF;AACA,QAAAC,YAAW,OAAO;AAAA,MACpB,OAAO;AACL,QAAAA,YAAW,MAAM,MAAM,GAAG,OAAO,GAAG,QAAQ,IAAI;AAAA,UAC9C,QAAQ;AAAA,UACR;AAAA,UACA,MAAM,KAAK,UAAU,EAAE,GAAG,MAAM,QAAQ,KAAK,CAAC;AAAA,UAC9C,YAAY;AAAA,UACZ;AAAA;AAAA,QACF,CAAC;AAAA,MACH;AAEA,UAAI,CAACA,UAAS,IAAI;AAEhB,YAAI,QAAQ,SAAS;AACnB,gBAAM,IAAI,MAAM,2BAA2B;AAAA,QAC7C;AAGA,YAAI;AACF,gBAAM,YAAY,MAAMA,UAAS,KAAK;AAEtC,gBAAM,WAAW,CACf,SAC+D;AAC/D,mBAAO,OAAO,SAAS,YAAY,SAAS;AAAA,UAC9C;AACA,gBAAM,eAAe,SAAS,SAAS,IACnC,UAAU,OAAO,WACjB,UAAU,WACV,QAAQA,UAAS,MAAM,KACvB,QAAQA,UAAS,MAAM;AAG3B,gBAAM,SAAS,KAAK,MAAM,WAAW,OAAO;AAC5C,gBAAM,WAAW,SACb,CAAC,GAAG,qBAAqB,GAAG,cAAc,IAC1C;AAEJ,qBAAW,WAAW,UAAU;AAC9B,gBAAI,QAAQ,OAAO,YAAY,GAAG;AAChC,0BAAY,KAAK,wBAAwB;AAAA,gBACvC,MAAM,QAAQ;AAAA,gBACd,OAAO,KAAK;AAAA,gBACZ,OAAO;AAAA,cACT,CAAC;AAGD;AAAA,gBACE,WAAW;AAAA,gBACX,KAAK;AAAA,gBACL,QAAQ;AAAA,gBACR;AAAA,cACF;AAGA,oBAAM,QAAQ,IAAI,IAAI;AACtB,0BAAY,KAAK,wBAAwB;AAAA,gBACvC,QAAQ;AAAA,gBACR,MAAM,QAAQ;AAAA,cAChB,CAAC;AAED,qBAAO;AAAA,gBACL;AAAA,gBACA;AAAA,gBACA,UAAU;AAAA,gBACV;AAAA,gBACA;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAGA,sBAAY,KAAK,0BAA0B;AAAA,YACzC,QAAQA,UAAS;AAAA,YACjB,OAAO;AAAA,UACT,CAAC;AAGD,sBAAY;AAAA,YACV,OAAO,KAAK;AAAA,YACZ,UAAU,GAAG,OAAO,GAAG,QAAQ;AAAA,YAC/B,QAAQA,UAAS;AAAA,YACjB,OAAO;AAAA,YACP,SAAS;AAAA,YACT,UAAU;AAAA,YACV;AAAA,UACF,CAAC;AAAA,QACH,SAAS,YAAY;AAEnB,sBAAY,KAAK,sBAAsB,EAAE,QAAQA,UAAS,OAAO,CAAC;AAGlE,sBAAY;AAAA,YACV,OAAO,KAAK;AAAA,YACZ,UAAU,GAAG,OAAO,GAAG,QAAQ;AAAA,YAC/B,QAAQA,UAAS;AAAA,YACjB,OAAO,mCAAmC,WAAW,OAAO;AAAA,YAC5D,SAAS;AAAA,YACT,UAAU,EAAE,YAAY,WAAW,QAAQ;AAAA,YAC3C;AAAA,UACF,CAAC;AAAA,QACH;AAEA,cAAM,UAAU,cAAc,OAAO;AACrC,gBAAQ;AAAA,UACN,wBAAmBA,UAAS,MAAM,kBAAkB,KAAK,MAAM,UAAU,GAAI,CAAC,iBAAiB,UAAU,CAAC,IAAI,WAAW;AAAA,QAC3H;AACA,YAAI;AACF,gBAAM,eAAe,SAAS,MAAM;AAAA,QACtC,SAAS,OAAO;AAEd,cAAI,MAAM,YAAY,uBAAuB;AAC3C,kBAAM,IAAI,MAAM,2BAA2B;AAAA,UAC7C;AACA,gBAAM;AAAA,QACR;AACA,eAAO;AAAA,UACL;AAAA,UACA;AAAA,UACA,UAAU;AAAA,UACV;AAAA,UACA;AAAA;AAAA,QACF;AAAA,MACF;AAEA,YAAM,SAAS,sBAAsBA,UAAS,MAAa,MAAM;AACjE,aAAO;AAAA,IACT;AAGA,UAAM,qBAAqB;AAAA,MACzB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,EAAE,SAAS,QAAQ;AAEnB,QAAI;AAEJ,QAAI,sBAAsB,aAAa,SAAS;AAC9C,YAAM,SAAS,MAAM;AAAA,QACnB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA;AAAA,MACF;AACA,iBAAW,OAAO;AAAA,IACpB,OAAO;AACL,iBAAW,MAAM,MAAM,GAAG,OAAO,GAAG,QAAQ,IAAI;AAAA,QAC9C,QAAQ;AAAA,QACR;AAAA,QACA,MAAM,KAAK,UAAU,IAAI;AAAA,QACzB,YAAY;AAAA,QACZ;AAAA;AAAA,MACF,CAAC;AAAA,IACH;AAEA,QAAI,CAAC,SAAS,IAAI;AAEhB,UAAI,QAAQ,SAAS;AACnB,cAAM,IAAI,MAAM,2BAA2B;AAAA,MAC7C;AAGA,UAAI;AACF,cAAM,YAAY,MAAM,SAAS,KAAK;AAEtC,cAAM,WAAW,CACf,SAC+D;AAC/D,iBAAO,OAAO,SAAS,YAAY,SAAS;AAAA,QAC9C;AACA,cAAM,eAAe,SAAS,SAAS,IACnC,UAAU,OAAO,WACjB,UAAU,WACV,QAAQ,SAAS,MAAM,KACvB,QAAQ,SAAS,MAAM;AAG3B,cAAM,SAAS,KAAK,MAAM,WAAW,OAAO;AAC5C,cAAM,WAAW,SACb,CAAC,GAAG,qBAAqB,GAAG,cAAc,IAC1C;AAEJ,mBAAW,WAAW,UAAU;AAC9B,cAAI,QAAQ,OAAO,YAAY,GAAG;AAChC,wBAAY,KAAK,wBAAwB;AAAA,cACvC,MAAM,QAAQ;AAAA,cACd,OAAO,KAAK;AAAA,cACZ,OAAO;AAAA,YACT,CAAC;AAGD,0BAAc,WAAW,IAAI,KAAK,OAAO,QAAQ,MAAM,YAAY;AAGnE,kBAAM,QAAQ,IAAI,IAAI;AACtB,wBAAY,KAAK,wBAAwB;AAAA,cACvC,QAAQ;AAAA,cACR,MAAM,QAAQ;AAAA,YAChB,CAAC;AAED,mBAAO;AAAA,cACL;AAAA,cACA;AAAA,cACA,UAAU;AAAA,cACV;AAAA,cACA;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAGA,oBAAY,KAAK,0BAA0B;AAAA,UACzC,QAAQ,SAAS;AAAA,UACjB,OAAO;AAAA,QACT,CAAC;AAAA,MACH,SAAS,YAAY;AAEnB,oBAAY,KAAK,sBAAsB,EAAE,QAAQ,SAAS,OAAO,CAAC;AAAA,MACpE;AAEA,YAAM,UAAU,cAAc,OAAO;AACrC,cAAQ;AAAA,QACN,wBAAmB,SAAS,MAAM,kBAAkB,KAAK,MAAM,UAAU,GAAI,CAAC,iBAAiB,UAAU,CAAC,IAAI,WAAW;AAAA,MAC3H;AACA,UAAI;AACF,cAAM,eAAe,SAAS,MAAM;AAAA,MACtC,SAAS,OAAO;AAEd,YAAI,MAAM,YAAY,uBAAuB;AAC3C,gBAAM,IAAI,MAAM,2BAA2B;AAAA,QAC7C;AACA,cAAM;AAAA,MACR;AACA,aAAO;AAAA,QACL;AAAA,QACA;AAAA,QACA,UAAU;AAAA,QACV;AAAA,QACA;AAAA;AAAA,MACF;AAAA,IACF;AAEA,UAAM,eAAgB,MAAM,SAAS,KAAK;AAC1C,WAAO;AAAA,EACT,SAAS,OAAO;AAEd,QAAI,QAAQ,SAAS;AACnB,YAAM,IAAI,MAAM,2BAA2B;AAAA,IAC7C;AAEA,QAAI,UAAU,aAAa;AAEzB,UAAI,QAAQ,SAAS;AACnB,cAAM,IAAI,MAAM,2BAA2B;AAAA,MAC7C;AAEA,YAAM,UAAU,cAAc,OAAO;AACrC,cAAQ;AAAA,QACN,wCAAmC,KAAK,MAAM,UAAU,GAAI,CAAC,iBAAiB,UAAU,CAAC,IAAI,WAAW;AAAA,MAC1G;AACA,UAAI;AACF,cAAM,eAAe,SAAS,MAAM;AAAA,MACtC,SAASC,QAAO;AAEd,YAAIA,OAAM,YAAY,uBAAuB;AAC3C,gBAAM,IAAI,MAAM,2BAA2B;AAAA,QAC7C;AACA,cAAMA;AAAA,MACR;AACA,aAAO;AAAA,QACL;AAAA,QACA;AAAA,QACA,UAAU;AAAA,QACV;AAAA,QACA;AAAA;AAAA,MACF;AAAA,IACF;AACA,UAAM;AAAA,EACR;AACF;AAEO,SAAS,sBACd,QACA,QAC2D;AAC3D,MAAI,CAAC,QAAQ;AACX,UAAM,IAAI,MAAM,6BAA6B;AAAA,EAC/C;AAEA,UAAQ,mBAAmB;AACzB,UAAM,SAAS,OAAO,UAAU;AAChC,UAAM,UAAU,IAAI,YAAY,OAAO;AACvC,QAAI,SAAS;AAEb,QAAI;AACF,aAAO,MAAM;AAEX,YAAI,QAAQ,SAAS;AACnB;AAAA,QACF;AAEA,YAAI;AACJ,YAAI;AACF,uBAAa,MAAM,OAAO,KAAK;AAAA,QACjC,SAAS,GAAG;AAEV,cAAI,QAAQ,SAAS;AACnB;AAAA,UACF;AACA,mBAAS,CAAC;AACV;AAAA,QACF;AAEA,cAAM,EAAE,MAAM,MAAM,IAAI;AACxB,YAAI,MAAM;AACR;AAAA,QACF;AAEA,cAAM,QAAQ,QAAQ,OAAO,OAAO,EAAE,QAAQ,KAAK,CAAC;AACpD,kBAAU;AAEV,YAAI,UAAU,OAAO,QAAQ,IAAI;AACjC,eAAO,YAAY,IAAI;AACrB,gBAAM,OAAO,OAAO,UAAU,GAAG,OAAO,EAAE,KAAK;AAC/C,mBAAS,OAAO,UAAU,UAAU,CAAC;AAErC,cAAI,SAAS,gBAAgB;AAC3B;AAAA,UACF;AAEA,cAAI,KAAK,WAAW,QAAQ,GAAG;AAC7B,kBAAM,OAAO,KAAK,MAAM,CAAC,EAAE,KAAK;AAChC,gBAAI,CAAC,KAAM;AAEX,gBAAI;AACF,oBAAM,SAAS,KAAK,MAAM,IAAI;AAC9B,oBAAM;AAAA,YACR,SAAS,GAAG;AACV,0BAAY,KAAK,sBAAsB;AAAA,gBACrC,MAAM,KAAK,UAAU,GAAG,GAAG;AAAA,gBAC3B,OAAO,OAAO,CAAC;AAAA,cACjB,CAAC;AAAA,YACH;AAAA,UACF;AAEA,oBAAU,OAAO,QAAQ,IAAI;AAAA,QAC/B;AAAA,MACF;AAGA,UAAI,OAAO,KAAK,GAAG;AACjB,cAAM,QAAQ,OAAO,KAAK,EAAE,MAAM,IAAI;AACtC,mBAAW,QAAQ,OAAO;AACxB,cAAI,KAAK,WAAW,QAAQ,KAAK,SAAS,gBAAgB;AACxD,kBAAM,OAAO,KAAK,MAAM,CAAC,EAAE,KAAK;AAChC,gBAAI,CAAC,KAAM;AAEX,gBAAI;AACF,oBAAM,SAAS,KAAK,MAAM,IAAI;AAC9B,oBAAM;AAAA,YACR,SAAS,GAAG;AACV,0BAAY,KAAK,sBAAsB;AAAA,gBACrC,MAAM,KAAK,UAAU,GAAG,GAAG;AAAA,gBAC3B,OAAO,OAAO,CAAC;AAAA,gBACf,OAAO;AAAA,cACT,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF,SAAS,GAAG;AACV,eAAS,CAAC;AAAA,IACZ,UAAE;AACA,UAAI;AACF,eAAO,YAAY;AAAA,MACrB,SAAS,GAAG;AAAA,MAEZ;AAAA,IACF;AAAA,EACF,GAAG;AACL;AAEO,SAAS,iBACd,QACA,QAC2D;AAC3D,SAAO,sBAAsB,QAAQ,MAAM;AAC7C;AAKA,eAAsB,qBACpB,cACA,MACA,QACc;AACd,QAAM,UAAU,cAAc,WAAW;AACzC,QAAM,SAAS,cAAc;AAC7B,QAAM,QAAQ,gBAAgB,EAAE,QAC5B,IAAI,WAAW,gBAAgB,EAAE,KAAK,IACtC;AAEJ,QAAM,UAAkC;AAAA,IACtC,gBAAgB;AAAA,IAChB,eAAe,UAAU,MAAM;AAAA,EACjC;AAGA,QAAM,kBAAuB;AAAA,IAC3B,OAAO,KAAK;AAAA,IACZ,OAAO,KAAK;AAAA;AAAA,EACd;AAGA,MAAI,KAAK,uBAAuB;AAC9B,oBAAgB,wBAAwB,KAAK;AAAA,EAC/C,WAAW,KAAK,YAAY;AAE1B,oBAAgB,wBAAwB,KAAK;AAAA,EAC/C;AAGA,MAAI,KAAK,gBAAgB,GAAG;AAC1B,oBAAgB,cAAc;AAAA,EAChC;AAIA,QAAM,kBAAkB,KAAK,oBAAoB;AACjD,kBAAgB,YAAY;AAAA,IAC1B,QAAQ;AAAA;AAAA,IAER,kBAAkB;AAAA,EACpB;AAGA,MAAI,KAAK,SAAS,KAAK,MAAM,SAAS,GAAG;AACvC,oBAAgB,QAAQ,KAAK;AAG7B,QAAI,KAAK,aAAa;AACpB,sBAAgB,cAAc,KAAK;AAAA,IACrC;AAAA,EACF;AAGA,QAAM,iBAAiB,KAAK,SAAS,OAAO,SAAO,IAAI,SAAS,QAAQ;AACxE,QAAM,oBAAoB,KAAK,SAAS,OAAO,SAAO,IAAI,SAAS,QAAQ;AAE3E,MAAI,eAAe,SAAS,GAAG;AAC7B,oBAAgB,eAAe,eAC5B,IAAI,SAAO,IAAI,OAAO,EACtB,KAAK,MAAM;AACd,oBAAgB,QAAQ;AAAA,EAC1B;AAGA,QAAM,WAAW,iBAAiB,KAAK,KAAK;AAC5C,MAAI,SAAS,0BAA0B;AAGrC,oBAAgB,OAAO;AAAA,MACrB,WAAW;AAAA,IACb;AAAA,EACF;AAGA,MAAI,KAAK,MAAM,WAAW,OAAO,GAAG;AAElC,QAAI,CAAC,gBAAgB,WAAW;AAC9B,sBAAgB,YAAY;AAAA,QAC1B,QAAQ;AAAA;AAAA,MACV;AAAA,IACF;AAGA,QAAI,CAAC,gBAAgB,cAAc;AACjC,sBAAgB,eAAe;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAMjC;AAAA,EACF;AAEA,MAAI;AACF,UAAM,WAAW,MAAM,MAAM,GAAG,OAAO,cAAc;AAAA,MACnD,QAAQ;AAAA,MACR;AAAA,MACA,MAAM,KAAK,UAAU,eAAe;AAAA,MACpC,YAAY;AAAA,MACZ;AAAA,IACF,CAAC;AAED,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,IAAI;AAAA,QACR,8BAA8B,SAAS,MAAM,IAAI,SAAS,UAAU;AAAA,MACtE;AAAA,IACF;AAEA,UAAM,eAAe,MAAM,SAAS,KAAK;AAGzC,WAAO,oCAAoC,YAAY;AAAA,EACzD,SAAS,OAAO;AACd,QAAI,QAAQ,SAAS;AACnB,YAAM,IAAI,MAAM,2BAA2B;AAAA,IAC7C;AACA,UAAM;AAAA,EACR;AACF;AAMA,SAAS,oCAAoC,eAAyB;AAEpE,MAAI,aAAa,cAAc,eAAe;AAC9C,QAAM,QAAQ,cAAc,SAAS,CAAC;AAItC,MAAI,cAAc,UAAU,MAAM,QAAQ,cAAc,MAAM,GAAG;AAC/D,UAAM,iBAAiB,cAAc,OAAO;AAAA,MAC1C,UAAQ,KAAK,SAAS,eAAe,KAAK;AAAA,IAC5C;AACA,UAAM,eAAe,cAAc,OAAO;AAAA,MACxC,UAAQ,KAAK,SAAS;AAAA,IACxB;AAEA,QAAI,eAAe,SAAS,KAAK,aAAa,SAAS,GAAG;AACxD,YAAM,mBAAmB,eACtB,IAAI,UAAQ,KAAK,SAAS,IAAI,OAAK,EAAE,IAAI,EAAE,KAAK,IAAI,CAAC,EACrD,OAAO,OAAO,EACd,KAAK,MAAM;AAEd,YAAM,cAAc,aACjB,IAAI,UAAQ,KAAK,SAAS,IAAI,OAAK,EAAE,IAAI,EAAE,KAAK,IAAI,CAAC,EACrD,OAAO,OAAO,EACd,KAAK,MAAM;AAEd,UAAI,kBAAkB;AACpB,qBAAa;AAAA,EAA8B,gBAAgB;AAAA;AAAA;AAAA,EAAyB,WAAW;AAAA,MACjG,OAAO;AACL,qBAAa;AAAA,MACf;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL,IAAI,cAAc,MAAM,YAAY,KAAK,IAAI,CAAC;AAAA,IAC9C,QAAQ;AAAA,IACR,SAAS,KAAK,MAAM,KAAK,IAAI,IAAI,GAAI;AAAA,IACrC,OAAO,cAAc,SAAS;AAAA,IAC9B,SAAS;AAAA,MACP;AAAA,QACE,OAAO;AAAA,QACP,SAAS;AAAA,UACP,MAAM;AAAA,UACN,SAAS;AAAA;AAAA,UAET,GAAI,cAAc,aAAa;AAAA,YAC7B,WAAW;AAAA,cACT,QAAQ,cAAc,UAAU;AAAA,cAChC,SAAS,cAAc,UAAU;AAAA,YACnC;AAAA,UACF;AAAA,QACF;AAAA,QACA,eAAe,cAAc,WAAW,cAAc,SAAS;AAAA,MACjE;AAAA,IACF;AAAA,IACA,OAAO;AAAA,MACL,eAAe,MAAM,gBAAgB;AAAA,MACrC,mBAAmB,MAAM,iBAAiB;AAAA,MAC1C,eAAe,MAAM,gBAAgB,MAAM,MAAM,iBAAiB;AAAA;AAAA,MAElE,uBAAuB;AAAA,QACrB,eAAe,MAAM,sBAAsB,iBAAiB;AAAA,MAC9D;AAAA,MACA,2BAA2B;AAAA,QACzB,kBAAkB,MAAM,uBAAuB,oBAAoB;AAAA,MACrE;AAAA,IACF;AAAA,EACF;AACF;AAMA,eAAe,6BACb,cACA,MACA,UAAkB,GAClB,cAAsB,IACtB,QAC4E;AAC5E,QAAM,WAAW,iBAAiB,KAAK,KAAK;AAC5C,QAAM,mBACJ,CAAC,aAAa,WAAW,aAAa,QAAQ,SAAS,gBAAgB;AAGzE,MAAI,SAAS,wBAAwB,CAAC,KAAK,UAAU,kBAAkB;AACrE,QAAI;AACF,kBAAY,IAAI,iCAAiC;AAAA,QAC/C,OAAO,KAAK;AAAA,QACZ,SAAS,aAAa,WAAW;AAAA,QACjC,UAAU,aAAa;AAAA,QACvB,QAAQ,KAAK;AAAA,QACb,WAAW,kBAAkB,GAAG;AAAA,MAClC,CAAC;AAED,YAAM,SAAS,MAAM,qBAAqB,cAAc,MAAM,MAAM;AAEpE,kBAAY,IAAI,8BAA8B;AAAA,QAC5C,OAAO,KAAK;AAAA,QACZ,SAAS,aAAa,WAAW;AAAA,QACjC,WAAW,kBAAkB,GAAG;AAAA,MAClC,CAAC;AAED,aAAO;AAAA,IACT,SAAS,OAAO;AACd,kBAAY,IAAI,+BAA+B;AAAA,QAC7C,OAAO,KAAK;AAAA,QACZ,OAAO,MAAM;AAAA,QACb,SAAS,aAAa,WAAW;AAAA,QACjC,WAAW,kBAAkB,GAAG;AAAA,MAClC,CAAC;AAED,kBAAY,KAAK,iBAAiB;AAAA,QAChC,QAAQ;AAAA,QACR,OAAO,MAAM;AAAA,MACf,CAAC;AAAA,IAEH;AAAA,EACF,WAGS,CAAC,kBAAkB;AAC1B,gBAAY,IAAI,6BAA6B;AAAA,MAC3C,OAAO,KAAK;AAAA,MACZ,SAAS,aAAa;AAAA,MACtB,UAAU,aAAa;AAAA,MACvB,sBAAsB,SAAS;AAAA,MAC/B,WAAW,kBAAkB,GAAG;AAAA,IAClC,CAAC;AAGD,gBAAY,KAAK,oBAAoB;AAAA,MACnC,UAAU,aAAa;AAAA,MACvB,SAAS,aAAa;AAAA,IACxB,CAAC;AAGD,QAAI,aAAa,aAAa,SAAS;AAErC,aAAO,KAAK;AAAA,IACd,WAAW,aAAa,aAAa,iBAAiB;AAEpD,kBAAY,KAAK,6BAA6B;AAAA,QAC5C,UAAU;AAAA,MACZ,CAAC;AAAA,IACH;AAAA,EACF,WAGS,KAAK,QAAQ;AACpB,gBAAY,IAAI,uBAAuB;AAAA,MACrC,OAAO,KAAK;AAAA,MACZ,SAAS,aAAa,WAAW;AAAA,MACjC,QAAQ;AAAA,MACR,WAAW,kBAAkB,GAAG;AAAA,IAClC,CAAC;AAED,gBAAY,KAAK,kBAAkB,EAAE,QAAQ,6BAA6B,CAAC;AAAA,EAC7E;AAGA,cAAY,IAAI,mCAAmC;AAAA,IACjD,OAAO,KAAK;AAAA,IACZ,SAAS,aAAa,WAAW;AAAA,IACjC,UAAU,aAAa;AAAA,IACvB,QAAQ,mBAAmB,0BAA0B;AAAA,IACrD,WAAW,kBAAkB,GAAG;AAAA,EAClC,CAAC;AAED,SAAO,MAAM;AAAA,IACX;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAKA,eAAsB,kBACpB,SACA,QACgB;AAChB,MAAI;AAEF,UAAM,mBAAmB,SAAS,KAAK,OAAO;AAC9C,UAAM,eAAe,QAAQ,QAAQ,QAAQ,EAAE;AAC/C,UAAM,YAAY,mBACd,GAAG,YAAY,YACf,GAAG,YAAY;AAEnB,UAAM,WAAW,MAAM,MAAM,WAAW;AAAA,MACtC,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,eAAe,UAAU,MAAM;AAAA,QAC/B,gBAAgB;AAAA,MAClB;AAAA,IACF,CAAC;AAED,QAAI,CAAC,SAAS,IAAI;AAEhB,UAAI,SAAS,WAAW,KAAK;AAC3B,cAAM,IAAI;AAAA,UACR;AAAA,QACF;AAAA,MACF,WAAW,SAAS,WAAW,KAAK;AAClC,cAAM,IAAI;AAAA,UACR;AAAA,QACF;AAAA,MACF,WAAW,SAAS,WAAW,KAAK;AAClC,cAAM,IAAI;AAAA,UACR;AAAA,QACF;AAAA,MACF,WAAW,SAAS,WAAW,KAAK;AAClC,cAAM,IAAI;AAAA,UACR;AAAA,QACF;AAAA,MACF,WAAW,SAAS,UAAU,KAAK;AACjC,cAAM,IAAI;AAAA,UACR;AAAA,QACF;AAAA,MACF,OAAO;AACL,cAAM,IAAI;AAAA,UACR,6BAA6B,SAAS,MAAM;AAAA,QAC9C;AAAA,MACF;AAAA,IACF;AAEA,UAAM,OAAO,MAAM,SAAS,KAAK;AAGjC,UAAM,eAAe,CAAC,QAA6C;AACjE,aACE,OAAO,QAAQ,YACf,QAAQ,QACR,UAAU,OACV,MAAM,QAAS,IAAY,IAAI;AAAA,IAEnC;AAEA,UAAM,iBAAiB,CAAC,QAA+C;AACrE,aACE,OAAO,QAAQ,YACf,QAAQ,QACR,YAAY,OACZ,MAAM,QAAS,IAAY,MAAM;AAAA,IAErC;AAGA,QAAI,SAAS,CAAC;AAEd,QAAI,aAAa,IAAI,GAAG;AAEtB,eAAS,KAAK;AAAA,IAChB,WAAW,MAAM,QAAQ,IAAI,GAAG;AAE9B,eAAS;AAAA,IACX,WAAW,eAAe,IAAI,GAAG;AAE/B,eAAS,KAAK;AAAA,IAChB,OAAO;AACL,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAGA,QAAI,CAAC,MAAM,QAAQ,MAAM,GAAG;AAC1B,YAAM,IAAI,MAAM,yDAAyD;AAAA,IAC3E;AAEA,WAAO;AAAA,EACT,SAAS,OAAO;AAEd,QACE,iBAAiB,UAChB,MAAM,QAAQ,SAAS,SAAS,KAC/B,MAAM,QAAQ,SAAS,cAAc,KACrC,MAAM,QAAQ,SAAS,aAAa,KACpC,MAAM,QAAQ,SAAS,iBAAiB,IAC1C;AACA,YAAM;AAAA,IACR;AAGA,aAAS,KAAK;AAGd,QAAI,iBAAiB,SAAS,MAAM,QAAQ,SAAS,OAAO,GAAG;AAC7D,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACF;",
6
+ "names": ["ModelErrorType", "isOpenAICompatible", "response", "error"]
7
7
  }
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../src/services/responseStateManager.ts"],
4
- "sourcesContent": ["/**\n * GPT-5 Responses API state management\n * Manages previous_response_id for conversation continuity and reasoning context reuse\n */\n\ninterface ConversationState {\n previousResponseId?: string\n lastUpdate: number\n}\n\nclass ResponseStateManager {\n private conversationStates = new Map<string, ConversationState>()\n \n // Cache cleanup after 1 hour of inactivity\n private readonly CLEANUP_INTERVAL = 60 * 60 * 1000\n \n constructor() {\n // Periodic cleanup of stale conversations\n setInterval(() => {\n this.cleanup()\n }, this.CLEANUP_INTERVAL)\n }\n \n /**\n * Set the previous response ID for a conversation\n */\n setPreviousResponseId(conversationId: string, responseId: string): void {\n this.conversationStates.set(conversationId, {\n previousResponseId: responseId,\n lastUpdate: Date.now()\n })\n }\n \n /**\n * Get the previous response ID for a conversation\n */\n getPreviousResponseId(conversationId: string): string | undefined {\n const state = this.conversationStates.get(conversationId)\n if (state) {\n // Update last access time\n state.lastUpdate = Date.now()\n return state.previousResponseId\n }\n return undefined\n }\n \n /**\n * Clear state for a conversation\n */\n clearConversation(conversationId: string): void {\n this.conversationStates.delete(conversationId)\n }\n \n /**\n * Clear all conversation states\n */\n clearAll(): void {\n this.conversationStates.clear()\n }\n \n /**\n * Clean up stale conversations\n */\n private cleanup(): void {\n const now = Date.now()\n for (const [conversationId, state] of this.conversationStates.entries()) {\n if (now - state.lastUpdate > this.CLEANUP_INTERVAL) {\n this.conversationStates.delete(conversationId)\n }\n }\n }\n \n /**\n * Get current state size (for debugging/monitoring)\n */\n getStateSize(): number {\n return this.conversationStates.size\n }\n}\n\n// Singleton instance\nexport const responseStateManager = new ResponseStateManager()\n\n/**\n * Helper to generate conversation ID from context\n */\nexport function getConversationId(agentId?: string, messageId?: string): string {\n // Use agentId as primary identifier, fallback to messageId or timestamp\n return agentId || messageId || `conv_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`\n}"],
5
- "mappings": "AAUA,MAAM,qBAAqB;AAAA,EACjB,qBAAqB,oBAAI,IAA+B;AAAA;AAAA,EAG/C,mBAAmB,KAAK,KAAK;AAAA,EAE9C,cAAc;AAEZ,gBAAY,MAAM;AAChB,WAAK,QAAQ;AAAA,IACf,GAAG,KAAK,gBAAgB;AAAA,EAC1B;AAAA;AAAA;AAAA;AAAA,EAKA,sBAAsB,gBAAwB,YAA0B;AACtE,SAAK,mBAAmB,IAAI,gBAAgB;AAAA,MAC1C,oBAAoB;AAAA,MACpB,YAAY,KAAK,IAAI;AAAA,IACvB,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,sBAAsB,gBAA4C;AAChE,UAAM,QAAQ,KAAK,mBAAmB,IAAI,cAAc;AACxD,QAAI,OAAO;AAET,YAAM,aAAa,KAAK,IAAI;AAC5B,aAAO,MAAM;AAAA,IACf;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,kBAAkB,gBAA8B;AAC9C,SAAK,mBAAmB,OAAO,cAAc;AAAA,EAC/C;AAAA;AAAA;AAAA;AAAA,EAKA,WAAiB;AACf,SAAK,mBAAmB,MAAM;AAAA,EAChC;AAAA;AAAA;AAAA;AAAA,EAKQ,UAAgB;AACtB,UAAM,MAAM,KAAK,IAAI;AACrB,eAAW,CAAC,gBAAgB,KAAK,KAAK,KAAK,mBAAmB,QAAQ,GAAG;AACvE,UAAI,MAAM,MAAM,aAAa,KAAK,kBAAkB;AAClD,aAAK,mBAAmB,OAAO,cAAc;AAAA,MAC/C;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,eAAuB;AACrB,WAAO,KAAK,mBAAmB;AAAA,EACjC;AACF;AAGO,MAAM,uBAAuB,IAAI,qBAAqB;AAKtD,SAAS,kBAAkB,SAAkB,WAA4B;AAE9E,SAAO,WAAW,aAAa,QAAQ,KAAK,IAAI,CAAC,IAAI,KAAK,OAAO,EAAE,SAAS,EAAE,EAAE,OAAO,GAAG,CAAC,CAAC;AAC9F;",
4
+ "sourcesContent": ["/**\n * GPT-5 Responses API state management\n * Manages previous_response_id for conversation continuity and reasoning context reuse\n */\n\ninterface ConversationState {\n previousResponseId?: string\n lastUpdate: number\n}\n\nclass ResponseStateManager {\n private conversationStates = new Map<string, ConversationState>()\n\n // Cache cleanup after 1 hour of inactivity\n private readonly CLEANUP_INTERVAL = 60 * 60 * 1000\n\n constructor() {\n // Periodic cleanup of stale conversations\n setInterval(() => {\n this.cleanup()\n }, this.CLEANUP_INTERVAL)\n }\n\n /**\n * Set the previous response ID for a conversation\n */\n setPreviousResponseId(conversationId: string, responseId: string): void {\n this.conversationStates.set(conversationId, {\n previousResponseId: responseId,\n lastUpdate: Date.now(),\n })\n }\n\n /**\n * Get the previous response ID for a conversation\n */\n getPreviousResponseId(conversationId: string): string | undefined {\n const state = this.conversationStates.get(conversationId)\n if (state) {\n // Update last access time\n state.lastUpdate = Date.now()\n return state.previousResponseId\n }\n return undefined\n }\n\n /**\n * Clear state for a conversation\n */\n clearConversation(conversationId: string): void {\n this.conversationStates.delete(conversationId)\n }\n\n /**\n * Clear all conversation states\n */\n clearAll(): void {\n this.conversationStates.clear()\n }\n\n /**\n * Clean up stale conversations\n */\n private cleanup(): void {\n const now = Date.now()\n for (const [conversationId, state] of this.conversationStates.entries()) {\n if (now - state.lastUpdate > this.CLEANUP_INTERVAL) {\n this.conversationStates.delete(conversationId)\n }\n }\n }\n\n /**\n * Get current state size (for debugging/monitoring)\n */\n getStateSize(): number {\n return this.conversationStates.size\n }\n}\n\n// Singleton instance\nexport const responseStateManager = new ResponseStateManager()\n\n/**\n * Helper to generate conversation ID from context\n */\nexport function getConversationId(\n agentId?: string,\n messageId?: string,\n): string {\n // Use agentId as primary identifier, fallback to messageId or timestamp\n return (\n agentId ||\n messageId ||\n `conv_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`\n )\n}\n"],
5
+ "mappings": "AAUA,MAAM,qBAAqB;AAAA,EACjB,qBAAqB,oBAAI,IAA+B;AAAA;AAAA,EAG/C,mBAAmB,KAAK,KAAK;AAAA,EAE9C,cAAc;AAEZ,gBAAY,MAAM;AAChB,WAAK,QAAQ;AAAA,IACf,GAAG,KAAK,gBAAgB;AAAA,EAC1B;AAAA;AAAA;AAAA;AAAA,EAKA,sBAAsB,gBAAwB,YAA0B;AACtE,SAAK,mBAAmB,IAAI,gBAAgB;AAAA,MAC1C,oBAAoB;AAAA,MACpB,YAAY,KAAK,IAAI;AAAA,IACvB,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,sBAAsB,gBAA4C;AAChE,UAAM,QAAQ,KAAK,mBAAmB,IAAI,cAAc;AACxD,QAAI,OAAO;AAET,YAAM,aAAa,KAAK,IAAI;AAC5B,aAAO,MAAM;AAAA,IACf;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,kBAAkB,gBAA8B;AAC9C,SAAK,mBAAmB,OAAO,cAAc;AAAA,EAC/C;AAAA;AAAA;AAAA;AAAA,EAKA,WAAiB;AACf,SAAK,mBAAmB,MAAM;AAAA,EAChC;AAAA;AAAA;AAAA;AAAA,EAKQ,UAAgB;AACtB,UAAM,MAAM,KAAK,IAAI;AACrB,eAAW,CAAC,gBAAgB,KAAK,KAAK,KAAK,mBAAmB,QAAQ,GAAG;AACvE,UAAI,MAAM,MAAM,aAAa,KAAK,kBAAkB;AAClD,aAAK,mBAAmB,OAAO,cAAc;AAAA,MAC/C;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,eAAuB;AACrB,WAAO,KAAK,mBAAmB;AAAA,EACjC;AACF;AAGO,MAAM,uBAAuB,IAAI,qBAAqB;AAKtD,SAAS,kBACd,SACA,WACQ;AAER,SACE,WACA,aACA,QAAQ,KAAK,IAAI,CAAC,IAAI,KAAK,OAAO,EAAE,SAAS,EAAE,EAAE,OAAO,GAAG,CAAC,CAAC;AAEjE;",
6
6
  "names": []
7
7
  }
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../src/services/systemReminder.ts"],
4
- "sourcesContent": ["import { getTodos, TodoItem } from '@utils/todoStorage'\n\nexport interface ReminderMessage {\n role: 'system'\n content: string\n isMeta: boolean\n timestamp: number\n type: string\n priority: 'low' | 'medium' | 'high'\n category: 'task' | 'security' | 'performance' | 'general'\n}\n\ninterface ReminderConfig {\n todoEmptyReminder: boolean\n securityReminder: boolean\n performanceReminder: boolean\n maxRemindersPerSession: number\n}\n\ninterface SessionReminderState {\n lastTodoUpdate: number\n lastFileAccess: number\n sessionStartTime: number\n remindersSent: Set<string>\n contextPresent: boolean\n reminderCount: number\n config: ReminderConfig\n}\n\nclass SystemReminderService {\n private sessionState: SessionReminderState = {\n lastTodoUpdate: 0,\n lastFileAccess: 0,\n sessionStartTime: Date.now(),\n remindersSent: new Set(),\n contextPresent: false,\n reminderCount: 0,\n config: {\n todoEmptyReminder: true,\n securityReminder: true,\n performanceReminder: true,\n maxRemindersPerSession: 10,\n },\n }\n\n private eventDispatcher = new Map<string, Array<(context: any) => void>>()\n private reminderCache = new Map<string, ReminderMessage>()\n\n constructor() {\n this.setupEventDispatcher()\n }\n\n /**\n * Conditional reminder injection - only when context is present\n * Enhanced with performance optimizations and priority management\n */\n public generateReminders(\n hasContext: boolean = false,\n agentId?: string,\n ): ReminderMessage[] {\n this.sessionState.contextPresent = hasContext\n\n // Only inject when context is present (matching original behavior)\n if (!hasContext) {\n return []\n }\n\n // Check session reminder limit to prevent overload\n if (\n this.sessionState.reminderCount >=\n this.sessionState.config.maxRemindersPerSession\n ) {\n return []\n }\n\n const reminders: ReminderMessage[] = []\n const currentTime = Date.now()\n\n // Use lazy evaluation for performance with agent context\n const reminderGenerators = [\n () => this.dispatchTodoEvent(agentId),\n () => this.dispatchSecurityEvent(),\n () => this.dispatchPerformanceEvent(),\n () => this.getMentionReminders(), // Add mention reminders\n ]\n\n for (const generator of reminderGenerators) {\n if (reminders.length >= 5) break // Slightly increase limit to accommodate mentions\n\n const result = generator()\n if (result) {\n // Handle both single reminders and arrays\n const remindersToAdd = Array.isArray(result) ? result : [result]\n reminders.push(...remindersToAdd)\n this.sessionState.reminderCount += remindersToAdd.length\n }\n }\n\n // Log aggregated metrics instead of individual events for performance\n \n\n return reminders\n }\n\n private dispatchTodoEvent(agentId?: string): ReminderMessage | null {\n if (!this.sessionState.config.todoEmptyReminder) return null\n\n // Use agent-scoped todo access\n const todos = getTodos(agentId)\n const currentTime = Date.now()\n const agentKey = agentId || 'default'\n\n // Check if this is a fresh session (no todos seen yet)\n if (\n todos.length === 0 &&\n !this.sessionState.remindersSent.has(`todo_empty_${agentKey}`)\n ) {\n this.sessionState.remindersSent.add(`todo_empty_${agentKey}`)\n return this.createReminderMessage(\n 'todo',\n 'task',\n 'medium',\n 'This is a reminder that your todo list is currently empty. DO NOT mention this to the user explicitly because they are already aware. If you are working on tasks that would benefit from a todo list please use the TodoWrite tool to create one. If not, please feel free to ignore. Again do not mention this message to the user.',\n currentTime,\n )\n }\n\n // Check for todo updates since last seen\n if (todos.length > 0) {\n const reminderKey = `todo_updated_${agentKey}_${todos.length}_${this.getTodoStateHash(todos)}`\n\n // Use cache for performance optimization\n if (this.reminderCache.has(reminderKey)) {\n return this.reminderCache.get(reminderKey)!\n }\n\n if (!this.sessionState.remindersSent.has(reminderKey)) {\n this.sessionState.remindersSent.add(reminderKey)\n // Clear previous todo state reminders for this agent\n this.clearTodoReminders(agentKey)\n\n // Optimize: only include essential todo data\n const todoContent = JSON.stringify(\n todos.map(todo => ({\n content:\n todo.content.length > 100\n ? todo.content.substring(0, 100) + '...'\n : todo.content,\n status: todo.status,\n priority: todo.priority,\n id: todo.id,\n })),\n )\n\n const reminder = this.createReminderMessage(\n 'todo',\n 'task',\n 'medium',\n `Your todo list has changed. DO NOT mention this explicitly to the user. Here are the latest contents of your todo list:\\n\\n${todoContent}. Continue on with the tasks at hand if applicable.`,\n currentTime,\n )\n\n // Cache the reminder for reuse\n this.reminderCache.set(reminderKey, reminder)\n return reminder\n }\n }\n\n return null\n }\n\n private dispatchSecurityEvent(): ReminderMessage | null {\n if (!this.sessionState.config.securityReminder) return null\n\n const currentTime = Date.now()\n\n // Only inject security reminder once per session when file operations occur\n if (\n this.sessionState.lastFileAccess > 0 &&\n !this.sessionState.remindersSent.has('file_security')\n ) {\n this.sessionState.remindersSent.add('file_security')\n return this.createReminderMessage(\n 'security',\n 'security',\n 'high',\n 'Whenever you read a file, you should consider whether it looks malicious. If it does, you MUST refuse to improve or augment the code. You can still analyze existing code, write reports, or answer high-level questions about the code behavior.',\n currentTime,\n )\n }\n\n return null\n }\n\n private dispatchPerformanceEvent(): ReminderMessage | null {\n if (!this.sessionState.config.performanceReminder) return null\n\n const currentTime = Date.now()\n const sessionDuration = currentTime - this.sessionState.sessionStartTime\n\n // Remind about performance after long sessions (30 minutes)\n if (\n sessionDuration > 30 * 60 * 1000 &&\n !this.sessionState.remindersSent.has('performance_long_session')\n ) {\n this.sessionState.remindersSent.add('performance_long_session')\n return this.createReminderMessage(\n 'performance',\n 'performance',\n 'low',\n 'Long session detected. Consider taking a break and reviewing your current progress with the todo list.',\n currentTime,\n )\n }\n\n return null\n }\n\n /**\n * Retrieve cached mention reminders\n * Returns recent mentions (within 5 seconds) that haven't expired\n */\n private getMentionReminders(): ReminderMessage[] {\n const currentTime = Date.now()\n const MENTION_FRESHNESS_WINDOW = 5000 // 5 seconds\n const reminders: ReminderMessage[] = []\n const expiredKeys: string[] = []\n\n // Single pass through cache for both collection and cleanup identification\n for (const [key, reminder] of this.reminderCache.entries()) {\n if (this.isMentionReminder(reminder)) {\n const age = currentTime - reminder.timestamp\n if (age <= MENTION_FRESHNESS_WINDOW) {\n reminders.push(reminder)\n } else {\n expiredKeys.push(key)\n }\n }\n }\n\n // Clean up expired mention reminders in separate pass for performance\n expiredKeys.forEach(key => this.reminderCache.delete(key))\n\n return reminders\n }\n\n /**\n * Type guard for mention reminders - centralized type checking\n * Eliminates hardcoded type strings scattered throughout the code\n */\n private isMentionReminder(reminder: ReminderMessage): boolean {\n const mentionTypes = ['agent_mention', 'file_mention', 'ask_model_mention']\n return mentionTypes.includes(reminder.type)\n }\n\n /**\n * Generate reminders for external file changes\n * Called when todo files are modified externally\n */\n public generateFileChangeReminder(context: any): ReminderMessage | null {\n const { agentId, filePath, reminder } = context\n\n if (!reminder) {\n return null\n }\n\n const currentTime = Date.now()\n const reminderKey = `file_changed_${agentId}_${filePath}_${currentTime}`\n\n // Ensure this specific file change reminder is only shown once\n if (this.sessionState.remindersSent.has(reminderKey)) {\n return null\n }\n\n this.sessionState.remindersSent.add(reminderKey)\n\n return this.createReminderMessage(\n 'file_changed',\n 'general',\n 'medium',\n reminder,\n currentTime,\n )\n }\n\n private createReminderMessage(\n type: string,\n category: ReminderMessage['category'],\n priority: ReminderMessage['priority'],\n content: string,\n timestamp: number,\n ): ReminderMessage {\n return {\n role: 'system',\n content: `<system-reminder>\\n${content}\\n</system-reminder>`,\n isMeta: true,\n timestamp,\n type,\n priority,\n category,\n }\n }\n\n private getTodoStateHash(todos: TodoItem[]): string {\n return todos\n .map(t => `${t.id}:${t.status}`)\n .sort()\n .join('|')\n }\n\n private clearTodoReminders(agentId?: string): void {\n const agentKey = agentId || 'default'\n for (const key of this.sessionState.remindersSent) {\n if (key.startsWith(`todo_updated_${agentKey}_`)) {\n this.sessionState.remindersSent.delete(key)\n }\n }\n }\n\n private setupEventDispatcher(): void {\n // Session startup events\n this.addEventListener('session:startup', context => {\n // Reset session state on startup\n this.resetSession()\n\n // Initialize session tracking\n this.sessionState.sessionStartTime = Date.now()\n this.sessionState.contextPresent =\n Object.keys(context.context || {}).length > 0\n\n \n })\n\n // Todo change events\n this.addEventListener('todo:changed', context => {\n this.sessionState.lastTodoUpdate = Date.now()\n this.clearTodoReminders(context.agentId)\n })\n\n // Todo file changed externally\n this.addEventListener('todo:file_changed', context => {\n // External file change detected, trigger reminder injection\n const agentId = context.agentId || 'default'\n this.clearTodoReminders(agentId)\n this.sessionState.lastTodoUpdate = Date.now()\n\n // Generate and inject file change reminder immediately\n const reminder = this.generateFileChangeReminder(context)\n if (reminder) {\n // Inject reminder into the latest user message through event system\n this.emitEvent('reminder:inject', {\n reminder: reminder.content,\n agentId,\n type: 'file_changed',\n timestamp: Date.now(),\n })\n }\n })\n\n // File access events\n this.addEventListener('file:read', context => {\n this.sessionState.lastFileAccess = Date.now()\n })\n\n // File edit events for freshness detection\n this.addEventListener('file:edited', context => {\n // File edit handling\n })\n\n // Unified mention event handlers - eliminates code duplication\n this.addEventListener('agent:mentioned', context => {\n this.createMentionReminder({\n type: 'agent_mention',\n key: `agent_mention_${context.agentType}_${context.timestamp}`,\n category: 'task',\n priority: 'high',\n content: `The user mentioned @${context.originalMention}. You MUST use the Task tool with subagent_type=\"${context.agentType}\" to delegate this task to the specified agent. Provide a detailed, self-contained task description that fully captures the user's intent for the ${context.agentType} agent to execute.`,\n timestamp: context.timestamp\n })\n })\n\n this.addEventListener('file:mentioned', context => {\n this.createMentionReminder({\n type: 'file_mention',\n key: `file_mention_${context.filePath}_${context.timestamp}`,\n category: 'general',\n priority: 'high',\n content: `The user mentioned @${context.originalMention}. You MUST read the entire content of the file at path: ${context.filePath} using the Read tool to understand the full context before proceeding with the user's request.`,\n timestamp: context.timestamp\n })\n })\n\n this.addEventListener('ask-model:mentioned', context => {\n this.createMentionReminder({\n type: 'ask_model_mention',\n key: `ask_model_mention_${context.modelName}_${context.timestamp}`,\n category: 'task',\n priority: 'high',\n content: `The user mentioned @${context.modelName}. You MUST use the AskExpertModelTool to consult this specific model for expert opinions and analysis. Provide the user's question or context clearly to get the most relevant response from ${context.modelName}.`,\n timestamp: context.timestamp\n })\n })\n }\n\n public addEventListener(\n event: string,\n callback: (context: any) => void,\n ): void {\n if (!this.eventDispatcher.has(event)) {\n this.eventDispatcher.set(event, [])\n }\n this.eventDispatcher.get(event)!.push(callback)\n }\n\n public emitEvent(event: string, context: any): void {\n const listeners = this.eventDispatcher.get(event) || []\n listeners.forEach(callback => {\n try {\n callback(context)\n } catch (error) {\n console.error(`Error in event listener for ${event}:`, error)\n }\n })\n }\n\n /**\n * Unified mention reminder creation - eliminates duplicate logic\n * Centralizes reminder creation with consistent deduplication\n */\n private createMentionReminder(params: {\n type: string\n key: string\n category: ReminderMessage['category']\n priority: ReminderMessage['priority']\n content: string\n timestamp: number\n }): void {\n if (!this.sessionState.remindersSent.has(params.key)) {\n this.sessionState.remindersSent.add(params.key)\n \n const reminder = this.createReminderMessage(\n params.type,\n params.category,\n params.priority,\n params.content,\n params.timestamp\n )\n \n this.reminderCache.set(params.key, reminder)\n }\n }\n\n public resetSession(): void {\n this.sessionState = {\n lastTodoUpdate: 0,\n lastFileAccess: 0,\n sessionStartTime: Date.now(),\n remindersSent: new Set(),\n contextPresent: false,\n reminderCount: 0,\n config: { ...this.sessionState.config }, // Preserve config across resets\n }\n this.reminderCache.clear() // Clear cache on session reset\n }\n\n public updateConfig(config: Partial<ReminderConfig>): void {\n this.sessionState.config = { ...this.sessionState.config, ...config }\n }\n\n public getSessionState(): SessionReminderState {\n return { ...this.sessionState }\n }\n}\n\nexport const systemReminderService = new SystemReminderService()\n\nexport const generateSystemReminders = (\n hasContext: boolean = false,\n agentId?: string,\n) => systemReminderService.generateReminders(hasContext, agentId)\n\nexport const generateFileChangeReminder = (context: any) =>\n systemReminderService.generateFileChangeReminder(context)\n\nexport const emitReminderEvent = (event: string, context: any) =>\n systemReminderService.emitEvent(event, context)\n\nexport const resetReminderSession = () => systemReminderService.resetSession()\nexport const getReminderSessionState = () =>\n systemReminderService.getSessionState()\n"],
5
- "mappings": "AAAA,SAAS,gBAA0B;AA6BnC,MAAM,sBAAsB;AAAA,EAClB,eAAqC;AAAA,IAC3C,gBAAgB;AAAA,IAChB,gBAAgB;AAAA,IAChB,kBAAkB,KAAK,IAAI;AAAA,IAC3B,eAAe,oBAAI,IAAI;AAAA,IACvB,gBAAgB;AAAA,IAChB,eAAe;AAAA,IACf,QAAQ;AAAA,MACN,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,MAClB,qBAAqB;AAAA,MACrB,wBAAwB;AAAA,IAC1B;AAAA,EACF;AAAA,EAEQ,kBAAkB,oBAAI,IAA2C;AAAA,EACjE,gBAAgB,oBAAI,IAA6B;AAAA,EAEzD,cAAc;AACZ,SAAK,qBAAqB;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,kBACL,aAAsB,OACtB,SACmB;AACnB,SAAK,aAAa,iBAAiB;AAGnC,QAAI,CAAC,YAAY;AACf,aAAO,CAAC;AAAA,IACV;AAGA,QACE,KAAK,aAAa,iBAClB,KAAK,aAAa,OAAO,wBACzB;AACA,aAAO,CAAC;AAAA,IACV;AAEA,UAAM,YAA+B,CAAC;AACtC,UAAM,cAAc,KAAK,IAAI;AAG7B,UAAM,qBAAqB;AAAA,MACzB,MAAM,KAAK,kBAAkB,OAAO;AAAA,MACpC,MAAM,KAAK,sBAAsB;AAAA,MACjC,MAAM,KAAK,yBAAyB;AAAA,MACpC,MAAM,KAAK,oBAAoB;AAAA;AAAA,IACjC;AAEA,eAAW,aAAa,oBAAoB;AAC1C,UAAI,UAAU,UAAU,EAAG;AAE3B,YAAM,SAAS,UAAU;AACzB,UAAI,QAAQ;AAEV,cAAM,iBAAiB,MAAM,QAAQ,MAAM,IAAI,SAAS,CAAC,MAAM;AAC/D,kBAAU,KAAK,GAAG,cAAc;AAChC,aAAK,aAAa,iBAAiB,eAAe;AAAA,MACpD;AAAA,IACF;AAKA,WAAO;AAAA,EACT;AAAA,EAEQ,kBAAkB,SAA0C;AAClE,QAAI,CAAC,KAAK,aAAa,OAAO,kBAAmB,QAAO;AAGxD,UAAM,QAAQ,SAAS,OAAO;AAC9B,UAAM,cAAc,KAAK,IAAI;AAC7B,UAAM,WAAW,WAAW;AAG5B,QACE,MAAM,WAAW,KACjB,CAAC,KAAK,aAAa,cAAc,IAAI,cAAc,QAAQ,EAAE,GAC7D;AACA,WAAK,aAAa,cAAc,IAAI,cAAc,QAAQ,EAAE;AAC5D,aAAO,KAAK;AAAA,QACV;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAGA,QAAI,MAAM,SAAS,GAAG;AACpB,YAAM,cAAc,gBAAgB,QAAQ,IAAI,MAAM,MAAM,IAAI,KAAK,iBAAiB,KAAK,CAAC;AAG5F,UAAI,KAAK,cAAc,IAAI,WAAW,GAAG;AACvC,eAAO,KAAK,cAAc,IAAI,WAAW;AAAA,MAC3C;AAEA,UAAI,CAAC,KAAK,aAAa,cAAc,IAAI,WAAW,GAAG;AACrD,aAAK,aAAa,cAAc,IAAI,WAAW;AAE/C,aAAK,mBAAmB,QAAQ;AAGhC,cAAM,cAAc,KAAK;AAAA,UACvB,MAAM,IAAI,WAAS;AAAA,YACjB,SACE,KAAK,QAAQ,SAAS,MAClB,KAAK,QAAQ,UAAU,GAAG,GAAG,IAAI,QACjC,KAAK;AAAA,YACX,QAAQ,KAAK;AAAA,YACb,UAAU,KAAK;AAAA,YACf,IAAI,KAAK;AAAA,UACX,EAAE;AAAA,QACJ;AAEA,cAAM,WAAW,KAAK;AAAA,UACpB;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA;AAAA,EAA8H,WAAW;AAAA,UACzI;AAAA,QACF;AAGA,aAAK,cAAc,IAAI,aAAa,QAAQ;AAC5C,eAAO;AAAA,MACT;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA,EAEQ,wBAAgD;AACtD,QAAI,CAAC,KAAK,aAAa,OAAO,iBAAkB,QAAO;AAEvD,UAAM,cAAc,KAAK,IAAI;AAG7B,QACE,KAAK,aAAa,iBAAiB,KACnC,CAAC,KAAK,aAAa,cAAc,IAAI,eAAe,GACpD;AACA,WAAK,aAAa,cAAc,IAAI,eAAe;AACnD,aAAO,KAAK;AAAA,QACV;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA,EAEQ,2BAAmD;AACzD,QAAI,CAAC,KAAK,aAAa,OAAO,oBAAqB,QAAO;AAE1D,UAAM,cAAc,KAAK,IAAI;AAC7B,UAAM,kBAAkB,cAAc,KAAK,aAAa;AAGxD,QACE,kBAAkB,KAAK,KAAK,OAC5B,CAAC,KAAK,aAAa,cAAc,IAAI,0BAA0B,GAC/D;AACA,WAAK,aAAa,cAAc,IAAI,0BAA0B;AAC9D,aAAO,KAAK;AAAA,QACV;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,sBAAyC;AAC/C,UAAM,cAAc,KAAK,IAAI;AAC7B,UAAM,2BAA2B;AACjC,UAAM,YAA+B,CAAC;AACtC,UAAM,cAAwB,CAAC;AAG/B,eAAW,CAAC,KAAK,QAAQ,KAAK,KAAK,cAAc,QAAQ,GAAG;AAC1D,UAAI,KAAK,kBAAkB,QAAQ,GAAG;AACpC,cAAM,MAAM,cAAc,SAAS;AACnC,YAAI,OAAO,0BAA0B;AACnC,oBAAU,KAAK,QAAQ;AAAA,QACzB,OAAO;AACL,sBAAY,KAAK,GAAG;AAAA,QACtB;AAAA,MACF;AAAA,IACF;AAGA,gBAAY,QAAQ,SAAO,KAAK,cAAc,OAAO,GAAG,CAAC;AAEzD,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,kBAAkB,UAAoC;AAC5D,UAAM,eAAe,CAAC,iBAAiB,gBAAgB,mBAAmB;AAC1E,WAAO,aAAa,SAAS,SAAS,IAAI;AAAA,EAC5C;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,2BAA2B,SAAsC;AACtE,UAAM,EAAE,SAAS,UAAU,SAAS,IAAI;AAExC,QAAI,CAAC,UAAU;AACb,aAAO;AAAA,IACT;AAEA,UAAM,cAAc,KAAK,IAAI;AAC7B,UAAM,cAAc,gBAAgB,OAAO,IAAI,QAAQ,IAAI,WAAW;AAGtE,QAAI,KAAK,aAAa,cAAc,IAAI,WAAW,GAAG;AACpD,aAAO;AAAA,IACT;AAEA,SAAK,aAAa,cAAc,IAAI,WAAW;AAE/C,WAAO,KAAK;AAAA,MACV;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,sBACN,MACA,UACA,UACA,SACA,WACiB;AACjB,WAAO;AAAA,MACL,MAAM;AAAA,MACN,SAAS;AAAA,EAAsB,OAAO;AAAA;AAAA,MACtC,QAAQ;AAAA,MACR;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,iBAAiB,OAA2B;AAClD,WAAO,MACJ,IAAI,OAAK,GAAG,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,EAC9B,KAAK,EACL,KAAK,GAAG;AAAA,EACb;AAAA,EAEQ,mBAAmB,SAAwB;AACjD,UAAM,WAAW,WAAW;AAC5B,eAAW,OAAO,KAAK,aAAa,eAAe;AACjD,UAAI,IAAI,WAAW,gBAAgB,QAAQ,GAAG,GAAG;AAC/C,aAAK,aAAa,cAAc,OAAO,GAAG;AAAA,MAC5C;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,uBAA6B;AAEnC,SAAK,iBAAiB,mBAAmB,aAAW;AAElD,WAAK,aAAa;AAGlB,WAAK,aAAa,mBAAmB,KAAK,IAAI;AAC9C,WAAK,aAAa,iBAChB,OAAO,KAAK,QAAQ,WAAW,CAAC,CAAC,EAAE,SAAS;AAAA,IAGhD,CAAC;AAGD,SAAK,iBAAiB,gBAAgB,aAAW;AAC/C,WAAK,aAAa,iBAAiB,KAAK,IAAI;AAC5C,WAAK,mBAAmB,QAAQ,OAAO;AAAA,IACzC,CAAC;AAGD,SAAK,iBAAiB,qBAAqB,aAAW;AAEpD,YAAM,UAAU,QAAQ,WAAW;AACnC,WAAK,mBAAmB,OAAO;AAC/B,WAAK,aAAa,iBAAiB,KAAK,IAAI;AAG5C,YAAM,WAAW,KAAK,2BAA2B,OAAO;AACxD,UAAI,UAAU;AAEZ,aAAK,UAAU,mBAAmB;AAAA,UAChC,UAAU,SAAS;AAAA,UACnB;AAAA,UACA,MAAM;AAAA,UACN,WAAW,KAAK,IAAI;AAAA,QACtB,CAAC;AAAA,MACH;AAAA,IACF,CAAC;AAGD,SAAK,iBAAiB,aAAa,aAAW;AAC5C,WAAK,aAAa,iBAAiB,KAAK,IAAI;AAAA,IAC9C,CAAC;AAGD,SAAK,iBAAiB,eAAe,aAAW;AAAA,IAEhD,CAAC;AAGD,SAAK,iBAAiB,mBAAmB,aAAW;AAClD,WAAK,sBAAsB;AAAA,QACzB,MAAM;AAAA,QACN,KAAK,iBAAiB,QAAQ,SAAS,IAAI,QAAQ,SAAS;AAAA,QAC5D,UAAU;AAAA,QACV,UAAU;AAAA,QACV,SAAS,uBAAuB,QAAQ,eAAe,oDAAoD,QAAQ,SAAS,qJAAqJ,QAAQ,SAAS;AAAA,QAClS,WAAW,QAAQ;AAAA,MACrB,CAAC;AAAA,IACH,CAAC;AAED,SAAK,iBAAiB,kBAAkB,aAAW;AACjD,WAAK,sBAAsB;AAAA,QACzB,MAAM;AAAA,QACN,KAAK,gBAAgB,QAAQ,QAAQ,IAAI,QAAQ,SAAS;AAAA,QAC1D,UAAU;AAAA,QACV,UAAU;AAAA,QACV,SAAS,uBAAuB,QAAQ,eAAe,2DAA2D,QAAQ,QAAQ;AAAA,QAClI,WAAW,QAAQ;AAAA,MACrB,CAAC;AAAA,IACH,CAAC;AAED,SAAK,iBAAiB,uBAAuB,aAAW;AACtD,WAAK,sBAAsB;AAAA,QACzB,MAAM;AAAA,QACN,KAAK,qBAAqB,QAAQ,SAAS,IAAI,QAAQ,SAAS;AAAA,QAChE,UAAU;AAAA,QACV,UAAU;AAAA,QACV,SAAS,uBAAuB,QAAQ,SAAS,gMAAgM,QAAQ,SAAS;AAAA,QAClQ,WAAW,QAAQ;AAAA,MACrB,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAAA,EAEO,iBACL,OACA,UACM;AACN,QAAI,CAAC,KAAK,gBAAgB,IAAI,KAAK,GAAG;AACpC,WAAK,gBAAgB,IAAI,OAAO,CAAC,CAAC;AAAA,IACpC;AACA,SAAK,gBAAgB,IAAI,KAAK,EAAG,KAAK,QAAQ;AAAA,EAChD;AAAA,EAEO,UAAU,OAAe,SAAoB;AAClD,UAAM,YAAY,KAAK,gBAAgB,IAAI,KAAK,KAAK,CAAC;AACtD,cAAU,QAAQ,cAAY;AAC5B,UAAI;AACF,iBAAS,OAAO;AAAA,MAClB,SAAS,OAAO;AACd,gBAAQ,MAAM,+BAA+B,KAAK,KAAK,KAAK;AAAA,MAC9D;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,sBAAsB,QAOrB;AACP,QAAI,CAAC,KAAK,aAAa,cAAc,IAAI,OAAO,GAAG,GAAG;AACpD,WAAK,aAAa,cAAc,IAAI,OAAO,GAAG;AAE9C,YAAM,WAAW,KAAK;AAAA,QACpB,OAAO;AAAA,QACP,OAAO;AAAA,QACP,OAAO;AAAA,QACP,OAAO;AAAA,QACP,OAAO;AAAA,MACT;AAEA,WAAK,cAAc,IAAI,OAAO,KAAK,QAAQ;AAAA,IAC7C;AAAA,EACF;AAAA,EAEO,eAAqB;AAC1B,SAAK,eAAe;AAAA,MAClB,gBAAgB;AAAA,MAChB,gBAAgB;AAAA,MAChB,kBAAkB,KAAK,IAAI;AAAA,MAC3B,eAAe,oBAAI,IAAI;AAAA,MACvB,gBAAgB;AAAA,MAChB,eAAe;AAAA,MACf,QAAQ,EAAE,GAAG,KAAK,aAAa,OAAO;AAAA;AAAA,IACxC;AACA,SAAK,cAAc,MAAM;AAAA,EAC3B;AAAA,EAEO,aAAa,QAAuC;AACzD,SAAK,aAAa,SAAS,EAAE,GAAG,KAAK,aAAa,QAAQ,GAAG,OAAO;AAAA,EACtE;AAAA,EAEO,kBAAwC;AAC7C,WAAO,EAAE,GAAG,KAAK,aAAa;AAAA,EAChC;AACF;AAEO,MAAM,wBAAwB,IAAI,sBAAsB;AAExD,MAAM,0BAA0B,CACrC,aAAsB,OACtB,YACG,sBAAsB,kBAAkB,YAAY,OAAO;AAEzD,MAAM,6BAA6B,CAAC,YACzC,sBAAsB,2BAA2B,OAAO;AAEnD,MAAM,oBAAoB,CAAC,OAAe,YAC/C,sBAAsB,UAAU,OAAO,OAAO;AAEzC,MAAM,uBAAuB,MAAM,sBAAsB,aAAa;AACtE,MAAM,0BAA0B,MACrC,sBAAsB,gBAAgB;",
4
+ "sourcesContent": ["import { getTodos, TodoItem } from '@utils/todoStorage'\n\nexport interface ReminderMessage {\n role: 'system'\n content: string\n isMeta: boolean\n timestamp: number\n type: string\n priority: 'low' | 'medium' | 'high'\n category: 'task' | 'security' | 'performance' | 'general'\n}\n\ninterface ReminderConfig {\n todoEmptyReminder: boolean\n securityReminder: boolean\n performanceReminder: boolean\n maxRemindersPerSession: number\n}\n\ninterface SessionReminderState {\n lastTodoUpdate: number\n lastFileAccess: number\n sessionStartTime: number\n remindersSent: Set<string>\n contextPresent: boolean\n reminderCount: number\n config: ReminderConfig\n}\n\nclass SystemReminderService {\n private sessionState: SessionReminderState = {\n lastTodoUpdate: 0,\n lastFileAccess: 0,\n sessionStartTime: Date.now(),\n remindersSent: new Set(),\n contextPresent: false,\n reminderCount: 0,\n config: {\n todoEmptyReminder: true,\n securityReminder: true,\n performanceReminder: true,\n maxRemindersPerSession: 10,\n },\n }\n\n private eventDispatcher = new Map<string, Array<(context: any) => void>>()\n private reminderCache = new Map<string, ReminderMessage>()\n\n constructor() {\n this.setupEventDispatcher()\n }\n\n /**\n * Conditional reminder injection - only when context is present\n * Enhanced with performance optimizations and priority management\n */\n public generateReminders(\n hasContext: boolean = false,\n agentId?: string,\n ): ReminderMessage[] {\n this.sessionState.contextPresent = hasContext\n\n // Only inject when context is present (matching original behavior)\n if (!hasContext) {\n return []\n }\n\n // Check session reminder limit to prevent overload\n if (\n this.sessionState.reminderCount >=\n this.sessionState.config.maxRemindersPerSession\n ) {\n return []\n }\n\n const reminders: ReminderMessage[] = []\n const currentTime = Date.now()\n\n // Use lazy evaluation for performance with agent context\n const reminderGenerators = [\n () => this.dispatchTodoEvent(agentId),\n () => this.dispatchSecurityEvent(),\n () => this.dispatchPerformanceEvent(),\n () => this.getMentionReminders(), // Add mention reminders\n ]\n\n for (const generator of reminderGenerators) {\n if (reminders.length >= 5) break // Slightly increase limit to accommodate mentions\n\n const result = generator()\n if (result) {\n // Handle both single reminders and arrays\n const remindersToAdd = Array.isArray(result) ? result : [result]\n reminders.push(...remindersToAdd)\n this.sessionState.reminderCount += remindersToAdd.length\n }\n }\n\n // Log aggregated metrics instead of individual events for performance\n\n return reminders\n }\n\n private dispatchTodoEvent(agentId?: string): ReminderMessage | null {\n if (!this.sessionState.config.todoEmptyReminder) return null\n\n // Use agent-scoped todo access\n const todos = getTodos(agentId)\n const currentTime = Date.now()\n const agentKey = agentId || 'default'\n\n // Check if this is a fresh session (no todos seen yet)\n if (\n todos.length === 0 &&\n !this.sessionState.remindersSent.has(`todo_empty_${agentKey}`)\n ) {\n this.sessionState.remindersSent.add(`todo_empty_${agentKey}`)\n return this.createReminderMessage(\n 'todo',\n 'task',\n 'medium',\n 'This is a reminder that your todo list is currently empty. DO NOT mention this to the user explicitly because they are already aware. If you are working on tasks that would benefit from a todo list please use the TodoWrite tool to create one. If not, please feel free to ignore. Again do not mention this message to the user.',\n currentTime,\n )\n }\n\n // Check for todo updates since last seen\n if (todos.length > 0) {\n const reminderKey = `todo_updated_${agentKey}_${todos.length}_${this.getTodoStateHash(todos)}`\n\n // Use cache for performance optimization\n if (this.reminderCache.has(reminderKey)) {\n return this.reminderCache.get(reminderKey)!\n }\n\n if (!this.sessionState.remindersSent.has(reminderKey)) {\n this.sessionState.remindersSent.add(reminderKey)\n // Clear previous todo state reminders for this agent\n this.clearTodoReminders(agentKey)\n\n // Optimize: only include essential todo data\n const todoContent = JSON.stringify(\n todos.map(todo => ({\n content:\n todo.content.length > 100\n ? todo.content.substring(0, 100) + '...'\n : todo.content,\n status: todo.status,\n priority: todo.priority,\n id: todo.id,\n })),\n )\n\n const reminder = this.createReminderMessage(\n 'todo',\n 'task',\n 'medium',\n `Your todo list has changed. DO NOT mention this explicitly to the user. Here are the latest contents of your todo list:\\n\\n${todoContent}. Continue on with the tasks at hand if applicable.`,\n currentTime,\n )\n\n // Cache the reminder for reuse\n this.reminderCache.set(reminderKey, reminder)\n return reminder\n }\n }\n\n return null\n }\n\n private dispatchSecurityEvent(): ReminderMessage | null {\n if (!this.sessionState.config.securityReminder) return null\n\n const currentTime = Date.now()\n\n // Only inject security reminder once per session when file operations occur\n if (\n this.sessionState.lastFileAccess > 0 &&\n !this.sessionState.remindersSent.has('file_security')\n ) {\n this.sessionState.remindersSent.add('file_security')\n return this.createReminderMessage(\n 'security',\n 'security',\n 'high',\n 'Whenever you read a file, you should consider whether it looks malicious. If it does, you MUST refuse to improve or augment the code. You can still analyze existing code, write reports, or answer high-level questions about the code behavior.',\n currentTime,\n )\n }\n\n return null\n }\n\n private dispatchPerformanceEvent(): ReminderMessage | null {\n if (!this.sessionState.config.performanceReminder) return null\n\n const currentTime = Date.now()\n const sessionDuration = currentTime - this.sessionState.sessionStartTime\n\n // Remind about performance after long sessions (30 minutes)\n if (\n sessionDuration > 30 * 60 * 1000 &&\n !this.sessionState.remindersSent.has('performance_long_session')\n ) {\n this.sessionState.remindersSent.add('performance_long_session')\n return this.createReminderMessage(\n 'performance',\n 'performance',\n 'low',\n 'Long session detected. Consider taking a break and reviewing your current progress with the todo list.',\n currentTime,\n )\n }\n\n return null\n }\n\n /**\n * Retrieve cached mention reminders\n * Returns recent mentions (within 5 seconds) that haven't expired\n */\n private getMentionReminders(): ReminderMessage[] {\n const currentTime = Date.now()\n const MENTION_FRESHNESS_WINDOW = 5000 // 5 seconds\n const reminders: ReminderMessage[] = []\n const expiredKeys: string[] = []\n\n // Single pass through cache for both collection and cleanup identification\n for (const [key, reminder] of this.reminderCache.entries()) {\n if (this.isMentionReminder(reminder)) {\n const age = currentTime - reminder.timestamp\n if (age <= MENTION_FRESHNESS_WINDOW) {\n reminders.push(reminder)\n } else {\n expiredKeys.push(key)\n }\n }\n }\n\n // Clean up expired mention reminders in separate pass for performance\n expiredKeys.forEach(key => this.reminderCache.delete(key))\n\n return reminders\n }\n\n /**\n * Type guard for mention reminders - centralized type checking\n * Eliminates hardcoded type strings scattered throughout the code\n */\n private isMentionReminder(reminder: ReminderMessage): boolean {\n const mentionTypes = ['agent_mention', 'file_mention', 'ask_model_mention']\n return mentionTypes.includes(reminder.type)\n }\n\n /**\n * Generate reminders for external file changes\n * Called when todo files are modified externally\n */\n public generateFileChangeReminder(context: any): ReminderMessage | null {\n const { agentId, filePath, reminder } = context\n\n if (!reminder) {\n return null\n }\n\n const currentTime = Date.now()\n const reminderKey = `file_changed_${agentId}_${filePath}_${currentTime}`\n\n // Ensure this specific file change reminder is only shown once\n if (this.sessionState.remindersSent.has(reminderKey)) {\n return null\n }\n\n this.sessionState.remindersSent.add(reminderKey)\n\n return this.createReminderMessage(\n 'file_changed',\n 'general',\n 'medium',\n reminder,\n currentTime,\n )\n }\n\n private createReminderMessage(\n type: string,\n category: ReminderMessage['category'],\n priority: ReminderMessage['priority'],\n content: string,\n timestamp: number,\n ): ReminderMessage {\n return {\n role: 'system',\n content: `<system-reminder>\\n${content}\\n</system-reminder>`,\n isMeta: true,\n timestamp,\n type,\n priority,\n category,\n }\n }\n\n private getTodoStateHash(todos: TodoItem[]): string {\n return todos\n .map(t => `${t.id}:${t.status}`)\n .sort()\n .join('|')\n }\n\n private clearTodoReminders(agentId?: string): void {\n const agentKey = agentId || 'default'\n for (const key of this.sessionState.remindersSent) {\n if (key.startsWith(`todo_updated_${agentKey}_`)) {\n this.sessionState.remindersSent.delete(key)\n }\n }\n }\n\n private setupEventDispatcher(): void {\n // Session startup events\n this.addEventListener('session:startup', context => {\n // Reset session state on startup\n this.resetSession()\n\n // Initialize session tracking\n this.sessionState.sessionStartTime = Date.now()\n this.sessionState.contextPresent =\n Object.keys(context.context || {}).length > 0\n })\n\n // Todo change events\n this.addEventListener('todo:changed', context => {\n this.sessionState.lastTodoUpdate = Date.now()\n this.clearTodoReminders(context.agentId)\n })\n\n // Todo file changed externally\n this.addEventListener('todo:file_changed', context => {\n // External file change detected, trigger reminder injection\n const agentId = context.agentId || 'default'\n this.clearTodoReminders(agentId)\n this.sessionState.lastTodoUpdate = Date.now()\n\n // Generate and inject file change reminder immediately\n const reminder = this.generateFileChangeReminder(context)\n if (reminder) {\n // Inject reminder into the latest user message through event system\n this.emitEvent('reminder:inject', {\n reminder: reminder.content,\n agentId,\n type: 'file_changed',\n timestamp: Date.now(),\n })\n }\n })\n\n // File access events\n this.addEventListener('file:read', context => {\n this.sessionState.lastFileAccess = Date.now()\n })\n\n // File edit events for freshness detection\n this.addEventListener('file:edited', context => {\n // File edit handling\n })\n\n // Unified mention event handlers - eliminates code duplication\n this.addEventListener('agent:mentioned', context => {\n this.createMentionReminder({\n type: 'agent_mention',\n key: `agent_mention_${context.agentType}_${context.timestamp}`,\n category: 'task',\n priority: 'high',\n content: `The user mentioned @${context.originalMention}. You MUST use the Task tool with subagent_type=\"${context.agentType}\" to delegate this task to the specified agent. Provide a detailed, self-contained task description that fully captures the user's intent for the ${context.agentType} agent to execute.`,\n timestamp: context.timestamp,\n })\n })\n\n this.addEventListener('file:mentioned', context => {\n this.createMentionReminder({\n type: 'file_mention',\n key: `file_mention_${context.filePath}_${context.timestamp}`,\n category: 'general',\n priority: 'high',\n content: `The user mentioned @${context.originalMention}. You MUST read the entire content of the file at path: ${context.filePath} using the Read tool to understand the full context before proceeding with the user's request.`,\n timestamp: context.timestamp,\n })\n })\n\n this.addEventListener('ask-model:mentioned', context => {\n this.createMentionReminder({\n type: 'ask_model_mention',\n key: `ask_model_mention_${context.modelName}_${context.timestamp}`,\n category: 'task',\n priority: 'high',\n content: `The user mentioned @${context.modelName}. You MUST use the AskExpertModelTool to consult this specific model for expert opinions and analysis. Provide the user's question or context clearly to get the most relevant response from ${context.modelName}.`,\n timestamp: context.timestamp,\n })\n })\n }\n\n public addEventListener(\n event: string,\n callback: (context: any) => void,\n ): void {\n if (!this.eventDispatcher.has(event)) {\n this.eventDispatcher.set(event, [])\n }\n this.eventDispatcher.get(event)!.push(callback)\n }\n\n public emitEvent(event: string, context: any): void {\n const listeners = this.eventDispatcher.get(event) || []\n listeners.forEach(callback => {\n try {\n callback(context)\n } catch (error) {\n console.error(`Error in event listener for ${event}:`, error)\n }\n })\n }\n\n /**\n * Unified mention reminder creation - eliminates duplicate logic\n * Centralizes reminder creation with consistent deduplication\n */\n private createMentionReminder(params: {\n type: string\n key: string\n category: ReminderMessage['category']\n priority: ReminderMessage['priority']\n content: string\n timestamp: number\n }): void {\n if (!this.sessionState.remindersSent.has(params.key)) {\n this.sessionState.remindersSent.add(params.key)\n\n const reminder = this.createReminderMessage(\n params.type,\n params.category,\n params.priority,\n params.content,\n params.timestamp,\n )\n\n this.reminderCache.set(params.key, reminder)\n }\n }\n\n public resetSession(): void {\n this.sessionState = {\n lastTodoUpdate: 0,\n lastFileAccess: 0,\n sessionStartTime: Date.now(),\n remindersSent: new Set(),\n contextPresent: false,\n reminderCount: 0,\n config: { ...this.sessionState.config }, // Preserve config across resets\n }\n this.reminderCache.clear() // Clear cache on session reset\n }\n\n public updateConfig(config: Partial<ReminderConfig>): void {\n this.sessionState.config = { ...this.sessionState.config, ...config }\n }\n\n public getSessionState(): SessionReminderState {\n return { ...this.sessionState }\n }\n}\n\nexport const systemReminderService = new SystemReminderService()\n\nexport const generateSystemReminders = (\n hasContext: boolean = false,\n agentId?: string,\n) => systemReminderService.generateReminders(hasContext, agentId)\n\nexport const generateFileChangeReminder = (context: any) =>\n systemReminderService.generateFileChangeReminder(context)\n\nexport const emitReminderEvent = (event: string, context: any) =>\n systemReminderService.emitEvent(event, context)\n\nexport const resetReminderSession = () => systemReminderService.resetSession()\nexport const getReminderSessionState = () =>\n systemReminderService.getSessionState()\n"],
5
+ "mappings": "AAAA,SAAS,gBAA0B;AA6BnC,MAAM,sBAAsB;AAAA,EAClB,eAAqC;AAAA,IAC3C,gBAAgB;AAAA,IAChB,gBAAgB;AAAA,IAChB,kBAAkB,KAAK,IAAI;AAAA,IAC3B,eAAe,oBAAI,IAAI;AAAA,IACvB,gBAAgB;AAAA,IAChB,eAAe;AAAA,IACf,QAAQ;AAAA,MACN,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,MAClB,qBAAqB;AAAA,MACrB,wBAAwB;AAAA,IAC1B;AAAA,EACF;AAAA,EAEQ,kBAAkB,oBAAI,IAA2C;AAAA,EACjE,gBAAgB,oBAAI,IAA6B;AAAA,EAEzD,cAAc;AACZ,SAAK,qBAAqB;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,kBACL,aAAsB,OACtB,SACmB;AACnB,SAAK,aAAa,iBAAiB;AAGnC,QAAI,CAAC,YAAY;AACf,aAAO,CAAC;AAAA,IACV;AAGA,QACE,KAAK,aAAa,iBAClB,KAAK,aAAa,OAAO,wBACzB;AACA,aAAO,CAAC;AAAA,IACV;AAEA,UAAM,YAA+B,CAAC;AACtC,UAAM,cAAc,KAAK,IAAI;AAG7B,UAAM,qBAAqB;AAAA,MACzB,MAAM,KAAK,kBAAkB,OAAO;AAAA,MACpC,MAAM,KAAK,sBAAsB;AAAA,MACjC,MAAM,KAAK,yBAAyB;AAAA,MACpC,MAAM,KAAK,oBAAoB;AAAA;AAAA,IACjC;AAEA,eAAW,aAAa,oBAAoB;AAC1C,UAAI,UAAU,UAAU,EAAG;AAE3B,YAAM,SAAS,UAAU;AACzB,UAAI,QAAQ;AAEV,cAAM,iBAAiB,MAAM,QAAQ,MAAM,IAAI,SAAS,CAAC,MAAM;AAC/D,kBAAU,KAAK,GAAG,cAAc;AAChC,aAAK,aAAa,iBAAiB,eAAe;AAAA,MACpD;AAAA,IACF;AAIA,WAAO;AAAA,EACT;AAAA,EAEQ,kBAAkB,SAA0C;AAClE,QAAI,CAAC,KAAK,aAAa,OAAO,kBAAmB,QAAO;AAGxD,UAAM,QAAQ,SAAS,OAAO;AAC9B,UAAM,cAAc,KAAK,IAAI;AAC7B,UAAM,WAAW,WAAW;AAG5B,QACE,MAAM,WAAW,KACjB,CAAC,KAAK,aAAa,cAAc,IAAI,cAAc,QAAQ,EAAE,GAC7D;AACA,WAAK,aAAa,cAAc,IAAI,cAAc,QAAQ,EAAE;AAC5D,aAAO,KAAK;AAAA,QACV;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAGA,QAAI,MAAM,SAAS,GAAG;AACpB,YAAM,cAAc,gBAAgB,QAAQ,IAAI,MAAM,MAAM,IAAI,KAAK,iBAAiB,KAAK,CAAC;AAG5F,UAAI,KAAK,cAAc,IAAI,WAAW,GAAG;AACvC,eAAO,KAAK,cAAc,IAAI,WAAW;AAAA,MAC3C;AAEA,UAAI,CAAC,KAAK,aAAa,cAAc,IAAI,WAAW,GAAG;AACrD,aAAK,aAAa,cAAc,IAAI,WAAW;AAE/C,aAAK,mBAAmB,QAAQ;AAGhC,cAAM,cAAc,KAAK;AAAA,UACvB,MAAM,IAAI,WAAS;AAAA,YACjB,SACE,KAAK,QAAQ,SAAS,MAClB,KAAK,QAAQ,UAAU,GAAG,GAAG,IAAI,QACjC,KAAK;AAAA,YACX,QAAQ,KAAK;AAAA,YACb,UAAU,KAAK;AAAA,YACf,IAAI,KAAK;AAAA,UACX,EAAE;AAAA,QACJ;AAEA,cAAM,WAAW,KAAK;AAAA,UACpB;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA;AAAA,EAA8H,WAAW;AAAA,UACzI;AAAA,QACF;AAGA,aAAK,cAAc,IAAI,aAAa,QAAQ;AAC5C,eAAO;AAAA,MACT;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA,EAEQ,wBAAgD;AACtD,QAAI,CAAC,KAAK,aAAa,OAAO,iBAAkB,QAAO;AAEvD,UAAM,cAAc,KAAK,IAAI;AAG7B,QACE,KAAK,aAAa,iBAAiB,KACnC,CAAC,KAAK,aAAa,cAAc,IAAI,eAAe,GACpD;AACA,WAAK,aAAa,cAAc,IAAI,eAAe;AACnD,aAAO,KAAK;AAAA,QACV;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA,EAEQ,2BAAmD;AACzD,QAAI,CAAC,KAAK,aAAa,OAAO,oBAAqB,QAAO;AAE1D,UAAM,cAAc,KAAK,IAAI;AAC7B,UAAM,kBAAkB,cAAc,KAAK,aAAa;AAGxD,QACE,kBAAkB,KAAK,KAAK,OAC5B,CAAC,KAAK,aAAa,cAAc,IAAI,0BAA0B,GAC/D;AACA,WAAK,aAAa,cAAc,IAAI,0BAA0B;AAC9D,aAAO,KAAK;AAAA,QACV;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,sBAAyC;AAC/C,UAAM,cAAc,KAAK,IAAI;AAC7B,UAAM,2BAA2B;AACjC,UAAM,YAA+B,CAAC;AACtC,UAAM,cAAwB,CAAC;AAG/B,eAAW,CAAC,KAAK,QAAQ,KAAK,KAAK,cAAc,QAAQ,GAAG;AAC1D,UAAI,KAAK,kBAAkB,QAAQ,GAAG;AACpC,cAAM,MAAM,cAAc,SAAS;AACnC,YAAI,OAAO,0BAA0B;AACnC,oBAAU,KAAK,QAAQ;AAAA,QACzB,OAAO;AACL,sBAAY,KAAK,GAAG;AAAA,QACtB;AAAA,MACF;AAAA,IACF;AAGA,gBAAY,QAAQ,SAAO,KAAK,cAAc,OAAO,GAAG,CAAC;AAEzD,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,kBAAkB,UAAoC;AAC5D,UAAM,eAAe,CAAC,iBAAiB,gBAAgB,mBAAmB;AAC1E,WAAO,aAAa,SAAS,SAAS,IAAI;AAAA,EAC5C;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,2BAA2B,SAAsC;AACtE,UAAM,EAAE,SAAS,UAAU,SAAS,IAAI;AAExC,QAAI,CAAC,UAAU;AACb,aAAO;AAAA,IACT;AAEA,UAAM,cAAc,KAAK,IAAI;AAC7B,UAAM,cAAc,gBAAgB,OAAO,IAAI,QAAQ,IAAI,WAAW;AAGtE,QAAI,KAAK,aAAa,cAAc,IAAI,WAAW,GAAG;AACpD,aAAO;AAAA,IACT;AAEA,SAAK,aAAa,cAAc,IAAI,WAAW;AAE/C,WAAO,KAAK;AAAA,MACV;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,sBACN,MACA,UACA,UACA,SACA,WACiB;AACjB,WAAO;AAAA,MACL,MAAM;AAAA,MACN,SAAS;AAAA,EAAsB,OAAO;AAAA;AAAA,MACtC,QAAQ;AAAA,MACR;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,iBAAiB,OAA2B;AAClD,WAAO,MACJ,IAAI,OAAK,GAAG,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,EAC9B,KAAK,EACL,KAAK,GAAG;AAAA,EACb;AAAA,EAEQ,mBAAmB,SAAwB;AACjD,UAAM,WAAW,WAAW;AAC5B,eAAW,OAAO,KAAK,aAAa,eAAe;AACjD,UAAI,IAAI,WAAW,gBAAgB,QAAQ,GAAG,GAAG;AAC/C,aAAK,aAAa,cAAc,OAAO,GAAG;AAAA,MAC5C;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,uBAA6B;AAEnC,SAAK,iBAAiB,mBAAmB,aAAW;AAElD,WAAK,aAAa;AAGlB,WAAK,aAAa,mBAAmB,KAAK,IAAI;AAC9C,WAAK,aAAa,iBAChB,OAAO,KAAK,QAAQ,WAAW,CAAC,CAAC,EAAE,SAAS;AAAA,IAChD,CAAC;AAGD,SAAK,iBAAiB,gBAAgB,aAAW;AAC/C,WAAK,aAAa,iBAAiB,KAAK,IAAI;AAC5C,WAAK,mBAAmB,QAAQ,OAAO;AAAA,IACzC,CAAC;AAGD,SAAK,iBAAiB,qBAAqB,aAAW;AAEpD,YAAM,UAAU,QAAQ,WAAW;AACnC,WAAK,mBAAmB,OAAO;AAC/B,WAAK,aAAa,iBAAiB,KAAK,IAAI;AAG5C,YAAM,WAAW,KAAK,2BAA2B,OAAO;AACxD,UAAI,UAAU;AAEZ,aAAK,UAAU,mBAAmB;AAAA,UAChC,UAAU,SAAS;AAAA,UACnB;AAAA,UACA,MAAM;AAAA,UACN,WAAW,KAAK,IAAI;AAAA,QACtB,CAAC;AAAA,MACH;AAAA,IACF,CAAC;AAGD,SAAK,iBAAiB,aAAa,aAAW;AAC5C,WAAK,aAAa,iBAAiB,KAAK,IAAI;AAAA,IAC9C,CAAC;AAGD,SAAK,iBAAiB,eAAe,aAAW;AAAA,IAEhD,CAAC;AAGD,SAAK,iBAAiB,mBAAmB,aAAW;AAClD,WAAK,sBAAsB;AAAA,QACzB,MAAM;AAAA,QACN,KAAK,iBAAiB,QAAQ,SAAS,IAAI,QAAQ,SAAS;AAAA,QAC5D,UAAU;AAAA,QACV,UAAU;AAAA,QACV,SAAS,uBAAuB,QAAQ,eAAe,oDAAoD,QAAQ,SAAS,qJAAqJ,QAAQ,SAAS;AAAA,QAClS,WAAW,QAAQ;AAAA,MACrB,CAAC;AAAA,IACH,CAAC;AAED,SAAK,iBAAiB,kBAAkB,aAAW;AACjD,WAAK,sBAAsB;AAAA,QACzB,MAAM;AAAA,QACN,KAAK,gBAAgB,QAAQ,QAAQ,IAAI,QAAQ,SAAS;AAAA,QAC1D,UAAU;AAAA,QACV,UAAU;AAAA,QACV,SAAS,uBAAuB,QAAQ,eAAe,2DAA2D,QAAQ,QAAQ;AAAA,QAClI,WAAW,QAAQ;AAAA,MACrB,CAAC;AAAA,IACH,CAAC;AAED,SAAK,iBAAiB,uBAAuB,aAAW;AACtD,WAAK,sBAAsB;AAAA,QACzB,MAAM;AAAA,QACN,KAAK,qBAAqB,QAAQ,SAAS,IAAI,QAAQ,SAAS;AAAA,QAChE,UAAU;AAAA,QACV,UAAU;AAAA,QACV,SAAS,uBAAuB,QAAQ,SAAS,gMAAgM,QAAQ,SAAS;AAAA,QAClQ,WAAW,QAAQ;AAAA,MACrB,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAAA,EAEO,iBACL,OACA,UACM;AACN,QAAI,CAAC,KAAK,gBAAgB,IAAI,KAAK,GAAG;AACpC,WAAK,gBAAgB,IAAI,OAAO,CAAC,CAAC;AAAA,IACpC;AACA,SAAK,gBAAgB,IAAI,KAAK,EAAG,KAAK,QAAQ;AAAA,EAChD;AAAA,EAEO,UAAU,OAAe,SAAoB;AAClD,UAAM,YAAY,KAAK,gBAAgB,IAAI,KAAK,KAAK,CAAC;AACtD,cAAU,QAAQ,cAAY;AAC5B,UAAI;AACF,iBAAS,OAAO;AAAA,MAClB,SAAS,OAAO;AACd,gBAAQ,MAAM,+BAA+B,KAAK,KAAK,KAAK;AAAA,MAC9D;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,sBAAsB,QAOrB;AACP,QAAI,CAAC,KAAK,aAAa,cAAc,IAAI,OAAO,GAAG,GAAG;AACpD,WAAK,aAAa,cAAc,IAAI,OAAO,GAAG;AAE9C,YAAM,WAAW,KAAK;AAAA,QACpB,OAAO;AAAA,QACP,OAAO;AAAA,QACP,OAAO;AAAA,QACP,OAAO;AAAA,QACP,OAAO;AAAA,MACT;AAEA,WAAK,cAAc,IAAI,OAAO,KAAK,QAAQ;AAAA,IAC7C;AAAA,EACF;AAAA,EAEO,eAAqB;AAC1B,SAAK,eAAe;AAAA,MAClB,gBAAgB;AAAA,MAChB,gBAAgB;AAAA,MAChB,kBAAkB,KAAK,IAAI;AAAA,MAC3B,eAAe,oBAAI,IAAI;AAAA,MACvB,gBAAgB;AAAA,MAChB,eAAe;AAAA,MACf,QAAQ,EAAE,GAAG,KAAK,aAAa,OAAO;AAAA;AAAA,IACxC;AACA,SAAK,cAAc,MAAM;AAAA,EAC3B;AAAA,EAEO,aAAa,QAAuC;AACzD,SAAK,aAAa,SAAS,EAAE,GAAG,KAAK,aAAa,QAAQ,GAAG,OAAO;AAAA,EACtE;AAAA,EAEO,kBAAwC;AAC7C,WAAO,EAAE,GAAG,KAAK,aAAa;AAAA,EAChC;AACF;AAEO,MAAM,wBAAwB,IAAI,sBAAsB;AAExD,MAAM,0BAA0B,CACrC,aAAsB,OACtB,YACG,sBAAsB,kBAAkB,YAAY,OAAO;AAEzD,MAAM,6BAA6B,CAAC,YACzC,sBAAsB,2BAA2B,OAAO;AAEnD,MAAM,oBAAoB,CAAC,OAAe,YAC/C,sBAAsB,UAAU,OAAO,OAAO;AAEzC,MAAM,uBAAuB,MAAM,sBAAsB,aAAa;AACtE,MAAM,0BAA0B,MACrC,sBAAsB,gBAAgB;",
6
6
  "names": []
7
7
  }
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/tools/ArchitectTool/ArchitectTool.tsx"],
4
- "sourcesContent": ["import type { TextBlock } from '@anthropic-ai/sdk/resources/index.mjs'\nimport { Box } from 'ink'\nimport * as React from 'react'\nimport { z } from 'zod'\nimport type { Tool } from '@tool'\nimport { FallbackToolUseRejectedMessage } from '@components/FallbackToolUseRejectedMessage'\nimport { HighlightedCode } from '@components/HighlightedCode'\nimport { getContext } from '@context'\nimport { Message, query } from '@query'\nimport { lastX } from '@utils/generators'\nimport { createUserMessage } from '@utils/messages'\nimport { BashTool } from '@tools/BashTool/BashTool'\nimport { FileReadTool } from '@tools/FileReadTool/FileReadTool'\nimport { FileWriteTool } from '@tools/FileWriteTool/FileWriteTool'\nimport { GlobTool } from '@tools/GlobTool/GlobTool'\nimport { GrepTool } from '@tools/GrepTool/GrepTool'\nimport { LSTool } from '@tools/lsTool/lsTool'\nimport { ARCHITECT_SYSTEM_PROMPT, DESCRIPTION } from './prompt'\n\nconst FS_EXPLORATION_TOOLS: Tool[] = [\n BashTool,\n LSTool,\n FileReadTool,\n FileWriteTool,\n GlobTool,\n GrepTool,\n]\n\nconst inputSchema = z.strictObject({\n prompt: z\n .string()\n .describe('The technical request or coding task to analyze'),\n context: z\n .string()\n .describe('Optional context from previous conversation or system state')\n .optional(),\n})\n\nexport const ArchitectTool = {\n name: 'Architect',\n async description() {\n return DESCRIPTION\n },\n inputSchema,\n isReadOnly() {\n return true\n },\n isConcurrencySafe() {\n return true // ArchitectTool is read-only, safe for concurrent execution\n },\n userFacingName() {\n return 'Architect'\n },\n async isEnabled() {\n return false\n },\n needsPermissions() {\n return false\n },\n async *call({ prompt, context }, toolUseContext) {\n const content = context\n ? `<context>${context}</context>\\n\\n${prompt}`\n : prompt\n\n const userMessage = createUserMessage(content)\n\n const messages: Message[] = [userMessage]\n\n // We only allow the file exploration tools to be used in the architect tool\n const allowedTools = (toolUseContext.options?.tools ?? []).filter(_ =>\n FS_EXPLORATION_TOOLS.map(_ => _.name).includes(_.name),\n )\n\n // Create a dummy canUseTool function since this tool controls its own tool usage\n const canUseTool = async () => ({ result: true as const })\n\n const lastResponse = await lastX(\n query(\n messages,\n [ARCHITECT_SYSTEM_PROMPT],\n await getContext(),\n canUseTool,\n {\n ...toolUseContext,\n setToolJSX: () => {}, // Dummy function since ArchitectTool doesn't use UI\n options: { \n commands: toolUseContext.options?.commands || [],\n forkNumber: toolUseContext.options?.forkNumber || 0,\n messageLogName: toolUseContext.options?.messageLogName || 'default',\n verbose: toolUseContext.options?.verbose || false,\n safeMode: toolUseContext.options?.safeMode || false,\n maxThinkingTokens: toolUseContext.options?.maxThinkingTokens || 0,\n ...toolUseContext.options, \n tools: allowedTools \n },\n },\n ),\n )\n\n if (lastResponse.type !== 'assistant') {\n throw new Error(`Invalid response from API`)\n }\n\n const data = lastResponse.message.content.filter(_ => _.type === 'text')\n yield {\n type: 'result',\n data,\n resultForAssistant: this.renderResultForAssistant(data),\n }\n },\n async prompt() {\n return DESCRIPTION\n },\n renderResultForAssistant(data: TextBlock[]): string {\n return data.map(block => block.text).join('\\n')\n },\n renderToolUseMessage(input) {\n return Object.entries(input)\n .map(([key, value]) => `${key}: ${JSON.stringify(value)}`)\n .join(', ')\n },\n renderToolResultMessage(content) {\n return (\n <Box flexDirection=\"column\" gap={1}>\n <HighlightedCode\n code={content.map(_ => _.text).join('\\n')}\n language=\"markdown\"\n />\n </Box>\n )\n },\n renderToolUseRejectedMessage() {\n return <FallbackToolUseRejectedMessage />\n },\n} satisfies Tool<typeof inputSchema, TextBlock[]>\n"],
4
+ "sourcesContent": ["import type { TextBlock } from '@anthropic-ai/sdk/resources/index.mjs'\nimport { Box } from 'ink'\nimport * as React from 'react'\nimport { z } from 'zod'\nimport type { Tool } from '@tool'\nimport { FallbackToolUseRejectedMessage } from '@components/FallbackToolUseRejectedMessage'\nimport { HighlightedCode } from '@components/HighlightedCode'\nimport { getContext } from '@context'\nimport { Message, query } from '@query'\nimport { lastX } from '@utils/generators'\nimport { createUserMessage } from '@utils/messages'\nimport { BashTool } from '@tools/BashTool/BashTool'\nimport { FileReadTool } from '@tools/FileReadTool/FileReadTool'\nimport { FileWriteTool } from '@tools/FileWriteTool/FileWriteTool'\nimport { GlobTool } from '@tools/GlobTool/GlobTool'\nimport { GrepTool } from '@tools/GrepTool/GrepTool'\nimport { LSTool } from '@tools/lsTool/lsTool'\nimport { ARCHITECT_SYSTEM_PROMPT, DESCRIPTION } from './prompt'\n\nconst FS_EXPLORATION_TOOLS: Tool[] = [\n BashTool,\n LSTool,\n FileReadTool,\n FileWriteTool,\n GlobTool,\n GrepTool,\n]\n\nconst inputSchema = z.strictObject({\n prompt: z\n .string()\n .describe('The technical request or coding task to analyze'),\n context: z\n .string()\n .describe('Optional context from previous conversation or system state')\n .optional(),\n})\n\nexport const ArchitectTool = {\n name: 'Architect',\n async description() {\n return DESCRIPTION\n },\n inputSchema,\n isReadOnly() {\n return true\n },\n isConcurrencySafe() {\n return true // ArchitectTool is read-only, safe for concurrent execution\n },\n userFacingName() {\n return 'Architect'\n },\n async isEnabled() {\n return false\n },\n needsPermissions() {\n return false\n },\n async *call({ prompt, context }, toolUseContext) {\n const content = context\n ? `<context>${context}</context>\\n\\n${prompt}`\n : prompt\n\n const userMessage = createUserMessage(content)\n\n const messages: Message[] = [userMessage]\n\n // We only allow the file exploration tools to be used in the architect tool\n const allowedTools = (toolUseContext.options?.tools ?? []).filter(_ =>\n FS_EXPLORATION_TOOLS.map(_ => _.name).includes(_.name),\n )\n\n // Create a dummy canUseTool function since this tool controls its own tool usage\n const canUseTool = async () => ({ result: true as const })\n\n const lastResponse = await lastX(\n query(\n messages,\n [ARCHITECT_SYSTEM_PROMPT],\n await getContext(),\n canUseTool,\n {\n ...toolUseContext,\n setToolJSX: () => {}, // Dummy function since ArchitectTool doesn't use UI\n options: {\n commands: toolUseContext.options?.commands || [],\n forkNumber: toolUseContext.options?.forkNumber || 0,\n messageLogName: toolUseContext.options?.messageLogName || 'default',\n verbose: toolUseContext.options?.verbose || false,\n safeMode: toolUseContext.options?.safeMode || false,\n maxThinkingTokens: toolUseContext.options?.maxThinkingTokens || 0,\n ...toolUseContext.options,\n tools: allowedTools,\n },\n },\n ),\n )\n\n if (lastResponse.type !== 'assistant') {\n throw new Error(`Invalid response from API`)\n }\n\n const data = lastResponse.message.content.filter(_ => _.type === 'text')\n yield {\n type: 'result',\n data,\n resultForAssistant: this.renderResultForAssistant(data),\n }\n },\n async prompt() {\n return DESCRIPTION\n },\n renderResultForAssistant(data: TextBlock[]): string {\n return data.map(block => block.text).join('\\n')\n },\n renderToolUseMessage(input) {\n return Object.entries(input)\n .map(([key, value]) => `${key}: ${JSON.stringify(value)}`)\n .join(', ')\n },\n renderToolResultMessage(content) {\n return (\n <Box flexDirection=\"column\" gap={1}>\n <HighlightedCode\n code={content.map(_ => _.text).join('\\n')}\n language=\"markdown\"\n />\n </Box>\n )\n },\n renderToolUseRejectedMessage() {\n return <FallbackToolUseRejectedMessage />\n },\n} satisfies Tool<typeof inputSchema, TextBlock[]>\n"],
5
5
  "mappings": "AACA,SAAS,WAAW;AACpB,YAAY,WAAW;AACvB,SAAS,SAAS;AAElB,SAAS,sCAAsC;AAC/C,SAAS,uBAAuB;AAChC,SAAS,kBAAkB;AAC3B,SAAkB,aAAa;AAC/B,SAAS,aAAa;AACtB,SAAS,yBAAyB;AAClC,SAAS,gBAAgB;AACzB,SAAS,oBAAoB;AAC7B,SAAS,qBAAqB;AAC9B,SAAS,gBAAgB;AACzB,SAAS,gBAAgB;AACzB,SAAS,cAAc;AACvB,SAAS,yBAAyB,mBAAmB;AAErD,MAAM,uBAA+B;AAAA,EACnC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,MAAM,cAAc,EAAE,aAAa;AAAA,EACjC,QAAQ,EACL,OAAO,EACP,SAAS,iDAAiD;AAAA,EAC7D,SAAS,EACN,OAAO,EACP,SAAS,6DAA6D,EACtE,SAAS;AACd,CAAC;AAEM,MAAM,gBAAgB;AAAA,EAC3B,MAAM;AAAA,EACN,MAAM,cAAc;AAClB,WAAO;AAAA,EACT;AAAA,EACA;AAAA,EACA,aAAa;AACX,WAAO;AAAA,EACT;AAAA,EACA,oBAAoB;AAClB,WAAO;AAAA,EACT;AAAA,EACA,iBAAiB;AACf,WAAO;AAAA,EACT;AAAA,EACA,MAAM,YAAY;AAChB,WAAO;AAAA,EACT;AAAA,EACA,mBAAmB;AACjB,WAAO;AAAA,EACT;AAAA,EACA,OAAO,KAAK,EAAE,QAAQ,QAAQ,GAAG,gBAAgB;AAC/C,UAAM,UAAU,UACZ,YAAY,OAAO;AAAA;AAAA,EAAiB,MAAM,KAC1C;AAEJ,UAAM,cAAc,kBAAkB,OAAO;AAE7C,UAAM,WAAsB,CAAC,WAAW;AAGxC,UAAM,gBAAgB,eAAe,SAAS,SAAS,CAAC,GAAG;AAAA,MAAO,OAChE,qBAAqB,IAAI,CAAAA,OAAKA,GAAE,IAAI,EAAE,SAAS,EAAE,IAAI;AAAA,IACvD;AAGA,UAAM,aAAa,aAAa,EAAE,QAAQ,KAAc;AAExD,UAAM,eAAe,MAAM;AAAA,MACzB;AAAA,QACE;AAAA,QACA,CAAC,uBAAuB;AAAA,QACxB,MAAM,WAAW;AAAA,QACjB;AAAA,QACA;AAAA,UACE,GAAG;AAAA,UACH,YAAY,MAAM;AAAA,UAAC;AAAA;AAAA,UACnB,SAAS;AAAA,YACP,UAAU,eAAe,SAAS,YAAY,CAAC;AAAA,YAC/C,YAAY,eAAe,SAAS,cAAc;AAAA,YAClD,gBAAgB,eAAe,SAAS,kBAAkB;AAAA,YAC1D,SAAS,eAAe,SAAS,WAAW;AAAA,YAC5C,UAAU,eAAe,SAAS,YAAY;AAAA,YAC9C,mBAAmB,eAAe,SAAS,qBAAqB;AAAA,YAChE,GAAG,eAAe;AAAA,YAClB,OAAO;AAAA,UACT;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,QAAI,aAAa,SAAS,aAAa;AACrC,YAAM,IAAI,MAAM,2BAA2B;AAAA,IAC7C;AAEA,UAAM,OAAO,aAAa,QAAQ,QAAQ,OAAO,OAAK,EAAE,SAAS,MAAM;AACvE,UAAM;AAAA,MACJ,MAAM;AAAA,MACN;AAAA,MACA,oBAAoB,KAAK,yBAAyB,IAAI;AAAA,IACxD;AAAA,EACF;AAAA,EACA,MAAM,SAAS;AACb,WAAO;AAAA,EACT;AAAA,EACA,yBAAyB,MAA2B;AAClD,WAAO,KAAK,IAAI,WAAS,MAAM,IAAI,EAAE,KAAK,IAAI;AAAA,EAChD;AAAA,EACA,qBAAqB,OAAO;AAC1B,WAAO,OAAO,QAAQ,KAAK,EACxB,IAAI,CAAC,CAAC,KAAK,KAAK,MAAM,GAAG,GAAG,KAAK,KAAK,UAAU,KAAK,CAAC,EAAE,EACxD,KAAK,IAAI;AAAA,EACd;AAAA,EACA,wBAAwB,SAAS;AAC/B,WACE,oCAAC,OAAI,eAAc,UAAS,KAAK,KAC/B;AAAA,MAAC;AAAA;AAAA,QACC,MAAM,QAAQ,IAAI,OAAK,EAAE,IAAI,EAAE,KAAK,IAAI;AAAA,QACxC,UAAS;AAAA;AAAA,IACX,CACF;AAAA,EAEJ;AAAA,EACA,+BAA+B;AAC7B,WAAO,oCAAC,oCAA+B;AAAA,EACzC;AACF;",
6
6
  "names": ["_"]
7
7
  }
@@ -84,11 +84,7 @@ Question: What are the most effective React optimization techniques for handling
84
84
  needsPermissions() {
85
85
  return false;
86
86
  },
87
- async validateInput({
88
- question,
89
- expert_model,
90
- chat_session_id
91
- }, context) {
87
+ async validateInput({ question, expert_model, chat_session_id }, context) {
92
88
  if (!question.trim()) {
93
89
  return { result: false, message: "Question cannot be empty" };
94
90
  }
@@ -118,7 +114,10 @@ Question: What are the most effective React optimization techniques for handling
118
114
  };
119
115
  }
120
116
  } catch (e) {
121
- debugLogger.error("AskExpertModel", { message: "Could not determine current model", error: e });
117
+ debugLogger.error("AskExpertModel", {
118
+ message: "Could not determine current model",
119
+ error: e
120
+ });
122
121
  }
123
122
  try {
124
123
  const modelManager = getModelManager();
@@ -222,7 +221,10 @@ ${output.expertAnswer}`;
222
221
  const newSession = createExpertChatSession(expertModel);
223
222
  sessionId = newSession.sessionId;
224
223
  } catch (createError) {
225
- console.error("Failed to create fallback expert chat session:", createError);
224
+ console.error(
225
+ "Failed to create fallback expert chat session:",
226
+ createError
227
+ );
226
228
  logError(createError);
227
229
  throw new Error("Unable to create or load chat session");
228
230
  }
@@ -275,7 +277,11 @@ ${output.expertAnswer}`;
275
277
  const timeoutMs = 3e5;
276
278
  const timeoutPromise = new Promise((_, reject) => {
277
279
  setTimeout(() => {
278
- reject(new Error(`Expert model query timed out after ${timeoutMs / 1e3}s`));
280
+ reject(
281
+ new Error(
282
+ `Expert model query timed out after ${timeoutMs / 1e3}s`
283
+ )
284
+ );
279
285
  }, timeoutMs);
280
286
  });
281
287
  response = await Promise.race([