@donggui/core 1.5.4-donggui.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (269) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +9 -0
  3. package/dist/es/agent/agent.mjs +709 -0
  4. package/dist/es/agent/agent.mjs.map +1 -0
  5. package/dist/es/agent/common.mjs +0 -0
  6. package/dist/es/agent/execution-session.mjs +41 -0
  7. package/dist/es/agent/execution-session.mjs.map +1 -0
  8. package/dist/es/agent/index.mjs +6 -0
  9. package/dist/es/agent/task-builder.mjs +330 -0
  10. package/dist/es/agent/task-builder.mjs.map +1 -0
  11. package/dist/es/agent/task-cache.mjs +186 -0
  12. package/dist/es/agent/task-cache.mjs.map +1 -0
  13. package/dist/es/agent/tasks.mjs +422 -0
  14. package/dist/es/agent/tasks.mjs.map +1 -0
  15. package/dist/es/agent/ui-utils.mjs +91 -0
  16. package/dist/es/agent/ui-utils.mjs.map +1 -0
  17. package/dist/es/agent/utils.mjs +198 -0
  18. package/dist/es/agent/utils.mjs.map +1 -0
  19. package/dist/es/ai-model/auto-glm/actions.mjs +224 -0
  20. package/dist/es/ai-model/auto-glm/actions.mjs.map +1 -0
  21. package/dist/es/ai-model/auto-glm/index.mjs +6 -0
  22. package/dist/es/ai-model/auto-glm/parser.mjs +239 -0
  23. package/dist/es/ai-model/auto-glm/parser.mjs.map +1 -0
  24. package/dist/es/ai-model/auto-glm/planning.mjs +71 -0
  25. package/dist/es/ai-model/auto-glm/planning.mjs.map +1 -0
  26. package/dist/es/ai-model/auto-glm/prompt.mjs +222 -0
  27. package/dist/es/ai-model/auto-glm/prompt.mjs.map +1 -0
  28. package/dist/es/ai-model/auto-glm/util.mjs +9 -0
  29. package/dist/es/ai-model/auto-glm/util.mjs.map +1 -0
  30. package/dist/es/ai-model/conversation-history.mjs +195 -0
  31. package/dist/es/ai-model/conversation-history.mjs.map +1 -0
  32. package/dist/es/ai-model/index.mjs +11 -0
  33. package/dist/es/ai-model/inspect.mjs +386 -0
  34. package/dist/es/ai-model/inspect.mjs.map +1 -0
  35. package/dist/es/ai-model/llm-planning.mjs +233 -0
  36. package/dist/es/ai-model/llm-planning.mjs.map +1 -0
  37. package/dist/es/ai-model/prompt/common.mjs +7 -0
  38. package/dist/es/ai-model/prompt/common.mjs.map +1 -0
  39. package/dist/es/ai-model/prompt/describe.mjs +66 -0
  40. package/dist/es/ai-model/prompt/describe.mjs.map +1 -0
  41. package/dist/es/ai-model/prompt/extraction.mjs +129 -0
  42. package/dist/es/ai-model/prompt/extraction.mjs.map +1 -0
  43. package/dist/es/ai-model/prompt/llm-locator.mjs +51 -0
  44. package/dist/es/ai-model/prompt/llm-locator.mjs.map +1 -0
  45. package/dist/es/ai-model/prompt/llm-planning.mjs +364 -0
  46. package/dist/es/ai-model/prompt/llm-planning.mjs.map +1 -0
  47. package/dist/es/ai-model/prompt/llm-section-locator.mjs +44 -0
  48. package/dist/es/ai-model/prompt/llm-section-locator.mjs.map +1 -0
  49. package/dist/es/ai-model/prompt/order-sensitive-judge.mjs +35 -0
  50. package/dist/es/ai-model/prompt/order-sensitive-judge.mjs.map +1 -0
  51. package/dist/es/ai-model/prompt/playwright-generator.mjs +117 -0
  52. package/dist/es/ai-model/prompt/playwright-generator.mjs.map +1 -0
  53. package/dist/es/ai-model/prompt/ui-tars-planning.mjs +36 -0
  54. package/dist/es/ai-model/prompt/ui-tars-planning.mjs.map +1 -0
  55. package/dist/es/ai-model/prompt/util.mjs +59 -0
  56. package/dist/es/ai-model/prompt/util.mjs.map +1 -0
  57. package/dist/es/ai-model/prompt/yaml-generator.mjs +219 -0
  58. package/dist/es/ai-model/prompt/yaml-generator.mjs.map +1 -0
  59. package/dist/es/ai-model/service-caller/index.mjs +466 -0
  60. package/dist/es/ai-model/service-caller/index.mjs.map +1 -0
  61. package/dist/es/ai-model/ui-tars-planning.mjs +249 -0
  62. package/dist/es/ai-model/ui-tars-planning.mjs.map +1 -0
  63. package/dist/es/common.mjs +371 -0
  64. package/dist/es/common.mjs.map +1 -0
  65. package/dist/es/device/device-options.mjs +0 -0
  66. package/dist/es/device/index.mjs +300 -0
  67. package/dist/es/device/index.mjs.map +1 -0
  68. package/dist/es/dump/html-utils.mjs +211 -0
  69. package/dist/es/dump/html-utils.mjs.map +1 -0
  70. package/dist/es/dump/image-restoration.mjs +43 -0
  71. package/dist/es/dump/image-restoration.mjs.map +1 -0
  72. package/dist/es/dump/index.mjs +3 -0
  73. package/dist/es/index.mjs +15 -0
  74. package/dist/es/index.mjs.map +1 -0
  75. package/dist/es/report-generator.mjs +134 -0
  76. package/dist/es/report-generator.mjs.map +1 -0
  77. package/dist/es/report.mjs +111 -0
  78. package/dist/es/report.mjs.map +1 -0
  79. package/dist/es/screenshot-item.mjs +105 -0
  80. package/dist/es/screenshot-item.mjs.map +1 -0
  81. package/dist/es/service/index.mjs +256 -0
  82. package/dist/es/service/index.mjs.map +1 -0
  83. package/dist/es/service/utils.mjs +15 -0
  84. package/dist/es/service/utils.mjs.map +1 -0
  85. package/dist/es/skill/index.mjs +38 -0
  86. package/dist/es/skill/index.mjs.map +1 -0
  87. package/dist/es/task-runner.mjs +258 -0
  88. package/dist/es/task-runner.mjs.map +1 -0
  89. package/dist/es/task-timing.mjs +12 -0
  90. package/dist/es/task-timing.mjs.map +1 -0
  91. package/dist/es/tree.mjs +13 -0
  92. package/dist/es/tree.mjs.map +1 -0
  93. package/dist/es/types.mjs +196 -0
  94. package/dist/es/types.mjs.map +1 -0
  95. package/dist/es/utils.mjs +218 -0
  96. package/dist/es/utils.mjs.map +1 -0
  97. package/dist/es/yaml/builder.mjs +13 -0
  98. package/dist/es/yaml/builder.mjs.map +1 -0
  99. package/dist/es/yaml/index.mjs +4 -0
  100. package/dist/es/yaml/player.mjs +418 -0
  101. package/dist/es/yaml/player.mjs.map +1 -0
  102. package/dist/es/yaml/utils.mjs +73 -0
  103. package/dist/es/yaml/utils.mjs.map +1 -0
  104. package/dist/es/yaml.mjs +0 -0
  105. package/dist/lib/agent/agent.js +757 -0
  106. package/dist/lib/agent/agent.js.map +1 -0
  107. package/dist/lib/agent/common.js +5 -0
  108. package/dist/lib/agent/execution-session.js +75 -0
  109. package/dist/lib/agent/execution-session.js.map +1 -0
  110. package/dist/lib/agent/index.js +81 -0
  111. package/dist/lib/agent/index.js.map +1 -0
  112. package/dist/lib/agent/task-builder.js +367 -0
  113. package/dist/lib/agent/task-builder.js.map +1 -0
  114. package/dist/lib/agent/task-cache.js +238 -0
  115. package/dist/lib/agent/task-cache.js.map +1 -0
  116. package/dist/lib/agent/tasks.js +465 -0
  117. package/dist/lib/agent/tasks.js.map +1 -0
  118. package/dist/lib/agent/ui-utils.js +143 -0
  119. package/dist/lib/agent/ui-utils.js.map +1 -0
  120. package/dist/lib/agent/utils.js +275 -0
  121. package/dist/lib/agent/utils.js.map +1 -0
  122. package/dist/lib/ai-model/auto-glm/actions.js +258 -0
  123. package/dist/lib/ai-model/auto-glm/actions.js.map +1 -0
  124. package/dist/lib/ai-model/auto-glm/index.js +66 -0
  125. package/dist/lib/ai-model/auto-glm/index.js.map +1 -0
  126. package/dist/lib/ai-model/auto-glm/parser.js +282 -0
  127. package/dist/lib/ai-model/auto-glm/parser.js.map +1 -0
  128. package/dist/lib/ai-model/auto-glm/planning.js +105 -0
  129. package/dist/lib/ai-model/auto-glm/planning.js.map +1 -0
  130. package/dist/lib/ai-model/auto-glm/prompt.js +259 -0
  131. package/dist/lib/ai-model/auto-glm/prompt.js.map +1 -0
  132. package/dist/lib/ai-model/auto-glm/util.js +46 -0
  133. package/dist/lib/ai-model/auto-glm/util.js.map +1 -0
  134. package/dist/lib/ai-model/conversation-history.js +229 -0
  135. package/dist/lib/ai-model/conversation-history.js.map +1 -0
  136. package/dist/lib/ai-model/index.js +125 -0
  137. package/dist/lib/ai-model/index.js.map +1 -0
  138. package/dist/lib/ai-model/inspect.js +429 -0
  139. package/dist/lib/ai-model/inspect.js.map +1 -0
  140. package/dist/lib/ai-model/llm-planning.js +270 -0
  141. package/dist/lib/ai-model/llm-planning.js.map +1 -0
  142. package/dist/lib/ai-model/prompt/common.js +41 -0
  143. package/dist/lib/ai-model/prompt/common.js.map +1 -0
  144. package/dist/lib/ai-model/prompt/describe.js +100 -0
  145. package/dist/lib/ai-model/prompt/describe.js.map +1 -0
  146. package/dist/lib/ai-model/prompt/extraction.js +169 -0
  147. package/dist/lib/ai-model/prompt/extraction.js.map +1 -0
  148. package/dist/lib/ai-model/prompt/llm-locator.js +88 -0
  149. package/dist/lib/ai-model/prompt/llm-locator.js.map +1 -0
  150. package/dist/lib/ai-model/prompt/llm-planning.js +401 -0
  151. package/dist/lib/ai-model/prompt/llm-planning.js.map +1 -0
  152. package/dist/lib/ai-model/prompt/llm-section-locator.js +81 -0
  153. package/dist/lib/ai-model/prompt/llm-section-locator.js.map +1 -0
  154. package/dist/lib/ai-model/prompt/order-sensitive-judge.js +72 -0
  155. package/dist/lib/ai-model/prompt/order-sensitive-judge.js.map +1 -0
  156. package/dist/lib/ai-model/prompt/playwright-generator.js +178 -0
  157. package/dist/lib/ai-model/prompt/playwright-generator.js.map +1 -0
  158. package/dist/lib/ai-model/prompt/ui-tars-planning.js +73 -0
  159. package/dist/lib/ai-model/prompt/ui-tars-planning.js.map +1 -0
  160. package/dist/lib/ai-model/prompt/util.js +105 -0
  161. package/dist/lib/ai-model/prompt/util.js.map +1 -0
  162. package/dist/lib/ai-model/prompt/yaml-generator.js +280 -0
  163. package/dist/lib/ai-model/prompt/yaml-generator.js.map +1 -0
  164. package/dist/lib/ai-model/service-caller/index.js +531 -0
  165. package/dist/lib/ai-model/service-caller/index.js.map +1 -0
  166. package/dist/lib/ai-model/ui-tars-planning.js +283 -0
  167. package/dist/lib/ai-model/ui-tars-planning.js.map +1 -0
  168. package/dist/lib/common.js +480 -0
  169. package/dist/lib/common.js.map +1 -0
  170. package/dist/lib/device/device-options.js +20 -0
  171. package/dist/lib/device/device-options.js.map +1 -0
  172. package/dist/lib/device/index.js +418 -0
  173. package/dist/lib/device/index.js.map +1 -0
  174. package/dist/lib/dump/html-utils.js +281 -0
  175. package/dist/lib/dump/html-utils.js.map +1 -0
  176. package/dist/lib/dump/image-restoration.js +77 -0
  177. package/dist/lib/dump/image-restoration.js.map +1 -0
  178. package/dist/lib/dump/index.js +60 -0
  179. package/dist/lib/dump/index.js.map +1 -0
  180. package/dist/lib/index.js +146 -0
  181. package/dist/lib/index.js.map +1 -0
  182. package/dist/lib/report-generator.js +172 -0
  183. package/dist/lib/report-generator.js.map +1 -0
  184. package/dist/lib/report.js +145 -0
  185. package/dist/lib/report.js.map +1 -0
  186. package/dist/lib/screenshot-item.js +139 -0
  187. package/dist/lib/screenshot-item.js.map +1 -0
  188. package/dist/lib/service/index.js +290 -0
  189. package/dist/lib/service/index.js.map +1 -0
  190. package/dist/lib/service/utils.js +49 -0
  191. package/dist/lib/service/utils.js.map +1 -0
  192. package/dist/lib/skill/index.js +72 -0
  193. package/dist/lib/skill/index.js.map +1 -0
  194. package/dist/lib/task-runner.js +295 -0
  195. package/dist/lib/task-runner.js.map +1 -0
  196. package/dist/lib/task-timing.js +46 -0
  197. package/dist/lib/task-timing.js.map +1 -0
  198. package/dist/lib/tree.js +53 -0
  199. package/dist/lib/tree.js.map +1 -0
  200. package/dist/lib/types.js +285 -0
  201. package/dist/lib/types.js.map +1 -0
  202. package/dist/lib/utils.js +297 -0
  203. package/dist/lib/utils.js.map +1 -0
  204. package/dist/lib/yaml/builder.js +57 -0
  205. package/dist/lib/yaml/builder.js.map +1 -0
  206. package/dist/lib/yaml/index.js +81 -0
  207. package/dist/lib/yaml/index.js.map +1 -0
  208. package/dist/lib/yaml/player.js +452 -0
  209. package/dist/lib/yaml/player.js.map +1 -0
  210. package/dist/lib/yaml/utils.js +126 -0
  211. package/dist/lib/yaml/utils.js.map +1 -0
  212. package/dist/lib/yaml.js +20 -0
  213. package/dist/lib/yaml.js.map +1 -0
  214. package/dist/types/agent/agent.d.ts +190 -0
  215. package/dist/types/agent/common.d.ts +0 -0
  216. package/dist/types/agent/execution-session.d.ts +36 -0
  217. package/dist/types/agent/index.d.ts +10 -0
  218. package/dist/types/agent/task-builder.d.ts +34 -0
  219. package/dist/types/agent/task-cache.d.ts +48 -0
  220. package/dist/types/agent/tasks.d.ts +70 -0
  221. package/dist/types/agent/ui-utils.d.ts +14 -0
  222. package/dist/types/agent/utils.d.ts +29 -0
  223. package/dist/types/ai-model/auto-glm/actions.d.ts +77 -0
  224. package/dist/types/ai-model/auto-glm/index.d.ts +6 -0
  225. package/dist/types/ai-model/auto-glm/parser.d.ts +18 -0
  226. package/dist/types/ai-model/auto-glm/planning.d.ts +10 -0
  227. package/dist/types/ai-model/auto-glm/prompt.d.ts +27 -0
  228. package/dist/types/ai-model/auto-glm/util.d.ts +13 -0
  229. package/dist/types/ai-model/conversation-history.d.ts +105 -0
  230. package/dist/types/ai-model/index.d.ts +14 -0
  231. package/dist/types/ai-model/inspect.d.ts +58 -0
  232. package/dist/types/ai-model/llm-planning.d.ts +19 -0
  233. package/dist/types/ai-model/prompt/common.d.ts +2 -0
  234. package/dist/types/ai-model/prompt/describe.d.ts +1 -0
  235. package/dist/types/ai-model/prompt/extraction.d.ts +7 -0
  236. package/dist/types/ai-model/prompt/llm-locator.d.ts +3 -0
  237. package/dist/types/ai-model/prompt/llm-planning.d.ts +10 -0
  238. package/dist/types/ai-model/prompt/llm-section-locator.d.ts +3 -0
  239. package/dist/types/ai-model/prompt/order-sensitive-judge.d.ts +2 -0
  240. package/dist/types/ai-model/prompt/playwright-generator.d.ts +26 -0
  241. package/dist/types/ai-model/prompt/ui-tars-planning.d.ts +2 -0
  242. package/dist/types/ai-model/prompt/util.d.ts +33 -0
  243. package/dist/types/ai-model/prompt/yaml-generator.d.ts +100 -0
  244. package/dist/types/ai-model/service-caller/index.d.ts +49 -0
  245. package/dist/types/ai-model/ui-tars-planning.d.ts +72 -0
  246. package/dist/types/common.d.ts +288 -0
  247. package/dist/types/device/device-options.d.ts +142 -0
  248. package/dist/types/device/index.d.ts +2315 -0
  249. package/dist/types/dump/html-utils.d.ts +52 -0
  250. package/dist/types/dump/image-restoration.d.ts +6 -0
  251. package/dist/types/dump/index.d.ts +5 -0
  252. package/dist/types/index.d.ts +17 -0
  253. package/dist/types/report-generator.d.ts +48 -0
  254. package/dist/types/report.d.ts +15 -0
  255. package/dist/types/screenshot-item.d.ts +66 -0
  256. package/dist/types/service/index.d.ts +23 -0
  257. package/dist/types/service/utils.d.ts +2 -0
  258. package/dist/types/skill/index.d.ts +25 -0
  259. package/dist/types/task-runner.d.ts +48 -0
  260. package/dist/types/task-timing.d.ts +8 -0
  261. package/dist/types/tree.d.ts +4 -0
  262. package/dist/types/types.d.ts +645 -0
  263. package/dist/types/utils.d.ts +40 -0
  264. package/dist/types/yaml/builder.d.ts +2 -0
  265. package/dist/types/yaml/index.d.ts +4 -0
  266. package/dist/types/yaml/player.d.ts +34 -0
  267. package/dist/types/yaml/utils.d.ts +9 -0
  268. package/dist/types/yaml.d.ts +203 -0
  269. package/package.json +111 -0
@@ -0,0 +1,466 @@
1
+ import { MIDSCENE_LANGFUSE_DEBUG, MIDSCENE_LANGSMITH_DEBUG, MIDSCENE_MODEL_MAX_TOKENS, OPENAI_MAX_TOKENS, globalConfigManager } from "@midscene/shared/env";
2
+ import { getDebug } from "@midscene/shared/logger";
3
+ import { assert, ifInBrowser } from "@midscene/shared/utils";
4
+ import { jsonrepair } from "jsonrepair";
5
+ import openai_0 from "openai";
6
+ import { isAutoGLM, isUITars } from "../auto-glm/util.mjs";
7
+ function _define_property(obj, key, value) {
8
+ if (key in obj) Object.defineProperty(obj, key, {
9
+ value: value,
10
+ enumerable: true,
11
+ configurable: true,
12
+ writable: true
13
+ });
14
+ else obj[key] = value;
15
+ return obj;
16
+ }
17
+ class AIResponseParseError extends Error {
18
+ constructor(message, rawResponse, usage){
19
+ super(message), _define_property(this, "usage", void 0), _define_property(this, "rawResponse", void 0);
20
+ this.name = 'AIResponseParseError';
21
+ this.rawResponse = rawResponse;
22
+ this.usage = usage;
23
+ }
24
+ }
25
+ async function createChatClient({ modelConfig }) {
26
+ const { socksProxy, httpProxy, modelName, openaiBaseURL, openaiApiKey, openaiExtraConfig, modelDescription, uiTarsModelVersion, modelFamily, createOpenAIClient, timeout } = modelConfig;
27
+ let proxyAgent;
28
+ const warnClient = getDebug('ai:call', {
29
+ console: true
30
+ });
31
+ const debugProxy = getDebug('ai:call:proxy');
32
+ const warnProxy = getDebug('ai:call:proxy', {
33
+ console: true
34
+ });
35
+ const sanitizeProxyUrl = (url)=>{
36
+ try {
37
+ const parsed = new URL(url);
38
+ if (parsed.username) {
39
+ parsed.password = '****';
40
+ return parsed.href;
41
+ }
42
+ return url;
43
+ } catch {
44
+ return url;
45
+ }
46
+ };
47
+ if (httpProxy) {
48
+ debugProxy('using http proxy', sanitizeProxyUrl(httpProxy));
49
+ if (ifInBrowser) warnProxy('HTTP proxy is configured but not supported in browser environment');
50
+ else {
51
+ const moduleName = 'undici';
52
+ const { ProxyAgent } = await import(moduleName);
53
+ proxyAgent = new ProxyAgent({
54
+ uri: httpProxy
55
+ });
56
+ }
57
+ } else if (socksProxy) {
58
+ debugProxy('using socks proxy', sanitizeProxyUrl(socksProxy));
59
+ if (ifInBrowser) warnProxy('SOCKS proxy is configured but not supported in browser environment');
60
+ else try {
61
+ const moduleName = 'fetch-socks';
62
+ const { socksDispatcher } = await import(moduleName);
63
+ const proxyUrl = new URL(socksProxy);
64
+ if (!proxyUrl.hostname) throw new Error('SOCKS proxy URL must include a valid hostname');
65
+ const port = Number.parseInt(proxyUrl.port, 10);
66
+ if (!proxyUrl.port || Number.isNaN(port)) throw new Error('SOCKS proxy URL must include a valid port');
67
+ const protocol = proxyUrl.protocol.replace(':', '');
68
+ const socksType = 'socks4' === protocol ? 4 : 'socks5' === protocol ? 5 : 5;
69
+ proxyAgent = socksDispatcher({
70
+ type: socksType,
71
+ host: proxyUrl.hostname,
72
+ port,
73
+ ...proxyUrl.username ? {
74
+ userId: decodeURIComponent(proxyUrl.username),
75
+ password: decodeURIComponent(proxyUrl.password || '')
76
+ } : {}
77
+ });
78
+ debugProxy('socks proxy configured successfully', {
79
+ type: socksType,
80
+ host: proxyUrl.hostname,
81
+ port: port
82
+ });
83
+ } catch (error) {
84
+ warnProxy('Failed to configure SOCKS proxy:', error);
85
+ throw new Error(`Invalid SOCKS proxy URL: ${socksProxy}. Expected format: socks4://host:port, socks5://host:port, or with authentication: socks5://user:pass@host:port`);
86
+ }
87
+ }
88
+ const openAIOptions = {
89
+ baseURL: openaiBaseURL,
90
+ apiKey: openaiApiKey,
91
+ ...proxyAgent ? {
92
+ fetchOptions: {
93
+ dispatcher: proxyAgent
94
+ }
95
+ } : {},
96
+ ...openaiExtraConfig,
97
+ ...'number' == typeof timeout ? {
98
+ timeout
99
+ } : {},
100
+ dangerouslyAllowBrowser: true
101
+ };
102
+ const baseOpenAI = new openai_0(openAIOptions);
103
+ let openai = baseOpenAI;
104
+ if (openai && globalConfigManager.getEnvConfigInBoolean(MIDSCENE_LANGSMITH_DEBUG)) {
105
+ if (ifInBrowser) throw new Error('langsmith is not supported in browser');
106
+ warnClient('DEBUGGING MODE: langsmith wrapper enabled');
107
+ const langsmithModule = 'langsmith/wrappers';
108
+ const { wrapOpenAI } = await import(langsmithModule);
109
+ openai = wrapOpenAI(openai);
110
+ }
111
+ if (openai && globalConfigManager.getEnvConfigInBoolean(MIDSCENE_LANGFUSE_DEBUG)) {
112
+ if (ifInBrowser) throw new Error('langfuse is not supported in browser');
113
+ warnClient('DEBUGGING MODE: langfuse wrapper enabled');
114
+ const langfuseModule = '@langfuse/openai';
115
+ const { observeOpenAI } = await import(langfuseModule);
116
+ openai = observeOpenAI(openai);
117
+ }
118
+ if (createOpenAIClient) {
119
+ const wrappedClient = await createOpenAIClient(baseOpenAI, openAIOptions);
120
+ if (wrappedClient) openai = wrappedClient;
121
+ }
122
+ return {
123
+ completion: openai.chat.completions,
124
+ modelName,
125
+ modelDescription,
126
+ uiTarsModelVersion,
127
+ modelFamily
128
+ };
129
+ }
130
+ async function callAI(messages, modelConfig, options) {
131
+ const { completion, modelName, modelDescription, uiTarsModelVersion, modelFamily } = await createChatClient({
132
+ modelConfig
133
+ });
134
+ const maxTokens = globalConfigManager.getEnvConfigValueAsNumber(MIDSCENE_MODEL_MAX_TOKENS) ?? globalConfigManager.getEnvConfigValueAsNumber(OPENAI_MAX_TOKENS);
135
+ const debugCall = getDebug('ai:call');
136
+ const warnCall = getDebug('ai:call', {
137
+ console: true
138
+ });
139
+ const debugProfileStats = getDebug('ai:profile:stats');
140
+ const debugProfileDetail = getDebug('ai:profile:detail');
141
+ const startTime = Date.now();
142
+ const temperature = (()=>{
143
+ if ('gpt-5' === modelFamily) return void debugCall('temperature is ignored for gpt-5');
144
+ return modelConfig.temperature ?? 0;
145
+ })();
146
+ const isStreaming = options?.stream && options?.onChunk;
147
+ let content;
148
+ let accumulated = '';
149
+ let accumulatedReasoning = '';
150
+ let usage;
151
+ let timeCost;
152
+ let requestId;
153
+ const buildUsageInfo = (usageData, requestId)=>{
154
+ if (!usageData) return;
155
+ const cachedInputTokens = usageData?.prompt_tokens_details?.cached_tokens;
156
+ return {
157
+ prompt_tokens: usageData.prompt_tokens ?? 0,
158
+ completion_tokens: usageData.completion_tokens ?? 0,
159
+ total_tokens: usageData.total_tokens ?? 0,
160
+ cached_input: cachedInputTokens ?? 0,
161
+ time_cost: timeCost ?? 0,
162
+ model_name: modelName,
163
+ model_description: modelDescription,
164
+ intent: modelConfig.intent,
165
+ request_id: requestId ?? void 0
166
+ };
167
+ };
168
+ const commonConfig = {
169
+ temperature,
170
+ stream: !!isStreaming,
171
+ max_tokens: maxTokens,
172
+ ...'qwen2.5-vl' === modelFamily ? {
173
+ vl_high_resolution_images: true
174
+ } : {}
175
+ };
176
+ if (isAutoGLM(modelFamily)) {
177
+ commonConfig.top_p = 0.85;
178
+ commonConfig.frequency_penalty = 0.2;
179
+ }
180
+ const mergedEnableReasoning = (()=>{
181
+ const normalizedDeepThink = options?.deepThink === 'unset' ? void 0 : options?.deepThink;
182
+ if (true === normalizedDeepThink) return true;
183
+ if (false === normalizedDeepThink) return false;
184
+ return modelConfig.reasoningEnabled;
185
+ })();
186
+ const { config: reasoningEffortConfig, debugMessage: reasoningEffortDebugMessage, warningMessage } = resolveReasoningConfig({
187
+ reasoningEnabled: mergedEnableReasoning,
188
+ reasoningEffort: modelConfig.reasoningEffort,
189
+ reasoningBudget: modelConfig.reasoningBudget,
190
+ modelFamily
191
+ });
192
+ if (reasoningEffortDebugMessage) debugCall(reasoningEffortDebugMessage);
193
+ if (warningMessage) warnCall(warningMessage);
194
+ const messagesWithImageDetail = (()=>{
195
+ if ('gpt-5' !== modelFamily) return messages;
196
+ return messages.map((msg)=>{
197
+ if (!Array.isArray(msg.content)) return msg;
198
+ const content = msg.content.map((part)=>{
199
+ if (part && 'image_url' === part.type && part.image_url?.url) return {
200
+ ...part,
201
+ image_url: {
202
+ ...part.image_url,
203
+ detail: 'original'
204
+ }
205
+ };
206
+ return part;
207
+ });
208
+ return {
209
+ ...msg,
210
+ content
211
+ };
212
+ });
213
+ })();
214
+ try {
215
+ debugCall(`sending ${isStreaming ? 'streaming ' : ''}request to ${modelName}`);
216
+ if (isStreaming) {
217
+ const stream = await completion.create({
218
+ model: modelName,
219
+ messages: messagesWithImageDetail,
220
+ ...commonConfig,
221
+ ...reasoningEffortConfig
222
+ }, {
223
+ stream: true,
224
+ ...options?.abortSignal ? {
225
+ signal: options.abortSignal
226
+ } : {}
227
+ });
228
+ requestId = stream._request_id;
229
+ for await (const chunk of stream){
230
+ const content = chunk.choices?.[0]?.delta?.content || '';
231
+ const reasoning_content = chunk.choices?.[0]?.delta?.reasoning_content || '';
232
+ if (chunk.usage) usage = chunk.usage;
233
+ if (content || reasoning_content) {
234
+ accumulated += content;
235
+ accumulatedReasoning += reasoning_content;
236
+ const chunkData = {
237
+ content,
238
+ reasoning_content,
239
+ accumulated,
240
+ isComplete: false,
241
+ usage: void 0
242
+ };
243
+ options.onChunk(chunkData);
244
+ }
245
+ if (chunk.choices?.[0]?.finish_reason) {
246
+ timeCost = Date.now() - startTime;
247
+ if (!usage) {
248
+ const estimatedTokens = Math.max(1, Math.floor(accumulated.length / 4));
249
+ usage = {
250
+ prompt_tokens: estimatedTokens,
251
+ completion_tokens: estimatedTokens,
252
+ total_tokens: 2 * estimatedTokens
253
+ };
254
+ }
255
+ const finalChunk = {
256
+ content: '',
257
+ accumulated,
258
+ reasoning_content: '',
259
+ isComplete: true,
260
+ usage: buildUsageInfo(usage, requestId)
261
+ };
262
+ options.onChunk(finalChunk);
263
+ break;
264
+ }
265
+ }
266
+ content = accumulated;
267
+ debugProfileStats(`streaming model, ${modelName}, mode, ${modelFamily || 'default'}, cost-ms, ${timeCost}, temperature, ${temperature ?? ''}`);
268
+ } else {
269
+ const retryCount = modelConfig.retryCount ?? 1;
270
+ const retryInterval = modelConfig.retryInterval ?? 2000;
271
+ const maxAttempts = retryCount + 1;
272
+ let lastError;
273
+ for(let attempt = 1; attempt <= maxAttempts; attempt++)try {
274
+ const result = await completion.create({
275
+ model: modelName,
276
+ messages: messagesWithImageDetail,
277
+ ...commonConfig,
278
+ ...reasoningEffortConfig
279
+ }, options?.abortSignal ? {
280
+ signal: options.abortSignal
281
+ } : void 0);
282
+ timeCost = Date.now() - startTime;
283
+ debugProfileStats(`model, ${modelName}, mode, ${modelFamily || 'default'}, ui-tars-version, ${uiTarsModelVersion}, prompt-tokens, ${result.usage?.prompt_tokens || ''}, completion-tokens, ${result.usage?.completion_tokens || ''}, total-tokens, ${result.usage?.total_tokens || ''}, cost-ms, ${timeCost}, requestId, ${result._request_id || ''}, temperature, ${temperature ?? ''}`);
284
+ debugProfileDetail(`model usage detail: ${JSON.stringify(result.usage)}`);
285
+ if (!result.choices) throw new Error(`invalid response from LLM service: ${JSON.stringify(result)}`);
286
+ content = result.choices[0].message.content;
287
+ accumulatedReasoning = result.choices[0].message?.reasoning_content || '';
288
+ usage = result.usage;
289
+ requestId = result._request_id;
290
+ if (!content && accumulatedReasoning && ('doubao-vision' === modelFamily || 'doubao-seed' === modelFamily)) {
291
+ warnCall('empty content from AI model, using reasoning content');
292
+ content = accumulatedReasoning;
293
+ }
294
+ if (!content) throw new Error('empty content from AI model');
295
+ break;
296
+ } catch (error) {
297
+ lastError = error;
298
+ if (options?.abortSignal?.aborted) break;
299
+ if (attempt < maxAttempts) {
300
+ warnCall(`AI call failed (attempt ${attempt}/${maxAttempts}), retrying in ${retryInterval}ms... Error: ${lastError.message}`);
301
+ await new Promise((resolve)=>setTimeout(resolve, retryInterval));
302
+ }
303
+ }
304
+ if (!content) throw lastError;
305
+ }
306
+ debugCall(`response reasoning content: ${accumulatedReasoning}`);
307
+ debugCall(`response content: ${content}`);
308
+ if (isStreaming && !usage) {
309
+ const estimatedTokens = Math.max(1, Math.floor((content || '').length / 4));
310
+ usage = {
311
+ prompt_tokens: estimatedTokens,
312
+ completion_tokens: estimatedTokens,
313
+ total_tokens: 2 * estimatedTokens
314
+ };
315
+ }
316
+ return {
317
+ content: content || '',
318
+ reasoning_content: accumulatedReasoning || void 0,
319
+ usage: buildUsageInfo(usage, requestId),
320
+ isStreamed: !!isStreaming
321
+ };
322
+ } catch (e) {
323
+ warnCall('call AI error', e);
324
+ const newError = new Error(`failed to call ${isStreaming ? 'streaming ' : ''}AI model service (${modelName}): ${e.message}\nTrouble shooting: https://midscenejs.com/model-provider.html`, {
325
+ cause: e
326
+ });
327
+ throw newError;
328
+ }
329
+ }
330
+ async function callAIWithObjectResponse(messages, modelConfig, options) {
331
+ const response = await callAI(messages, modelConfig, {
332
+ deepThink: options?.deepThink,
333
+ abortSignal: options?.abortSignal
334
+ });
335
+ assert(response, 'empty response');
336
+ const modelFamily = modelConfig.modelFamily;
337
+ const jsonContent = safeParseJson(response.content, modelFamily);
338
+ if ('object' != typeof jsonContent) throw new AIResponseParseError(`failed to parse json response from model (${modelConfig.modelName}): ${response.content}`, response.content, response.usage);
339
+ return {
340
+ content: jsonContent,
341
+ contentString: response.content,
342
+ usage: response.usage,
343
+ reasoning_content: response.reasoning_content
344
+ };
345
+ }
346
+ async function callAIWithStringResponse(msgs, modelConfig, options) {
347
+ const { content, usage } = await callAI(msgs, modelConfig, {
348
+ abortSignal: options?.abortSignal
349
+ });
350
+ return {
351
+ content,
352
+ usage
353
+ };
354
+ }
355
+ function extractJSONFromCodeBlock(response) {
356
+ try {
357
+ const jsonMatch = response.match(/^\s*(\{[\s\S]*\})\s*$/);
358
+ if (jsonMatch) return jsonMatch[1];
359
+ const codeBlockMatch = response.match(/```(?:json)?\s*(\{[\s\S]*?\})\s*```/);
360
+ if (codeBlockMatch) return codeBlockMatch[1];
361
+ const jsonLikeMatch = response.match(/\{[\s\S]*\}/);
362
+ if (jsonLikeMatch) return jsonLikeMatch[0];
363
+ } catch {}
364
+ return response;
365
+ }
366
+ function preprocessDoubaoBboxJson(input) {
367
+ if (input.includes('bbox')) while(/\d+\s+\d+/.test(input))input = input.replace(/(\d+)\s+(\d+)/g, '$1,$2');
368
+ return input;
369
+ }
370
+ function resolveReasoningConfig({ reasoningEnabled, reasoningEffort, reasoningBudget, modelFamily }) {
371
+ if (void 0 === reasoningEnabled && !reasoningEffort && void 0 === reasoningBudget) return {
372
+ config: {}
373
+ };
374
+ const debugMessages = [];
375
+ const config = {};
376
+ if ('qwen3-vl' === modelFamily || 'qwen3.5' === modelFamily) {
377
+ if (void 0 !== reasoningEnabled) {
378
+ config.enable_thinking = reasoningEnabled;
379
+ debugMessages.push(`enable_thinking=${reasoningEnabled}`);
380
+ }
381
+ if (void 0 !== reasoningBudget) {
382
+ config.thinking_budget = reasoningBudget;
383
+ debugMessages.push(`thinking_budget=${reasoningBudget}`);
384
+ }
385
+ } else if ('doubao-vision' === modelFamily || 'doubao-seed' === modelFamily) {
386
+ if (void 0 !== reasoningEnabled) {
387
+ config.thinking = {
388
+ type: reasoningEnabled ? 'enabled' : 'disabled'
389
+ };
390
+ debugMessages.push(`thinking.type=${reasoningEnabled ? 'enabled' : 'disabled'}`);
391
+ }
392
+ if (reasoningEffort) {
393
+ config.reasoning_effort = reasoningEffort;
394
+ debugMessages.push(`reasoning_effort="${reasoningEffort}"`);
395
+ }
396
+ } else if ('glm-v' === modelFamily) {
397
+ if (void 0 !== reasoningEnabled) {
398
+ config.thinking = {
399
+ type: reasoningEnabled ? 'enabled' : 'disabled'
400
+ };
401
+ debugMessages.push(`thinking.type=${reasoningEnabled ? 'enabled' : 'disabled'}`);
402
+ }
403
+ } else if ('gpt-5' === modelFamily) {
404
+ config.reasoning = void 0;
405
+ debugMessages.push('reasoning config is ignored for gpt-5');
406
+ } else if (!modelFamily) return {
407
+ config: {},
408
+ debugMessage: 'reasoning config ignored: no model_family configured',
409
+ warningMessage: 'Reasoning config is set but no model_family is configured. Set MIDSCENE_MODEL_FAMILY to enable reasoning config pass-through.'
410
+ };
411
+ else if (reasoningEffort) {
412
+ config.reasoning_effort = reasoningEffort;
413
+ debugMessages.push(`reasoning_effort="${reasoningEffort}"`);
414
+ }
415
+ return {
416
+ config,
417
+ debugMessage: debugMessages.length ? `reasoning config for ${modelFamily}: ${debugMessages.join(', ')}` : void 0
418
+ };
419
+ }
420
+ function normalizeJsonObject(obj) {
421
+ if (null == obj) return obj;
422
+ if (Array.isArray(obj)) return obj.map((item)=>normalizeJsonObject(item));
423
+ if ('object' == typeof obj) {
424
+ const normalized = {};
425
+ for (const [key, value] of Object.entries(obj)){
426
+ const trimmedKey = key.trim();
427
+ let normalizedValue = normalizeJsonObject(value);
428
+ if ('string' == typeof normalizedValue) normalizedValue = normalizedValue.trim();
429
+ normalized[trimmedKey] = normalizedValue;
430
+ }
431
+ return normalized;
432
+ }
433
+ if ('string' == typeof obj) return obj.trim();
434
+ return obj;
435
+ }
436
+ function safeParseJson(input, modelFamily) {
437
+ const cleanJsonString = extractJSONFromCodeBlock(input);
438
+ if (cleanJsonString?.match(/\((\d+),(\d+)\)/)) return cleanJsonString.match(/\((\d+),(\d+)\)/)?.slice(1).map(Number);
439
+ let parsed;
440
+ let lastError;
441
+ try {
442
+ parsed = JSON.parse(cleanJsonString);
443
+ return normalizeJsonObject(parsed);
444
+ } catch (error) {
445
+ lastError = error;
446
+ }
447
+ try {
448
+ parsed = JSON.parse(jsonrepair(cleanJsonString));
449
+ return normalizeJsonObject(parsed);
450
+ } catch (error) {
451
+ lastError = error;
452
+ }
453
+ if ('doubao-vision' === modelFamily || 'doubao-seed' === modelFamily || isUITars(modelFamily)) {
454
+ const jsonString = preprocessDoubaoBboxJson(cleanJsonString);
455
+ try {
456
+ parsed = JSON.parse(jsonrepair(jsonString));
457
+ return normalizeJsonObject(parsed);
458
+ } catch (error) {
459
+ lastError = error;
460
+ }
461
+ }
462
+ throw Error(`failed to parse LLM response into JSON. Error - ${String(lastError ?? 'unknown error')}. Response - \n ${input}`);
463
+ }
464
+ export { AIResponseParseError, callAI, callAIWithObjectResponse, callAIWithStringResponse, extractJSONFromCodeBlock, preprocessDoubaoBboxJson, resolveReasoningConfig, safeParseJson };
465
+
466
+ //# sourceMappingURL=index.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"ai-model/service-caller/index.mjs","sources":["../../../../src/ai-model/service-caller/index.ts"],"sourcesContent":["import type { AIUsageInfo, DeepThinkOption } from '@/types';\nimport type { CodeGenerationChunk, StreamingCallback } from '@/types';\n\n// Error class that preserves usage and rawResponse when AI call parsing fails\nexport class AIResponseParseError extends Error {\n usage?: AIUsageInfo;\n rawResponse: string;\n\n constructor(message: string, rawResponse: string, usage?: AIUsageInfo) {\n super(message);\n this.name = 'AIResponseParseError';\n this.rawResponse = rawResponse;\n this.usage = usage;\n }\n}\nimport {\n type IModelConfig,\n MIDSCENE_LANGFUSE_DEBUG,\n MIDSCENE_LANGSMITH_DEBUG,\n MIDSCENE_MODEL_MAX_TOKENS,\n OPENAI_MAX_TOKENS,\n type TModelFamily,\n type UITarsModelVersion,\n globalConfigManager,\n} from '@midscene/shared/env';\n\nimport { getDebug } from '@midscene/shared/logger';\nimport { assert, ifInBrowser } from '@midscene/shared/utils';\nimport { jsonrepair } from 'jsonrepair';\nimport OpenAI from 'openai';\nimport type { ChatCompletionMessageParam } from 'openai/resources/index';\nimport type { Stream } from 'openai/streaming';\nimport type { AIArgs } from '../../common';\nimport { isAutoGLM, isUITars } from '../auto-glm/util';\n\nasync function createChatClient({\n modelConfig,\n}: {\n modelConfig: IModelConfig;\n}): Promise<{\n completion: OpenAI.Chat.Completions;\n modelName: string;\n modelDescription: string;\n uiTarsModelVersion?: UITarsModelVersion;\n modelFamily: TModelFamily | undefined;\n}> {\n const {\n socksProxy,\n httpProxy,\n modelName,\n openaiBaseURL,\n openaiApiKey,\n openaiExtraConfig,\n modelDescription,\n uiTarsModelVersion,\n modelFamily,\n createOpenAIClient,\n timeout,\n } = modelConfig;\n\n let proxyAgent: any = undefined;\n const warnClient = getDebug('ai:call', { console: true });\n const debugProxy = getDebug('ai:call:proxy');\n const warnProxy = getDebug('ai:call:proxy', { console: true });\n\n // Helper function to sanitize proxy URL for logging (remove credentials)\n // Uses URL API instead of regex to avoid ReDoS vulnerabilities\n const sanitizeProxyUrl = (url: string): string => {\n try {\n const parsed = new URL(url);\n if (parsed.username) {\n // Keep username for debugging, hide password for security\n parsed.password = '****';\n return parsed.href;\n }\n return url;\n } catch {\n // If URL parsing fails, return original URL (will be caught later)\n return url;\n }\n };\n\n if (httpProxy) {\n debugProxy('using http proxy', sanitizeProxyUrl(httpProxy));\n if (ifInBrowser) {\n warnProxy(\n 'HTTP proxy is configured but not supported in browser environment',\n );\n } else {\n // Dynamic import with variable to avoid bundler static analysis\n const moduleName = 'undici';\n const { ProxyAgent } = await import(moduleName);\n proxyAgent = new ProxyAgent({\n uri: httpProxy,\n // Note: authentication is handled via the URI (e.g., http://user:pass@proxy.com:8080)\n });\n }\n } else if (socksProxy) {\n debugProxy('using socks proxy', sanitizeProxyUrl(socksProxy));\n if (ifInBrowser) {\n warnProxy(\n 'SOCKS proxy is configured but not supported in browser environment',\n );\n } else {\n try {\n // Dynamic import with variable to avoid bundler static analysis\n const moduleName = 'fetch-socks';\n const { socksDispatcher } = await import(moduleName);\n // Parse SOCKS proxy URL (e.g., socks5://127.0.0.1:1080)\n const proxyUrl = new URL(socksProxy);\n\n // Validate hostname\n if (!proxyUrl.hostname) {\n throw new Error('SOCKS proxy URL must include a valid hostname');\n }\n\n // Validate and parse port\n const port = Number.parseInt(proxyUrl.port, 10);\n if (!proxyUrl.port || Number.isNaN(port)) {\n throw new Error('SOCKS proxy URL must include a valid port');\n }\n\n // Parse SOCKS version from protocol\n const protocol = proxyUrl.protocol.replace(':', '');\n const socksType =\n protocol === 'socks4' ? 4 : protocol === 'socks5' ? 5 : 5;\n\n proxyAgent = socksDispatcher({\n type: socksType,\n host: proxyUrl.hostname,\n port,\n ...(proxyUrl.username\n ? {\n userId: decodeURIComponent(proxyUrl.username),\n password: decodeURIComponent(proxyUrl.password || ''),\n }\n : {}),\n });\n debugProxy('socks proxy configured successfully', {\n type: socksType,\n host: proxyUrl.hostname,\n port: port,\n });\n } catch (error) {\n warnProxy('Failed to configure SOCKS proxy:', error);\n throw new Error(\n `Invalid SOCKS proxy URL: ${socksProxy}. Expected format: socks4://host:port, socks5://host:port, or with authentication: socks5://user:pass@host:port`,\n );\n }\n }\n }\n\n const openAIOptions = {\n baseURL: openaiBaseURL,\n apiKey: openaiApiKey,\n // Use fetchOptions.dispatcher for fetch-based SDK instead of httpAgent\n // Note: Type assertion needed due to undici version mismatch between dependencies\n ...(proxyAgent ? { fetchOptions: { dispatcher: proxyAgent as any } } : {}),\n ...openaiExtraConfig,\n ...(typeof timeout === 'number' ? { timeout } : {}),\n dangerouslyAllowBrowser: true,\n };\n\n const baseOpenAI = new OpenAI(openAIOptions);\n\n let openai: OpenAI = baseOpenAI;\n\n // LangSmith wrapper\n if (\n openai &&\n globalConfigManager.getEnvConfigInBoolean(MIDSCENE_LANGSMITH_DEBUG)\n ) {\n if (ifInBrowser) {\n throw new Error('langsmith is not supported in browser');\n }\n warnClient('DEBUGGING MODE: langsmith wrapper enabled');\n // Use variable to prevent static analysis by bundlers\n const langsmithModule = 'langsmith/wrappers';\n const { wrapOpenAI } = await import(langsmithModule);\n openai = wrapOpenAI(openai);\n }\n\n // Langfuse wrapper\n if (\n openai &&\n globalConfigManager.getEnvConfigInBoolean(MIDSCENE_LANGFUSE_DEBUG)\n ) {\n if (ifInBrowser) {\n throw new Error('langfuse is not supported in browser');\n }\n warnClient('DEBUGGING MODE: langfuse wrapper enabled');\n // Use variable to prevent static analysis by bundlers\n const langfuseModule = '@langfuse/openai';\n const { observeOpenAI } = await import(langfuseModule);\n openai = observeOpenAI(openai);\n }\n\n if (createOpenAIClient) {\n const wrappedClient = await createOpenAIClient(baseOpenAI, openAIOptions);\n\n if (wrappedClient) {\n openai = wrappedClient as OpenAI;\n }\n }\n\n return {\n completion: openai.chat.completions,\n modelName,\n modelDescription,\n uiTarsModelVersion,\n modelFamily,\n };\n}\n\nexport async function callAI(\n messages: ChatCompletionMessageParam[],\n modelConfig: IModelConfig,\n options?: {\n stream?: boolean;\n onChunk?: StreamingCallback;\n deepThink?: DeepThinkOption;\n abortSignal?: AbortSignal;\n },\n): Promise<{\n content: string;\n reasoning_content?: string;\n usage?: AIUsageInfo;\n isStreamed: boolean;\n}> {\n const {\n completion,\n modelName,\n modelDescription,\n uiTarsModelVersion,\n modelFamily,\n } = await createChatClient({\n modelConfig,\n });\n\n const maxTokens =\n globalConfigManager.getEnvConfigValueAsNumber(MIDSCENE_MODEL_MAX_TOKENS) ??\n globalConfigManager.getEnvConfigValueAsNumber(OPENAI_MAX_TOKENS);\n const debugCall = getDebug('ai:call');\n const warnCall = getDebug('ai:call', { console: true });\n const debugProfileStats = getDebug('ai:profile:stats');\n const debugProfileDetail = getDebug('ai:profile:detail');\n\n const startTime = Date.now();\n\n const temperature = (() => {\n if (modelFamily === 'gpt-5') {\n debugCall('temperature is ignored for gpt-5');\n return undefined;\n }\n return modelConfig.temperature ?? 0;\n })();\n\n const isStreaming = options?.stream && options?.onChunk;\n let content: string | undefined;\n let accumulated = '';\n let accumulatedReasoning = '';\n let usage: OpenAI.CompletionUsage | undefined;\n let timeCost: number | undefined;\n let requestId: string | null | undefined;\n\n const buildUsageInfo = (\n usageData?: OpenAI.CompletionUsage,\n requestId?: string | null,\n ) => {\n if (!usageData) return undefined;\n\n const cachedInputTokens = (\n usageData as { prompt_tokens_details?: { cached_tokens?: number } }\n )?.prompt_tokens_details?.cached_tokens;\n\n return {\n prompt_tokens: usageData.prompt_tokens ?? 0,\n completion_tokens: usageData.completion_tokens ?? 0,\n total_tokens: usageData.total_tokens ?? 0,\n cached_input: cachedInputTokens ?? 0,\n time_cost: timeCost ?? 0,\n model_name: modelName,\n model_description: modelDescription,\n intent: modelConfig.intent,\n request_id: requestId ?? undefined,\n } satisfies AIUsageInfo;\n };\n\n const commonConfig = {\n temperature,\n stream: !!isStreaming,\n max_tokens: maxTokens,\n ...(modelFamily === 'qwen2.5-vl' // qwen vl v2 specific config\n ? {\n vl_high_resolution_images: true,\n }\n : {}),\n };\n\n if (isAutoGLM(modelFamily)) {\n (commonConfig as unknown as Record<string, number>).top_p = 0.85;\n (commonConfig as unknown as Record<string, number>).frequency_penalty = 0.2;\n }\n\n // Merge deepThink (per-request boolean) with reasoning config (model-level)\n // deepThink takes priority as a per-request override for reasoningEnabled\n const mergedEnableReasoning = (() => {\n const normalizedDeepThink =\n options?.deepThink === 'unset' ? undefined : options?.deepThink;\n if (normalizedDeepThink === true) return true;\n if (normalizedDeepThink === false) return false;\n return modelConfig.reasoningEnabled;\n })();\n\n const {\n config: reasoningEffortConfig,\n debugMessage: reasoningEffortDebugMessage,\n warningMessage,\n } = resolveReasoningConfig({\n reasoningEnabled: mergedEnableReasoning,\n reasoningEffort: modelConfig.reasoningEffort,\n reasoningBudget: modelConfig.reasoningBudget,\n modelFamily,\n });\n if (reasoningEffortDebugMessage) {\n debugCall(reasoningEffortDebugMessage);\n }\n if (warningMessage) {\n warnCall(warningMessage);\n }\n\n // For GPT-5, add \"detail\": \"original\" to image inputs to get original resolution images in reasoning content\n const messagesWithImageDetail: ChatCompletionMessageParam[] = (() => {\n if (modelFamily !== 'gpt-5') {\n return messages;\n }\n\n return messages.map((msg) => {\n if (!Array.isArray(msg.content)) {\n return msg;\n }\n\n const content = msg.content.map((part) => {\n if (part && part.type === 'image_url' && part.image_url?.url) {\n return {\n ...part,\n image_url: {\n ...part.image_url,\n detail: 'original',\n },\n };\n }\n return part;\n });\n\n return {\n ...msg,\n content,\n } as ChatCompletionMessageParam;\n });\n })();\n\n try {\n debugCall(\n `sending ${isStreaming ? 'streaming ' : ''}request to ${modelName}`,\n );\n\n if (isStreaming) {\n const stream = (await completion.create(\n {\n model: modelName,\n messages: messagesWithImageDetail,\n ...commonConfig,\n ...reasoningEffortConfig,\n },\n {\n stream: true,\n ...(options?.abortSignal ? { signal: options.abortSignal } : {}),\n },\n )) as Stream<OpenAI.Chat.Completions.ChatCompletionChunk> & {\n _request_id?: string | null;\n };\n\n requestId = stream._request_id;\n\n for await (const chunk of stream) {\n const content = chunk.choices?.[0]?.delta?.content || '';\n const reasoning_content =\n (chunk.choices?.[0]?.delta as any)?.reasoning_content || '';\n\n // Check for usage info in any chunk (OpenAI provides usage in separate chunks)\n if (chunk.usage) {\n usage = chunk.usage;\n }\n\n if (content || reasoning_content) {\n accumulated += content;\n accumulatedReasoning += reasoning_content;\n const chunkData: CodeGenerationChunk = {\n content,\n reasoning_content,\n accumulated,\n isComplete: false,\n usage: undefined,\n };\n options.onChunk!(chunkData);\n }\n\n // Check if stream is complete\n if (chunk.choices?.[0]?.finish_reason) {\n timeCost = Date.now() - startTime;\n\n // If usage is not available from the stream, provide a basic usage info\n if (!usage) {\n // Estimate token counts based on content length (rough approximation)\n const estimatedTokens = Math.max(\n 1,\n Math.floor(accumulated.length / 4),\n );\n usage = {\n prompt_tokens: estimatedTokens,\n completion_tokens: estimatedTokens,\n total_tokens: estimatedTokens * 2,\n };\n }\n\n // Send final chunk\n const finalChunk: CodeGenerationChunk = {\n content: '',\n accumulated,\n reasoning_content: '',\n isComplete: true,\n usage: buildUsageInfo(usage, requestId),\n };\n options.onChunk!(finalChunk);\n break;\n }\n }\n content = accumulated;\n debugProfileStats(\n `streaming model, ${modelName}, mode, ${modelFamily || 'default'}, cost-ms, ${timeCost}, temperature, ${temperature ?? ''}`,\n );\n } else {\n // Non-streaming with retry logic\n const retryCount = modelConfig.retryCount ?? 1;\n const retryInterval = modelConfig.retryInterval ?? 2000;\n const maxAttempts = retryCount + 1; // retryCount=1 means 2 total attempts (1 initial + 1 retry)\n\n let lastError: Error | undefined;\n\n for (let attempt = 1; attempt <= maxAttempts; attempt++) {\n try {\n const result = await completion.create(\n {\n model: modelName,\n messages: messagesWithImageDetail,\n ...commonConfig,\n ...reasoningEffortConfig,\n } as any,\n options?.abortSignal ? { signal: options.abortSignal } : undefined,\n );\n\n timeCost = Date.now() - startTime;\n\n debugProfileStats(\n `model, ${modelName}, mode, ${modelFamily || 'default'}, ui-tars-version, ${uiTarsModelVersion}, prompt-tokens, ${result.usage?.prompt_tokens || ''}, completion-tokens, ${result.usage?.completion_tokens || ''}, total-tokens, ${result.usage?.total_tokens || ''}, cost-ms, ${timeCost}, requestId, ${result._request_id || ''}, temperature, ${temperature ?? ''}`,\n );\n\n debugProfileDetail(\n `model usage detail: ${JSON.stringify(result.usage)}`,\n );\n\n if (!result.choices) {\n throw new Error(\n `invalid response from LLM service: ${JSON.stringify(result)}`,\n );\n }\n\n content = result.choices[0].message.content!;\n accumulatedReasoning =\n (result.choices[0].message as any)?.reasoning_content || '';\n usage = result.usage;\n requestId = result._request_id;\n\n if (\n !content &&\n accumulatedReasoning &&\n (modelFamily === 'doubao-vision' || modelFamily === 'doubao-seed')\n ) {\n warnCall('empty content from AI model, using reasoning content');\n content = accumulatedReasoning;\n }\n\n if (!content) {\n throw new Error('empty content from AI model');\n }\n\n break; // Success, exit retry loop\n } catch (error) {\n lastError = error as Error;\n // Do not retry if the request was aborted by the caller\n if (options?.abortSignal?.aborted) {\n break;\n }\n if (attempt < maxAttempts) {\n warnCall(\n `AI call failed (attempt ${attempt}/${maxAttempts}), retrying in ${retryInterval}ms... Error: ${lastError.message}`,\n );\n await new Promise((resolve) => setTimeout(resolve, retryInterval));\n }\n }\n }\n\n if (!content) {\n throw lastError;\n }\n }\n\n debugCall(`response reasoning content: ${accumulatedReasoning}`);\n debugCall(`response content: ${content}`);\n\n // Ensure we always have usage info for streaming responses\n if (isStreaming && !usage) {\n // Estimate token counts based on content length (rough approximation)\n const estimatedTokens = Math.max(\n 1,\n Math.floor((content || '').length / 4),\n );\n usage = {\n prompt_tokens: estimatedTokens,\n completion_tokens: estimatedTokens,\n total_tokens: estimatedTokens * 2,\n } as OpenAI.CompletionUsage;\n }\n\n return {\n content: content || '',\n reasoning_content: accumulatedReasoning || undefined,\n usage: buildUsageInfo(usage, requestId),\n isStreamed: !!isStreaming,\n };\n } catch (e: any) {\n warnCall('call AI error', e);\n const newError = new Error(\n `failed to call ${isStreaming ? 'streaming ' : ''}AI model service (${modelName}): ${e.message}\\nTrouble shooting: https://midscenejs.com/model-provider.html`,\n {\n cause: e,\n },\n );\n throw newError;\n }\n}\n\nexport async function callAIWithObjectResponse<T>(\n messages: ChatCompletionMessageParam[],\n modelConfig: IModelConfig,\n options?: {\n deepThink?: DeepThinkOption;\n abortSignal?: AbortSignal;\n },\n): Promise<{\n content: T;\n contentString: string;\n usage?: AIUsageInfo;\n reasoning_content?: string;\n}> {\n const response = await callAI(messages, modelConfig, {\n deepThink: options?.deepThink,\n abortSignal: options?.abortSignal,\n });\n assert(response, 'empty response');\n const modelFamily = modelConfig.modelFamily;\n const jsonContent = safeParseJson(response.content, modelFamily);\n if (typeof jsonContent !== 'object') {\n throw new AIResponseParseError(\n `failed to parse json response from model (${modelConfig.modelName}): ${response.content}`,\n response.content,\n response.usage,\n );\n }\n return {\n content: jsonContent,\n contentString: response.content,\n usage: response.usage,\n reasoning_content: response.reasoning_content,\n };\n}\n\nexport async function callAIWithStringResponse(\n msgs: AIArgs,\n modelConfig: IModelConfig,\n options?: {\n abortSignal?: AbortSignal;\n },\n): Promise<{ content: string; usage?: AIUsageInfo }> {\n const { content, usage } = await callAI(msgs, modelConfig, {\n abortSignal: options?.abortSignal,\n });\n return { content, usage };\n}\n\nexport function extractJSONFromCodeBlock(response: string) {\n try {\n // First, try to match a JSON object directly in the response\n const jsonMatch = response.match(/^\\s*(\\{[\\s\\S]*\\})\\s*$/);\n if (jsonMatch) {\n return jsonMatch[1];\n }\n\n // If no direct JSON object is found, try to extract JSON from a code block\n const codeBlockMatch = response.match(\n /```(?:json)?\\s*(\\{[\\s\\S]*?\\})\\s*```/,\n );\n if (codeBlockMatch) {\n return codeBlockMatch[1];\n }\n\n // If no code block is found, try to find a JSON-like structure in the text\n const jsonLikeMatch = response.match(/\\{[\\s\\S]*\\}/);\n if (jsonLikeMatch) {\n return jsonLikeMatch[0];\n }\n } catch {}\n // If no JSON-like structure is found, return the original response\n return response;\n}\n\nexport function preprocessDoubaoBboxJson(input: string) {\n if (input.includes('bbox')) {\n // when its values like 940 445 969 490, replace all /\\d+\\s+\\d+/g with /$1,$2/g\n while (/\\d+\\s+\\d+/.test(input)) {\n input = input.replace(/(\\d+)\\s+(\\d+)/g, '$1,$2');\n }\n }\n return input;\n}\n\nexport function resolveReasoningConfig({\n reasoningEnabled,\n reasoningEffort,\n reasoningBudget,\n modelFamily,\n}: {\n reasoningEnabled?: boolean;\n reasoningEffort?: string;\n reasoningBudget?: number;\n modelFamily?: TModelFamily;\n}): {\n config: Record<string, unknown>;\n debugMessage?: string;\n warningMessage?: string;\n} {\n // No reasoning params set at all\n if (\n reasoningEnabled === undefined &&\n !reasoningEffort &&\n reasoningBudget === undefined\n ) {\n return { config: {} };\n }\n\n const debugMessages: string[] = [];\n const config: Record<string, unknown> = {};\n\n if (modelFamily === 'qwen3-vl' || modelFamily === 'qwen3.5') {\n // reasoningEnabled → enable_thinking\n if (reasoningEnabled !== undefined) {\n config.enable_thinking = reasoningEnabled;\n debugMessages.push(`enable_thinking=${reasoningEnabled}`);\n }\n // reasoningBudget → thinking_budget\n if (reasoningBudget !== undefined) {\n config.thinking_budget = reasoningBudget;\n debugMessages.push(`thinking_budget=${reasoningBudget}`);\n }\n // reasoningEffort is ignored for qwen\n } else if (modelFamily === 'doubao-vision' || modelFamily === 'doubao-seed') {\n // reasoningEnabled → thinking.type\n if (reasoningEnabled !== undefined) {\n config.thinking = {\n type: reasoningEnabled ? 'enabled' : 'disabled',\n };\n debugMessages.push(\n `thinking.type=${reasoningEnabled ? 'enabled' : 'disabled'}`,\n );\n }\n // reasoningEffort → reasoning_effort\n if (reasoningEffort) {\n config.reasoning_effort = reasoningEffort;\n debugMessages.push(`reasoning_effort=\"${reasoningEffort}\"`);\n }\n // reasoningBudget is ignored for doubao\n } else if (modelFamily === 'glm-v') {\n // reasoningEnabled → thinking.type\n if (reasoningEnabled !== undefined) {\n config.thinking = {\n type: reasoningEnabled ? 'enabled' : 'disabled',\n };\n debugMessages.push(\n `thinking.type=${reasoningEnabled ? 'enabled' : 'disabled'}`,\n );\n }\n // reasoningEffort and reasoningBudget are ignored for glm-v\n } else if (modelFamily === 'gpt-5') {\n // reasoningEffort → reasoning.effort\n config.reasoning = undefined;\n debugMessages.push('reasoning config is ignored for gpt-5');\n // if (reasoningEffort) {\n // config.reasoning = { effort: reasoningEffort };\n // debugMessages.push(`reasoning.effort=\"${reasoningEffort}\"`);\n // } else if (reasoningEnabled === true) {\n // config.reasoning = { effort: 'high' };\n // debugMessages.push('reasoning.effort=\"high\" (from reasoningEnabled)');\n // } else if (reasoningEnabled === false) {\n // config.reasoning = { effort: 'low' };\n // debugMessages.push('reasoning.effort=\"low\" (from reasoningEnabled)');\n // }\n // reasoningBudget is ignored for gpt-5\n } else if (!modelFamily) {\n return {\n config: {},\n debugMessage: 'reasoning config ignored: no model_family configured',\n warningMessage:\n 'Reasoning config is set but no model_family is configured. Set MIDSCENE_MODEL_FAMILY to enable reasoning config pass-through.',\n };\n } else {\n // For unknown model families, pass reasoning_effort directly as a best-effort default\n if (reasoningEffort) {\n config.reasoning_effort = reasoningEffort;\n debugMessages.push(`reasoning_effort=\"${reasoningEffort}\"`);\n }\n }\n\n return {\n config,\n debugMessage: debugMessages.length\n ? `reasoning config for ${modelFamily}: ${debugMessages.join(', ')}`\n : undefined,\n };\n}\n\n/**\n * Normalize a parsed JSON object by trimming whitespace from:\n * 1. All object keys (e.g., \" prompt \" -> \"prompt\")\n * 2. All string values (e.g., \" Tap \" -> \"Tap\")\n * This handles LLM output that may include leading/trailing spaces.\n */\nfunction normalizeJsonObject(obj: any): any {\n // Handle null and undefined\n if (obj === null || obj === undefined) {\n return obj;\n }\n\n // Handle arrays - recursively normalize each element\n if (Array.isArray(obj)) {\n return obj.map((item) => normalizeJsonObject(item));\n }\n\n // Handle objects\n if (typeof obj === 'object') {\n const normalized: any = {};\n\n for (const [key, value] of Object.entries(obj)) {\n // Trim the key to remove leading/trailing spaces\n const trimmedKey = key.trim();\n\n // Recursively normalize the value\n let normalizedValue = normalizeJsonObject(value);\n\n // Trim all string values\n if (typeof normalizedValue === 'string') {\n normalizedValue = normalizedValue.trim();\n }\n\n normalized[trimmedKey] = normalizedValue;\n }\n\n return normalized;\n }\n\n // Handle primitive strings\n if (typeof obj === 'string') {\n return obj.trim();\n }\n\n // Return other primitives as-is\n return obj;\n}\n\nexport function safeParseJson(\n input: string,\n modelFamily: TModelFamily | undefined,\n) {\n const cleanJsonString = extractJSONFromCodeBlock(input);\n // match the point\n if (cleanJsonString?.match(/\\((\\d+),(\\d+)\\)/)) {\n return cleanJsonString\n .match(/\\((\\d+),(\\d+)\\)/)\n ?.slice(1)\n .map(Number);\n }\n\n let parsed: any;\n let lastError: unknown;\n try {\n parsed = JSON.parse(cleanJsonString);\n return normalizeJsonObject(parsed);\n } catch (error) {\n lastError = error;\n }\n try {\n parsed = JSON.parse(jsonrepair(cleanJsonString));\n return normalizeJsonObject(parsed);\n } catch (error) {\n lastError = error;\n }\n\n if (\n modelFamily === 'doubao-vision' ||\n modelFamily === 'doubao-seed' ||\n isUITars(modelFamily)\n ) {\n const jsonString = preprocessDoubaoBboxJson(cleanJsonString);\n try {\n parsed = JSON.parse(jsonrepair(jsonString));\n return normalizeJsonObject(parsed);\n } catch (error) {\n lastError = error;\n }\n }\n throw Error(\n `failed to parse LLM response into JSON. Error - ${String(\n lastError ?? 'unknown error',\n )}. Response - \\n ${input}`,\n );\n}\n"],"names":["AIResponseParseError","Error","message","rawResponse","usage","createChatClient","modelConfig","socksProxy","httpProxy","modelName","openaiBaseURL","openaiApiKey","openaiExtraConfig","modelDescription","uiTarsModelVersion","modelFamily","createOpenAIClient","timeout","proxyAgent","warnClient","getDebug","debugProxy","warnProxy","sanitizeProxyUrl","url","parsed","URL","ifInBrowser","moduleName","ProxyAgent","socksDispatcher","proxyUrl","port","Number","protocol","socksType","decodeURIComponent","error","openAIOptions","baseOpenAI","OpenAI","openai","globalConfigManager","MIDSCENE_LANGSMITH_DEBUG","langsmithModule","wrapOpenAI","MIDSCENE_LANGFUSE_DEBUG","langfuseModule","observeOpenAI","wrappedClient","callAI","messages","options","completion","maxTokens","MIDSCENE_MODEL_MAX_TOKENS","OPENAI_MAX_TOKENS","debugCall","warnCall","debugProfileStats","debugProfileDetail","startTime","Date","temperature","isStreaming","content","accumulated","accumulatedReasoning","timeCost","requestId","buildUsageInfo","usageData","cachedInputTokens","undefined","commonConfig","isAutoGLM","mergedEnableReasoning","normalizedDeepThink","reasoningEffortConfig","reasoningEffortDebugMessage","warningMessage","resolveReasoningConfig","messagesWithImageDetail","msg","Array","part","stream","chunk","reasoning_content","chunkData","estimatedTokens","Math","finalChunk","retryCount","retryInterval","maxAttempts","lastError","attempt","result","JSON","Promise","resolve","setTimeout","e","newError","callAIWithObjectResponse","response","assert","jsonContent","safeParseJson","callAIWithStringResponse","msgs","extractJSONFromCodeBlock","jsonMatch","codeBlockMatch","jsonLikeMatch","preprocessDoubaoBboxJson","input","reasoningEnabled","reasoningEffort","reasoningBudget","debugMessages","config","normalizeJsonObject","obj","item","normalized","key","value","Object","trimmedKey","normalizedValue","cleanJsonString","jsonrepair","isUITars","jsonString","String"],"mappings":";;;;;;;;;;;;;;;;AAIO,MAAMA,6BAA6BC;IAIxC,YAAYC,OAAe,EAAEC,WAAmB,EAAEC,KAAmB,CAAE;QACrE,KAAK,CAACF,UAJR,yCACA;QAIE,IAAI,CAAC,IAAI,GAAG;QACZ,IAAI,CAAC,WAAW,GAAGC;QACnB,IAAI,CAAC,KAAK,GAAGC;IACf;AACF;AAqBA,eAAeC,iBAAiB,EAC9BC,WAAW,EAGZ;IAOC,MAAM,EACJC,UAAU,EACVC,SAAS,EACTC,SAAS,EACTC,aAAa,EACbC,YAAY,EACZC,iBAAiB,EACjBC,gBAAgB,EAChBC,kBAAkB,EAClBC,WAAW,EACXC,kBAAkB,EAClBC,OAAO,EACR,GAAGX;IAEJ,IAAIY;IACJ,MAAMC,aAAaC,SAAS,WAAW;QAAE,SAAS;IAAK;IACvD,MAAMC,aAAaD,SAAS;IAC5B,MAAME,YAAYF,SAAS,iBAAiB;QAAE,SAAS;IAAK;IAI5D,MAAMG,mBAAmB,CAACC;QACxB,IAAI;YACF,MAAMC,SAAS,IAAIC,IAAIF;YACvB,IAAIC,OAAO,QAAQ,EAAE;gBAEnBA,OAAO,QAAQ,GAAG;gBAClB,OAAOA,OAAO,IAAI;YACpB;YACA,OAAOD;QACT,EAAE,OAAM;YAEN,OAAOA;QACT;IACF;IAEA,IAAIhB,WAAW;QACba,WAAW,oBAAoBE,iBAAiBf;QAChD,IAAImB,aACFL,UACE;aAEG;YAEL,MAAMM,aAAa;YACnB,MAAM,EAAEC,UAAU,EAAE,GAAG,MAAM,MAAM,CAACD;YACpCV,aAAa,IAAIW,WAAW;gBAC1B,KAAKrB;YAEP;QACF;IACF,OAAO,IAAID,YAAY;QACrBc,WAAW,qBAAqBE,iBAAiBhB;QACjD,IAAIoB,aACFL,UACE;aAGF,IAAI;YAEF,MAAMM,aAAa;YACnB,MAAM,EAAEE,eAAe,EAAE,GAAG,MAAM,MAAM,CAACF;YAEzC,MAAMG,WAAW,IAAIL,IAAInB;YAGzB,IAAI,CAACwB,SAAS,QAAQ,EACpB,MAAM,IAAI9B,MAAM;YAIlB,MAAM+B,OAAOC,OAAO,QAAQ,CAACF,SAAS,IAAI,EAAE;YAC5C,IAAI,CAACA,SAAS,IAAI,IAAIE,OAAO,KAAK,CAACD,OACjC,MAAM,IAAI/B,MAAM;YAIlB,MAAMiC,WAAWH,SAAS,QAAQ,CAAC,OAAO,CAAC,KAAK;YAChD,MAAMI,YACJD,AAAa,aAAbA,WAAwB,IAAIA,AAAa,aAAbA,WAAwB,IAAI;YAE1DhB,aAAaY,gBAAgB;gBAC3B,MAAMK;gBACN,MAAMJ,SAAS,QAAQ;gBACvBC;gBACA,GAAID,SAAS,QAAQ,GACjB;oBACE,QAAQK,mBAAmBL,SAAS,QAAQ;oBAC5C,UAAUK,mBAAmBL,SAAS,QAAQ,IAAI;gBACpD,IACA,CAAC,CAAC;YACR;YACAV,WAAW,uCAAuC;gBAChD,MAAMc;gBACN,MAAMJ,SAAS,QAAQ;gBACvB,MAAMC;YACR;QACF,EAAE,OAAOK,OAAO;YACdf,UAAU,oCAAoCe;YAC9C,MAAM,IAAIpC,MACR,CAAC,yBAAyB,EAAEM,WAAW,+GAA+G,CAAC;QAE3J;IAEJ;IAEA,MAAM+B,gBAAgB;QACpB,SAAS5B;QACT,QAAQC;QAGR,GAAIO,aAAa;YAAE,cAAc;gBAAE,YAAYA;YAAkB;QAAE,IAAI,CAAC,CAAC;QACzE,GAAGN,iBAAiB;QACpB,GAAI,AAAmB,YAAnB,OAAOK,UAAuB;YAAEA;QAAQ,IAAI,CAAC,CAAC;QAClD,yBAAyB;IAC3B;IAEA,MAAMsB,aAAa,IAAIC,SAAOF;IAE9B,IAAIG,SAAiBF;IAGrB,IACEE,UACAC,oBAAoB,qBAAqB,CAACC,2BAC1C;QACA,IAAIhB,aACF,MAAM,IAAI1B,MAAM;QAElBkB,WAAW;QAEX,MAAMyB,kBAAkB;QACxB,MAAM,EAAEC,UAAU,EAAE,GAAG,MAAM,MAAM,CAACD;QACpCH,SAASI,WAAWJ;IACtB;IAGA,IACEA,UACAC,oBAAoB,qBAAqB,CAACI,0BAC1C;QACA,IAAInB,aACF,MAAM,IAAI1B,MAAM;QAElBkB,WAAW;QAEX,MAAM4B,iBAAiB;QACvB,MAAM,EAAEC,aAAa,EAAE,GAAG,MAAM,MAAM,CAACD;QACvCN,SAASO,cAAcP;IACzB;IAEA,IAAIzB,oBAAoB;QACtB,MAAMiC,gBAAgB,MAAMjC,mBAAmBuB,YAAYD;QAE3D,IAAIW,eACFR,SAASQ;IAEb;IAEA,OAAO;QACL,YAAYR,OAAO,IAAI,CAAC,WAAW;QACnChC;QACAI;QACAC;QACAC;IACF;AACF;AAEO,eAAemC,OACpBC,QAAsC,EACtC7C,WAAyB,EACzB8C,OAKC;IAOD,MAAM,EACJC,UAAU,EACV5C,SAAS,EACTI,gBAAgB,EAChBC,kBAAkB,EAClBC,WAAW,EACZ,GAAG,MAAMV,iBAAiB;QACzBC;IACF;IAEA,MAAMgD,YACJZ,oBAAoB,yBAAyB,CAACa,8BAC9Cb,oBAAoB,yBAAyB,CAACc;IAChD,MAAMC,YAAYrC,SAAS;IAC3B,MAAMsC,WAAWtC,SAAS,WAAW;QAAE,SAAS;IAAK;IACrD,MAAMuC,oBAAoBvC,SAAS;IACnC,MAAMwC,qBAAqBxC,SAAS;IAEpC,MAAMyC,YAAYC,KAAK,GAAG;IAE1B,MAAMC,cAAe,AAAC;QACpB,IAAIhD,AAAgB,YAAhBA,aAAyB,YAC3B0C,UAAU;QAGZ,OAAOnD,YAAY,WAAW,IAAI;IACpC;IAEA,MAAM0D,cAAcZ,SAAS,UAAUA,SAAS;IAChD,IAAIa;IACJ,IAAIC,cAAc;IAClB,IAAIC,uBAAuB;IAC3B,IAAI/D;IACJ,IAAIgE;IACJ,IAAIC;IAEJ,MAAMC,iBAAiB,CACrBC,WACAF;QAEA,IAAI,CAACE,WAAW;QAEhB,MAAMC,oBACJD,WACC,uBAAuB;QAE1B,OAAO;YACL,eAAeA,UAAU,aAAa,IAAI;YAC1C,mBAAmBA,UAAU,iBAAiB,IAAI;YAClD,cAAcA,UAAU,YAAY,IAAI;YACxC,cAAcC,qBAAqB;YACnC,WAAWJ,YAAY;YACvB,YAAY3D;YACZ,mBAAmBI;YACnB,QAAQP,YAAY,MAAM;YAC1B,YAAY+D,aAAaI;QAC3B;IACF;IAEA,MAAMC,eAAe;QACnBX;QACA,QAAQ,CAAC,CAACC;QACV,YAAYV;QACZ,GAAIvC,AAAgB,iBAAhBA,cACA;YACE,2BAA2B;QAC7B,IACA,CAAC,CAAC;IACR;IAEA,IAAI4D,UAAU5D,cAAc;QACzB2D,aAAmD,KAAK,GAAG;QAC3DA,aAAmD,iBAAiB,GAAG;IAC1E;IAIA,MAAME,wBAAyB,AAAC;QAC9B,MAAMC,sBACJzB,SAAS,cAAc,UAAUqB,SAAYrB,SAAS;QACxD,IAAIyB,AAAwB,SAAxBA,qBAA8B,OAAO;QACzC,IAAIA,AAAwB,UAAxBA,qBAA+B,OAAO;QAC1C,OAAOvE,YAAY,gBAAgB;IACrC;IAEA,MAAM,EACJ,QAAQwE,qBAAqB,EAC7B,cAAcC,2BAA2B,EACzCC,cAAc,EACf,GAAGC,uBAAuB;QACzB,kBAAkBL;QAClB,iBAAiBtE,YAAY,eAAe;QAC5C,iBAAiBA,YAAY,eAAe;QAC5CS;IACF;IACA,IAAIgE,6BACFtB,UAAUsB;IAEZ,IAAIC,gBACFtB,SAASsB;IAIX,MAAME,0BAAyD,AAAC;QAC9D,IAAInE,AAAgB,YAAhBA,aACF,OAAOoC;QAGT,OAAOA,SAAS,GAAG,CAAC,CAACgC;YACnB,IAAI,CAACC,MAAM,OAAO,CAACD,IAAI,OAAO,GAC5B,OAAOA;YAGT,MAAMlB,UAAUkB,IAAI,OAAO,CAAC,GAAG,CAAC,CAACE;gBAC/B,IAAIA,QAAQA,AAAc,gBAAdA,KAAK,IAAI,IAAoBA,KAAK,SAAS,EAAE,KACvD,OAAO;oBACL,GAAGA,IAAI;oBACP,WAAW;wBACT,GAAGA,KAAK,SAAS;wBACjB,QAAQ;oBACV;gBACF;gBAEF,OAAOA;YACT;YAEA,OAAO;gBACL,GAAGF,GAAG;gBACNlB;YACF;QACF;IACF;IAEA,IAAI;QACFR,UACE,CAAC,QAAQ,EAAEO,cAAc,eAAe,GAAG,WAAW,EAAEvD,WAAW;QAGrE,IAAIuD,aAAa;YACf,MAAMsB,SAAU,MAAMjC,WAAW,MAAM,CACrC;gBACE,OAAO5C;gBACP,UAAUyE;gBACV,GAAGR,YAAY;gBACf,GAAGI,qBAAqB;YAC1B,GACA;gBACE,QAAQ;gBACR,GAAI1B,SAAS,cAAc;oBAAE,QAAQA,QAAQ,WAAW;gBAAC,IAAI,CAAC,CAAC;YACjE;YAKFiB,YAAYiB,OAAO,WAAW;YAE9B,WAAW,MAAMC,SAASD,OAAQ;gBAChC,MAAMrB,UAAUsB,MAAM,OAAO,EAAE,CAAC,EAAE,EAAE,OAAO,WAAW;gBACtD,MAAMC,oBACHD,MAAM,OAAO,EAAE,CAAC,EAAE,EAAE,OAAe,qBAAqB;gBAG3D,IAAIA,MAAM,KAAK,EACbnF,QAAQmF,MAAM,KAAK;gBAGrB,IAAItB,WAAWuB,mBAAmB;oBAChCtB,eAAeD;oBACfE,wBAAwBqB;oBACxB,MAAMC,YAAiC;wBACrCxB;wBACAuB;wBACAtB;wBACA,YAAY;wBACZ,OAAOO;oBACT;oBACArB,QAAQ,OAAO,CAAEqC;gBACnB;gBAGA,IAAIF,MAAM,OAAO,EAAE,CAAC,EAAE,EAAE,eAAe;oBACrCnB,WAAWN,KAAK,GAAG,KAAKD;oBAGxB,IAAI,CAACzD,OAAO;wBAEV,MAAMsF,kBAAkBC,KAAK,GAAG,CAC9B,GACAA,KAAK,KAAK,CAACzB,YAAY,MAAM,GAAG;wBAElC9D,QAAQ;4BACN,eAAesF;4BACf,mBAAmBA;4BACnB,cAAcA,AAAkB,IAAlBA;wBAChB;oBACF;oBAGA,MAAME,aAAkC;wBACtC,SAAS;wBACT1B;wBACA,mBAAmB;wBACnB,YAAY;wBACZ,OAAOI,eAAelE,OAAOiE;oBAC/B;oBACAjB,QAAQ,OAAO,CAAEwC;oBACjB;gBACF;YACF;YACA3B,UAAUC;YACVP,kBACE,CAAC,iBAAiB,EAAElD,UAAU,QAAQ,EAAEM,eAAe,UAAU,WAAW,EAAEqD,SAAS,eAAe,EAAEL,eAAe,IAAI;QAE/H,OAAO;YAEL,MAAM8B,aAAavF,YAAY,UAAU,IAAI;YAC7C,MAAMwF,gBAAgBxF,YAAY,aAAa,IAAI;YACnD,MAAMyF,cAAcF,aAAa;YAEjC,IAAIG;YAEJ,IAAK,IAAIC,UAAU,GAAGA,WAAWF,aAAaE,UAC5C,IAAI;gBACF,MAAMC,SAAS,MAAM7C,WAAW,MAAM,CACpC;oBACE,OAAO5C;oBACP,UAAUyE;oBACV,GAAGR,YAAY;oBACf,GAAGI,qBAAqB;gBAC1B,GACA1B,SAAS,cAAc;oBAAE,QAAQA,QAAQ,WAAW;gBAAC,IAAIqB;gBAG3DL,WAAWN,KAAK,GAAG,KAAKD;gBAExBF,kBACE,CAAC,OAAO,EAAElD,UAAU,QAAQ,EAAEM,eAAe,UAAU,mBAAmB,EAAED,mBAAmB,iBAAiB,EAAEoF,OAAO,KAAK,EAAE,iBAAiB,GAAG,qBAAqB,EAAEA,OAAO,KAAK,EAAE,qBAAqB,GAAG,gBAAgB,EAAEA,OAAO,KAAK,EAAE,gBAAgB,GAAG,WAAW,EAAE9B,SAAS,aAAa,EAAE8B,OAAO,WAAW,IAAI,GAAG,eAAe,EAAEnC,eAAe,IAAI;gBAGxWH,mBACE,CAAC,oBAAoB,EAAEuC,KAAK,SAAS,CAACD,OAAO,KAAK,GAAG;gBAGvD,IAAI,CAACA,OAAO,OAAO,EACjB,MAAM,IAAIjG,MACR,CAAC,mCAAmC,EAAEkG,KAAK,SAAS,CAACD,SAAS;gBAIlEjC,UAAUiC,OAAO,OAAO,CAAC,EAAE,CAAC,OAAO,CAAC,OAAO;gBAC3C/B,uBACG+B,OAAO,OAAO,CAAC,EAAE,CAAC,OAAO,EAAU,qBAAqB;gBAC3D9F,QAAQ8F,OAAO,KAAK;gBACpB7B,YAAY6B,OAAO,WAAW;gBAE9B,IACE,CAACjC,WACDE,wBACCpD,CAAAA,AAAgB,oBAAhBA,eAAmCA,AAAgB,kBAAhBA,WAA4B,GAChE;oBACA2C,SAAS;oBACTO,UAAUE;gBACZ;gBAEA,IAAI,CAACF,SACH,MAAM,IAAIhE,MAAM;gBAGlB;YACF,EAAE,OAAOoC,OAAO;gBACd2D,YAAY3D;gBAEZ,IAAIe,SAAS,aAAa,SACxB;gBAEF,IAAI6C,UAAUF,aAAa;oBACzBrC,SACE,CAAC,wBAAwB,EAAEuC,QAAQ,CAAC,EAAEF,YAAY,eAAe,EAAED,cAAc,aAAa,EAAEE,UAAU,OAAO,EAAE;oBAErH,MAAM,IAAII,QAAQ,CAACC,UAAYC,WAAWD,SAASP;gBACrD;YACF;YAGF,IAAI,CAAC7B,SACH,MAAM+B;QAEV;QAEAvC,UAAU,CAAC,4BAA4B,EAAEU,sBAAsB;QAC/DV,UAAU,CAAC,kBAAkB,EAAEQ,SAAS;QAGxC,IAAID,eAAe,CAAC5D,OAAO;YAEzB,MAAMsF,kBAAkBC,KAAK,GAAG,CAC9B,GACAA,KAAK,KAAK,CAAE1B,AAAAA,CAAAA,WAAW,EAAC,EAAG,MAAM,GAAG;YAEtC7D,QAAQ;gBACN,eAAesF;gBACf,mBAAmBA;gBACnB,cAAcA,AAAkB,IAAlBA;YAChB;QACF;QAEA,OAAO;YACL,SAASzB,WAAW;YACpB,mBAAmBE,wBAAwBM;YAC3C,OAAOH,eAAelE,OAAOiE;YAC7B,YAAY,CAAC,CAACL;QAChB;IACF,EAAE,OAAOuC,GAAQ;QACf7C,SAAS,iBAAiB6C;QAC1B,MAAMC,WAAW,IAAIvG,MACnB,CAAC,eAAe,EAAE+D,cAAc,eAAe,GAAG,kBAAkB,EAAEvD,UAAU,GAAG,EAAE8F,EAAE,OAAO,CAAC,8DAA8D,CAAC,EAC9J;YACE,OAAOA;QACT;QAEF,MAAMC;IACR;AACF;AAEO,eAAeC,yBACpBtD,QAAsC,EACtC7C,WAAyB,EACzB8C,OAGC;IAOD,MAAMsD,WAAW,MAAMxD,OAAOC,UAAU7C,aAAa;QACnD,WAAW8C,SAAS;QACpB,aAAaA,SAAS;IACxB;IACAuD,OAAOD,UAAU;IACjB,MAAM3F,cAAcT,YAAY,WAAW;IAC3C,MAAMsG,cAAcC,cAAcH,SAAS,OAAO,EAAE3F;IACpD,IAAI,AAAuB,YAAvB,OAAO6F,aACT,MAAM,IAAI5G,qBACR,CAAC,0CAA0C,EAAEM,YAAY,SAAS,CAAC,GAAG,EAAEoG,SAAS,OAAO,EAAE,EAC1FA,SAAS,OAAO,EAChBA,SAAS,KAAK;IAGlB,OAAO;QACL,SAASE;QACT,eAAeF,SAAS,OAAO;QAC/B,OAAOA,SAAS,KAAK;QACrB,mBAAmBA,SAAS,iBAAiB;IAC/C;AACF;AAEO,eAAeI,yBACpBC,IAAY,EACZzG,WAAyB,EACzB8C,OAEC;IAED,MAAM,EAAEa,OAAO,EAAE7D,KAAK,EAAE,GAAG,MAAM8C,OAAO6D,MAAMzG,aAAa;QACzD,aAAa8C,SAAS;IACxB;IACA,OAAO;QAAEa;QAAS7D;IAAM;AAC1B;AAEO,SAAS4G,yBAAyBN,QAAgB;IACvD,IAAI;QAEF,MAAMO,YAAYP,SAAS,KAAK,CAAC;QACjC,IAAIO,WACF,OAAOA,SAAS,CAAC,EAAE;QAIrB,MAAMC,iBAAiBR,SAAS,KAAK,CACnC;QAEF,IAAIQ,gBACF,OAAOA,cAAc,CAAC,EAAE;QAI1B,MAAMC,gBAAgBT,SAAS,KAAK,CAAC;QACrC,IAAIS,eACF,OAAOA,aAAa,CAAC,EAAE;IAE3B,EAAE,OAAM,CAAC;IAET,OAAOT;AACT;AAEO,SAASU,yBAAyBC,KAAa;IACpD,IAAIA,MAAM,QAAQ,CAAC,SAEjB,MAAO,YAAY,IAAI,CAACA,OACtBA,QAAQA,MAAM,OAAO,CAAC,kBAAkB;IAG5C,OAAOA;AACT;AAEO,SAASpC,uBAAuB,EACrCqC,gBAAgB,EAChBC,eAAe,EACfC,eAAe,EACfzG,WAAW,EAMZ;IAMC,IACEuG,AAAqB7C,WAArB6C,oBACA,CAACC,mBACDC,AAAoB/C,WAApB+C,iBAEA,OAAO;QAAE,QAAQ,CAAC;IAAE;IAGtB,MAAMC,gBAA0B,EAAE;IAClC,MAAMC,SAAkC,CAAC;IAEzC,IAAI3G,AAAgB,eAAhBA,eAA8BA,AAAgB,cAAhBA,aAA2B;QAE3D,IAAIuG,AAAqB7C,WAArB6C,kBAAgC;YAClCI,OAAO,eAAe,GAAGJ;YACzBG,cAAc,IAAI,CAAC,CAAC,gBAAgB,EAAEH,kBAAkB;QAC1D;QAEA,IAAIE,AAAoB/C,WAApB+C,iBAA+B;YACjCE,OAAO,eAAe,GAAGF;YACzBC,cAAc,IAAI,CAAC,CAAC,gBAAgB,EAAED,iBAAiB;QACzD;IAEF,OAAO,IAAIzG,AAAgB,oBAAhBA,eAAmCA,AAAgB,kBAAhBA,aAA+B;QAE3E,IAAIuG,AAAqB7C,WAArB6C,kBAAgC;YAClCI,OAAO,QAAQ,GAAG;gBAChB,MAAMJ,mBAAmB,YAAY;YACvC;YACAG,cAAc,IAAI,CAChB,CAAC,cAAc,EAAEH,mBAAmB,YAAY,YAAY;QAEhE;QAEA,IAAIC,iBAAiB;YACnBG,OAAO,gBAAgB,GAAGH;YAC1BE,cAAc,IAAI,CAAC,CAAC,kBAAkB,EAAEF,gBAAgB,CAAC,CAAC;QAC5D;IAEF,OAAO,IAAIxG,AAAgB,YAAhBA,aAET;QAAA,IAAIuG,AAAqB7C,WAArB6C,kBAAgC;YAClCI,OAAO,QAAQ,GAAG;gBAChB,MAAMJ,mBAAmB,YAAY;YACvC;YACAG,cAAc,IAAI,CAChB,CAAC,cAAc,EAAEH,mBAAmB,YAAY,YAAY;QAEhE;IAAA,OAEK,IAAIvG,AAAgB,YAAhBA,aAAyB;QAElC2G,OAAO,SAAS,GAAGjD;QACnBgD,cAAc,IAAI,CAAC;IAYrB,OAAO,IAAI,CAAC1G,aACV,OAAO;QACL,QAAQ,CAAC;QACT,cAAc;QACd,gBACE;IACJ;SAGA,IAAIwG,iBAAiB;QACnBG,OAAO,gBAAgB,GAAGH;QAC1BE,cAAc,IAAI,CAAC,CAAC,kBAAkB,EAAEF,gBAAgB,CAAC,CAAC;IAC5D;IAGF,OAAO;QACLG;QACA,cAAcD,cAAc,MAAM,GAC9B,CAAC,qBAAqB,EAAE1G,YAAY,EAAE,EAAE0G,cAAc,IAAI,CAAC,OAAO,GAClEhD;IACN;AACF;AAQA,SAASkD,oBAAoBC,GAAQ;IAEnC,IAAIA,QAAAA,KACF,OAAOA;IAIT,IAAIxC,MAAM,OAAO,CAACwC,MAChB,OAAOA,IAAI,GAAG,CAAC,CAACC,OAASF,oBAAoBE;IAI/C,IAAI,AAAe,YAAf,OAAOD,KAAkB;QAC3B,MAAME,aAAkB,CAAC;QAEzB,KAAK,MAAM,CAACC,KAAKC,MAAM,IAAIC,OAAO,OAAO,CAACL,KAAM;YAE9C,MAAMM,aAAaH,IAAI,IAAI;YAG3B,IAAII,kBAAkBR,oBAAoBK;YAG1C,IAAI,AAA2B,YAA3B,OAAOG,iBACTA,kBAAkBA,gBAAgB,IAAI;YAGxCL,UAAU,CAACI,WAAW,GAAGC;QAC3B;QAEA,OAAOL;IACT;IAGA,IAAI,AAAe,YAAf,OAAOF,KACT,OAAOA,IAAI,IAAI;IAIjB,OAAOA;AACT;AAEO,SAASf,cACdQ,KAAa,EACbtG,WAAqC;IAErC,MAAMqH,kBAAkBpB,yBAAyBK;IAEjD,IAAIe,iBAAiB,MAAM,oBACzB,OAAOA,gBACJ,KAAK,CAAC,oBACL,MAAM,GACP,IAAInG;IAGT,IAAIR;IACJ,IAAIuE;IACJ,IAAI;QACFvE,SAAS0E,KAAK,KAAK,CAACiC;QACpB,OAAOT,oBAAoBlG;IAC7B,EAAE,OAAOY,OAAO;QACd2D,YAAY3D;IACd;IACA,IAAI;QACFZ,SAAS0E,KAAK,KAAK,CAACkC,WAAWD;QAC/B,OAAOT,oBAAoBlG;IAC7B,EAAE,OAAOY,OAAO;QACd2D,YAAY3D;IACd;IAEA,IACEtB,AAAgB,oBAAhBA,eACAA,AAAgB,kBAAhBA,eACAuH,SAASvH,cACT;QACA,MAAMwH,aAAanB,yBAAyBgB;QAC5C,IAAI;YACF3G,SAAS0E,KAAK,KAAK,CAACkC,WAAWE;YAC/B,OAAOZ,oBAAoBlG;QAC7B,EAAE,OAAOY,OAAO;YACd2D,YAAY3D;QACd;IACF;IACA,MAAMpC,MACJ,CAAC,gDAAgD,EAAEuI,OACjDxC,aAAa,iBACb,gBAAgB,EAAEqB,OAAO;AAE/B"}