@sqaitech/core 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (200) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +9 -0
  3. package/dist/es/agent/agent.mjs +635 -0
  4. package/dist/es/agent/agent.mjs.map +1 -0
  5. package/dist/es/agent/common.mjs +0 -0
  6. package/dist/es/agent/index.mjs +6 -0
  7. package/dist/es/agent/task-cache.mjs +184 -0
  8. package/dist/es/agent/task-cache.mjs.map +1 -0
  9. package/dist/es/agent/tasks.mjs +663 -0
  10. package/dist/es/agent/tasks.mjs.map +1 -0
  11. package/dist/es/agent/ui-utils.mjs +72 -0
  12. package/dist/es/agent/ui-utils.mjs.map +1 -0
  13. package/dist/es/agent/utils.mjs +162 -0
  14. package/dist/es/agent/utils.mjs.map +1 -0
  15. package/dist/es/ai-model/action-executor.mjs +129 -0
  16. package/dist/es/ai-model/action-executor.mjs.map +1 -0
  17. package/dist/es/ai-model/common.mjs +355 -0
  18. package/dist/es/ai-model/common.mjs.map +1 -0
  19. package/dist/es/ai-model/conversation-history.mjs +58 -0
  20. package/dist/es/ai-model/conversation-history.mjs.map +1 -0
  21. package/dist/es/ai-model/index.mjs +11 -0
  22. package/dist/es/ai-model/inspect.mjs +286 -0
  23. package/dist/es/ai-model/inspect.mjs.map +1 -0
  24. package/dist/es/ai-model/llm-planning.mjs +140 -0
  25. package/dist/es/ai-model/llm-planning.mjs.map +1 -0
  26. package/dist/es/ai-model/prompt/assertion.mjs +31 -0
  27. package/dist/es/ai-model/prompt/assertion.mjs.map +1 -0
  28. package/dist/es/ai-model/prompt/common.mjs +7 -0
  29. package/dist/es/ai-model/prompt/common.mjs.map +1 -0
  30. package/dist/es/ai-model/prompt/describe.mjs +44 -0
  31. package/dist/es/ai-model/prompt/describe.mjs.map +1 -0
  32. package/dist/es/ai-model/prompt/extraction.mjs +129 -0
  33. package/dist/es/ai-model/prompt/extraction.mjs.map +1 -0
  34. package/dist/es/ai-model/prompt/llm-locator.mjs +268 -0
  35. package/dist/es/ai-model/prompt/llm-locator.mjs.map +1 -0
  36. package/dist/es/ai-model/prompt/llm-planning.mjs +367 -0
  37. package/dist/es/ai-model/prompt/llm-planning.mjs.map +1 -0
  38. package/dist/es/ai-model/prompt/llm-section-locator.mjs +41 -0
  39. package/dist/es/ai-model/prompt/llm-section-locator.mjs.map +1 -0
  40. package/dist/es/ai-model/prompt/playwright-generator.mjs +117 -0
  41. package/dist/es/ai-model/prompt/playwright-generator.mjs.map +1 -0
  42. package/dist/es/ai-model/prompt/ui-tars-locator.mjs +34 -0
  43. package/dist/es/ai-model/prompt/ui-tars-locator.mjs.map +1 -0
  44. package/dist/es/ai-model/prompt/ui-tars-planning.mjs +36 -0
  45. package/dist/es/ai-model/prompt/ui-tars-planning.mjs.map +1 -0
  46. package/dist/es/ai-model/prompt/util.mjs +124 -0
  47. package/dist/es/ai-model/prompt/util.mjs.map +1 -0
  48. package/dist/es/ai-model/prompt/yaml-generator.mjs +219 -0
  49. package/dist/es/ai-model/prompt/yaml-generator.mjs.map +1 -0
  50. package/dist/es/ai-model/service-caller/index.mjs +388 -0
  51. package/dist/es/ai-model/service-caller/index.mjs.map +1 -0
  52. package/dist/es/ai-model/ui-tars-planning.mjs +201 -0
  53. package/dist/es/ai-model/ui-tars-planning.mjs.map +1 -0
  54. package/dist/es/device/index.mjs +152 -0
  55. package/dist/es/device/index.mjs.map +1 -0
  56. package/dist/es/image/index.mjs +2 -0
  57. package/dist/es/index.mjs +11 -0
  58. package/dist/es/index.mjs.map +1 -0
  59. package/dist/es/insight/index.mjs +231 -0
  60. package/dist/es/insight/index.mjs.map +1 -0
  61. package/dist/es/insight/utils.mjs +15 -0
  62. package/dist/es/insight/utils.mjs.map +1 -0
  63. package/dist/es/report.mjs +88 -0
  64. package/dist/es/report.mjs.map +1 -0
  65. package/dist/es/tree.mjs +2 -0
  66. package/dist/es/types.mjs +11 -0
  67. package/dist/es/types.mjs.map +1 -0
  68. package/dist/es/utils.mjs +202 -0
  69. package/dist/es/utils.mjs.map +1 -0
  70. package/dist/es/yaml/builder.mjs +13 -0
  71. package/dist/es/yaml/builder.mjs.map +1 -0
  72. package/dist/es/yaml/index.mjs +3 -0
  73. package/dist/es/yaml/player.mjs +372 -0
  74. package/dist/es/yaml/player.mjs.map +1 -0
  75. package/dist/es/yaml/utils.mjs +73 -0
  76. package/dist/es/yaml/utils.mjs.map +1 -0
  77. package/dist/es/yaml.mjs +0 -0
  78. package/dist/lib/agent/agent.js +682 -0
  79. package/dist/lib/agent/agent.js.map +1 -0
  80. package/dist/lib/agent/common.js +5 -0
  81. package/dist/lib/agent/index.js +81 -0
  82. package/dist/lib/agent/index.js.map +1 -0
  83. package/dist/lib/agent/task-cache.js +236 -0
  84. package/dist/lib/agent/task-cache.js.map +1 -0
  85. package/dist/lib/agent/tasks.js +700 -0
  86. package/dist/lib/agent/tasks.js.map +1 -0
  87. package/dist/lib/agent/ui-utils.js +121 -0
  88. package/dist/lib/agent/ui-utils.js.map +1 -0
  89. package/dist/lib/agent/utils.js +233 -0
  90. package/dist/lib/agent/utils.js.map +1 -0
  91. package/dist/lib/ai-model/action-executor.js +163 -0
  92. package/dist/lib/ai-model/action-executor.js.map +1 -0
  93. package/dist/lib/ai-model/common.js +461 -0
  94. package/dist/lib/ai-model/common.js.map +1 -0
  95. package/dist/lib/ai-model/conversation-history.js +92 -0
  96. package/dist/lib/ai-model/conversation-history.js.map +1 -0
  97. package/dist/lib/ai-model/index.js +131 -0
  98. package/dist/lib/ai-model/index.js.map +1 -0
  99. package/dist/lib/ai-model/inspect.js +326 -0
  100. package/dist/lib/ai-model/inspect.js.map +1 -0
  101. package/dist/lib/ai-model/llm-planning.js +174 -0
  102. package/dist/lib/ai-model/llm-planning.js.map +1 -0
  103. package/dist/lib/ai-model/prompt/assertion.js +65 -0
  104. package/dist/lib/ai-model/prompt/assertion.js.map +1 -0
  105. package/dist/lib/ai-model/prompt/common.js +41 -0
  106. package/dist/lib/ai-model/prompt/common.js.map +1 -0
  107. package/dist/lib/ai-model/prompt/describe.js +78 -0
  108. package/dist/lib/ai-model/prompt/describe.js.map +1 -0
  109. package/dist/lib/ai-model/prompt/extraction.js +169 -0
  110. package/dist/lib/ai-model/prompt/extraction.js.map +1 -0
  111. package/dist/lib/ai-model/prompt/llm-locator.js +308 -0
  112. package/dist/lib/ai-model/prompt/llm-locator.js.map +1 -0
  113. package/dist/lib/ai-model/prompt/llm-planning.js +407 -0
  114. package/dist/lib/ai-model/prompt/llm-planning.js.map +1 -0
  115. package/dist/lib/ai-model/prompt/llm-section-locator.js +78 -0
  116. package/dist/lib/ai-model/prompt/llm-section-locator.js.map +1 -0
  117. package/dist/lib/ai-model/prompt/playwright-generator.js +178 -0
  118. package/dist/lib/ai-model/prompt/playwright-generator.js.map +1 -0
  119. package/dist/lib/ai-model/prompt/ui-tars-locator.js +68 -0
  120. package/dist/lib/ai-model/prompt/ui-tars-locator.js.map +1 -0
  121. package/dist/lib/ai-model/prompt/ui-tars-planning.js +73 -0
  122. package/dist/lib/ai-model/prompt/ui-tars-planning.js.map +1 -0
  123. package/dist/lib/ai-model/prompt/util.js +176 -0
  124. package/dist/lib/ai-model/prompt/util.js.map +1 -0
  125. package/dist/lib/ai-model/prompt/yaml-generator.js +280 -0
  126. package/dist/lib/ai-model/prompt/yaml-generator.js.map +1 -0
  127. package/dist/lib/ai-model/service-caller/index.js +468 -0
  128. package/dist/lib/ai-model/service-caller/index.js.map +1 -0
  129. package/dist/lib/ai-model/ui-tars-planning.js +238 -0
  130. package/dist/lib/ai-model/ui-tars-planning.js.map +1 -0
  131. package/dist/lib/device/index.js +255 -0
  132. package/dist/lib/device/index.js.map +1 -0
  133. package/dist/lib/image/index.js +56 -0
  134. package/dist/lib/image/index.js.map +1 -0
  135. package/dist/lib/index.js +103 -0
  136. package/dist/lib/index.js.map +1 -0
  137. package/dist/lib/insight/index.js +265 -0
  138. package/dist/lib/insight/index.js.map +1 -0
  139. package/dist/lib/insight/utils.js +49 -0
  140. package/dist/lib/insight/utils.js.map +1 -0
  141. package/dist/lib/report.js +122 -0
  142. package/dist/lib/report.js.map +1 -0
  143. package/dist/lib/tree.js +44 -0
  144. package/dist/lib/tree.js.map +1 -0
  145. package/dist/lib/types.js +82 -0
  146. package/dist/lib/types.js.map +1 -0
  147. package/dist/lib/utils.js +281 -0
  148. package/dist/lib/utils.js.map +1 -0
  149. package/dist/lib/yaml/builder.js +57 -0
  150. package/dist/lib/yaml/builder.js.map +1 -0
  151. package/dist/lib/yaml/index.js +80 -0
  152. package/dist/lib/yaml/index.js.map +1 -0
  153. package/dist/lib/yaml/player.js +406 -0
  154. package/dist/lib/yaml/player.js.map +1 -0
  155. package/dist/lib/yaml/utils.js +126 -0
  156. package/dist/lib/yaml/utils.js.map +1 -0
  157. package/dist/lib/yaml.js +20 -0
  158. package/dist/lib/yaml.js.map +1 -0
  159. package/dist/types/agent/agent.d.ts +156 -0
  160. package/dist/types/agent/common.d.ts +0 -0
  161. package/dist/types/agent/index.d.ts +9 -0
  162. package/dist/types/agent/task-cache.d.ts +48 -0
  163. package/dist/types/agent/tasks.d.ts +48 -0
  164. package/dist/types/agent/ui-utils.d.ts +7 -0
  165. package/dist/types/agent/utils.d.ts +52 -0
  166. package/dist/types/ai-model/action-executor.d.ts +19 -0
  167. package/dist/types/ai-model/common.d.ts +569 -0
  168. package/dist/types/ai-model/conversation-history.d.ts +18 -0
  169. package/dist/types/ai-model/index.d.ts +13 -0
  170. package/dist/types/ai-model/inspect.d.ts +46 -0
  171. package/dist/types/ai-model/llm-planning.d.ts +11 -0
  172. package/dist/types/ai-model/prompt/assertion.d.ts +2 -0
  173. package/dist/types/ai-model/prompt/common.d.ts +2 -0
  174. package/dist/types/ai-model/prompt/describe.d.ts +1 -0
  175. package/dist/types/ai-model/prompt/extraction.d.ts +4 -0
  176. package/dist/types/ai-model/prompt/llm-locator.d.ts +8 -0
  177. package/dist/types/ai-model/prompt/llm-planning.d.ts +9 -0
  178. package/dist/types/ai-model/prompt/llm-section-locator.d.ts +5 -0
  179. package/dist/types/ai-model/prompt/playwright-generator.d.ts +26 -0
  180. package/dist/types/ai-model/prompt/ui-tars-locator.d.ts +1 -0
  181. package/dist/types/ai-model/prompt/ui-tars-planning.d.ts +2 -0
  182. package/dist/types/ai-model/prompt/util.d.ts +47 -0
  183. package/dist/types/ai-model/prompt/yaml-generator.d.ts +100 -0
  184. package/dist/types/ai-model/service-caller/index.d.ts +26 -0
  185. package/dist/types/ai-model/ui-tars-planning.d.ts +59 -0
  186. package/dist/types/device/index.d.ts +2158 -0
  187. package/dist/types/image/index.d.ts +1 -0
  188. package/dist/types/index.d.ts +12 -0
  189. package/dist/types/insight/index.d.ts +31 -0
  190. package/dist/types/insight/utils.d.ts +2 -0
  191. package/dist/types/report.d.ts +12 -0
  192. package/dist/types/tree.d.ts +1 -0
  193. package/dist/types/types.d.ts +412 -0
  194. package/dist/types/utils.d.ts +40 -0
  195. package/dist/types/yaml/builder.d.ts +2 -0
  196. package/dist/types/yaml/index.d.ts +3 -0
  197. package/dist/types/yaml/player.d.ts +34 -0
  198. package/dist/types/yaml/utils.d.ts +9 -0
  199. package/dist/types/yaml.d.ts +178 -0
  200. package/package.json +124 -0
@@ -0,0 +1,468 @@
1
+ "use strict";
2
+ var __webpack_modules__ = {
3
+ "langsmith/wrappers": function(module) {
4
+ module.exports = import("langsmith/wrappers").then(function(module) {
5
+ return module;
6
+ });
7
+ }
8
+ };
9
+ var __webpack_module_cache__ = {};
10
+ function __webpack_require__(moduleId) {
11
+ var cachedModule = __webpack_module_cache__[moduleId];
12
+ if (void 0 !== cachedModule) return cachedModule.exports;
13
+ var module = __webpack_module_cache__[moduleId] = {
14
+ exports: {}
15
+ };
16
+ __webpack_modules__[moduleId](module, module.exports, __webpack_require__);
17
+ return module.exports;
18
+ }
19
+ (()=>{
20
+ __webpack_require__.n = (module)=>{
21
+ var getter = module && module.__esModule ? ()=>module['default'] : ()=>module;
22
+ __webpack_require__.d(getter, {
23
+ a: getter
24
+ });
25
+ return getter;
26
+ };
27
+ })();
28
+ (()=>{
29
+ __webpack_require__.d = (exports1, definition)=>{
30
+ for(var key in definition)if (__webpack_require__.o(definition, key) && !__webpack_require__.o(exports1, key)) Object.defineProperty(exports1, key, {
31
+ enumerable: true,
32
+ get: definition[key]
33
+ });
34
+ };
35
+ })();
36
+ (()=>{
37
+ __webpack_require__.o = (obj, prop)=>Object.prototype.hasOwnProperty.call(obj, prop);
38
+ })();
39
+ (()=>{
40
+ __webpack_require__.r = (exports1)=>{
41
+ if ('undefined' != typeof Symbol && Symbol.toStringTag) Object.defineProperty(exports1, Symbol.toStringTag, {
42
+ value: 'Module'
43
+ });
44
+ Object.defineProperty(exports1, '__esModule', {
45
+ value: true
46
+ });
47
+ };
48
+ })();
49
+ var __webpack_exports__ = {};
50
+ (()=>{
51
+ __webpack_require__.r(__webpack_exports__);
52
+ __webpack_require__.d(__webpack_exports__, {
53
+ extractJSONFromCodeBlock: ()=>extractJSONFromCodeBlock,
54
+ callAIWithStringResponse: ()=>callAIWithStringResponse,
55
+ preprocessDoubaoBboxJson: ()=>preprocessDoubaoBboxJson,
56
+ callAIWithObjectResponse: ()=>callAIWithObjectResponse,
57
+ getResponseFormat: ()=>getResponseFormat,
58
+ safeParseJson: ()=>safeParseJson,
59
+ callAI: ()=>callAI
60
+ });
61
+ const external_types_js_namespaceObject = require("../../types.js");
62
+ const sdk_namespaceObject = require("@anthropic-ai/sdk");
63
+ const identity_namespaceObject = require("@azure/identity");
64
+ const env_namespaceObject = require("@sqaitech/shared/env");
65
+ const img_namespaceObject = require("@sqaitech/shared/img");
66
+ const logger_namespaceObject = require("@sqaitech/shared/logger");
67
+ const utils_namespaceObject = require("@sqaitech/shared/utils");
68
+ const external_https_proxy_agent_namespaceObject = require("https-proxy-agent");
69
+ const external_jsonrepair_namespaceObject = require("jsonrepair");
70
+ const external_openai_namespaceObject = require("openai");
71
+ var external_openai_default = /*#__PURE__*/ __webpack_require__.n(external_openai_namespaceObject);
72
+ const external_socks_proxy_agent_namespaceObject = require("socks-proxy-agent");
73
+ const external_common_js_namespaceObject = require("../common.js");
74
+ const assertion_js_namespaceObject = require("../prompt/assertion.js");
75
+ const llm_locator_js_namespaceObject = require("../prompt/llm-locator.js");
76
+ const llm_planning_js_namespaceObject = require("../prompt/llm-planning.js");
77
+ async function createChatClient({ AIActionTypeValue, modelConfig }) {
78
+ const { socksProxy, httpProxy, modelName, openaiBaseURL, openaiApiKey, openaiExtraConfig, openaiUseAzureDeprecated, useAzureOpenai, azureOpenaiScope, azureOpenaiKey, azureOpenaiEndpoint, azureOpenaiApiVersion, azureOpenaiDeployment, azureExtraConfig, useAnthropicSdk, anthropicApiKey, modelDescription, uiTarsModelVersion: uiTarsVersion, vlMode } = modelConfig;
79
+ let openai;
80
+ let proxyAgent;
81
+ const debugProxy = (0, logger_namespaceObject.getDebug)('ai:call:proxy');
82
+ if (httpProxy) {
83
+ debugProxy('using http proxy', httpProxy);
84
+ proxyAgent = new external_https_proxy_agent_namespaceObject.HttpsProxyAgent(httpProxy);
85
+ } else if (socksProxy) {
86
+ debugProxy('using socks proxy', socksProxy);
87
+ proxyAgent = new external_socks_proxy_agent_namespaceObject.SocksProxyAgent(socksProxy);
88
+ }
89
+ if (openaiUseAzureDeprecated) openai = new external_openai_namespaceObject.AzureOpenAI({
90
+ baseURL: openaiBaseURL,
91
+ apiKey: openaiApiKey,
92
+ httpAgent: proxyAgent,
93
+ ...openaiExtraConfig,
94
+ dangerouslyAllowBrowser: true
95
+ });
96
+ else if (useAzureOpenai) {
97
+ let tokenProvider;
98
+ if (azureOpenaiScope) {
99
+ (0, utils_namespaceObject.assert)(!utils_namespaceObject.ifInBrowser, 'Azure OpenAI is not supported in browser with Midscene.');
100
+ const credential = new identity_namespaceObject.DefaultAzureCredential();
101
+ tokenProvider = (0, identity_namespaceObject.getBearerTokenProvider)(credential, azureOpenaiScope);
102
+ openai = new external_openai_namespaceObject.AzureOpenAI({
103
+ azureADTokenProvider: tokenProvider,
104
+ endpoint: azureOpenaiEndpoint,
105
+ apiVersion: azureOpenaiApiVersion,
106
+ deployment: azureOpenaiDeployment,
107
+ ...openaiExtraConfig,
108
+ ...azureExtraConfig
109
+ });
110
+ } else openai = new external_openai_namespaceObject.AzureOpenAI({
111
+ apiKey: azureOpenaiKey,
112
+ endpoint: azureOpenaiEndpoint,
113
+ apiVersion: azureOpenaiApiVersion,
114
+ deployment: azureOpenaiDeployment,
115
+ dangerouslyAllowBrowser: true,
116
+ ...openaiExtraConfig,
117
+ ...azureExtraConfig
118
+ });
119
+ } else if (!useAnthropicSdk) openai = new (external_openai_default())({
120
+ baseURL: openaiBaseURL,
121
+ apiKey: openaiApiKey,
122
+ httpAgent: proxyAgent,
123
+ ...openaiExtraConfig,
124
+ defaultHeaders: {
125
+ ...(null == openaiExtraConfig ? void 0 : openaiExtraConfig.defaultHeaders) || {},
126
+ [env_namespaceObject.SQAI_API_TYPE]: AIActionTypeValue.toString()
127
+ },
128
+ dangerouslyAllowBrowser: true
129
+ });
130
+ if (openai && env_namespaceObject.globalConfigManager.getEnvConfigInBoolean(env_namespaceObject.SQAI_LANGSMITH_DEBUG)) {
131
+ if (utils_namespaceObject.ifInBrowser) throw new Error('langsmith is not supported in browser');
132
+ console.log('DEBUGGING MODE: langsmith wrapper enabled');
133
+ const { wrapOpenAI } = await Promise.resolve().then(__webpack_require__.bind(__webpack_require__, "langsmith/wrappers"));
134
+ openai = wrapOpenAI(openai);
135
+ }
136
+ if (void 0 !== openai) return {
137
+ completion: openai.chat.completions,
138
+ style: 'openai',
139
+ modelName,
140
+ modelDescription,
141
+ uiTarsVersion,
142
+ vlMode
143
+ };
144
+ if (useAnthropicSdk) openai = new sdk_namespaceObject.Anthropic({
145
+ apiKey: anthropicApiKey,
146
+ httpAgent: proxyAgent,
147
+ dangerouslyAllowBrowser: true
148
+ });
149
+ if (void 0 !== openai && openai.messages) return {
150
+ completion: openai.messages,
151
+ style: 'anthropic',
152
+ modelName,
153
+ modelDescription,
154
+ uiTarsVersion,
155
+ vlMode
156
+ };
157
+ throw new Error('Openai SDK or Anthropic SDK is not initialized');
158
+ }
159
+ async function callAI(messages, AIActionTypeValue, modelConfig, options) {
160
+ const { completion, style, modelName, modelDescription, uiTarsVersion, vlMode } = await createChatClient({
161
+ AIActionTypeValue,
162
+ modelConfig
163
+ });
164
+ const responseFormat = getResponseFormat(modelName, AIActionTypeValue);
165
+ const maxTokens = env_namespaceObject.globalConfigManager.getEnvConfigValue(env_namespaceObject.OPENAI_MAX_TOKENS);
166
+ const debugCall = (0, logger_namespaceObject.getDebug)('ai:call');
167
+ const debugProfileStats = (0, logger_namespaceObject.getDebug)('ai:profile:stats');
168
+ const debugProfileDetail = (0, logger_namespaceObject.getDebug)('ai:profile:detail');
169
+ const startTime = Date.now();
170
+ const isStreaming = (null == options ? void 0 : options.stream) && (null == options ? void 0 : options.onChunk);
171
+ let content;
172
+ let accumulated = '';
173
+ let usage;
174
+ let timeCost;
175
+ const commonConfig = {
176
+ temperature: 'vlm-ui-tars' === vlMode ? 0.0 : 0.1,
177
+ stream: !!isStreaming,
178
+ max_tokens: 'number' == typeof maxTokens ? maxTokens : Number.parseInt(maxTokens || '2048', 10),
179
+ ...'qwen-vl' === vlMode || 'qwen3-vl' === vlMode ? {
180
+ vl_high_resolution_images: true
181
+ } : {}
182
+ };
183
+ try {
184
+ if ('openai' === style) {
185
+ debugCall(`sending ${isStreaming ? 'streaming ' : ''}request to ${modelName}`);
186
+ if (isStreaming) {
187
+ const stream = await completion.create({
188
+ model: modelName,
189
+ messages,
190
+ response_format: responseFormat,
191
+ ...commonConfig
192
+ }, {
193
+ stream: true
194
+ });
195
+ for await (const chunk of stream){
196
+ var _chunk_choices__delta, _chunk_choices_, _chunk_choices, _chunk_choices__delta1, _chunk_choices_1, _chunk_choices1, _chunk_choices_2, _chunk_choices2;
197
+ const content = (null == (_chunk_choices = chunk.choices) ? void 0 : null == (_chunk_choices_ = _chunk_choices[0]) ? void 0 : null == (_chunk_choices__delta = _chunk_choices_.delta) ? void 0 : _chunk_choices__delta.content) || '';
198
+ const reasoning_content = (null == (_chunk_choices1 = chunk.choices) ? void 0 : null == (_chunk_choices_1 = _chunk_choices1[0]) ? void 0 : null == (_chunk_choices__delta1 = _chunk_choices_1.delta) ? void 0 : _chunk_choices__delta1.reasoning_content) || '';
199
+ if (chunk.usage) usage = chunk.usage;
200
+ if (content || reasoning_content) {
201
+ accumulated += content;
202
+ const chunkData = {
203
+ content,
204
+ reasoning_content,
205
+ accumulated,
206
+ isComplete: false,
207
+ usage: void 0
208
+ };
209
+ options.onChunk(chunkData);
210
+ }
211
+ if (null == (_chunk_choices2 = chunk.choices) ? void 0 : null == (_chunk_choices_2 = _chunk_choices2[0]) ? void 0 : _chunk_choices_2.finish_reason) {
212
+ timeCost = Date.now() - startTime;
213
+ if (!usage) {
214
+ const estimatedTokens = Math.max(1, Math.floor(accumulated.length / 4));
215
+ usage = {
216
+ prompt_tokens: estimatedTokens,
217
+ completion_tokens: estimatedTokens,
218
+ total_tokens: 2 * estimatedTokens
219
+ };
220
+ }
221
+ const finalChunk = {
222
+ content: '',
223
+ accumulated,
224
+ reasoning_content: '',
225
+ isComplete: true,
226
+ usage: {
227
+ prompt_tokens: usage.prompt_tokens ?? 0,
228
+ completion_tokens: usage.completion_tokens ?? 0,
229
+ total_tokens: usage.total_tokens ?? 0,
230
+ time_cost: timeCost ?? 0,
231
+ model_name: modelName,
232
+ model_description: modelDescription,
233
+ intent: modelConfig.intent
234
+ }
235
+ };
236
+ options.onChunk(finalChunk);
237
+ break;
238
+ }
239
+ }
240
+ content = accumulated;
241
+ debugProfileStats(`streaming model, ${modelName}, mode, ${vlMode || 'default'}, cost-ms, ${timeCost}`);
242
+ } else {
243
+ var _result_usage, _result_usage1, _result_usage2;
244
+ const result = await completion.create({
245
+ model: modelName,
246
+ messages,
247
+ response_format: responseFormat,
248
+ ...commonConfig
249
+ });
250
+ timeCost = Date.now() - startTime;
251
+ debugProfileStats(`model, ${modelName}, mode, ${vlMode || 'default'}, ui-tars-version, ${uiTarsVersion}, prompt-tokens, ${(null == (_result_usage = result.usage) ? void 0 : _result_usage.prompt_tokens) || ''}, completion-tokens, ${(null == (_result_usage1 = result.usage) ? void 0 : _result_usage1.completion_tokens) || ''}, total-tokens, ${(null == (_result_usage2 = result.usage) ? void 0 : _result_usage2.total_tokens) || ''}, cost-ms, ${timeCost}, requestId, ${result._request_id || ''}`);
252
+ debugProfileDetail(`model usage detail: ${JSON.stringify(result.usage)}`);
253
+ (0, utils_namespaceObject.assert)(result.choices, `invalid response from LLM service: ${JSON.stringify(result)}`);
254
+ content = result.choices[0].message.content;
255
+ usage = result.usage;
256
+ }
257
+ debugCall(`response: ${content}`);
258
+ (0, utils_namespaceObject.assert)(content, 'empty content');
259
+ } else if ('anthropic' === style) {
260
+ const convertImageContent = (content)=>{
261
+ if ('image_url' === content.type) {
262
+ const imgBase64 = content.image_url.url;
263
+ (0, utils_namespaceObject.assert)(imgBase64, 'image_url is required');
264
+ const { mimeType, body } = (0, img_namespaceObject.parseBase64)(content.image_url.url);
265
+ return {
266
+ source: {
267
+ type: 'base64',
268
+ media_type: mimeType,
269
+ data: body
270
+ },
271
+ type: 'image'
272
+ };
273
+ }
274
+ return content;
275
+ };
276
+ if (isStreaming) {
277
+ const stream = await completion.create({
278
+ model: modelName,
279
+ system: 'You are a versatile professional in software UI automation',
280
+ messages: messages.map((m)=>({
281
+ role: 'user',
282
+ content: Array.isArray(m.content) ? m.content.map(convertImageContent) : m.content
283
+ })),
284
+ response_format: responseFormat,
285
+ ...commonConfig
286
+ });
287
+ for await (const chunk of stream){
288
+ var _chunk_delta;
289
+ const content = (null == (_chunk_delta = chunk.delta) ? void 0 : _chunk_delta.text) || '';
290
+ if (content) {
291
+ accumulated += content;
292
+ const chunkData = {
293
+ content,
294
+ accumulated,
295
+ reasoning_content: '',
296
+ isComplete: false,
297
+ usage: void 0
298
+ };
299
+ options.onChunk(chunkData);
300
+ }
301
+ if ('message_stop' === chunk.type) {
302
+ timeCost = Date.now() - startTime;
303
+ const anthropicUsage = chunk.usage;
304
+ const finalChunk = {
305
+ content: '',
306
+ accumulated,
307
+ reasoning_content: '',
308
+ isComplete: true,
309
+ usage: anthropicUsage ? {
310
+ prompt_tokens: anthropicUsage.input_tokens ?? 0,
311
+ completion_tokens: anthropicUsage.output_tokens ?? 0,
312
+ total_tokens: (anthropicUsage.input_tokens ?? 0) + (anthropicUsage.output_tokens ?? 0),
313
+ time_cost: timeCost ?? 0,
314
+ model_name: modelName,
315
+ model_description: modelDescription,
316
+ intent: modelConfig.intent
317
+ } : void 0
318
+ };
319
+ options.onChunk(finalChunk);
320
+ break;
321
+ }
322
+ }
323
+ content = accumulated;
324
+ } else {
325
+ const result = await completion.create({
326
+ model: modelName,
327
+ system: 'You are a versatile professional in software UI automation',
328
+ messages: messages.map((m)=>({
329
+ role: 'user',
330
+ content: Array.isArray(m.content) ? m.content.map(convertImageContent) : m.content
331
+ })),
332
+ response_format: responseFormat,
333
+ ...commonConfig
334
+ });
335
+ timeCost = Date.now() - startTime;
336
+ content = result.content[0].text;
337
+ usage = result.usage;
338
+ }
339
+ (0, utils_namespaceObject.assert)(content, 'empty content');
340
+ }
341
+ if (isStreaming && !usage) {
342
+ const estimatedTokens = Math.max(1, Math.floor((content || '').length / 4));
343
+ usage = {
344
+ prompt_tokens: estimatedTokens,
345
+ completion_tokens: estimatedTokens,
346
+ total_tokens: 2 * estimatedTokens
347
+ };
348
+ }
349
+ return {
350
+ content: content || '',
351
+ usage: usage ? {
352
+ prompt_tokens: usage.prompt_tokens ?? 0,
353
+ completion_tokens: usage.completion_tokens ?? 0,
354
+ total_tokens: usage.total_tokens ?? 0,
355
+ time_cost: timeCost ?? 0,
356
+ model_name: modelName,
357
+ model_description: modelDescription,
358
+ intent: modelConfig.intent
359
+ } : void 0,
360
+ isStreamed: !!isStreaming
361
+ };
362
+ } catch (e) {
363
+ console.error(' call AI error', e);
364
+ const newError = new Error(`failed to call ${isStreaming ? 'streaming ' : ''}AI model service: ${e.message}. Trouble shooting: https://sqai.tech/model-provider.html`, {
365
+ cause: e
366
+ });
367
+ throw newError;
368
+ }
369
+ }
370
+ const getResponseFormat = (modelName, AIActionTypeValue)=>{
371
+ let responseFormat;
372
+ if (modelName.includes('gpt-4')) switch(AIActionTypeValue){
373
+ case external_common_js_namespaceObject.AIActionType.ASSERT:
374
+ responseFormat = assertion_js_namespaceObject.assertSchema;
375
+ break;
376
+ case external_common_js_namespaceObject.AIActionType.INSPECT_ELEMENT:
377
+ responseFormat = llm_locator_js_namespaceObject.locatorSchema;
378
+ break;
379
+ case external_common_js_namespaceObject.AIActionType.PLAN:
380
+ responseFormat = llm_planning_js_namespaceObject.planSchema;
381
+ break;
382
+ case external_common_js_namespaceObject.AIActionType.EXTRACT_DATA:
383
+ case external_common_js_namespaceObject.AIActionType.DESCRIBE_ELEMENT:
384
+ responseFormat = {
385
+ type: external_types_js_namespaceObject.AIResponseFormat.JSON
386
+ };
387
+ break;
388
+ case external_common_js_namespaceObject.AIActionType.TEXT:
389
+ responseFormat = void 0;
390
+ break;
391
+ }
392
+ if ('gpt-4o-2024-05-13' === modelName && AIActionTypeValue !== external_common_js_namespaceObject.AIActionType.TEXT) responseFormat = {
393
+ type: external_types_js_namespaceObject.AIResponseFormat.JSON
394
+ };
395
+ return responseFormat;
396
+ };
397
+ async function callAIWithObjectResponse(messages, AIActionTypeValue, modelConfig) {
398
+ const response = await callAI(messages, AIActionTypeValue, modelConfig);
399
+ (0, utils_namespaceObject.assert)(response, 'empty response');
400
+ const vlMode = modelConfig.vlMode;
401
+ const jsonContent = safeParseJson(response.content, vlMode);
402
+ return {
403
+ content: jsonContent,
404
+ usage: response.usage
405
+ };
406
+ }
407
+ async function callAIWithStringResponse(msgs, AIActionTypeValue, modelConfig) {
408
+ const { content, usage } = await callAI(msgs, AIActionTypeValue, modelConfig);
409
+ return {
410
+ content,
411
+ usage
412
+ };
413
+ }
414
+ function extractJSONFromCodeBlock(response) {
415
+ try {
416
+ const jsonMatch = response.match(/^\s*(\{[\s\S]*\})\s*$/);
417
+ if (jsonMatch) return jsonMatch[1];
418
+ const codeBlockMatch = response.match(/```(?:json)?\s*(\{[\s\S]*?\})\s*```/);
419
+ if (codeBlockMatch) return codeBlockMatch[1];
420
+ const jsonLikeMatch = response.match(/\{[\s\S]*\}/);
421
+ if (jsonLikeMatch) return jsonLikeMatch[0];
422
+ } catch {}
423
+ return response;
424
+ }
425
+ function preprocessDoubaoBboxJson(input) {
426
+ if (input.includes('bbox')) while(/\d+\s+\d+/.test(input))input = input.replace(/(\d+)\s+(\d+)/g, '$1,$2');
427
+ return input;
428
+ }
429
+ function safeParseJson(input, vlMode) {
430
+ const cleanJsonString = extractJSONFromCodeBlock(input);
431
+ if (null == cleanJsonString ? void 0 : cleanJsonString.match(/\((\d+),(\d+)\)/)) {
432
+ var _cleanJsonString_match;
433
+ return null == (_cleanJsonString_match = cleanJsonString.match(/\((\d+),(\d+)\)/)) ? void 0 : _cleanJsonString_match.slice(1).map(Number);
434
+ }
435
+ try {
436
+ return JSON.parse(cleanJsonString);
437
+ } catch {}
438
+ try {
439
+ return JSON.parse((0, external_jsonrepair_namespaceObject.jsonrepair)(cleanJsonString));
440
+ } catch (e) {}
441
+ if ('doubao-vision' === vlMode || 'vlm-ui-tars' === vlMode) {
442
+ const jsonString = preprocessDoubaoBboxJson(cleanJsonString);
443
+ return JSON.parse((0, external_jsonrepair_namespaceObject.jsonrepair)(jsonString));
444
+ }
445
+ throw Error(`failed to parse json response: ${input}`);
446
+ }
447
+ })();
448
+ exports.callAI = __webpack_exports__.callAI;
449
+ exports.callAIWithObjectResponse = __webpack_exports__.callAIWithObjectResponse;
450
+ exports.callAIWithStringResponse = __webpack_exports__.callAIWithStringResponse;
451
+ exports.extractJSONFromCodeBlock = __webpack_exports__.extractJSONFromCodeBlock;
452
+ exports.getResponseFormat = __webpack_exports__.getResponseFormat;
453
+ exports.preprocessDoubaoBboxJson = __webpack_exports__.preprocessDoubaoBboxJson;
454
+ exports.safeParseJson = __webpack_exports__.safeParseJson;
455
+ for(var __webpack_i__ in __webpack_exports__)if (-1 === [
456
+ "callAI",
457
+ "callAIWithObjectResponse",
458
+ "callAIWithStringResponse",
459
+ "extractJSONFromCodeBlock",
460
+ "getResponseFormat",
461
+ "preprocessDoubaoBboxJson",
462
+ "safeParseJson"
463
+ ].indexOf(__webpack_i__)) exports[__webpack_i__] = __webpack_exports__[__webpack_i__];
464
+ Object.defineProperty(exports, '__esModule', {
465
+ value: true
466
+ });
467
+
468
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"ai-model\\service-caller\\index.js","sources":["webpack://@sqaitech/core/webpack/runtime/compat_get_default_export","webpack://@sqaitech/core/webpack/runtime/define_property_getters","webpack://@sqaitech/core/webpack/runtime/has_own_property","webpack://@sqaitech/core/webpack/runtime/make_namespace_object","webpack://@sqaitech/core/./src/ai-model/service-caller/index.ts"],"sourcesContent":["// getDefaultExport function for compatibility with non-ESM modules\n__webpack_require__.n = (module) => {\n\tvar getter = module && module.__esModule ?\n\t\t() => (module['default']) :\n\t\t() => (module);\n\t__webpack_require__.d(getter, { a: getter });\n\treturn getter;\n};\n","__webpack_require__.d = (exports, definition) => {\n\tfor(var key in definition) {\n if(__webpack_require__.o(definition, key) && !__webpack_require__.o(exports, key)) {\n Object.defineProperty(exports, key, { enumerable: true, get: definition[key] });\n }\n }\n};","__webpack_require__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop))","// define __esModule on exports\n__webpack_require__.r = (exports) => {\n\tif(typeof Symbol !== 'undefined' && Symbol.toStringTag) {\n\t\tObject.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });\n\t}\n\tObject.defineProperty(exports, '__esModule', { value: true });\n};","import { AIResponseFormat, type AIUsageInfo } from '@/types';\r\nimport type { CodeGenerationChunk, StreamingCallback } from '@/types';\r\nimport { Anthropic } from '@anthropic-ai/sdk';\r\nimport {\r\n DefaultAzureCredential,\r\n getBearerTokenProvider,\r\n} from '@azure/identity';\r\nimport {\r\n type IModelConfig,\r\n SQAI_API_TYPE,\r\n SQAI_LANGSMITH_DEBUG,\r\n OPENAI_MAX_TOKENS,\r\n type TVlModeTypes,\r\n type UITarsModelVersion,\r\n globalConfigManager,\r\n} from '@sqaitech/shared/env';\r\n\r\nimport { parseBase64 } from '@sqaitech/shared/img';\r\nimport { getDebug } from '@sqaitech/shared/logger';\r\nimport { assert } from '@sqaitech/shared/utils';\r\nimport { ifInBrowser } from '@sqaitech/shared/utils';\r\nimport { HttpsProxyAgent } from 'https-proxy-agent';\r\nimport { jsonrepair } from 'jsonrepair';\r\nimport OpenAI, { AzureOpenAI } from 'openai';\r\nimport type { ChatCompletionMessageParam } from 'openai/resources/index';\r\nimport type { Stream } from 'openai/streaming';\r\nimport { SocksProxyAgent } from 'socks-proxy-agent';\r\nimport { AIActionType, type AIArgs } from '../common';\r\nimport { assertSchema } from '../prompt/assertion';\r\nimport { locatorSchema } from '../prompt/llm-locator';\r\nimport { planSchema } from '../prompt/llm-planning';\r\n\r\nasync function createChatClient({\r\n AIActionTypeValue,\r\n modelConfig,\r\n}: {\r\n AIActionTypeValue: AIActionType;\r\n modelConfig: IModelConfig;\r\n}): Promise<{\r\n completion: OpenAI.Chat.Completions;\r\n style: 'openai' | 'anthropic';\r\n modelName: string;\r\n modelDescription: string;\r\n uiTarsVersion?: UITarsModelVersion;\r\n vlMode: TVlModeTypes | undefined;\r\n}> {\r\n const {\r\n socksProxy,\r\n httpProxy,\r\n modelName,\r\n openaiBaseURL,\r\n openaiApiKey,\r\n openaiExtraConfig,\r\n openaiUseAzureDeprecated,\r\n useAzureOpenai,\r\n azureOpenaiScope,\r\n azureOpenaiKey,\r\n azureOpenaiEndpoint,\r\n azureOpenaiApiVersion,\r\n azureOpenaiDeployment,\r\n azureExtraConfig,\r\n useAnthropicSdk,\r\n anthropicApiKey,\r\n modelDescription,\r\n uiTarsModelVersion: uiTarsVersion,\r\n vlMode,\r\n } = modelConfig;\r\n\r\n let openai: OpenAI | AzureOpenAI | undefined;\r\n\r\n let proxyAgent = undefined;\r\n const debugProxy = getDebug('ai:call:proxy');\r\n if (httpProxy) {\r\n debugProxy('using http proxy', httpProxy);\r\n proxyAgent = new HttpsProxyAgent(httpProxy);\r\n } else if (socksProxy) {\r\n debugProxy('using socks proxy', socksProxy);\r\n proxyAgent = new SocksProxyAgent(socksProxy);\r\n }\r\n\r\n if (openaiUseAzureDeprecated) {\r\n // this is deprecated\r\n openai = new AzureOpenAI({\r\n baseURL: openaiBaseURL,\r\n apiKey: openaiApiKey,\r\n httpAgent: proxyAgent,\r\n ...openaiExtraConfig,\r\n dangerouslyAllowBrowser: true,\r\n }) as OpenAI;\r\n } else if (useAzureOpenai) {\r\n // https://learn.microsoft.com/en-us/azure/ai-services/openai/chatgpt-quickstart?tabs=bash%2Cjavascript-key%2Ctypescript-keyless%2Cpython&pivots=programming-language-javascript#rest-api\r\n // keyless authentication\r\n let tokenProvider: any = undefined;\r\n if (azureOpenaiScope) {\r\n assert(\r\n !ifInBrowser,\r\n 'Azure OpenAI is not supported in browser with Midscene.',\r\n );\r\n const credential = new DefaultAzureCredential();\r\n\r\n tokenProvider = getBearerTokenProvider(credential, azureOpenaiScope);\r\n\r\n openai = new AzureOpenAI({\r\n azureADTokenProvider: tokenProvider,\r\n endpoint: azureOpenaiEndpoint,\r\n apiVersion: azureOpenaiApiVersion,\r\n deployment: azureOpenaiDeployment,\r\n ...openaiExtraConfig,\r\n ...azureExtraConfig,\r\n });\r\n } else {\r\n // endpoint, apiKey, apiVersion, deployment\r\n openai = new AzureOpenAI({\r\n apiKey: azureOpenaiKey,\r\n endpoint: azureOpenaiEndpoint,\r\n apiVersion: azureOpenaiApiVersion,\r\n deployment: azureOpenaiDeployment,\r\n dangerouslyAllowBrowser: true,\r\n ...openaiExtraConfig,\r\n ...azureExtraConfig,\r\n });\r\n }\r\n } else if (!useAnthropicSdk) {\r\n openai = new OpenAI({\r\n baseURL: openaiBaseURL,\r\n apiKey: openaiApiKey,\r\n httpAgent: proxyAgent,\r\n ...openaiExtraConfig,\r\n defaultHeaders: {\r\n ...(openaiExtraConfig?.defaultHeaders || {}),\r\n [SQAI_API_TYPE]: AIActionTypeValue.toString(),\r\n },\r\n dangerouslyAllowBrowser: true,\r\n });\r\n }\r\n\r\n if (\r\n openai &&\r\n globalConfigManager.getEnvConfigInBoolean(SQAI_LANGSMITH_DEBUG)\r\n ) {\r\n if (ifInBrowser) {\r\n throw new Error('langsmith is not supported in browser');\r\n }\r\n console.log('DEBUGGING MODE: langsmith wrapper enabled');\r\n const { wrapOpenAI } = await import('langsmith/wrappers');\r\n openai = wrapOpenAI(openai);\r\n }\r\n\r\n if (typeof openai !== 'undefined') {\r\n return {\r\n completion: openai.chat.completions,\r\n style: 'openai',\r\n modelName,\r\n modelDescription,\r\n uiTarsVersion,\r\n vlMode,\r\n };\r\n }\r\n\r\n // Anthropic\r\n if (useAnthropicSdk) {\r\n openai = new Anthropic({\r\n apiKey: anthropicApiKey,\r\n httpAgent: proxyAgent,\r\n dangerouslyAllowBrowser: true,\r\n }) as any;\r\n }\r\n\r\n if (typeof openai !== 'undefined' && (openai as any).messages) {\r\n return {\r\n completion: (openai as any).messages,\r\n style: 'anthropic',\r\n modelName,\r\n modelDescription,\r\n uiTarsVersion,\r\n vlMode,\r\n };\r\n }\r\n\r\n throw new Error('Openai SDK or Anthropic SDK is not initialized');\r\n}\r\n\r\nexport async function callAI(\r\n messages: ChatCompletionMessageParam[],\r\n AIActionTypeValue: AIActionType,\r\n modelConfig: IModelConfig,\r\n options?: {\r\n stream?: boolean;\r\n onChunk?: StreamingCallback;\r\n },\r\n): Promise<{ content: string; usage?: AIUsageInfo; isStreamed: boolean }> {\r\n const {\r\n completion,\r\n style,\r\n modelName,\r\n modelDescription,\r\n uiTarsVersion,\r\n vlMode,\r\n } = await createChatClient({\r\n AIActionTypeValue,\r\n modelConfig,\r\n });\r\n\r\n const responseFormat = getResponseFormat(modelName, AIActionTypeValue);\r\n\r\n const maxTokens = globalConfigManager.getEnvConfigValue(OPENAI_MAX_TOKENS);\r\n const debugCall = getDebug('ai:call');\r\n const debugProfileStats = getDebug('ai:profile:stats');\r\n const debugProfileDetail = getDebug('ai:profile:detail');\r\n\r\n const startTime = Date.now();\r\n\r\n const isStreaming = options?.stream && options?.onChunk;\r\n let content: string | undefined;\r\n let accumulated = '';\r\n let usage: OpenAI.CompletionUsage | undefined;\r\n let timeCost: number | undefined;\r\n\r\n const commonConfig = {\r\n temperature: vlMode === 'vlm-ui-tars' ? 0.0 : 0.1,\r\n stream: !!isStreaming,\r\n max_tokens:\r\n typeof maxTokens === 'number'\r\n ? maxTokens\r\n : Number.parseInt(maxTokens || '2048', 10),\r\n ...(vlMode === 'qwen-vl' || vlMode === 'qwen3-vl' // qwen specific config\r\n ? {\r\n vl_high_resolution_images: true,\r\n }\r\n : {}),\r\n };\r\n\r\n try {\r\n if (style === 'openai') {\r\n debugCall(\r\n `sending ${isStreaming ? 'streaming ' : ''}request to ${modelName}`,\r\n );\r\n\r\n if (isStreaming) {\r\n const stream = (await completion.create(\r\n {\r\n model: modelName,\r\n messages,\r\n response_format: responseFormat,\r\n ...commonConfig,\r\n },\r\n {\r\n stream: true,\r\n },\r\n )) as Stream<OpenAI.Chat.Completions.ChatCompletionChunk> & {\r\n _request_id?: string | null;\r\n };\r\n\r\n for await (const chunk of stream) {\r\n const content = chunk.choices?.[0]?.delta?.content || '';\r\n const reasoning_content =\r\n (chunk.choices?.[0]?.delta as any)?.reasoning_content || '';\r\n\r\n // Check for usage info in any chunk (OpenAI provides usage in separate chunks)\r\n if (chunk.usage) {\r\n usage = chunk.usage;\r\n }\r\n\r\n if (content || reasoning_content) {\r\n accumulated += content;\r\n const chunkData: CodeGenerationChunk = {\r\n content,\r\n reasoning_content,\r\n accumulated,\r\n isComplete: false,\r\n usage: undefined,\r\n };\r\n options.onChunk!(chunkData);\r\n }\r\n\r\n // Check if stream is complete\r\n if (chunk.choices?.[0]?.finish_reason) {\r\n timeCost = Date.now() - startTime;\r\n\r\n // If usage is not available from the stream, provide a basic usage info\r\n if (!usage) {\r\n // Estimate token counts based on content length (rough approximation)\r\n const estimatedTokens = Math.max(\r\n 1,\r\n Math.floor(accumulated.length / 4),\r\n );\r\n usage = {\r\n prompt_tokens: estimatedTokens,\r\n completion_tokens: estimatedTokens,\r\n total_tokens: estimatedTokens * 2,\r\n };\r\n }\r\n\r\n // Send final chunk\r\n const finalChunk: CodeGenerationChunk = {\r\n content: '',\r\n accumulated,\r\n reasoning_content: '',\r\n isComplete: true,\r\n usage: {\r\n prompt_tokens: usage.prompt_tokens ?? 0,\r\n completion_tokens: usage.completion_tokens ?? 0,\r\n total_tokens: usage.total_tokens ?? 0,\r\n time_cost: timeCost ?? 0,\r\n model_name: modelName,\r\n model_description: modelDescription,\r\n intent: modelConfig.intent,\r\n },\r\n };\r\n options.onChunk!(finalChunk);\r\n break;\r\n }\r\n }\r\n content = accumulated;\r\n debugProfileStats(\r\n `streaming model, ${modelName}, mode, ${vlMode || 'default'}, cost-ms, ${timeCost}`,\r\n );\r\n } else {\r\n const result = await completion.create({\r\n model: modelName,\r\n messages,\r\n response_format: responseFormat,\r\n ...commonConfig,\r\n } as any);\r\n timeCost = Date.now() - startTime;\r\n\r\n debugProfileStats(\r\n `model, ${modelName}, mode, ${vlMode || 'default'}, ui-tars-version, ${uiTarsVersion}, prompt-tokens, ${result.usage?.prompt_tokens || ''}, completion-tokens, ${result.usage?.completion_tokens || ''}, total-tokens, ${result.usage?.total_tokens || ''}, cost-ms, ${timeCost}, requestId, ${result._request_id || ''}`,\r\n );\r\n\r\n debugProfileDetail(\r\n `model usage detail: ${JSON.stringify(result.usage)}`,\r\n );\r\n\r\n assert(\r\n result.choices,\r\n `invalid response from LLM service: ${JSON.stringify(result)}`,\r\n );\r\n content = result.choices[0].message.content!;\r\n usage = result.usage;\r\n }\r\n\r\n debugCall(`response: ${content}`);\r\n assert(content, 'empty content');\r\n } else if (style === 'anthropic') {\r\n const convertImageContent = (content: any) => {\r\n if (content.type === 'image_url') {\r\n const imgBase64 = content.image_url.url;\r\n assert(imgBase64, 'image_url is required');\r\n const { mimeType, body } = parseBase64(content.image_url.url);\r\n return {\r\n source: {\r\n type: 'base64',\r\n media_type: mimeType,\r\n data: body,\r\n },\r\n type: 'image',\r\n };\r\n }\r\n return content;\r\n };\r\n\r\n if (isStreaming) {\r\n const stream = (await completion.create({\r\n model: modelName,\r\n system: 'You are a versatile professional in software UI automation',\r\n messages: messages.map((m) => ({\r\n role: 'user',\r\n content: Array.isArray(m.content)\r\n ? (m.content as any).map(convertImageContent)\r\n : m.content,\r\n })),\r\n response_format: responseFormat,\r\n ...commonConfig,\r\n } as any)) as any;\r\n\r\n for await (const chunk of stream) {\r\n const content = chunk.delta?.text || '';\r\n if (content) {\r\n accumulated += content;\r\n const chunkData: CodeGenerationChunk = {\r\n content,\r\n accumulated,\r\n reasoning_content: '',\r\n isComplete: false,\r\n usage: undefined,\r\n };\r\n options.onChunk!(chunkData);\r\n }\r\n\r\n // Check if stream is complete\r\n if (chunk.type === 'message_stop') {\r\n timeCost = Date.now() - startTime;\r\n const anthropicUsage = chunk.usage;\r\n\r\n // Send final chunk\r\n const finalChunk: CodeGenerationChunk = {\r\n content: '',\r\n accumulated,\r\n reasoning_content: '',\r\n isComplete: true,\r\n usage: anthropicUsage\r\n ? {\r\n prompt_tokens: anthropicUsage.input_tokens ?? 0,\r\n completion_tokens: anthropicUsage.output_tokens ?? 0,\r\n total_tokens:\r\n (anthropicUsage.input_tokens ?? 0) +\r\n (anthropicUsage.output_tokens ?? 0),\r\n time_cost: timeCost ?? 0,\r\n model_name: modelName,\r\n model_description: modelDescription,\r\n intent: modelConfig.intent,\r\n }\r\n : undefined,\r\n };\r\n options.onChunk!(finalChunk);\r\n break;\r\n }\r\n }\r\n content = accumulated;\r\n } else {\r\n const result = await completion.create({\r\n model: modelName,\r\n system: 'You are a versatile professional in software UI automation',\r\n messages: messages.map((m) => ({\r\n role: 'user',\r\n content: Array.isArray(m.content)\r\n ? (m.content as any).map(convertImageContent)\r\n : m.content,\r\n })),\r\n response_format: responseFormat,\r\n ...commonConfig,\r\n } as any);\r\n timeCost = Date.now() - startTime;\r\n content = (result as any).content[0].text as string;\r\n usage = result.usage;\r\n }\r\n\r\n assert(content, 'empty content');\r\n }\r\n // Ensure we always have usage info for streaming responses\r\n if (isStreaming && !usage) {\r\n // Estimate token counts based on content length (rough approximation)\r\n const estimatedTokens = Math.max(\r\n 1,\r\n Math.floor((content || '').length / 4),\r\n );\r\n usage = {\r\n prompt_tokens: estimatedTokens,\r\n completion_tokens: estimatedTokens,\r\n total_tokens: estimatedTokens * 2,\r\n };\r\n }\r\n\r\n return {\r\n content: content || '',\r\n usage: usage\r\n ? {\r\n prompt_tokens: usage.prompt_tokens ?? 0,\r\n completion_tokens: usage.completion_tokens ?? 0,\r\n total_tokens: usage.total_tokens ?? 0,\r\n time_cost: timeCost ?? 0,\r\n model_name: modelName,\r\n model_description: modelDescription,\r\n intent: modelConfig.intent,\r\n }\r\n : undefined,\r\n isStreamed: !!isStreaming,\r\n };\r\n } catch (e: any) {\r\n console.error(' call AI error', e);\r\n const newError = new Error(\r\n `failed to call ${isStreaming ? 'streaming ' : ''}AI model service: ${e.message}. Trouble shooting: https://sqai.tech/model-provider.html`,\r\n {\r\n cause: e,\r\n },\r\n );\r\n throw newError;\r\n }\r\n}\r\n\r\nexport const getResponseFormat = (\r\n modelName: string,\r\n AIActionTypeValue: AIActionType,\r\n):\r\n | OpenAI.ChatCompletionCreateParams['response_format']\r\n | OpenAI.ResponseFormatJSONObject => {\r\n let responseFormat:\r\n | OpenAI.ChatCompletionCreateParams['response_format']\r\n | OpenAI.ResponseFormatJSONObject\r\n | undefined;\r\n\r\n if (modelName.includes('gpt-4')) {\r\n switch (AIActionTypeValue) {\r\n case AIActionType.ASSERT:\r\n responseFormat = assertSchema;\r\n break;\r\n case AIActionType.INSPECT_ELEMENT:\r\n responseFormat = locatorSchema;\r\n break;\r\n case AIActionType.PLAN:\r\n responseFormat = planSchema;\r\n break;\r\n case AIActionType.EXTRACT_DATA:\r\n case AIActionType.DESCRIBE_ELEMENT:\r\n responseFormat = { type: AIResponseFormat.JSON };\r\n break;\r\n case AIActionType.TEXT:\r\n // No response format for plain text - return as-is\r\n responseFormat = undefined;\r\n break;\r\n }\r\n }\r\n\r\n // gpt-4o-2024-05-13 only supports json_object response format\r\n // Skip for plain text to allow string output\r\n if (\r\n modelName === 'gpt-4o-2024-05-13' &&\r\n AIActionTypeValue !== AIActionType.TEXT\r\n ) {\r\n responseFormat = { type: AIResponseFormat.JSON };\r\n }\r\n\r\n return responseFormat;\r\n};\r\n\r\nexport async function callAIWithObjectResponse<T>(\r\n messages: ChatCompletionMessageParam[],\r\n AIActionTypeValue: AIActionType,\r\n modelConfig: IModelConfig,\r\n): Promise<{ content: T; usage?: AIUsageInfo }> {\r\n const response = await callAI(messages, AIActionTypeValue, modelConfig);\r\n assert(response, 'empty response');\r\n const vlMode = modelConfig.vlMode;\r\n const jsonContent = safeParseJson(response.content, vlMode);\r\n return { content: jsonContent, usage: response.usage };\r\n}\r\n\r\nexport async function callAIWithStringResponse(\r\n msgs: AIArgs,\r\n AIActionTypeValue: AIActionType,\r\n modelConfig: IModelConfig,\r\n): Promise<{ content: string; usage?: AIUsageInfo }> {\r\n const { content, usage } = await callAI(msgs, AIActionTypeValue, modelConfig);\r\n return { content, usage };\r\n}\r\n\r\nexport function extractJSONFromCodeBlock(response: string) {\r\n try {\r\n // First, try to match a JSON object directly in the response\r\n const jsonMatch = response.match(/^\\s*(\\{[\\s\\S]*\\})\\s*$/);\r\n if (jsonMatch) {\r\n return jsonMatch[1];\r\n }\r\n\r\n // If no direct JSON object is found, try to extract JSON from a code block\r\n const codeBlockMatch = response.match(\r\n /```(?:json)?\\s*(\\{[\\s\\S]*?\\})\\s*```/,\r\n );\r\n if (codeBlockMatch) {\r\n return codeBlockMatch[1];\r\n }\r\n\r\n // If no code block is found, try to find a JSON-like structure in the text\r\n const jsonLikeMatch = response.match(/\\{[\\s\\S]*\\}/);\r\n if (jsonLikeMatch) {\r\n return jsonLikeMatch[0];\r\n }\r\n } catch {}\r\n // If no JSON-like structure is found, return the original response\r\n return response;\r\n}\r\n\r\nexport function preprocessDoubaoBboxJson(input: string) {\r\n if (input.includes('bbox')) {\r\n // when its values like 940 445 969 490, replace all /\\d+\\s+\\d+/g with /$1,$2/g\r\n while (/\\d+\\s+\\d+/.test(input)) {\r\n input = input.replace(/(\\d+)\\s+(\\d+)/g, '$1,$2');\r\n }\r\n }\r\n return input;\r\n}\r\n\r\nexport function safeParseJson(input: string, vlMode: TVlModeTypes | undefined) {\r\n const cleanJsonString = extractJSONFromCodeBlock(input);\r\n // match the point\r\n if (cleanJsonString?.match(/\\((\\d+),(\\d+)\\)/)) {\r\n return cleanJsonString\r\n .match(/\\((\\d+),(\\d+)\\)/)\r\n ?.slice(1)\r\n .map(Number);\r\n }\r\n try {\r\n return JSON.parse(cleanJsonString);\r\n } catch {}\r\n try {\r\n return JSON.parse(jsonrepair(cleanJsonString));\r\n } catch (e) {}\r\n\r\n if (vlMode === 'doubao-vision' || vlMode === 'vlm-ui-tars') {\r\n const jsonString = preprocessDoubaoBboxJson(cleanJsonString);\r\n return JSON.parse(jsonrepair(jsonString));\r\n }\r\n throw Error(`failed to parse json response: ${input}`);\r\n}\r\n"],"names":["__webpack_require__","module","getter","definition","key","Object","obj","prop","Symbol","createChatClient","AIActionTypeValue","modelConfig","socksProxy","httpProxy","modelName","openaiBaseURL","openaiApiKey","openaiExtraConfig","openaiUseAzureDeprecated","useAzureOpenai","azureOpenaiScope","azureOpenaiKey","azureOpenaiEndpoint","azureOpenaiApiVersion","azureOpenaiDeployment","azureExtraConfig","useAnthropicSdk","anthropicApiKey","modelDescription","uiTarsVersion","vlMode","openai","proxyAgent","debugProxy","getDebug","HttpsProxyAgent","SocksProxyAgent","AzureOpenAI","tokenProvider","assert","ifInBrowser","credential","DefaultAzureCredential","getBearerTokenProvider","OpenAI","SQAI_API_TYPE","globalConfigManager","SQAI_LANGSMITH_DEBUG","Error","console","wrapOpenAI","Anthropic","callAI","messages","options","completion","style","responseFormat","getResponseFormat","maxTokens","OPENAI_MAX_TOKENS","debugCall","debugProfileStats","debugProfileDetail","startTime","Date","isStreaming","content","accumulated","usage","timeCost","commonConfig","Number","stream","chunk","_chunk_choices__delta","_chunk_choices__delta1","_chunk_choices_2","reasoning_content","chunkData","undefined","estimatedTokens","Math","finalChunk","_result_usage","_result_usage1","_result_usage2","result","JSON","convertImageContent","imgBase64","mimeType","body","parseBase64","m","Array","_chunk_delta","anthropicUsage","e","newError","AIActionType","assertSchema","locatorSchema","planSchema","AIResponseFormat","callAIWithObjectResponse","response","jsonContent","safeParseJson","callAIWithStringResponse","msgs","extractJSONFromCodeBlock","jsonMatch","codeBlockMatch","jsonLikeMatch","preprocessDoubaoBboxJson","input","cleanJsonString","_cleanJsonString_match","jsonrepair","jsonString"],"mappings":";;;;;;;;;;;;;;;;;;;IACAA,oBAAoB,CAAC,GAAG,CAACC;QACxB,IAAIC,SAASD,UAAUA,OAAO,UAAU,GACvC,IAAOA,MAAM,CAAC,UAAU,GACxB,IAAOA;QACRD,oBAAoB,CAAC,CAACE,QAAQ;YAAE,GAAGA;QAAO;QAC1C,OAAOA;IACR;;;ICPAF,oBAAoB,CAAC,GAAG,CAAC,UAASG;QACjC,IAAI,IAAIC,OAAOD,WACR,IAAGH,oBAAoB,CAAC,CAACG,YAAYC,QAAQ,CAACJ,oBAAoB,CAAC,CAAC,UAASI,MACzEC,OAAO,cAAc,CAAC,UAASD,KAAK;YAAE,YAAY;YAAM,KAAKD,UAAU,CAACC,IAAI;QAAC;IAGzF;;;ICNAJ,oBAAoB,CAAC,GAAG,CAACM,KAAKC,OAAUF,OAAO,SAAS,CAAC,cAAc,CAAC,IAAI,CAACC,KAAKC;;;ICClFP,oBAAoB,CAAC,GAAG,CAAC;QACxB,IAAG,AAAkB,eAAlB,OAAOQ,UAA0BA,OAAO,WAAW,EACrDH,OAAO,cAAc,CAAC,UAASG,OAAO,WAAW,EAAE;YAAE,OAAO;QAAS;QAEtEH,OAAO,cAAc,CAAC,UAAS,cAAc;YAAE,OAAO;QAAK;IAC5D;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;IC0BA,eAAeI,iBAAiB,EAC9BC,iBAAiB,EACjBC,WAAW,EAIZ;QAQC,MAAM,EACJC,UAAU,EACVC,SAAS,EACTC,SAAS,EACTC,aAAa,EACbC,YAAY,EACZC,iBAAiB,EACjBC,wBAAwB,EACxBC,cAAc,EACdC,gBAAgB,EAChBC,cAAc,EACdC,mBAAmB,EACnBC,qBAAqB,EACrBC,qBAAqB,EACrBC,gBAAgB,EAChBC,eAAe,EACfC,eAAe,EACfC,gBAAgB,EAChB,oBAAoBC,aAAa,EACjCC,MAAM,EACP,GAAGnB;QAEJ,IAAIoB;QAEJ,IAAIC;QACJ,MAAMC,aAAaC,AAAAA,IAAAA,uBAAAA,QAAAA,AAAAA,EAAS;QAC5B,IAAIrB,WAAW;YACboB,WAAW,oBAAoBpB;YAC/BmB,aAAa,IAAIG,2CAAAA,eAAeA,CAACtB;QACnC,OAAO,IAAID,YAAY;YACrBqB,WAAW,qBAAqBrB;YAChCoB,aAAa,IAAII,2CAAAA,eAAeA,CAACxB;QACnC;QAEA,IAAIM,0BAEFa,SAAS,IAAIM,gCAAAA,WAAWA,CAAC;YACvB,SAAStB;YACT,QAAQC;YACR,WAAWgB;YACX,GAAGf,iBAAiB;YACpB,yBAAyB;QAC3B;aACK,IAAIE,gBAAgB;YAGzB,IAAImB;YACJ,IAAIlB,kBAAkB;gBACpBmB,IAAAA,sBAAAA,MAAAA,AAAAA,EACE,CAACC,sBAAAA,WAAWA,EACZ;gBAEF,MAAMC,aAAa,IAAIC,yBAAAA,sBAAsBA;gBAE7CJ,gBAAgBK,AAAAA,IAAAA,yBAAAA,sBAAAA,AAAAA,EAAuBF,YAAYrB;gBAEnDW,SAAS,IAAIM,gCAAAA,WAAWA,CAAC;oBACvB,sBAAsBC;oBACtB,UAAUhB;oBACV,YAAYC;oBACZ,YAAYC;oBACZ,GAAGP,iBAAiB;oBACpB,GAAGQ,gBAAgB;gBACrB;YACF,OAEEM,SAAS,IAAIM,gCAAAA,WAAWA,CAAC;gBACvB,QAAQhB;gBACR,UAAUC;gBACV,YAAYC;gBACZ,YAAYC;gBACZ,yBAAyB;gBACzB,GAAGP,iBAAiB;gBACpB,GAAGQ,gBAAgB;YACrB;QAEJ,OAAO,IAAI,CAACC,iBACVK,SAAS,IAAIa,CAAAA,yBAAAA,EAAO;YAClB,SAAS7B;YACT,QAAQC;YACR,WAAWgB;YACX,GAAGf,iBAAiB;YACpB,gBAAgB;gBACd,GAAIA,AAAAA,CAAAA,QAAAA,oBAAAA,KAAAA,IAAAA,kBAAmB,cAAc,AAAD,KAAK,CAAC,CAAC;gBAC3C,CAAC4B,oBAAAA,aAAaA,CAAC,EAAEnC,kBAAkB,QAAQ;YAC7C;YACA,yBAAyB;QAC3B;QAGF,IACEqB,UACAe,oBAAAA,mBAAAA,CAAAA,qBAAyC,CAACC,oBAAAA,oBAAoBA,GAC9D;YACA,IAAIP,sBAAAA,WAAWA,EACb,MAAM,IAAIQ,MAAM;YAElBC,QAAQ,GAAG,CAAC;YACZ,MAAM,EAAEC,UAAU,EAAE,GAAG,MAAM;YAC7BnB,SAASmB,WAAWnB;QACtB;QAEA,IAAI,AAAkB,WAAXA,QACT,OAAO;YACL,YAAYA,OAAO,IAAI,CAAC,WAAW;YACnC,OAAO;YACPjB;YACAc;YACAC;YACAC;QACF;QAIF,IAAIJ,iBACFK,SAAS,IAAIoB,oBAAAA,SAASA,CAAC;YACrB,QAAQxB;YACR,WAAWK;YACX,yBAAyB;QAC3B;QAGF,IAAI,AAAkB,WAAXD,UAA2BA,OAAe,QAAQ,EAC3D,OAAO;YACL,YAAaA,OAAe,QAAQ;YACpC,OAAO;YACPjB;YACAc;YACAC;YACAC;QACF;QAGF,MAAM,IAAIkB,MAAM;IAClB;IAEO,eAAeI,OACpBC,QAAsC,EACtC3C,iBAA+B,EAC/BC,WAAyB,EACzB2C,OAGC;QAED,MAAM,EACJC,UAAU,EACVC,KAAK,EACL1C,SAAS,EACTc,gBAAgB,EAChBC,aAAa,EACbC,MAAM,EACP,GAAG,MAAMrB,iBAAiB;YACzBC;YACAC;QACF;QAEA,MAAM8C,iBAAiBC,kBAAkB5C,WAAWJ;QAEpD,MAAMiD,YAAYb,oBAAAA,mBAAAA,CAAAA,iBAAqC,CAACc,oBAAAA,iBAAiBA;QACzE,MAAMC,YAAY3B,AAAAA,IAAAA,uBAAAA,QAAAA,AAAAA,EAAS;QAC3B,MAAM4B,oBAAoB5B,AAAAA,IAAAA,uBAAAA,QAAAA,AAAAA,EAAS;QACnC,MAAM6B,qBAAqB7B,AAAAA,IAAAA,uBAAAA,QAAAA,AAAAA,EAAS;QAEpC,MAAM8B,YAAYC,KAAK,GAAG;QAE1B,MAAMC,cAAcZ,AAAAA,CAAAA,QAAAA,UAAAA,KAAAA,IAAAA,QAAS,MAAM,AAAD,KAAKA,CAAAA,QAAAA,UAAAA,KAAAA,IAAAA,QAAS,OAAO,AAAD;QACtD,IAAIa;QACJ,IAAIC,cAAc;QAClB,IAAIC;QACJ,IAAIC;QAEJ,MAAMC,eAAe;YACnB,aAAazC,AAAW,kBAAXA,SAA2B,MAAM;YAC9C,QAAQ,CAAC,CAACoC;YACV,YACE,AAAqB,YAArB,OAAOP,YACHA,YACAa,OAAO,QAAQ,CAACb,aAAa,QAAQ;YAC3C,GAAI7B,AAAW,cAAXA,UAAwBA,AAAW,eAAXA,SACxB;gBACE,2BAA2B;YAC7B,IACA,CAAC,CAAC;QACR;QAEA,IAAI;YACF,IAAI0B,AAAU,aAAVA,OAAoB;gBACtBK,UACE,CAAC,QAAQ,EAAEK,cAAc,eAAe,GAAG,WAAW,EAAEpD,WAAW;gBAGrE,IAAIoD,aAAa;oBACf,MAAMO,SAAU,MAAMlB,WAAW,MAAM,CACrC;wBACE,OAAOzC;wBACPuC;wBACA,iBAAiBI;wBACjB,GAAGc,YAAY;oBACjB,GACA;wBACE,QAAQ;oBACV;oBAKF,WAAW,MAAMG,SAASD,OAAQ;4BAChBE,uBAAAA,iBAAAA,gBAEbC,wBAAAA,kBAAAA,iBAoBCC,kBAAAA;wBAtBJ,MAAMV,UAAUQ,AAAAA,SAAAA,CAAAA,iBAAAA,MAAM,OAAO,AAAD,IAAZA,KAAAA,IAAAA,QAAAA,CAAAA,kBAAAA,cAAe,CAAC,EAAE,AAAD,IAAjBA,KAAAA,IAAAA,QAAAA,CAAAA,wBAAAA,gBAAoB,KAAK,AAAD,IAAxBA,KAAAA,IAAAA,sBAA2B,OAAO,AAAD,KAAK;wBACtD,MAAMG,oBACJ,AAAC,SAAAF,CAAAA,kBAAAA,MAAM,OAAO,AAAD,IAAZA,KAAAA,IAAAA,QAAAA,CAAAA,mBAAAA,eAAe,CAAC,EAAE,AAAD,IAAjBA,KAAAA,IAAAA,QAAAA,CAAAA,yBAAAA,iBAAoB,KAAK,AAAD,IAAxBA,KAAAA,IAAAA,uBAAmC,iBAAiB,AAAD,KAAK;wBAG3D,IAAIF,MAAM,KAAK,EACbL,QAAQK,MAAM,KAAK;wBAGrB,IAAIP,WAAWW,mBAAmB;4BAChCV,eAAeD;4BACf,MAAMY,YAAiC;gCACrCZ;gCACAW;gCACAV;gCACA,YAAY;gCACZ,OAAOY;4BACT;4BACA1B,QAAQ,OAAO,CAAEyB;wBACnB;wBAGA,IAAI,QAAAF,CAAAA,kBAAAA,MAAM,OAAO,AAAD,IAAZA,KAAAA,IAAAA,QAAAA,CAAAA,mBAAAA,eAAe,CAAC,EAAE,AAAD,IAAjBA,KAAAA,IAAAA,iBAAoB,aAAa,EAAE;4BACrCP,WAAWL,KAAK,GAAG,KAAKD;4BAGxB,IAAI,CAACK,OAAO;gCAEV,MAAMY,kBAAkBC,KAAK,GAAG,CAC9B,GACAA,KAAK,KAAK,CAACd,YAAY,MAAM,GAAG;gCAElCC,QAAQ;oCACN,eAAeY;oCACf,mBAAmBA;oCACnB,cAAcA,AAAkB,IAAlBA;gCAChB;4BACF;4BAGA,MAAME,aAAkC;gCACtC,SAAS;gCACTf;gCACA,mBAAmB;gCACnB,YAAY;gCACZ,OAAO;oCACL,eAAeC,MAAM,aAAa,IAAI;oCACtC,mBAAmBA,MAAM,iBAAiB,IAAI;oCAC9C,cAAcA,MAAM,YAAY,IAAI;oCACpC,WAAWC,YAAY;oCACvB,YAAYxD;oCACZ,mBAAmBc;oCACnB,QAAQjB,YAAY,MAAM;gCAC5B;4BACF;4BACA2C,QAAQ,OAAO,CAAE6B;4BACjB;wBACF;oBACF;oBACAhB,UAAUC;oBACVN,kBACE,CAAC,iBAAiB,EAAEhD,UAAU,QAAQ,EAAEgB,UAAU,UAAU,WAAW,EAAEwC,UAAU;gBAEvF,OAAO;wBAUqGc,eAAyDC,gBAAwDC;oBAT3N,MAAMC,SAAS,MAAMhC,WAAW,MAAM,CAAC;wBACrC,OAAOzC;wBACPuC;wBACA,iBAAiBI;wBACjB,GAAGc,YAAY;oBACjB;oBACAD,WAAWL,KAAK,GAAG,KAAKD;oBAExBF,kBACE,CAAC,OAAO,EAAEhD,UAAU,QAAQ,EAAEgB,UAAU,UAAU,mBAAmB,EAAED,cAAc,iBAAiB,EAAEuD,AAAAA,SAAAA,CAAAA,gBAAAA,OAAO,KAAK,AAAD,IAAXA,KAAAA,IAAAA,cAAc,aAAa,AAAD,KAAK,GAAG,qBAAqB,EAAEC,AAAAA,SAAAA,CAAAA,iBAAAA,OAAO,KAAK,AAAD,IAAXA,KAAAA,IAAAA,eAAc,iBAAiB,AAAD,KAAK,GAAG,gBAAgB,EAAEC,AAAAA,SAAAA,CAAAA,iBAAAA,OAAO,KAAK,AAAD,IAAXA,KAAAA,IAAAA,eAAc,YAAY,AAAD,KAAK,GAAG,WAAW,EAAEhB,SAAS,aAAa,EAAEiB,OAAO,WAAW,IAAI,IAAI;oBAG3TxB,mBACE,CAAC,oBAAoB,EAAEyB,KAAK,SAAS,CAACD,OAAO,KAAK,GAAG;oBAGvDhD,IAAAA,sBAAAA,MAAAA,AAAAA,EACEgD,OAAO,OAAO,EACd,CAAC,mCAAmC,EAAEC,KAAK,SAAS,CAACD,SAAS;oBAEhEpB,UAAUoB,OAAO,OAAO,CAAC,EAAE,CAAC,OAAO,CAAC,OAAO;oBAC3ClB,QAAQkB,OAAO,KAAK;gBACtB;gBAEA1B,UAAU,CAAC,UAAU,EAAEM,SAAS;gBAChC5B,IAAAA,sBAAAA,MAAAA,AAAAA,EAAO4B,SAAS;YAClB,OAAO,IAAIX,AAAU,gBAAVA,OAAuB;gBAChC,MAAMiC,sBAAsB,CAACtB;oBAC3B,IAAIA,AAAiB,gBAAjBA,QAAQ,IAAI,EAAkB;wBAChC,MAAMuB,YAAYvB,QAAQ,SAAS,CAAC,GAAG;wBACvC5B,IAAAA,sBAAAA,MAAAA,AAAAA,EAAOmD,WAAW;wBAClB,MAAM,EAAEC,QAAQ,EAAEC,IAAI,EAAE,GAAGC,AAAAA,IAAAA,oBAAAA,WAAAA,AAAAA,EAAY1B,QAAQ,SAAS,CAAC,GAAG;wBAC5D,OAAO;4BACL,QAAQ;gCACN,MAAM;gCACN,YAAYwB;gCACZ,MAAMC;4BACR;4BACA,MAAM;wBACR;oBACF;oBACA,OAAOzB;gBACT;gBAEA,IAAID,aAAa;oBACf,MAAMO,SAAU,MAAMlB,WAAW,MAAM,CAAC;wBACtC,OAAOzC;wBACP,QAAQ;wBACR,UAAUuC,SAAS,GAAG,CAAC,CAACyC,IAAO;gCAC7B,MAAM;gCACN,SAASC,MAAM,OAAO,CAACD,EAAE,OAAO,IAC3BA,EAAE,OAAO,CAAS,GAAG,CAACL,uBACvBK,EAAE,OAAO;4BACf;wBACA,iBAAiBrC;wBACjB,GAAGc,YAAY;oBACjB;oBAEA,WAAW,MAAMG,SAASD,OAAQ;4BAChBuB;wBAAhB,MAAM7B,UAAU6B,AAAAA,SAAAA,CAAAA,eAAAA,MAAM,KAAK,AAAD,IAAVA,KAAAA,IAAAA,aAAa,IAAI,AAAD,KAAK;wBACrC,IAAI7B,SAAS;4BACXC,eAAeD;4BACf,MAAMY,YAAiC;gCACrCZ;gCACAC;gCACA,mBAAmB;gCACnB,YAAY;gCACZ,OAAOY;4BACT;4BACA1B,QAAQ,OAAO,CAAEyB;wBACnB;wBAGA,IAAIL,AAAe,mBAAfA,MAAM,IAAI,EAAqB;4BACjCJ,WAAWL,KAAK,GAAG,KAAKD;4BACxB,MAAMiC,iBAAiBvB,MAAM,KAAK;4BAGlC,MAAMS,aAAkC;gCACtC,SAAS;gCACTf;gCACA,mBAAmB;gCACnB,YAAY;gCACZ,OAAO6B,iBACH;oCACE,eAAeA,eAAe,YAAY,IAAI;oCAC9C,mBAAmBA,eAAe,aAAa,IAAI;oCACnD,cACGA,AAAAA,CAAAA,eAAe,YAAY,IAAI,KAC/BA,CAAAA,eAAe,aAAa,IAAI;oCACnC,WAAW3B,YAAY;oCACvB,YAAYxD;oCACZ,mBAAmBc;oCACnB,QAAQjB,YAAY,MAAM;gCAC5B,IACAqE;4BACN;4BACA1B,QAAQ,OAAO,CAAE6B;4BACjB;wBACF;oBACF;oBACAhB,UAAUC;gBACZ,OAAO;oBACL,MAAMmB,SAAS,MAAMhC,WAAW,MAAM,CAAC;wBACrC,OAAOzC;wBACP,QAAQ;wBACR,UAAUuC,SAAS,GAAG,CAAC,CAACyC,IAAO;gCAC7B,MAAM;gCACN,SAASC,MAAM,OAAO,CAACD,EAAE,OAAO,IAC3BA,EAAE,OAAO,CAAS,GAAG,CAACL,uBACvBK,EAAE,OAAO;4BACf;wBACA,iBAAiBrC;wBACjB,GAAGc,YAAY;oBACjB;oBACAD,WAAWL,KAAK,GAAG,KAAKD;oBACxBG,UAAWoB,OAAe,OAAO,CAAC,EAAE,CAAC,IAAI;oBACzClB,QAAQkB,OAAO,KAAK;gBACtB;gBAEAhD,IAAAA,sBAAAA,MAAAA,AAAAA,EAAO4B,SAAS;YAClB;YAEA,IAAID,eAAe,CAACG,OAAO;gBAEzB,MAAMY,kBAAkBC,KAAK,GAAG,CAC9B,GACAA,KAAK,KAAK,CAAEf,AAAAA,CAAAA,WAAW,EAAC,EAAG,MAAM,GAAG;gBAEtCE,QAAQ;oBACN,eAAeY;oBACf,mBAAmBA;oBACnB,cAAcA,AAAkB,IAAlBA;gBAChB;YACF;YAEA,OAAO;gBACL,SAASd,WAAW;gBACpB,OAAOE,QACH;oBACE,eAAeA,MAAM,aAAa,IAAI;oBACtC,mBAAmBA,MAAM,iBAAiB,IAAI;oBAC9C,cAAcA,MAAM,YAAY,IAAI;oBACpC,WAAWC,YAAY;oBACvB,YAAYxD;oBACZ,mBAAmBc;oBACnB,QAAQjB,YAAY,MAAM;gBAC5B,IACAqE;gBACJ,YAAY,CAAC,CAACd;YAChB;QACF,EAAE,OAAOgC,GAAQ;YACfjD,QAAQ,KAAK,CAAC,kBAAkBiD;YAChC,MAAMC,WAAW,IAAInD,MACnB,CAAC,eAAe,EAAEkB,cAAc,eAAe,GAAG,kBAAkB,EAAEgC,EAAE,OAAO,CAAC,yDAAyD,CAAC,EAC1I;gBACE,OAAOA;YACT;YAEF,MAAMC;QACR;IACF;IAEO,MAAMzC,oBAAoB,CAC/B5C,WACAJ;QAIA,IAAI+C;QAKJ,IAAI3C,UAAU,QAAQ,CAAC,UACrB,OAAQJ;YACN,KAAK0F,mCAAAA,YAAAA,CAAAA,MAAmB;gBACtB3C,iBAAiB4C,6BAAAA,YAAYA;gBAC7B;YACF,KAAKD,mCAAAA,YAAAA,CAAAA,eAA4B;gBAC/B3C,iBAAiB6C,+BAAAA,aAAaA;gBAC9B;YACF,KAAKF,mCAAAA,YAAAA,CAAAA,IAAiB;gBACpB3C,iBAAiB8C,gCAAAA,UAAUA;gBAC3B;YACF,KAAKH,mCAAAA,YAAAA,CAAAA,YAAyB;YAC9B,KAAKA,mCAAAA,YAAAA,CAAAA,gBAA6B;gBAChC3C,iBAAiB;oBAAE,MAAM+C,kCAAAA,gBAAAA,CAAAA,IAAqB;gBAAC;gBAC/C;YACF,KAAKJ,mCAAAA,YAAAA,CAAAA,IAAiB;gBAEpB3C,iBAAiBuB;gBACjB;QACJ;QAKF,IACElE,AAAc,wBAAdA,aACAJ,sBAAsB0F,mCAAAA,YAAAA,CAAAA,IAAiB,EAEvC3C,iBAAiB;YAAE,MAAM+C,kCAAAA,gBAAAA,CAAAA,IAAqB;QAAC;QAGjD,OAAO/C;IACT;IAEO,eAAegD,yBACpBpD,QAAsC,EACtC3C,iBAA+B,EAC/BC,WAAyB;QAEzB,MAAM+F,WAAW,MAAMtD,OAAOC,UAAU3C,mBAAmBC;QAC3D4B,IAAAA,sBAAAA,MAAAA,AAAAA,EAAOmE,UAAU;QACjB,MAAM5E,SAASnB,YAAY,MAAM;QACjC,MAAMgG,cAAcC,cAAcF,SAAS,OAAO,EAAE5E;QACpD,OAAO;YAAE,SAAS6E;YAAa,OAAOD,SAAS,KAAK;QAAC;IACvD;IAEO,eAAeG,yBACpBC,IAAY,EACZpG,iBAA+B,EAC/BC,WAAyB;QAEzB,MAAM,EAAEwD,OAAO,EAAEE,KAAK,EAAE,GAAG,MAAMjB,OAAO0D,MAAMpG,mBAAmBC;QACjE,OAAO;YAAEwD;YAASE;QAAM;IAC1B;IAEO,SAAS0C,yBAAyBL,QAAgB;QACvD,IAAI;YAEF,MAAMM,YAAYN,SAAS,KAAK,CAAC;YACjC,IAAIM,WACF,OAAOA,SAAS,CAAC,EAAE;YAIrB,MAAMC,iBAAiBP,SAAS,KAAK,CACnC;YAEF,IAAIO,gBACF,OAAOA,cAAc,CAAC,EAAE;YAI1B,MAAMC,gBAAgBR,SAAS,KAAK,CAAC;YACrC,IAAIQ,eACF,OAAOA,aAAa,CAAC,EAAE;QAE3B,EAAE,OAAM,CAAC;QAET,OAAOR;IACT;IAEO,SAASS,yBAAyBC,KAAa;QACpD,IAAIA,MAAM,QAAQ,CAAC,SAEjB,MAAO,YAAY,IAAI,CAACA,OACtBA,QAAQA,MAAM,OAAO,CAAC,kBAAkB;QAG5C,OAAOA;IACT;IAEO,SAASR,cAAcQ,KAAa,EAAEtF,MAAgC;QAC3E,MAAMuF,kBAAkBN,yBAAyBK;QAEjD,IAAIC,QAAAA,kBAAAA,KAAAA,IAAAA,gBAAiB,KAAK,CAAC,oBAAoB;gBACtCC;YAAP,OAAO,QAAAA,CAAAA,yBAAAA,gBACJ,KAAK,CAAC,kBAAiB,IADnBA,KAAAA,IAAAA,uBAEH,KAAK,CAAC,GACP,GAAG,CAAC9C;QACT;QACA,IAAI;YACF,OAAOgB,KAAK,KAAK,CAAC6B;QACpB,EAAE,OAAM,CAAC;QACT,IAAI;YACF,OAAO7B,KAAK,KAAK,CAAC+B,AAAAA,IAAAA,oCAAAA,UAAAA,AAAAA,EAAWF;QAC/B,EAAE,OAAOnB,GAAG,CAAC;QAEb,IAAIpE,AAAW,oBAAXA,UAA8BA,AAAW,kBAAXA,QAA0B;YAC1D,MAAM0F,aAAaL,yBAAyBE;YAC5C,OAAO7B,KAAK,KAAK,CAAC+B,AAAAA,IAAAA,oCAAAA,UAAAA,AAAAA,EAAWC;QAC/B;QACA,MAAMxE,MAAM,CAAC,+BAA+B,EAAEoE,OAAO;IACvD"}