@midscene/core 1.0.1-beta-20251208031823.0 → 1.0.1-beta-20251208033501.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/es/agent/task-builder.mjs +23 -26
- package/dist/es/agent/task-builder.mjs.map +1 -1
- package/dist/es/agent/tasks.mjs +1 -4
- package/dist/es/agent/tasks.mjs.map +1 -1
- package/dist/es/agent/utils.mjs +1 -1
- package/dist/es/ai-model/prompt/llm-planning.mjs +95 -20
- package/dist/es/ai-model/prompt/llm-planning.mjs.map +1 -1
- package/dist/es/ai-model/service-caller/index.mjs +39 -54
- package/dist/es/ai-model/service-caller/index.mjs.map +1 -1
- package/dist/es/utils.mjs +2 -2
- package/dist/lib/agent/agent.js +2 -2
- package/dist/lib/agent/common.js +1 -1
- package/dist/lib/agent/execution-session.js +2 -2
- package/dist/lib/agent/index.js +2 -2
- package/dist/lib/agent/task-builder.js +25 -28
- package/dist/lib/agent/task-builder.js.map +1 -1
- package/dist/lib/agent/task-cache.js +2 -2
- package/dist/lib/agent/tasks.js +3 -6
- package/dist/lib/agent/tasks.js.map +1 -1
- package/dist/lib/agent/ui-utils.js +2 -2
- package/dist/lib/agent/utils.js +3 -3
- package/dist/lib/ai-model/conversation-history.js +2 -2
- package/dist/lib/ai-model/index.js +2 -2
- package/dist/lib/ai-model/inspect.js +2 -2
- package/dist/lib/ai-model/llm-planning.js +2 -2
- package/dist/lib/ai-model/prompt/assertion.js +2 -2
- package/dist/lib/ai-model/prompt/common.js +2 -2
- package/dist/lib/ai-model/prompt/describe.js +2 -2
- package/dist/lib/ai-model/prompt/extraction.js +2 -2
- package/dist/lib/ai-model/prompt/llm-locator.js +2 -2
- package/dist/lib/ai-model/prompt/llm-planning.js +100 -22
- package/dist/lib/ai-model/prompt/llm-planning.js.map +1 -1
- package/dist/lib/ai-model/prompt/llm-section-locator.js +2 -2
- package/dist/lib/ai-model/prompt/order-sensitive-judge.js +2 -2
- package/dist/lib/ai-model/prompt/playwright-generator.js +2 -2
- package/dist/lib/ai-model/prompt/ui-tars-locator.js +2 -2
- package/dist/lib/ai-model/prompt/ui-tars-planning.js +2 -2
- package/dist/lib/ai-model/prompt/util.js +2 -2
- package/dist/lib/ai-model/prompt/yaml-generator.js +2 -2
- package/dist/lib/ai-model/service-caller/index.js +43 -55
- package/dist/lib/ai-model/service-caller/index.js.map +1 -1
- package/dist/lib/ai-model/ui-tars-planning.js +2 -2
- package/dist/lib/common.js +2 -2
- package/dist/lib/device/device-options.js +1 -1
- package/dist/lib/device/index.js +2 -2
- package/dist/lib/image/index.js +2 -2
- package/dist/lib/index.js +2 -2
- package/dist/lib/report.js +2 -2
- package/dist/lib/service/index.js +2 -2
- package/dist/lib/service/utils.js +2 -2
- package/dist/lib/task-runner.js +2 -2
- package/dist/lib/tree.js +2 -2
- package/dist/lib/types.js +3 -3
- package/dist/lib/utils.js +4 -4
- package/dist/lib/yaml/builder.js +2 -2
- package/dist/lib/yaml/index.js +4 -4
- package/dist/lib/yaml/player.js +2 -2
- package/dist/lib/yaml/utils.js +2 -2
- package/dist/lib/yaml.js +1 -1
- package/dist/types/ai-model/prompt/llm-planning.d.ts +2 -0
- package/dist/types/ai-model/service-caller/index.d.ts +3 -1
- package/package.json +5 -5
|
@@ -38,78 +38,37 @@ __webpack_require__.d(__webpack_exports__, {
|
|
|
38
38
|
preprocessDoubaoBboxJson: ()=>preprocessDoubaoBboxJson,
|
|
39
39
|
callAIWithStringResponse: ()=>callAIWithStringResponse,
|
|
40
40
|
safeParseJson: ()=>safeParseJson,
|
|
41
|
+
getResponseFormat: ()=>getResponseFormat,
|
|
41
42
|
callAI: ()=>callAI
|
|
42
43
|
});
|
|
44
|
+
const external_types_js_namespaceObject = require("../../types.js");
|
|
43
45
|
const env_namespaceObject = require("@midscene/shared/env");
|
|
44
46
|
const logger_namespaceObject = require("@midscene/shared/logger");
|
|
45
47
|
const utils_namespaceObject = require("@midscene/shared/utils");
|
|
48
|
+
const external_https_proxy_agent_namespaceObject = require("https-proxy-agent");
|
|
46
49
|
const external_jsonrepair_namespaceObject = require("jsonrepair");
|
|
47
50
|
const external_openai_namespaceObject = require("openai");
|
|
48
51
|
var external_openai_default = /*#__PURE__*/ __webpack_require__.n(external_openai_namespaceObject);
|
|
52
|
+
const external_socks_proxy_agent_namespaceObject = require("socks-proxy-agent");
|
|
53
|
+
const external_common_js_namespaceObject = require("../../common.js");
|
|
54
|
+
const assertion_js_namespaceObject = require("../prompt/assertion.js");
|
|
55
|
+
const llm_planning_js_namespaceObject = require("../prompt/llm-planning.js");
|
|
49
56
|
async function createChatClient({ AIActionTypeValue, modelConfig }) {
|
|
50
57
|
const { socksProxy, httpProxy, modelName, openaiBaseURL, openaiApiKey, openaiExtraConfig, modelDescription, uiTarsModelVersion: uiTarsVersion, vlMode, createOpenAIClient, timeout } = modelConfig;
|
|
51
58
|
let proxyAgent;
|
|
52
59
|
const debugProxy = (0, logger_namespaceObject.getDebug)('ai:call:proxy');
|
|
53
|
-
const sanitizeProxyUrl = (url)=>{
|
|
54
|
-
try {
|
|
55
|
-
const parsed = new URL(url);
|
|
56
|
-
if (parsed.username) {
|
|
57
|
-
parsed.password = '****';
|
|
58
|
-
return parsed.href;
|
|
59
|
-
}
|
|
60
|
-
return url;
|
|
61
|
-
} catch {
|
|
62
|
-
return url;
|
|
63
|
-
}
|
|
64
|
-
};
|
|
65
60
|
if (httpProxy) {
|
|
66
|
-
debugProxy('using http proxy',
|
|
67
|
-
|
|
68
|
-
else {
|
|
69
|
-
const moduleName = 'undici';
|
|
70
|
-
const { ProxyAgent } = await import(moduleName);
|
|
71
|
-
proxyAgent = new ProxyAgent({
|
|
72
|
-
uri: httpProxy
|
|
73
|
-
});
|
|
74
|
-
}
|
|
61
|
+
debugProxy('using http proxy', httpProxy);
|
|
62
|
+
proxyAgent = new external_https_proxy_agent_namespaceObject.HttpsProxyAgent(httpProxy);
|
|
75
63
|
} else if (socksProxy) {
|
|
76
|
-
debugProxy('using socks proxy',
|
|
77
|
-
|
|
78
|
-
else try {
|
|
79
|
-
const moduleName = 'fetch-socks';
|
|
80
|
-
const { socksDispatcher } = await import(moduleName);
|
|
81
|
-
const proxyUrl = new URL(socksProxy);
|
|
82
|
-
if (!proxyUrl.hostname) throw new Error('SOCKS proxy URL must include a valid hostname');
|
|
83
|
-
const port = Number.parseInt(proxyUrl.port, 10);
|
|
84
|
-
if (!proxyUrl.port || Number.isNaN(port)) throw new Error('SOCKS proxy URL must include a valid port');
|
|
85
|
-
const protocol = proxyUrl.protocol.replace(':', '');
|
|
86
|
-
const socksType = 'socks4' === protocol ? 4 : 'socks5' === protocol ? 5 : 5;
|
|
87
|
-
proxyAgent = socksDispatcher({
|
|
88
|
-
type: socksType,
|
|
89
|
-
host: proxyUrl.hostname,
|
|
90
|
-
port,
|
|
91
|
-
...proxyUrl.username ? {
|
|
92
|
-
userId: decodeURIComponent(proxyUrl.username),
|
|
93
|
-
password: decodeURIComponent(proxyUrl.password || '')
|
|
94
|
-
} : {}
|
|
95
|
-
});
|
|
96
|
-
debugProxy('socks proxy configured successfully', {
|
|
97
|
-
type: socksType,
|
|
98
|
-
host: proxyUrl.hostname,
|
|
99
|
-
port: port
|
|
100
|
-
});
|
|
101
|
-
} catch (error) {
|
|
102
|
-
console.error('Failed to configure SOCKS proxy:', error);
|
|
103
|
-
throw new Error(`Invalid SOCKS proxy URL: ${socksProxy}. Expected format: socks4://host:port, socks5://host:port, or with authentication: socks5://user:pass@host:port`);
|
|
104
|
-
}
|
|
64
|
+
debugProxy('using socks proxy', socksProxy);
|
|
65
|
+
proxyAgent = new external_socks_proxy_agent_namespaceObject.SocksProxyAgent(socksProxy);
|
|
105
66
|
}
|
|
106
67
|
const openAIOptions = {
|
|
107
68
|
baseURL: openaiBaseURL,
|
|
108
69
|
apiKey: openaiApiKey,
|
|
109
70
|
...proxyAgent ? {
|
|
110
|
-
|
|
111
|
-
dispatcher: proxyAgent
|
|
112
|
-
}
|
|
71
|
+
httpAgent: proxyAgent
|
|
113
72
|
} : {},
|
|
114
73
|
...openaiExtraConfig,
|
|
115
74
|
...'number' == typeof timeout ? {
|
|
@@ -150,6 +109,7 @@ async function callAI(messages, AIActionTypeValue, modelConfig, options) {
|
|
|
150
109
|
AIActionTypeValue,
|
|
151
110
|
modelConfig
|
|
152
111
|
});
|
|
112
|
+
const responseFormat = getResponseFormat(modelName, AIActionTypeValue);
|
|
153
113
|
const maxTokens = env_namespaceObject.globalConfigManager.getEnvConfigValue(env_namespaceObject.MIDSCENE_MODEL_MAX_TOKENS) ?? env_namespaceObject.globalConfigManager.getEnvConfigValue(env_namespaceObject.OPENAI_MAX_TOKENS);
|
|
154
114
|
const debugCall = (0, logger_namespaceObject.getDebug)('ai:call');
|
|
155
115
|
const debugProfileStats = (0, logger_namespaceObject.getDebug)('ai:profile:stats');
|
|
@@ -188,6 +148,7 @@ async function callAI(messages, AIActionTypeValue, modelConfig, options) {
|
|
|
188
148
|
const stream = await completion.create({
|
|
189
149
|
model: modelName,
|
|
190
150
|
messages,
|
|
151
|
+
response_format: responseFormat,
|
|
191
152
|
...commonConfig
|
|
192
153
|
}, {
|
|
193
154
|
stream: true
|
|
@@ -234,6 +195,7 @@ async function callAI(messages, AIActionTypeValue, modelConfig, options) {
|
|
|
234
195
|
const result = await completion.create({
|
|
235
196
|
model: modelName,
|
|
236
197
|
messages,
|
|
198
|
+
response_format: responseFormat,
|
|
237
199
|
...commonConfig
|
|
238
200
|
});
|
|
239
201
|
timeCost = Date.now() - startTime;
|
|
@@ -266,6 +228,30 @@ async function callAI(messages, AIActionTypeValue, modelConfig, options) {
|
|
|
266
228
|
throw newError;
|
|
267
229
|
}
|
|
268
230
|
}
|
|
231
|
+
const getResponseFormat = (modelName, AIActionTypeValue)=>{
|
|
232
|
+
let responseFormat;
|
|
233
|
+
if (modelName.includes('gpt-4')) switch(AIActionTypeValue){
|
|
234
|
+
case external_common_js_namespaceObject.AIActionType.ASSERT:
|
|
235
|
+
responseFormat = assertion_js_namespaceObject.assertSchema;
|
|
236
|
+
break;
|
|
237
|
+
case external_common_js_namespaceObject.AIActionType.PLAN:
|
|
238
|
+
responseFormat = llm_planning_js_namespaceObject.planSchema;
|
|
239
|
+
break;
|
|
240
|
+
case external_common_js_namespaceObject.AIActionType.EXTRACT_DATA:
|
|
241
|
+
case external_common_js_namespaceObject.AIActionType.DESCRIBE_ELEMENT:
|
|
242
|
+
responseFormat = {
|
|
243
|
+
type: external_types_js_namespaceObject.AIResponseFormat.JSON
|
|
244
|
+
};
|
|
245
|
+
break;
|
|
246
|
+
case external_common_js_namespaceObject.AIActionType.TEXT:
|
|
247
|
+
responseFormat = void 0;
|
|
248
|
+
break;
|
|
249
|
+
}
|
|
250
|
+
if ('gpt-4o-2024-05-13' === modelName && AIActionTypeValue !== external_common_js_namespaceObject.AIActionType.TEXT) responseFormat = {
|
|
251
|
+
type: external_types_js_namespaceObject.AIResponseFormat.JSON
|
|
252
|
+
};
|
|
253
|
+
return responseFormat;
|
|
254
|
+
};
|
|
269
255
|
async function callAIWithObjectResponse(messages, AIActionTypeValue, modelConfig) {
|
|
270
256
|
const response = await callAI(messages, AIActionTypeValue, modelConfig);
|
|
271
257
|
(0, utils_namespaceObject.assert)(response, 'empty response');
|
|
@@ -338,16 +324,18 @@ exports.callAI = __webpack_exports__.callAI;
|
|
|
338
324
|
exports.callAIWithObjectResponse = __webpack_exports__.callAIWithObjectResponse;
|
|
339
325
|
exports.callAIWithStringResponse = __webpack_exports__.callAIWithStringResponse;
|
|
340
326
|
exports.extractJSONFromCodeBlock = __webpack_exports__.extractJSONFromCodeBlock;
|
|
327
|
+
exports.getResponseFormat = __webpack_exports__.getResponseFormat;
|
|
341
328
|
exports.preprocessDoubaoBboxJson = __webpack_exports__.preprocessDoubaoBboxJson;
|
|
342
329
|
exports.safeParseJson = __webpack_exports__.safeParseJson;
|
|
343
|
-
for(var
|
|
330
|
+
for(var __rspack_i in __webpack_exports__)if (-1 === [
|
|
344
331
|
"callAI",
|
|
345
332
|
"callAIWithObjectResponse",
|
|
346
333
|
"callAIWithStringResponse",
|
|
347
334
|
"extractJSONFromCodeBlock",
|
|
335
|
+
"getResponseFormat",
|
|
348
336
|
"preprocessDoubaoBboxJson",
|
|
349
337
|
"safeParseJson"
|
|
350
|
-
].indexOf(
|
|
338
|
+
].indexOf(__rspack_i)) exports[__rspack_i] = __webpack_exports__[__rspack_i];
|
|
351
339
|
Object.defineProperty(exports, '__esModule', {
|
|
352
340
|
value: true
|
|
353
341
|
});
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"ai-model/service-caller/index.js","sources":["webpack/runtime/compat_get_default_export","webpack/runtime/define_property_getters","webpack/runtime/has_own_property","webpack/runtime/make_namespace_object","../../../../src/ai-model/service-caller/index.ts"],"sourcesContent":["// getDefaultExport function for compatibility with non-ESM modules\n__webpack_require__.n = (module) => {\n\tvar getter = module && module.__esModule ?\n\t\t() => (module['default']) :\n\t\t() => (module);\n\t__webpack_require__.d(getter, { a: getter });\n\treturn getter;\n};\n","__webpack_require__.d = (exports, definition) => {\n\tfor(var key in definition) {\n if(__webpack_require__.o(definition, key) && !__webpack_require__.o(exports, key)) {\n Object.defineProperty(exports, key, { enumerable: true, get: definition[key] });\n }\n }\n};","__webpack_require__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop))","// define __esModule on exports\n__webpack_require__.r = (exports) => {\n\tif(typeof Symbol !== 'undefined' && Symbol.toStringTag) {\n\t\tObject.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });\n\t}\n\tObject.defineProperty(exports, '__esModule', { value: true });\n};","import { AIResponseFormat, type AIUsageInfo } from '@/types';\nimport type { CodeGenerationChunk, StreamingCallback } from '@/types';\nimport {\n type IModelConfig,\n MIDSCENE_LANGFUSE_DEBUG,\n MIDSCENE_LANGSMITH_DEBUG,\n MIDSCENE_MODEL_MAX_TOKENS,\n OPENAI_MAX_TOKENS,\n type TVlModeTypes,\n type UITarsModelVersion,\n globalConfigManager,\n} from '@midscene/shared/env';\n\nimport { getDebug } from '@midscene/shared/logger';\nimport { assert, ifInBrowser } from '@midscene/shared/utils';\nimport { jsonrepair } from 'jsonrepair';\nimport OpenAI from 'openai';\nimport type { ChatCompletionMessageParam } from 'openai/resources/index';\nimport type { Stream } from 'openai/streaming';\nimport type { AIActionType, AIArgs } from '../../common';\n\nasync function createChatClient({\n AIActionTypeValue,\n modelConfig,\n}: {\n AIActionTypeValue: AIActionType;\n modelConfig: IModelConfig;\n}): Promise<{\n completion: OpenAI.Chat.Completions;\n modelName: string;\n modelDescription: string;\n uiTarsVersion?: UITarsModelVersion;\n vlMode: TVlModeTypes | undefined;\n}> {\n const {\n socksProxy,\n httpProxy,\n modelName,\n openaiBaseURL,\n openaiApiKey,\n openaiExtraConfig,\n modelDescription,\n uiTarsModelVersion: uiTarsVersion,\n vlMode,\n createOpenAIClient,\n timeout,\n } = modelConfig;\n\n let proxyAgent: any = undefined;\n const debugProxy = getDebug('ai:call:proxy');\n\n // Helper function to sanitize proxy URL for logging (remove credentials)\n // Uses URL API instead of regex to avoid ReDoS vulnerabilities\n const sanitizeProxyUrl = (url: string): string => {\n try {\n const parsed = new URL(url);\n if (parsed.username) {\n // Keep username for debugging, hide password for security\n parsed.password = '****';\n return parsed.href;\n }\n return url;\n } catch {\n // If URL parsing fails, return original URL (will be caught later)\n return url;\n }\n };\n\n if (httpProxy) {\n debugProxy('using http proxy', sanitizeProxyUrl(httpProxy));\n if (ifInBrowser) {\n console.warn(\n 'HTTP proxy is configured but not supported in browser environment',\n );\n } else {\n // Dynamic import with variable to avoid bundler static analysis\n const moduleName = 'undici';\n const { ProxyAgent } = await import(moduleName);\n proxyAgent = new ProxyAgent({\n uri: httpProxy,\n // Note: authentication is handled via the URI (e.g., http://user:pass@proxy.com:8080)\n });\n }\n } else if (socksProxy) {\n debugProxy('using socks proxy', sanitizeProxyUrl(socksProxy));\n if (ifInBrowser) {\n console.warn(\n 'SOCKS proxy is configured but not supported in browser environment',\n );\n } else {\n try {\n // Dynamic import with variable to avoid bundler static analysis\n const moduleName = 'fetch-socks';\n const { socksDispatcher } = await import(moduleName);\n // Parse SOCKS proxy URL (e.g., socks5://127.0.0.1:1080)\n const proxyUrl = new URL(socksProxy);\n\n // Validate hostname\n if (!proxyUrl.hostname) {\n throw new Error('SOCKS proxy URL must include a valid hostname');\n }\n\n // Validate and parse port\n const port = Number.parseInt(proxyUrl.port, 10);\n if (!proxyUrl.port || Number.isNaN(port)) {\n throw new Error('SOCKS proxy URL must include a valid port');\n }\n\n // Parse SOCKS version from protocol\n const protocol = proxyUrl.protocol.replace(':', '');\n const socksType =\n protocol === 'socks4' ? 4 : protocol === 'socks5' ? 5 : 5;\n\n proxyAgent = socksDispatcher({\n type: socksType,\n host: proxyUrl.hostname,\n port,\n ...(proxyUrl.username\n ? {\n userId: decodeURIComponent(proxyUrl.username),\n password: decodeURIComponent(proxyUrl.password || ''),\n }\n : {}),\n });\n debugProxy('socks proxy configured successfully', {\n type: socksType,\n host: proxyUrl.hostname,\n port: port,\n });\n } catch (error) {\n console.error('Failed to configure SOCKS proxy:', error);\n throw new Error(\n `Invalid SOCKS proxy URL: ${socksProxy}. Expected format: socks4://host:port, socks5://host:port, or with authentication: socks5://user:pass@host:port`,\n );\n }\n }\n }\n\n const openAIOptions = {\n baseURL: openaiBaseURL,\n apiKey: openaiApiKey,\n // Use fetchOptions.dispatcher for fetch-based SDK instead of httpAgent\n // Note: Type assertion needed due to undici version mismatch between dependencies\n ...(proxyAgent ? { fetchOptions: { dispatcher: proxyAgent as any } } : {}),\n ...openaiExtraConfig,\n ...(typeof timeout === 'number' ? { timeout } : {}),\n dangerouslyAllowBrowser: true,\n };\n\n const baseOpenAI = new OpenAI(openAIOptions);\n\n let openai: OpenAI = baseOpenAI;\n\n // LangSmith wrapper\n if (\n openai &&\n globalConfigManager.getEnvConfigInBoolean(MIDSCENE_LANGSMITH_DEBUG)\n ) {\n if (ifInBrowser) {\n throw new Error('langsmith is not supported in browser');\n }\n console.log('DEBUGGING MODE: langsmith wrapper enabled');\n // Use variable to prevent static analysis by bundlers\n const langsmithModule = 'langsmith/wrappers';\n const { wrapOpenAI } = await import(langsmithModule);\n openai = wrapOpenAI(openai);\n }\n\n // Langfuse wrapper\n if (\n openai &&\n globalConfigManager.getEnvConfigInBoolean(MIDSCENE_LANGFUSE_DEBUG)\n ) {\n if (ifInBrowser) {\n throw new Error('langfuse is not supported in browser');\n }\n console.log('DEBUGGING MODE: langfuse wrapper enabled');\n // Use variable to prevent static analysis by bundlers\n const langfuseModule = 'langfuse';\n const { observeOpenAI } = await import(langfuseModule);\n openai = observeOpenAI(openai);\n }\n\n if (createOpenAIClient) {\n const wrappedClient = await createOpenAIClient(baseOpenAI, openAIOptions);\n\n if (wrappedClient) {\n openai = wrappedClient as OpenAI;\n }\n }\n\n return {\n completion: openai.chat.completions,\n modelName,\n modelDescription,\n uiTarsVersion,\n vlMode,\n };\n}\n\nexport async function callAI(\n messages: ChatCompletionMessageParam[],\n AIActionTypeValue: AIActionType,\n modelConfig: IModelConfig,\n options?: {\n stream?: boolean;\n onChunk?: StreamingCallback;\n },\n): Promise<{ content: string; usage?: AIUsageInfo; isStreamed: boolean }> {\n const { completion, modelName, modelDescription, uiTarsVersion, vlMode } =\n await createChatClient({\n AIActionTypeValue,\n modelConfig,\n });\n\n const maxTokens =\n globalConfigManager.getEnvConfigValue(MIDSCENE_MODEL_MAX_TOKENS) ??\n globalConfigManager.getEnvConfigValue(OPENAI_MAX_TOKENS);\n const debugCall = getDebug('ai:call');\n const debugProfileStats = getDebug('ai:profile:stats');\n const debugProfileDetail = getDebug('ai:profile:detail');\n\n const startTime = Date.now();\n\n const isStreaming = options?.stream && options?.onChunk;\n let content: string | undefined;\n let accumulated = '';\n let usage: OpenAI.CompletionUsage | undefined;\n let timeCost: number | undefined;\n\n const buildUsageInfo = (usageData?: OpenAI.CompletionUsage) => {\n if (!usageData) return undefined;\n\n const cachedInputTokens = (\n usageData as { prompt_tokens_details?: { cached_tokens?: number } }\n )?.prompt_tokens_details?.cached_tokens;\n\n return {\n prompt_tokens: usageData.prompt_tokens ?? 0,\n completion_tokens: usageData.completion_tokens ?? 0,\n total_tokens: usageData.total_tokens ?? 0,\n cached_input: cachedInputTokens ?? 0,\n time_cost: timeCost ?? 0,\n model_name: modelName,\n model_description: modelDescription,\n intent: modelConfig.intent,\n } satisfies AIUsageInfo;\n };\n\n const commonConfig = {\n temperature: vlMode === 'vlm-ui-tars' ? 0.0 : undefined,\n stream: !!isStreaming,\n max_tokens: typeof maxTokens === 'number' ? maxTokens : undefined,\n ...(vlMode === 'qwen2.5-vl' // qwen vl v2 specific config\n ? {\n vl_high_resolution_images: true,\n }\n : {}),\n };\n\n try {\n debugCall(\n `sending ${isStreaming ? 'streaming ' : ''}request to ${modelName}`,\n );\n\n if (isStreaming) {\n const stream = (await completion.create(\n {\n model: modelName,\n messages,\n ...commonConfig,\n },\n {\n stream: true,\n },\n )) as Stream<OpenAI.Chat.Completions.ChatCompletionChunk> & {\n _request_id?: string | null;\n };\n\n for await (const chunk of stream) {\n const content = chunk.choices?.[0]?.delta?.content || '';\n const reasoning_content =\n (chunk.choices?.[0]?.delta as any)?.reasoning_content || '';\n\n // Check for usage info in any chunk (OpenAI provides usage in separate chunks)\n if (chunk.usage) {\n usage = chunk.usage;\n }\n\n if (content || reasoning_content) {\n accumulated += content;\n const chunkData: CodeGenerationChunk = {\n content,\n reasoning_content,\n accumulated,\n isComplete: false,\n usage: undefined,\n };\n options.onChunk!(chunkData);\n }\n\n // Check if stream is complete\n if (chunk.choices?.[0]?.finish_reason) {\n timeCost = Date.now() - startTime;\n\n // If usage is not available from the stream, provide a basic usage info\n if (!usage) {\n // Estimate token counts based on content length (rough approximation)\n const estimatedTokens = Math.max(\n 1,\n Math.floor(accumulated.length / 4),\n );\n usage = {\n prompt_tokens: estimatedTokens,\n completion_tokens: estimatedTokens,\n total_tokens: estimatedTokens * 2,\n };\n }\n\n // Send final chunk\n const finalChunk: CodeGenerationChunk = {\n content: '',\n accumulated,\n reasoning_content: '',\n isComplete: true,\n usage: buildUsageInfo(usage),\n };\n options.onChunk!(finalChunk);\n break;\n }\n }\n content = accumulated;\n debugProfileStats(\n `streaming model, ${modelName}, mode, ${vlMode || 'default'}, cost-ms, ${timeCost}`,\n );\n } else {\n const result = await completion.create({\n model: modelName,\n messages,\n ...commonConfig,\n } as any);\n timeCost = Date.now() - startTime;\n\n debugProfileStats(\n `model, ${modelName}, mode, ${vlMode || 'default'}, ui-tars-version, ${uiTarsVersion}, prompt-tokens, ${result.usage?.prompt_tokens || ''}, completion-tokens, ${result.usage?.completion_tokens || ''}, total-tokens, ${result.usage?.total_tokens || ''}, cost-ms, ${timeCost}, requestId, ${result._request_id || ''}`,\n );\n\n debugProfileDetail(`model usage detail: ${JSON.stringify(result.usage)}`);\n\n assert(\n result.choices,\n `invalid response from LLM service: ${JSON.stringify(result)}`,\n );\n content = result.choices[0].message.content!;\n usage = result.usage;\n }\n\n debugCall(`response: ${content}`);\n assert(content, 'empty content');\n\n // Ensure we always have usage info for streaming responses\n if (isStreaming && !usage) {\n // Estimate token counts based on content length (rough approximation)\n const estimatedTokens = Math.max(\n 1,\n Math.floor((content || '').length / 4),\n );\n usage = {\n prompt_tokens: estimatedTokens,\n completion_tokens: estimatedTokens,\n total_tokens: estimatedTokens * 2,\n } as OpenAI.CompletionUsage;\n }\n\n return {\n content: content || '',\n usage: buildUsageInfo(usage),\n isStreamed: !!isStreaming,\n };\n } catch (e: any) {\n console.error(' call AI error', e);\n const newError = new Error(\n `failed to call ${isStreaming ? 'streaming ' : ''}AI model service: ${e.message}. Trouble shooting: https://midscenejs.com/model-provider.html`,\n {\n cause: e,\n },\n );\n throw newError;\n }\n}\n\nexport async function callAIWithObjectResponse<T>(\n messages: ChatCompletionMessageParam[],\n AIActionTypeValue: AIActionType,\n modelConfig: IModelConfig,\n): Promise<{ content: T; contentString: string; usage?: AIUsageInfo }> {\n const response = await callAI(messages, AIActionTypeValue, modelConfig);\n assert(response, 'empty response');\n const vlMode = modelConfig.vlMode;\n const jsonContent = safeParseJson(response.content, vlMode);\n return {\n content: jsonContent,\n contentString: response.content,\n usage: response.usage,\n };\n}\n\nexport async function callAIWithStringResponse(\n msgs: AIArgs,\n AIActionTypeValue: AIActionType,\n modelConfig: IModelConfig,\n): Promise<{ content: string; usage?: AIUsageInfo }> {\n const { content, usage } = await callAI(msgs, AIActionTypeValue, modelConfig);\n return { content, usage };\n}\n\nexport function extractJSONFromCodeBlock(response: string) {\n try {\n // First, try to match a JSON object directly in the response\n const jsonMatch = response.match(/^\\s*(\\{[\\s\\S]*\\})\\s*$/);\n if (jsonMatch) {\n return jsonMatch[1];\n }\n\n // If no direct JSON object is found, try to extract JSON from a code block\n const codeBlockMatch = response.match(\n /```(?:json)?\\s*(\\{[\\s\\S]*?\\})\\s*```/,\n );\n if (codeBlockMatch) {\n return codeBlockMatch[1];\n }\n\n // If no code block is found, try to find a JSON-like structure in the text\n const jsonLikeMatch = response.match(/\\{[\\s\\S]*\\}/);\n if (jsonLikeMatch) {\n return jsonLikeMatch[0];\n }\n } catch {}\n // If no JSON-like structure is found, return the original response\n return response;\n}\n\nexport function preprocessDoubaoBboxJson(input: string) {\n if (input.includes('bbox')) {\n // when its values like 940 445 969 490, replace all /\\d+\\s+\\d+/g with /$1,$2/g\n while (/\\d+\\s+\\d+/.test(input)) {\n input = input.replace(/(\\d+)\\s+(\\d+)/g, '$1,$2');\n }\n }\n return input;\n}\n\n/**\n * Normalize a parsed JSON object by trimming whitespace from:\n * 1. All object keys (e.g., \" prompt \" -> \"prompt\")\n * 2. All string values (e.g., \" Tap \" -> \"Tap\")\n * This handles LLM output that may include leading/trailing spaces.\n */\nfunction normalizeJsonObject(obj: any): any {\n // Handle null and undefined\n if (obj === null || obj === undefined) {\n return obj;\n }\n\n // Handle arrays - recursively normalize each element\n if (Array.isArray(obj)) {\n return obj.map((item) => normalizeJsonObject(item));\n }\n\n // Handle objects\n if (typeof obj === 'object') {\n const normalized: any = {};\n\n for (const [key, value] of Object.entries(obj)) {\n // Trim the key to remove leading/trailing spaces\n const trimmedKey = key.trim();\n\n // Recursively normalize the value\n let normalizedValue = normalizeJsonObject(value);\n\n // Trim all string values\n if (typeof normalizedValue === 'string') {\n normalizedValue = normalizedValue.trim();\n }\n\n normalized[trimmedKey] = normalizedValue;\n }\n\n return normalized;\n }\n\n // Handle primitive strings\n if (typeof obj === 'string') {\n return obj.trim();\n }\n\n // Return other primitives as-is\n return obj;\n}\n\nexport function safeParseJson(input: string, vlMode: TVlModeTypes | undefined) {\n const cleanJsonString = extractJSONFromCodeBlock(input);\n // match the point\n if (cleanJsonString?.match(/\\((\\d+),(\\d+)\\)/)) {\n return cleanJsonString\n .match(/\\((\\d+),(\\d+)\\)/)\n ?.slice(1)\n .map(Number);\n }\n\n let parsed: any;\n try {\n parsed = JSON.parse(cleanJsonString);\n return normalizeJsonObject(parsed);\n } catch {}\n try {\n parsed = JSON.parse(jsonrepair(cleanJsonString));\n return normalizeJsonObject(parsed);\n } catch (e) {}\n\n if (vlMode === 'doubao-vision' || vlMode === 'vlm-ui-tars') {\n const jsonString = preprocessDoubaoBboxJson(cleanJsonString);\n parsed = JSON.parse(jsonrepair(jsonString));\n return normalizeJsonObject(parsed);\n }\n throw Error(`failed to parse json response: ${input}`);\n}\n"],"names":["__webpack_require__","module","getter","definition","key","Object","obj","prop","Symbol","createChatClient","AIActionTypeValue","modelConfig","socksProxy","httpProxy","modelName","openaiBaseURL","openaiApiKey","openaiExtraConfig","modelDescription","uiTarsVersion","vlMode","createOpenAIClient","timeout","proxyAgent","debugProxy","getDebug","sanitizeProxyUrl","url","parsed","URL","ifInBrowser","console","moduleName","ProxyAgent","socksDispatcher","proxyUrl","Error","port","Number","protocol","socksType","decodeURIComponent","error","openAIOptions","baseOpenAI","OpenAI","openai","globalConfigManager","MIDSCENE_LANGSMITH_DEBUG","langsmithModule","wrapOpenAI","MIDSCENE_LANGFUSE_DEBUG","langfuseModule","observeOpenAI","wrappedClient","callAI","messages","options","completion","maxTokens","MIDSCENE_MODEL_MAX_TOKENS","OPENAI_MAX_TOKENS","debugCall","debugProfileStats","debugProfileDetail","startTime","Date","isStreaming","content","accumulated","usage","timeCost","buildUsageInfo","usageData","cachedInputTokens","commonConfig","undefined","stream","chunk","reasoning_content","chunkData","estimatedTokens","Math","finalChunk","result","JSON","assert","e","newError","callAIWithObjectResponse","response","jsonContent","safeParseJson","callAIWithStringResponse","msgs","extractJSONFromCodeBlock","jsonMatch","codeBlockMatch","jsonLikeMatch","preprocessDoubaoBboxJson","input","normalizeJsonObject","Array","item","normalized","value","trimmedKey","normalizedValue","cleanJsonString","jsonrepair","jsonString"],"mappings":";;;IACAA,oBAAoB,CAAC,GAAG,CAACC;QACxB,IAAIC,SAASD,UAAUA,OAAO,UAAU,GACvC,IAAOA,MAAM,CAAC,UAAU,GACxB,IAAOA;QACRD,oBAAoB,CAAC,CAACE,QAAQ;YAAE,GAAGA;QAAO;QAC1C,OAAOA;IACR;;;ICPAF,oBAAoB,CAAC,GAAG,CAAC,UAASG;QACjC,IAAI,IAAIC,OAAOD,WACR,IAAGH,oBAAoB,CAAC,CAACG,YAAYC,QAAQ,CAACJ,oBAAoB,CAAC,CAAC,UAASI,MACzEC,OAAO,cAAc,CAAC,UAASD,KAAK;YAAE,YAAY;YAAM,KAAKD,UAAU,CAACC,IAAI;QAAC;IAGzF;;;ICNAJ,oBAAoB,CAAC,GAAG,CAACM,KAAKC,OAAUF,OAAO,SAAS,CAAC,cAAc,CAAC,IAAI,CAACC,KAAKC;;;ICClFP,oBAAoB,CAAC,GAAG,CAAC;QACxB,IAAG,AAAkB,eAAlB,OAAOQ,UAA0BA,OAAO,WAAW,EACrDH,OAAO,cAAc,CAAC,UAASG,OAAO,WAAW,EAAE;YAAE,OAAO;QAAS;QAEtEH,OAAO,cAAc,CAAC,UAAS,cAAc;YAAE,OAAO;QAAK;IAC5D;;;;;;;;;;;;;;;;;;ACeA,eAAeI,iBAAiB,EAC9BC,iBAAiB,EACjBC,WAAW,EAIZ;IAOC,MAAM,EACJC,UAAU,EACVC,SAAS,EACTC,SAAS,EACTC,aAAa,EACbC,YAAY,EACZC,iBAAiB,EACjBC,gBAAgB,EAChB,oBAAoBC,aAAa,EACjCC,MAAM,EACNC,kBAAkB,EAClBC,OAAO,EACR,GAAGX;IAEJ,IAAIY;IACJ,MAAMC,aAAaC,AAAAA,IAAAA,uBAAAA,QAAAA,AAAAA,EAAS;IAI5B,MAAMC,mBAAmB,CAACC;QACxB,IAAI;YACF,MAAMC,SAAS,IAAIC,IAAIF;YACvB,IAAIC,OAAO,QAAQ,EAAE;gBAEnBA,OAAO,QAAQ,GAAG;gBAClB,OAAOA,OAAO,IAAI;YACpB;YACA,OAAOD;QACT,EAAE,OAAM;YAEN,OAAOA;QACT;IACF;IAEA,IAAId,WAAW;QACbW,WAAW,oBAAoBE,iBAAiBb;QAChD,IAAIiB,sBAAAA,WAAWA,EACbC,QAAQ,IAAI,CACV;aAEG;YAEL,MAAMC,aAAa;YACnB,MAAM,EAAEC,UAAU,EAAE,GAAG,MAAM,MAAM,CAACD;YACpCT,aAAa,IAAIU,WAAW;gBAC1B,KAAKpB;YAEP;QACF;IACF,OAAO,IAAID,YAAY;QACrBY,WAAW,qBAAqBE,iBAAiBd;QACjD,IAAIkB,sBAAAA,WAAWA,EACbC,QAAQ,IAAI,CACV;aAGF,IAAI;YAEF,MAAMC,aAAa;YACnB,MAAM,EAAEE,eAAe,EAAE,GAAG,MAAM,MAAM,CAACF;YAEzC,MAAMG,WAAW,IAAIN,IAAIjB;YAGzB,IAAI,CAACuB,SAAS,QAAQ,EACpB,MAAM,IAAIC,MAAM;YAIlB,MAAMC,OAAOC,OAAO,QAAQ,CAACH,SAAS,IAAI,EAAE;YAC5C,IAAI,CAACA,SAAS,IAAI,IAAIG,OAAO,KAAK,CAACD,OACjC,MAAM,IAAID,MAAM;YAIlB,MAAMG,WAAWJ,SAAS,QAAQ,CAAC,OAAO,CAAC,KAAK;YAChD,MAAMK,YACJD,AAAa,aAAbA,WAAwB,IAAIA,AAAa,aAAbA,WAAwB,IAAI;YAE1DhB,aAAaW,gBAAgB;gBAC3B,MAAMM;gBACN,MAAML,SAAS,QAAQ;gBACvBE;gBACA,GAAIF,SAAS,QAAQ,GACjB;oBACE,QAAQM,mBAAmBN,SAAS,QAAQ;oBAC5C,UAAUM,mBAAmBN,SAAS,QAAQ,IAAI;gBACpD,IACA,CAAC,CAAC;YACR;YACAX,WAAW,uCAAuC;gBAChD,MAAMgB;gBACN,MAAML,SAAS,QAAQ;gBACvB,MAAME;YACR;QACF,EAAE,OAAOK,OAAO;YACdX,QAAQ,KAAK,CAAC,oCAAoCW;YAClD,MAAM,IAAIN,MACR,CAAC,yBAAyB,EAAExB,WAAW,+GAA+G,CAAC;QAE3J;IAEJ;IAEA,MAAM+B,gBAAgB;QACpB,SAAS5B;QACT,QAAQC;QAGR,GAAIO,aAAa;YAAE,cAAc;gBAAE,YAAYA;YAAkB;QAAE,IAAI,CAAC,CAAC;QACzE,GAAGN,iBAAiB;QACpB,GAAI,AAAmB,YAAnB,OAAOK,UAAuB;YAAEA;QAAQ,IAAI,CAAC,CAAC;QAClD,yBAAyB;IAC3B;IAEA,MAAMsB,aAAa,IAAIC,CAAAA,yBAAAA,EAAOF;IAE9B,IAAIG,SAAiBF;IAGrB,IACEE,UACAC,oBAAAA,mBAAAA,CAAAA,qBAAyC,CAACC,oBAAAA,wBAAwBA,GAClE;QACA,IAAIlB,sBAAAA,WAAWA,EACb,MAAM,IAAIM,MAAM;QAElBL,QAAQ,GAAG,CAAC;QAEZ,MAAMkB,kBAAkB;QACxB,MAAM,EAAEC,UAAU,EAAE,GAAG,MAAM,MAAM,CAACD;QACpCH,SAASI,WAAWJ;IACtB;IAGA,IACEA,UACAC,oBAAAA,mBAAAA,CAAAA,qBAAyC,CAACI,oBAAAA,uBAAuBA,GACjE;QACA,IAAIrB,sBAAAA,WAAWA,EACb,MAAM,IAAIM,MAAM;QAElBL,QAAQ,GAAG,CAAC;QAEZ,MAAMqB,iBAAiB;QACvB,MAAM,EAAEC,aAAa,EAAE,GAAG,MAAM,MAAM,CAACD;QACvCN,SAASO,cAAcP;IACzB;IAEA,IAAIzB,oBAAoB;QACtB,MAAMiC,gBAAgB,MAAMjC,mBAAmBuB,YAAYD;QAE3D,IAAIW,eACFR,SAASQ;IAEb;IAEA,OAAO;QACL,YAAYR,OAAO,IAAI,CAAC,WAAW;QACnChC;QACAI;QACAC;QACAC;IACF;AACF;AAEO,eAAemC,OACpBC,QAAsC,EACtC9C,iBAA+B,EAC/BC,WAAyB,EACzB8C,OAGC;IAED,MAAM,EAAEC,UAAU,EAAE5C,SAAS,EAAEI,gBAAgB,EAAEC,aAAa,EAAEC,MAAM,EAAE,GACtE,MAAMX,iBAAiB;QACrBC;QACAC;IACF;IAEF,MAAMgD,YACJZ,oBAAAA,mBAAAA,CAAAA,iBAAqC,CAACa,oBAAAA,yBAAyBA,KAC/Db,oBAAAA,mBAAAA,CAAAA,iBAAqC,CAACc,oBAAAA,iBAAiBA;IACzD,MAAMC,YAAYrC,AAAAA,IAAAA,uBAAAA,QAAAA,AAAAA,EAAS;IAC3B,MAAMsC,oBAAoBtC,AAAAA,IAAAA,uBAAAA,QAAAA,AAAAA,EAAS;IACnC,MAAMuC,qBAAqBvC,AAAAA,IAAAA,uBAAAA,QAAAA,AAAAA,EAAS;IAEpC,MAAMwC,YAAYC,KAAK,GAAG;IAE1B,MAAMC,cAAcV,SAAS,UAAUA,SAAS;IAChD,IAAIW;IACJ,IAAIC,cAAc;IAClB,IAAIC;IACJ,IAAIC;IAEJ,MAAMC,iBAAiB,CAACC;QACtB,IAAI,CAACA,WAAW;QAEhB,MAAMC,oBACJD,WACC,uBAAuB;QAE1B,OAAO;YACL,eAAeA,UAAU,aAAa,IAAI;YAC1C,mBAAmBA,UAAU,iBAAiB,IAAI;YAClD,cAAcA,UAAU,YAAY,IAAI;YACxC,cAAcC,qBAAqB;YACnC,WAAWH,YAAY;YACvB,YAAYzD;YACZ,mBAAmBI;YACnB,QAAQP,YAAY,MAAM;QAC5B;IACF;IAEA,MAAMgE,eAAe;QACnB,aAAavD,AAAW,kBAAXA,SAA2B,MAAMwD;QAC9C,QAAQ,CAAC,CAACT;QACV,YAAY,AAAqB,YAArB,OAAOR,YAAyBA,YAAYiB;QACxD,GAAIxD,AAAW,iBAAXA,SACA;YACE,2BAA2B;QAC7B,IACA,CAAC,CAAC;IACR;IAEA,IAAI;QACF0C,UACE,CAAC,QAAQ,EAAEK,cAAc,eAAe,GAAG,WAAW,EAAErD,WAAW;QAGrE,IAAIqD,aAAa;YACf,MAAMU,SAAU,MAAMnB,WAAW,MAAM,CACrC;gBACE,OAAO5C;gBACP0C;gBACA,GAAGmB,YAAY;YACjB,GACA;gBACE,QAAQ;YACV;YAKF,WAAW,MAAMG,SAASD,OAAQ;gBAChC,MAAMT,UAAUU,MAAM,OAAO,EAAE,CAAC,EAAE,EAAE,OAAO,WAAW;gBACtD,MAAMC,oBACHD,MAAM,OAAO,EAAE,CAAC,EAAE,EAAE,OAAe,qBAAqB;gBAG3D,IAAIA,MAAM,KAAK,EACbR,QAAQQ,MAAM,KAAK;gBAGrB,IAAIV,WAAWW,mBAAmB;oBAChCV,eAAeD;oBACf,MAAMY,YAAiC;wBACrCZ;wBACAW;wBACAV;wBACA,YAAY;wBACZ,OAAOO;oBACT;oBACAnB,QAAQ,OAAO,CAAEuB;gBACnB;gBAGA,IAAIF,MAAM,OAAO,EAAE,CAAC,EAAE,EAAE,eAAe;oBACrCP,WAAWL,KAAK,GAAG,KAAKD;oBAGxB,IAAI,CAACK,OAAO;wBAEV,MAAMW,kBAAkBC,KAAK,GAAG,CAC9B,GACAA,KAAK,KAAK,CAACb,YAAY,MAAM,GAAG;wBAElCC,QAAQ;4BACN,eAAeW;4BACf,mBAAmBA;4BACnB,cAAcA,AAAkB,IAAlBA;wBAChB;oBACF;oBAGA,MAAME,aAAkC;wBACtC,SAAS;wBACTd;wBACA,mBAAmB;wBACnB,YAAY;wBACZ,OAAOG,eAAeF;oBACxB;oBACAb,QAAQ,OAAO,CAAE0B;oBACjB;gBACF;YACF;YACAf,UAAUC;YACVN,kBACE,CAAC,iBAAiB,EAAEjD,UAAU,QAAQ,EAAEM,UAAU,UAAU,WAAW,EAAEmD,UAAU;QAEvF,OAAO;YACL,MAAMa,SAAS,MAAM1B,WAAW,MAAM,CAAC;gBACrC,OAAO5C;gBACP0C;gBACA,GAAGmB,YAAY;YACjB;YACAJ,WAAWL,KAAK,GAAG,KAAKD;YAExBF,kBACE,CAAC,OAAO,EAAEjD,UAAU,QAAQ,EAAEM,UAAU,UAAU,mBAAmB,EAAED,cAAc,iBAAiB,EAAEiE,OAAO,KAAK,EAAE,iBAAiB,GAAG,qBAAqB,EAAEA,OAAO,KAAK,EAAE,qBAAqB,GAAG,gBAAgB,EAAEA,OAAO,KAAK,EAAE,gBAAgB,GAAG,WAAW,EAAEb,SAAS,aAAa,EAAEa,OAAO,WAAW,IAAI,IAAI;YAG3TpB,mBAAmB,CAAC,oBAAoB,EAAEqB,KAAK,SAAS,CAACD,OAAO,KAAK,GAAG;YAExEE,IAAAA,sBAAAA,MAAAA,AAAAA,EACEF,OAAO,OAAO,EACd,CAAC,mCAAmC,EAAEC,KAAK,SAAS,CAACD,SAAS;YAEhEhB,UAAUgB,OAAO,OAAO,CAAC,EAAE,CAAC,OAAO,CAAC,OAAO;YAC3Cd,QAAQc,OAAO,KAAK;QACtB;QAEAtB,UAAU,CAAC,UAAU,EAAEM,SAAS;QAChCkB,IAAAA,sBAAAA,MAAAA,AAAAA,EAAOlB,SAAS;QAGhB,IAAID,eAAe,CAACG,OAAO;YAEzB,MAAMW,kBAAkBC,KAAK,GAAG,CAC9B,GACAA,KAAK,KAAK,CAAEd,AAAAA,CAAAA,WAAW,EAAC,EAAG,MAAM,GAAG;YAEtCE,QAAQ;gBACN,eAAeW;gBACf,mBAAmBA;gBACnB,cAAcA,AAAkB,IAAlBA;YAChB;QACF;QAEA,OAAO;YACL,SAASb,WAAW;YACpB,OAAOI,eAAeF;YACtB,YAAY,CAAC,CAACH;QAChB;IACF,EAAE,OAAOoB,GAAQ;QACfxD,QAAQ,KAAK,CAAC,kBAAkBwD;QAChC,MAAMC,WAAW,IAAIpD,MACnB,CAAC,eAAe,EAAE+B,cAAc,eAAe,GAAG,kBAAkB,EAAEoB,EAAE,OAAO,CAAC,8DAA8D,CAAC,EAC/I;YACE,OAAOA;QACT;QAEF,MAAMC;IACR;AACF;AAEO,eAAeC,yBACpBjC,QAAsC,EACtC9C,iBAA+B,EAC/BC,WAAyB;IAEzB,MAAM+E,WAAW,MAAMnC,OAAOC,UAAU9C,mBAAmBC;IAC3D2E,IAAAA,sBAAAA,MAAAA,AAAAA,EAAOI,UAAU;IACjB,MAAMtE,SAAST,YAAY,MAAM;IACjC,MAAMgF,cAAcC,cAAcF,SAAS,OAAO,EAAEtE;IACpD,OAAO;QACL,SAASuE;QACT,eAAeD,SAAS,OAAO;QAC/B,OAAOA,SAAS,KAAK;IACvB;AACF;AAEO,eAAeG,yBACpBC,IAAY,EACZpF,iBAA+B,EAC/BC,WAAyB;IAEzB,MAAM,EAAEyD,OAAO,EAAEE,KAAK,EAAE,GAAG,MAAMf,OAAOuC,MAAMpF,mBAAmBC;IACjE,OAAO;QAAEyD;QAASE;IAAM;AAC1B;AAEO,SAASyB,yBAAyBL,QAAgB;IACvD,IAAI;QAEF,MAAMM,YAAYN,SAAS,KAAK,CAAC;QACjC,IAAIM,WACF,OAAOA,SAAS,CAAC,EAAE;QAIrB,MAAMC,iBAAiBP,SAAS,KAAK,CACnC;QAEF,IAAIO,gBACF,OAAOA,cAAc,CAAC,EAAE;QAI1B,MAAMC,gBAAgBR,SAAS,KAAK,CAAC;QACrC,IAAIQ,eACF,OAAOA,aAAa,CAAC,EAAE;IAE3B,EAAE,OAAM,CAAC;IAET,OAAOR;AACT;AAEO,SAASS,yBAAyBC,KAAa;IACpD,IAAIA,MAAM,QAAQ,CAAC,SAEjB,MAAO,YAAY,IAAI,CAACA,OACtBA,QAAQA,MAAM,OAAO,CAAC,kBAAkB;IAG5C,OAAOA;AACT;AAQA,SAASC,oBAAoB/F,GAAQ;IAEnC,IAAIA,QAAAA,KACF,OAAOA;IAIT,IAAIgG,MAAM,OAAO,CAAChG,MAChB,OAAOA,IAAI,GAAG,CAAC,CAACiG,OAASF,oBAAoBE;IAI/C,IAAI,AAAe,YAAf,OAAOjG,KAAkB;QAC3B,MAAMkG,aAAkB,CAAC;QAEzB,KAAK,MAAM,CAACpG,KAAKqG,MAAM,IAAIpG,OAAO,OAAO,CAACC,KAAM;YAE9C,MAAMoG,aAAatG,IAAI,IAAI;YAG3B,IAAIuG,kBAAkBN,oBAAoBI;YAG1C,IAAI,AAA2B,YAA3B,OAAOE,iBACTA,kBAAkBA,gBAAgB,IAAI;YAGxCH,UAAU,CAACE,WAAW,GAAGC;QAC3B;QAEA,OAAOH;IACT;IAGA,IAAI,AAAe,YAAf,OAAOlG,KACT,OAAOA,IAAI,IAAI;IAIjB,OAAOA;AACT;AAEO,SAASsF,cAAcQ,KAAa,EAAEhF,MAAgC;IAC3E,MAAMwF,kBAAkBb,yBAAyBK;IAEjD,IAAIQ,iBAAiB,MAAM,oBACzB,OAAOA,gBACJ,KAAK,CAAC,oBACL,MAAM,GACP,IAAItE;IAGT,IAAIV;IACJ,IAAI;QACFA,SAASyD,KAAK,KAAK,CAACuB;QACpB,OAAOP,oBAAoBzE;IAC7B,EAAE,OAAM,CAAC;IACT,IAAI;QACFA,SAASyD,KAAK,KAAK,CAACwB,AAAAA,IAAAA,oCAAAA,UAAAA,AAAAA,EAAWD;QAC/B,OAAOP,oBAAoBzE;IAC7B,EAAE,OAAO2D,GAAG,CAAC;IAEb,IAAInE,AAAW,oBAAXA,UAA8BA,AAAW,kBAAXA,QAA0B;QAC1D,MAAM0F,aAAaX,yBAAyBS;QAC5ChF,SAASyD,KAAK,KAAK,CAACwB,AAAAA,IAAAA,oCAAAA,UAAAA,AAAAA,EAAWC;QAC/B,OAAOT,oBAAoBzE;IAC7B;IACA,MAAMQ,MAAM,CAAC,+BAA+B,EAAEgE,OAAO;AACvD"}
|
|
1
|
+
{"version":3,"file":"ai-model/service-caller/index.js","sources":["webpack/runtime/compat_get_default_export","webpack/runtime/define_property_getters","webpack/runtime/has_own_property","webpack/runtime/make_namespace_object","../../../../src/ai-model/service-caller/index.ts"],"sourcesContent":["// getDefaultExport function for compatibility with non-ESM modules\n__webpack_require__.n = (module) => {\n\tvar getter = module && module.__esModule ?\n\t\t() => (module['default']) :\n\t\t() => (module);\n\t__webpack_require__.d(getter, { a: getter });\n\treturn getter;\n};\n","__webpack_require__.d = (exports, definition) => {\n\tfor(var key in definition) {\n if(__webpack_require__.o(definition, key) && !__webpack_require__.o(exports, key)) {\n Object.defineProperty(exports, key, { enumerable: true, get: definition[key] });\n }\n }\n};","__webpack_require__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop))","// define __esModule on exports\n__webpack_require__.r = (exports) => {\n\tif(typeof Symbol !== 'undefined' && Symbol.toStringTag) {\n\t\tObject.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });\n\t}\n\tObject.defineProperty(exports, '__esModule', { value: true });\n};","import { AIResponseFormat, type AIUsageInfo } from '@/types';\nimport type { CodeGenerationChunk, StreamingCallback } from '@/types';\nimport {\n type IModelConfig,\n MIDSCENE_LANGFUSE_DEBUG,\n MIDSCENE_LANGSMITH_DEBUG,\n MIDSCENE_MODEL_MAX_TOKENS,\n OPENAI_MAX_TOKENS,\n type TVlModeTypes,\n type UITarsModelVersion,\n globalConfigManager,\n} from '@midscene/shared/env';\n\nimport { getDebug } from '@midscene/shared/logger';\nimport { assert, ifInBrowser } from '@midscene/shared/utils';\nimport { HttpsProxyAgent } from 'https-proxy-agent';\nimport { jsonrepair } from 'jsonrepair';\nimport OpenAI from 'openai';\nimport type { ChatCompletionMessageParam } from 'openai/resources/index';\nimport type { Stream } from 'openai/streaming';\nimport { SocksProxyAgent } from 'socks-proxy-agent';\nimport { AIActionType, type AIArgs } from '../../common';\nimport { assertSchema } from '../prompt/assertion';\nimport { planSchema } from '../prompt/llm-planning';\n\nasync function createChatClient({\n AIActionTypeValue,\n modelConfig,\n}: {\n AIActionTypeValue: AIActionType;\n modelConfig: IModelConfig;\n}): Promise<{\n completion: OpenAI.Chat.Completions;\n modelName: string;\n modelDescription: string;\n uiTarsVersion?: UITarsModelVersion;\n vlMode: TVlModeTypes | undefined;\n}> {\n const {\n socksProxy,\n httpProxy,\n modelName,\n openaiBaseURL,\n openaiApiKey,\n openaiExtraConfig,\n modelDescription,\n uiTarsModelVersion: uiTarsVersion,\n vlMode,\n createOpenAIClient,\n timeout,\n } = modelConfig;\n\n let proxyAgent = undefined;\n const debugProxy = getDebug('ai:call:proxy');\n if (httpProxy) {\n debugProxy('using http proxy', httpProxy);\n proxyAgent = new HttpsProxyAgent(httpProxy);\n } else if (socksProxy) {\n debugProxy('using socks proxy', socksProxy);\n proxyAgent = new SocksProxyAgent(socksProxy);\n }\n\n const openAIOptions = {\n baseURL: openaiBaseURL,\n apiKey: openaiApiKey,\n ...(proxyAgent ? { httpAgent: proxyAgent as any } : {}),\n ...openaiExtraConfig,\n ...(typeof timeout === 'number' ? { timeout } : {}),\n dangerouslyAllowBrowser: true,\n };\n\n const baseOpenAI = new OpenAI(openAIOptions);\n\n let openai: OpenAI = baseOpenAI;\n\n // LangSmith wrapper\n if (\n openai &&\n globalConfigManager.getEnvConfigInBoolean(MIDSCENE_LANGSMITH_DEBUG)\n ) {\n if (ifInBrowser) {\n throw new Error('langsmith is not supported in browser');\n }\n console.log('DEBUGGING MODE: langsmith wrapper enabled');\n // Use variable to prevent static analysis by bundlers\n const langsmithModule = 'langsmith/wrappers';\n const { wrapOpenAI } = await import(langsmithModule);\n openai = wrapOpenAI(openai);\n }\n\n // Langfuse wrapper\n if (\n openai &&\n globalConfigManager.getEnvConfigInBoolean(MIDSCENE_LANGFUSE_DEBUG)\n ) {\n if (ifInBrowser) {\n throw new Error('langfuse is not supported in browser');\n }\n console.log('DEBUGGING MODE: langfuse wrapper enabled');\n // Use variable to prevent static analysis by bundlers\n const langfuseModule = 'langfuse';\n const { observeOpenAI } = await import(langfuseModule);\n openai = observeOpenAI(openai);\n }\n\n if (createOpenAIClient) {\n const wrappedClient = await createOpenAIClient(baseOpenAI, openAIOptions);\n\n if (wrappedClient) {\n openai = wrappedClient as OpenAI;\n }\n }\n\n return {\n completion: openai.chat.completions,\n modelName,\n modelDescription,\n uiTarsVersion,\n vlMode,\n };\n}\n\nexport async function callAI(\n messages: ChatCompletionMessageParam[],\n AIActionTypeValue: AIActionType,\n modelConfig: IModelConfig,\n options?: {\n stream?: boolean;\n onChunk?: StreamingCallback;\n },\n): Promise<{ content: string; usage?: AIUsageInfo; isStreamed: boolean }> {\n const { completion, modelName, modelDescription, uiTarsVersion, vlMode } =\n await createChatClient({\n AIActionTypeValue,\n modelConfig,\n });\n\n const responseFormat = getResponseFormat(modelName, AIActionTypeValue);\n\n const maxTokens =\n globalConfigManager.getEnvConfigValue(MIDSCENE_MODEL_MAX_TOKENS) ??\n globalConfigManager.getEnvConfigValue(OPENAI_MAX_TOKENS);\n const debugCall = getDebug('ai:call');\n const debugProfileStats = getDebug('ai:profile:stats');\n const debugProfileDetail = getDebug('ai:profile:detail');\n\n const startTime = Date.now();\n\n const isStreaming = options?.stream && options?.onChunk;\n let content: string | undefined;\n let accumulated = '';\n let usage: OpenAI.CompletionUsage | undefined;\n let timeCost: number | undefined;\n\n const buildUsageInfo = (usageData?: OpenAI.CompletionUsage) => {\n if (!usageData) return undefined;\n\n const cachedInputTokens = (\n usageData as { prompt_tokens_details?: { cached_tokens?: number } }\n )?.prompt_tokens_details?.cached_tokens;\n\n return {\n prompt_tokens: usageData.prompt_tokens ?? 0,\n completion_tokens: usageData.completion_tokens ?? 0,\n total_tokens: usageData.total_tokens ?? 0,\n cached_input: cachedInputTokens ?? 0,\n time_cost: timeCost ?? 0,\n model_name: modelName,\n model_description: modelDescription,\n intent: modelConfig.intent,\n } satisfies AIUsageInfo;\n };\n\n const commonConfig = {\n temperature: vlMode === 'vlm-ui-tars' ? 0.0 : undefined,\n stream: !!isStreaming,\n max_tokens: typeof maxTokens === 'number' ? maxTokens : undefined,\n ...(vlMode === 'qwen2.5-vl' // qwen vl v2 specific config\n ? {\n vl_high_resolution_images: true,\n }\n : {}),\n };\n\n try {\n debugCall(\n `sending ${isStreaming ? 'streaming ' : ''}request to ${modelName}`,\n );\n\n if (isStreaming) {\n const stream = (await completion.create(\n {\n model: modelName,\n messages,\n response_format: responseFormat,\n ...commonConfig,\n },\n {\n stream: true,\n },\n )) as Stream<OpenAI.Chat.Completions.ChatCompletionChunk> & {\n _request_id?: string | null;\n };\n\n for await (const chunk of stream) {\n const content = chunk.choices?.[0]?.delta?.content || '';\n const reasoning_content =\n (chunk.choices?.[0]?.delta as any)?.reasoning_content || '';\n\n // Check for usage info in any chunk (OpenAI provides usage in separate chunks)\n if (chunk.usage) {\n usage = chunk.usage;\n }\n\n if (content || reasoning_content) {\n accumulated += content;\n const chunkData: CodeGenerationChunk = {\n content,\n reasoning_content,\n accumulated,\n isComplete: false,\n usage: undefined,\n };\n options.onChunk!(chunkData);\n }\n\n // Check if stream is complete\n if (chunk.choices?.[0]?.finish_reason) {\n timeCost = Date.now() - startTime;\n\n // If usage is not available from the stream, provide a basic usage info\n if (!usage) {\n // Estimate token counts based on content length (rough approximation)\n const estimatedTokens = Math.max(\n 1,\n Math.floor(accumulated.length / 4),\n );\n usage = {\n prompt_tokens: estimatedTokens,\n completion_tokens: estimatedTokens,\n total_tokens: estimatedTokens * 2,\n };\n }\n\n // Send final chunk\n const finalChunk: CodeGenerationChunk = {\n content: '',\n accumulated,\n reasoning_content: '',\n isComplete: true,\n usage: buildUsageInfo(usage),\n };\n options.onChunk!(finalChunk);\n break;\n }\n }\n content = accumulated;\n debugProfileStats(\n `streaming model, ${modelName}, mode, ${vlMode || 'default'}, cost-ms, ${timeCost}`,\n );\n } else {\n const result = await completion.create({\n model: modelName,\n messages,\n response_format: responseFormat,\n ...commonConfig,\n } as any);\n timeCost = Date.now() - startTime;\n\n debugProfileStats(\n `model, ${modelName}, mode, ${vlMode || 'default'}, ui-tars-version, ${uiTarsVersion}, prompt-tokens, ${result.usage?.prompt_tokens || ''}, completion-tokens, ${result.usage?.completion_tokens || ''}, total-tokens, ${result.usage?.total_tokens || ''}, cost-ms, ${timeCost}, requestId, ${result._request_id || ''}`,\n );\n\n debugProfileDetail(`model usage detail: ${JSON.stringify(result.usage)}`);\n\n assert(\n result.choices,\n `invalid response from LLM service: ${JSON.stringify(result)}`,\n );\n content = result.choices[0].message.content!;\n usage = result.usage;\n }\n\n debugCall(`response: ${content}`);\n assert(content, 'empty content');\n\n // Ensure we always have usage info for streaming responses\n if (isStreaming && !usage) {\n // Estimate token counts based on content length (rough approximation)\n const estimatedTokens = Math.max(\n 1,\n Math.floor((content || '').length / 4),\n );\n usage = {\n prompt_tokens: estimatedTokens,\n completion_tokens: estimatedTokens,\n total_tokens: estimatedTokens * 2,\n } as OpenAI.CompletionUsage;\n }\n\n return {\n content: content || '',\n usage: buildUsageInfo(usage),\n isStreamed: !!isStreaming,\n };\n } catch (e: any) {\n console.error(' call AI error', e);\n const newError = new Error(\n `failed to call ${isStreaming ? 'streaming ' : ''}AI model service: ${e.message}. Trouble shooting: https://midscenejs.com/model-provider.html`,\n {\n cause: e,\n },\n );\n throw newError;\n }\n}\n\nexport const getResponseFormat = (\n modelName: string,\n AIActionTypeValue: AIActionType,\n):\n | OpenAI.ChatCompletionCreateParams['response_format']\n | OpenAI.ResponseFormatJSONObject => {\n let responseFormat:\n | OpenAI.ChatCompletionCreateParams['response_format']\n | OpenAI.ResponseFormatJSONObject\n | undefined;\n\n if (modelName.includes('gpt-4')) {\n switch (AIActionTypeValue) {\n case AIActionType.ASSERT:\n responseFormat = assertSchema;\n break;\n case AIActionType.PLAN:\n responseFormat = planSchema;\n break;\n case AIActionType.EXTRACT_DATA:\n case AIActionType.DESCRIBE_ELEMENT:\n responseFormat = { type: AIResponseFormat.JSON };\n break;\n case AIActionType.TEXT:\n // No response format for plain text - return as-is\n responseFormat = undefined;\n break;\n }\n }\n\n // gpt-4o-2024-05-13 only supports json_object response format\n // Skip for plain text to allow string output\n if (\n modelName === 'gpt-4o-2024-05-13' &&\n AIActionTypeValue !== AIActionType.TEXT\n ) {\n responseFormat = { type: AIResponseFormat.JSON };\n }\n\n return responseFormat;\n};\n\nexport async function callAIWithObjectResponse<T>(\n messages: ChatCompletionMessageParam[],\n AIActionTypeValue: AIActionType,\n modelConfig: IModelConfig,\n): Promise<{ content: T; contentString: string; usage?: AIUsageInfo }> {\n const response = await callAI(messages, AIActionTypeValue, modelConfig);\n assert(response, 'empty response');\n const vlMode = modelConfig.vlMode;\n const jsonContent = safeParseJson(response.content, vlMode);\n return {\n content: jsonContent,\n contentString: response.content,\n usage: response.usage,\n };\n}\n\nexport async function callAIWithStringResponse(\n msgs: AIArgs,\n AIActionTypeValue: AIActionType,\n modelConfig: IModelConfig,\n): Promise<{ content: string; usage?: AIUsageInfo }> {\n const { content, usage } = await callAI(msgs, AIActionTypeValue, modelConfig);\n return { content, usage };\n}\n\nexport function extractJSONFromCodeBlock(response: string) {\n try {\n // First, try to match a JSON object directly in the response\n const jsonMatch = response.match(/^\\s*(\\{[\\s\\S]*\\})\\s*$/);\n if (jsonMatch) {\n return jsonMatch[1];\n }\n\n // If no direct JSON object is found, try to extract JSON from a code block\n const codeBlockMatch = response.match(\n /```(?:json)?\\s*(\\{[\\s\\S]*?\\})\\s*```/,\n );\n if (codeBlockMatch) {\n return codeBlockMatch[1];\n }\n\n // If no code block is found, try to find a JSON-like structure in the text\n const jsonLikeMatch = response.match(/\\{[\\s\\S]*\\}/);\n if (jsonLikeMatch) {\n return jsonLikeMatch[0];\n }\n } catch {}\n // If no JSON-like structure is found, return the original response\n return response;\n}\n\nexport function preprocessDoubaoBboxJson(input: string) {\n if (input.includes('bbox')) {\n // when its values like 940 445 969 490, replace all /\\d+\\s+\\d+/g with /$1,$2/g\n while (/\\d+\\s+\\d+/.test(input)) {\n input = input.replace(/(\\d+)\\s+(\\d+)/g, '$1,$2');\n }\n }\n return input;\n}\n\n/**\n * Normalize a parsed JSON object by trimming whitespace from:\n * 1. All object keys (e.g., \" prompt \" -> \"prompt\")\n * 2. All string values (e.g., \" Tap \" -> \"Tap\")\n * This handles LLM output that may include leading/trailing spaces.\n */\nfunction normalizeJsonObject(obj: any): any {\n // Handle null and undefined\n if (obj === null || obj === undefined) {\n return obj;\n }\n\n // Handle arrays - recursively normalize each element\n if (Array.isArray(obj)) {\n return obj.map((item) => normalizeJsonObject(item));\n }\n\n // Handle objects\n if (typeof obj === 'object') {\n const normalized: any = {};\n\n for (const [key, value] of Object.entries(obj)) {\n // Trim the key to remove leading/trailing spaces\n const trimmedKey = key.trim();\n\n // Recursively normalize the value\n let normalizedValue = normalizeJsonObject(value);\n\n // Trim all string values\n if (typeof normalizedValue === 'string') {\n normalizedValue = normalizedValue.trim();\n }\n\n normalized[trimmedKey] = normalizedValue;\n }\n\n return normalized;\n }\n\n // Handle primitive strings\n if (typeof obj === 'string') {\n return obj.trim();\n }\n\n // Return other primitives as-is\n return obj;\n}\n\nexport function safeParseJson(input: string, vlMode: TVlModeTypes | undefined) {\n const cleanJsonString = extractJSONFromCodeBlock(input);\n // match the point\n if (cleanJsonString?.match(/\\((\\d+),(\\d+)\\)/)) {\n return cleanJsonString\n .match(/\\((\\d+),(\\d+)\\)/)\n ?.slice(1)\n .map(Number);\n }\n\n let parsed: any;\n try {\n parsed = JSON.parse(cleanJsonString);\n return normalizeJsonObject(parsed);\n } catch {}\n try {\n parsed = JSON.parse(jsonrepair(cleanJsonString));\n return normalizeJsonObject(parsed);\n } catch (e) {}\n\n if (vlMode === 'doubao-vision' || vlMode === 'vlm-ui-tars') {\n const jsonString = preprocessDoubaoBboxJson(cleanJsonString);\n parsed = JSON.parse(jsonrepair(jsonString));\n return normalizeJsonObject(parsed);\n }\n throw Error(`failed to parse json response: ${input}`);\n}\n"],"names":["__webpack_require__","module","getter","definition","key","Object","obj","prop","Symbol","createChatClient","AIActionTypeValue","modelConfig","socksProxy","httpProxy","modelName","openaiBaseURL","openaiApiKey","openaiExtraConfig","modelDescription","uiTarsVersion","vlMode","createOpenAIClient","timeout","proxyAgent","debugProxy","getDebug","HttpsProxyAgent","SocksProxyAgent","openAIOptions","baseOpenAI","OpenAI","openai","globalConfigManager","MIDSCENE_LANGSMITH_DEBUG","ifInBrowser","Error","console","langsmithModule","wrapOpenAI","MIDSCENE_LANGFUSE_DEBUG","langfuseModule","observeOpenAI","wrappedClient","callAI","messages","options","completion","responseFormat","getResponseFormat","maxTokens","MIDSCENE_MODEL_MAX_TOKENS","OPENAI_MAX_TOKENS","debugCall","debugProfileStats","debugProfileDetail","startTime","Date","isStreaming","content","accumulated","usage","timeCost","buildUsageInfo","usageData","cachedInputTokens","commonConfig","undefined","stream","chunk","reasoning_content","chunkData","estimatedTokens","Math","finalChunk","result","JSON","assert","e","newError","AIActionType","assertSchema","planSchema","AIResponseFormat","callAIWithObjectResponse","response","jsonContent","safeParseJson","callAIWithStringResponse","msgs","extractJSONFromCodeBlock","jsonMatch","codeBlockMatch","jsonLikeMatch","preprocessDoubaoBboxJson","input","normalizeJsonObject","Array","item","normalized","value","trimmedKey","normalizedValue","cleanJsonString","Number","parsed","jsonrepair","jsonString"],"mappings":";;;IACAA,oBAAoB,CAAC,GAAG,CAACC;QACxB,IAAIC,SAASD,UAAUA,OAAO,UAAU,GACvC,IAAOA,MAAM,CAAC,UAAU,GACxB,IAAOA;QACRD,oBAAoB,CAAC,CAACE,QAAQ;YAAE,GAAGA;QAAO;QAC1C,OAAOA;IACR;;;ICPAF,oBAAoB,CAAC,GAAG,CAAC,UAASG;QACjC,IAAI,IAAIC,OAAOD,WACR,IAAGH,oBAAoB,CAAC,CAACG,YAAYC,QAAQ,CAACJ,oBAAoB,CAAC,CAAC,UAASI,MACzEC,OAAO,cAAc,CAAC,UAASD,KAAK;YAAE,YAAY;YAAM,KAAKD,UAAU,CAACC,IAAI;QAAC;IAGzF;;;ICNAJ,oBAAoB,CAAC,GAAG,CAACM,KAAKC,OAAUF,OAAO,SAAS,CAAC,cAAc,CAAC,IAAI,CAACC,KAAKC;;;ICClFP,oBAAoB,CAAC,GAAG,CAAC;QACxB,IAAG,AAAkB,eAAlB,OAAOQ,UAA0BA,OAAO,WAAW,EACrDH,OAAO,cAAc,CAAC,UAASG,OAAO,WAAW,EAAE;YAAE,OAAO;QAAS;QAEtEH,OAAO,cAAc,CAAC,UAAS,cAAc;YAAE,OAAO;QAAK;IAC5D;;;;;;;;;;;;;;;;;;;;;;;;;ACmBA,eAAeI,iBAAiB,EAC9BC,iBAAiB,EACjBC,WAAW,EAIZ;IAOC,MAAM,EACJC,UAAU,EACVC,SAAS,EACTC,SAAS,EACTC,aAAa,EACbC,YAAY,EACZC,iBAAiB,EACjBC,gBAAgB,EAChB,oBAAoBC,aAAa,EACjCC,MAAM,EACNC,kBAAkB,EAClBC,OAAO,EACR,GAAGX;IAEJ,IAAIY;IACJ,MAAMC,aAAaC,AAAAA,IAAAA,uBAAAA,QAAAA,AAAAA,EAAS;IAC5B,IAAIZ,WAAW;QACbW,WAAW,oBAAoBX;QAC/BU,aAAa,IAAIG,2CAAAA,eAAeA,CAACb;IACnC,OAAO,IAAID,YAAY;QACrBY,WAAW,qBAAqBZ;QAChCW,aAAa,IAAII,2CAAAA,eAAeA,CAACf;IACnC;IAEA,MAAMgB,gBAAgB;QACpB,SAASb;QACT,QAAQC;QACR,GAAIO,aAAa;YAAE,WAAWA;QAAkB,IAAI,CAAC,CAAC;QACtD,GAAGN,iBAAiB;QACpB,GAAI,AAAmB,YAAnB,OAAOK,UAAuB;YAAEA;QAAQ,IAAI,CAAC,CAAC;QAClD,yBAAyB;IAC3B;IAEA,MAAMO,aAAa,IAAIC,CAAAA,yBAAAA,EAAOF;IAE9B,IAAIG,SAAiBF;IAGrB,IACEE,UACAC,oBAAAA,mBAAAA,CAAAA,qBAAyC,CAACC,oBAAAA,wBAAwBA,GAClE;QACA,IAAIC,sBAAAA,WAAWA,EACb,MAAM,IAAIC,MAAM;QAElBC,QAAQ,GAAG,CAAC;QAEZ,MAAMC,kBAAkB;QACxB,MAAM,EAAEC,UAAU,EAAE,GAAG,MAAM,MAAM,CAACD;QACpCN,SAASO,WAAWP;IACtB;IAGA,IACEA,UACAC,oBAAAA,mBAAAA,CAAAA,qBAAyC,CAACO,oBAAAA,uBAAuBA,GACjE;QACA,IAAIL,sBAAAA,WAAWA,EACb,MAAM,IAAIC,MAAM;QAElBC,QAAQ,GAAG,CAAC;QAEZ,MAAMI,iBAAiB;QACvB,MAAM,EAAEC,aAAa,EAAE,GAAG,MAAM,MAAM,CAACD;QACvCT,SAASU,cAAcV;IACzB;IAEA,IAAIV,oBAAoB;QACtB,MAAMqB,gBAAgB,MAAMrB,mBAAmBQ,YAAYD;QAE3D,IAAIc,eACFX,SAASW;IAEb;IAEA,OAAO;QACL,YAAYX,OAAO,IAAI,CAAC,WAAW;QACnCjB;QACAI;QACAC;QACAC;IACF;AACF;AAEO,eAAeuB,OACpBC,QAAsC,EACtClC,iBAA+B,EAC/BC,WAAyB,EACzBkC,OAGC;IAED,MAAM,EAAEC,UAAU,EAAEhC,SAAS,EAAEI,gBAAgB,EAAEC,aAAa,EAAEC,MAAM,EAAE,GACtE,MAAMX,iBAAiB;QACrBC;QACAC;IACF;IAEF,MAAMoC,iBAAiBC,kBAAkBlC,WAAWJ;IAEpD,MAAMuC,YACJjB,oBAAAA,mBAAAA,CAAAA,iBAAqC,CAACkB,oBAAAA,yBAAyBA,KAC/DlB,oBAAAA,mBAAAA,CAAAA,iBAAqC,CAACmB,oBAAAA,iBAAiBA;IACzD,MAAMC,YAAY3B,AAAAA,IAAAA,uBAAAA,QAAAA,AAAAA,EAAS;IAC3B,MAAM4B,oBAAoB5B,AAAAA,IAAAA,uBAAAA,QAAAA,AAAAA,EAAS;IACnC,MAAM6B,qBAAqB7B,AAAAA,IAAAA,uBAAAA,QAAAA,AAAAA,EAAS;IAEpC,MAAM8B,YAAYC,KAAK,GAAG;IAE1B,MAAMC,cAAcZ,SAAS,UAAUA,SAAS;IAChD,IAAIa;IACJ,IAAIC,cAAc;IAClB,IAAIC;IACJ,IAAIC;IAEJ,MAAMC,iBAAiB,CAACC;QACtB,IAAI,CAACA,WAAW;QAEhB,MAAMC,oBACJD,WACC,uBAAuB;QAE1B,OAAO;YACL,eAAeA,UAAU,aAAa,IAAI;YAC1C,mBAAmBA,UAAU,iBAAiB,IAAI;YAClD,cAAcA,UAAU,YAAY,IAAI;YACxC,cAAcC,qBAAqB;YACnC,WAAWH,YAAY;YACvB,YAAY/C;YACZ,mBAAmBI;YACnB,QAAQP,YAAY,MAAM;QAC5B;IACF;IAEA,MAAMsD,eAAe;QACnB,aAAa7C,AAAW,kBAAXA,SAA2B,MAAM8C;QAC9C,QAAQ,CAAC,CAACT;QACV,YAAY,AAAqB,YAArB,OAAOR,YAAyBA,YAAYiB;QACxD,GAAI9C,AAAW,iBAAXA,SACA;YACE,2BAA2B;QAC7B,IACA,CAAC,CAAC;IACR;IAEA,IAAI;QACFgC,UACE,CAAC,QAAQ,EAAEK,cAAc,eAAe,GAAG,WAAW,EAAE3C,WAAW;QAGrE,IAAI2C,aAAa;YACf,MAAMU,SAAU,MAAMrB,WAAW,MAAM,CACrC;gBACE,OAAOhC;gBACP8B;gBACA,iBAAiBG;gBACjB,GAAGkB,YAAY;YACjB,GACA;gBACE,QAAQ;YACV;YAKF,WAAW,MAAMG,SAASD,OAAQ;gBAChC,MAAMT,UAAUU,MAAM,OAAO,EAAE,CAAC,EAAE,EAAE,OAAO,WAAW;gBACtD,MAAMC,oBACHD,MAAM,OAAO,EAAE,CAAC,EAAE,EAAE,OAAe,qBAAqB;gBAG3D,IAAIA,MAAM,KAAK,EACbR,QAAQQ,MAAM,KAAK;gBAGrB,IAAIV,WAAWW,mBAAmB;oBAChCV,eAAeD;oBACf,MAAMY,YAAiC;wBACrCZ;wBACAW;wBACAV;wBACA,YAAY;wBACZ,OAAOO;oBACT;oBACArB,QAAQ,OAAO,CAAEyB;gBACnB;gBAGA,IAAIF,MAAM,OAAO,EAAE,CAAC,EAAE,EAAE,eAAe;oBACrCP,WAAWL,KAAK,GAAG,KAAKD;oBAGxB,IAAI,CAACK,OAAO;wBAEV,MAAMW,kBAAkBC,KAAK,GAAG,CAC9B,GACAA,KAAK,KAAK,CAACb,YAAY,MAAM,GAAG;wBAElCC,QAAQ;4BACN,eAAeW;4BACf,mBAAmBA;4BACnB,cAAcA,AAAkB,IAAlBA;wBAChB;oBACF;oBAGA,MAAME,aAAkC;wBACtC,SAAS;wBACTd;wBACA,mBAAmB;wBACnB,YAAY;wBACZ,OAAOG,eAAeF;oBACxB;oBACAf,QAAQ,OAAO,CAAE4B;oBACjB;gBACF;YACF;YACAf,UAAUC;YACVN,kBACE,CAAC,iBAAiB,EAAEvC,UAAU,QAAQ,EAAEM,UAAU,UAAU,WAAW,EAAEyC,UAAU;QAEvF,OAAO;YACL,MAAMa,SAAS,MAAM5B,WAAW,MAAM,CAAC;gBACrC,OAAOhC;gBACP8B;gBACA,iBAAiBG;gBACjB,GAAGkB,YAAY;YACjB;YACAJ,WAAWL,KAAK,GAAG,KAAKD;YAExBF,kBACE,CAAC,OAAO,EAAEvC,UAAU,QAAQ,EAAEM,UAAU,UAAU,mBAAmB,EAAED,cAAc,iBAAiB,EAAEuD,OAAO,KAAK,EAAE,iBAAiB,GAAG,qBAAqB,EAAEA,OAAO,KAAK,EAAE,qBAAqB,GAAG,gBAAgB,EAAEA,OAAO,KAAK,EAAE,gBAAgB,GAAG,WAAW,EAAEb,SAAS,aAAa,EAAEa,OAAO,WAAW,IAAI,IAAI;YAG3TpB,mBAAmB,CAAC,oBAAoB,EAAEqB,KAAK,SAAS,CAACD,OAAO,KAAK,GAAG;YAExEE,IAAAA,sBAAAA,MAAAA,AAAAA,EACEF,OAAO,OAAO,EACd,CAAC,mCAAmC,EAAEC,KAAK,SAAS,CAACD,SAAS;YAEhEhB,UAAUgB,OAAO,OAAO,CAAC,EAAE,CAAC,OAAO,CAAC,OAAO;YAC3Cd,QAAQc,OAAO,KAAK;QACtB;QAEAtB,UAAU,CAAC,UAAU,EAAEM,SAAS;QAChCkB,IAAAA,sBAAAA,MAAAA,AAAAA,EAAOlB,SAAS;QAGhB,IAAID,eAAe,CAACG,OAAO;YAEzB,MAAMW,kBAAkBC,KAAK,GAAG,CAC9B,GACAA,KAAK,KAAK,CAAEd,AAAAA,CAAAA,WAAW,EAAC,EAAG,MAAM,GAAG;YAEtCE,QAAQ;gBACN,eAAeW;gBACf,mBAAmBA;gBACnB,cAAcA,AAAkB,IAAlBA;YAChB;QACF;QAEA,OAAO;YACL,SAASb,WAAW;YACpB,OAAOI,eAAeF;YACtB,YAAY,CAAC,CAACH;QAChB;IACF,EAAE,OAAOoB,GAAQ;QACfzC,QAAQ,KAAK,CAAC,kBAAkByC;QAChC,MAAMC,WAAW,IAAI3C,MACnB,CAAC,eAAe,EAAEsB,cAAc,eAAe,GAAG,kBAAkB,EAAEoB,EAAE,OAAO,CAAC,8DAA8D,CAAC,EAC/I;YACE,OAAOA;QACT;QAEF,MAAMC;IACR;AACF;AAEO,MAAM9B,oBAAoB,CAC/BlC,WACAJ;IAIA,IAAIqC;IAKJ,IAAIjC,UAAU,QAAQ,CAAC,UACrB,OAAQJ;QACN,KAAKqE,mCAAAA,YAAAA,CAAAA,MAAmB;YACtBhC,iBAAiBiC,6BAAAA,YAAYA;YAC7B;QACF,KAAKD,mCAAAA,YAAAA,CAAAA,IAAiB;YACpBhC,iBAAiBkC,gCAAAA,UAAUA;YAC3B;QACF,KAAKF,mCAAAA,YAAAA,CAAAA,YAAyB;QAC9B,KAAKA,mCAAAA,YAAAA,CAAAA,gBAA6B;YAChChC,iBAAiB;gBAAE,MAAMmC,kCAAAA,gBAAAA,CAAAA,IAAqB;YAAC;YAC/C;QACF,KAAKH,mCAAAA,YAAAA,CAAAA,IAAiB;YAEpBhC,iBAAiBmB;YACjB;IACJ;IAKF,IACEpD,AAAc,wBAAdA,aACAJ,sBAAsBqE,mCAAAA,YAAAA,CAAAA,IAAiB,EAEvChC,iBAAiB;QAAE,MAAMmC,kCAAAA,gBAAAA,CAAAA,IAAqB;IAAC;IAGjD,OAAOnC;AACT;AAEO,eAAeoC,yBACpBvC,QAAsC,EACtClC,iBAA+B,EAC/BC,WAAyB;IAEzB,MAAMyE,WAAW,MAAMzC,OAAOC,UAAUlC,mBAAmBC;IAC3DiE,IAAAA,sBAAAA,MAAAA,AAAAA,EAAOQ,UAAU;IACjB,MAAMhE,SAAST,YAAY,MAAM;IACjC,MAAM0E,cAAcC,cAAcF,SAAS,OAAO,EAAEhE;IACpD,OAAO;QACL,SAASiE;QACT,eAAeD,SAAS,OAAO;QAC/B,OAAOA,SAAS,KAAK;IACvB;AACF;AAEO,eAAeG,yBACpBC,IAAY,EACZ9E,iBAA+B,EAC/BC,WAAyB;IAEzB,MAAM,EAAE+C,OAAO,EAAEE,KAAK,EAAE,GAAG,MAAMjB,OAAO6C,MAAM9E,mBAAmBC;IACjE,OAAO;QAAE+C;QAASE;IAAM;AAC1B;AAEO,SAAS6B,yBAAyBL,QAAgB;IACvD,IAAI;QAEF,MAAMM,YAAYN,SAAS,KAAK,CAAC;QACjC,IAAIM,WACF,OAAOA,SAAS,CAAC,EAAE;QAIrB,MAAMC,iBAAiBP,SAAS,KAAK,CACnC;QAEF,IAAIO,gBACF,OAAOA,cAAc,CAAC,EAAE;QAI1B,MAAMC,gBAAgBR,SAAS,KAAK,CAAC;QACrC,IAAIQ,eACF,OAAOA,aAAa,CAAC,EAAE;IAE3B,EAAE,OAAM,CAAC;IAET,OAAOR;AACT;AAEO,SAASS,yBAAyBC,KAAa;IACpD,IAAIA,MAAM,QAAQ,CAAC,SAEjB,MAAO,YAAY,IAAI,CAACA,OACtBA,QAAQA,MAAM,OAAO,CAAC,kBAAkB;IAG5C,OAAOA;AACT;AAQA,SAASC,oBAAoBzF,GAAQ;IAEnC,IAAIA,QAAAA,KACF,OAAOA;IAIT,IAAI0F,MAAM,OAAO,CAAC1F,MAChB,OAAOA,IAAI,GAAG,CAAC,CAAC2F,OAASF,oBAAoBE;IAI/C,IAAI,AAAe,YAAf,OAAO3F,KAAkB;QAC3B,MAAM4F,aAAkB,CAAC;QAEzB,KAAK,MAAM,CAAC9F,KAAK+F,MAAM,IAAI9F,OAAO,OAAO,CAACC,KAAM;YAE9C,MAAM8F,aAAahG,IAAI,IAAI;YAG3B,IAAIiG,kBAAkBN,oBAAoBI;YAG1C,IAAI,AAA2B,YAA3B,OAAOE,iBACTA,kBAAkBA,gBAAgB,IAAI;YAGxCH,UAAU,CAACE,WAAW,GAAGC;QAC3B;QAEA,OAAOH;IACT;IAGA,IAAI,AAAe,YAAf,OAAO5F,KACT,OAAOA,IAAI,IAAI;IAIjB,OAAOA;AACT;AAEO,SAASgF,cAAcQ,KAAa,EAAE1E,MAAgC;IAC3E,MAAMkF,kBAAkBb,yBAAyBK;IAEjD,IAAIQ,iBAAiB,MAAM,oBACzB,OAAOA,gBACJ,KAAK,CAAC,oBACL,MAAM,GACP,IAAIC;IAGT,IAAIC;IACJ,IAAI;QACFA,SAAS7B,KAAK,KAAK,CAAC2B;QACpB,OAAOP,oBAAoBS;IAC7B,EAAE,OAAM,CAAC;IACT,IAAI;QACFA,SAAS7B,KAAK,KAAK,CAAC8B,AAAAA,IAAAA,oCAAAA,UAAAA,AAAAA,EAAWH;QAC/B,OAAOP,oBAAoBS;IAC7B,EAAE,OAAO3B,GAAG,CAAC;IAEb,IAAIzD,AAAW,oBAAXA,UAA8BA,AAAW,kBAAXA,QAA0B;QAC1D,MAAMsF,aAAab,yBAAyBS;QAC5CE,SAAS7B,KAAK,KAAK,CAAC8B,AAAAA,IAAAA,oCAAAA,UAAAA,AAAAA,EAAWC;QAC/B,OAAOX,oBAAoBS;IAC7B;IACA,MAAMrE,MAAM,CAAC,+BAA+B,EAAE2D,OAAO;AACvD"}
|
|
@@ -288,10 +288,10 @@ async function resizeImageForUiTars(imageBase64, size, uiTarsVersion) {
|
|
|
288
288
|
}
|
|
289
289
|
exports.resizeImageForUiTars = __webpack_exports__.resizeImageForUiTars;
|
|
290
290
|
exports.uiTarsPlanning = __webpack_exports__.uiTarsPlanning;
|
|
291
|
-
for(var
|
|
291
|
+
for(var __rspack_i in __webpack_exports__)if (-1 === [
|
|
292
292
|
"resizeImageForUiTars",
|
|
293
293
|
"uiTarsPlanning"
|
|
294
|
-
].indexOf(
|
|
294
|
+
].indexOf(__rspack_i)) exports[__rspack_i] = __webpack_exports__[__rspack_i];
|
|
295
295
|
Object.defineProperty(exports, '__esModule', {
|
|
296
296
|
value: true
|
|
297
297
|
});
|
package/dist/lib/common.js
CHANGED
|
@@ -413,7 +413,7 @@ exports.markupImageForLLM = __webpack_exports__.markupImageForLLM;
|
|
|
413
413
|
exports.mergeRects = __webpack_exports__.mergeRects;
|
|
414
414
|
exports.normalized01000 = __webpack_exports__.normalized01000;
|
|
415
415
|
exports.parseActionParam = __webpack_exports__.parseActionParam;
|
|
416
|
-
for(var
|
|
416
|
+
for(var __rspack_i in __webpack_exports__)if (-1 === [
|
|
417
417
|
"AIActionType",
|
|
418
418
|
"PointSchema",
|
|
419
419
|
"RectSchema",
|
|
@@ -438,7 +438,7 @@ for(var __webpack_i__ in __webpack_exports__)if (-1 === [
|
|
|
438
438
|
"mergeRects",
|
|
439
439
|
"normalized01000",
|
|
440
440
|
"parseActionParam"
|
|
441
|
-
].indexOf(
|
|
441
|
+
].indexOf(__rspack_i)) exports[__rspack_i] = __webpack_exports__[__rspack_i];
|
|
442
442
|
Object.defineProperty(exports, '__esModule', {
|
|
443
443
|
value: true
|
|
444
444
|
});
|
|
@@ -12,7 +12,7 @@ var __webpack_require__ = {};
|
|
|
12
12
|
})();
|
|
13
13
|
var __webpack_exports__ = {};
|
|
14
14
|
__webpack_require__.r(__webpack_exports__);
|
|
15
|
-
for(var
|
|
15
|
+
for(var __rspack_i in __webpack_exports__)exports[__rspack_i] = __webpack_exports__[__rspack_i];
|
|
16
16
|
Object.defineProperty(exports, '__esModule', {
|
|
17
17
|
value: true
|
|
18
18
|
});
|
package/dist/lib/device/index.js
CHANGED
|
@@ -237,7 +237,7 @@ exports.defineActionRightClick = __webpack_exports__.defineActionRightClick;
|
|
|
237
237
|
exports.defineActionScroll = __webpack_exports__.defineActionScroll;
|
|
238
238
|
exports.defineActionSwipe = __webpack_exports__.defineActionSwipe;
|
|
239
239
|
exports.defineActionTap = __webpack_exports__.defineActionTap;
|
|
240
|
-
for(var
|
|
240
|
+
for(var __rspack_i in __webpack_exports__)if (-1 === [
|
|
241
241
|
"AbstractInterface",
|
|
242
242
|
"ActionLongPressParamSchema",
|
|
243
243
|
"ActionSwipeParamSchema",
|
|
@@ -264,7 +264,7 @@ for(var __webpack_i__ in __webpack_exports__)if (-1 === [
|
|
|
264
264
|
"defineActionScroll",
|
|
265
265
|
"defineActionSwipe",
|
|
266
266
|
"defineActionTap"
|
|
267
|
-
].indexOf(
|
|
267
|
+
].indexOf(__rspack_i)) exports[__rspack_i] = __webpack_exports__[__rspack_i];
|
|
268
268
|
Object.defineProperty(exports, '__esModule', {
|
|
269
269
|
value: true
|
|
270
270
|
});
|
package/dist/lib/image/index.js
CHANGED
|
@@ -40,7 +40,7 @@ exports.localImg2Base64 = __webpack_exports__.localImg2Base64;
|
|
|
40
40
|
exports.resizeAndConvertImgBuffer = __webpack_exports__.resizeAndConvertImgBuffer;
|
|
41
41
|
exports.saveBase64Image = __webpack_exports__.saveBase64Image;
|
|
42
42
|
exports.zoomForGPT4o = __webpack_exports__.zoomForGPT4o;
|
|
43
|
-
for(var
|
|
43
|
+
for(var __rspack_i in __webpack_exports__)if (-1 === [
|
|
44
44
|
"httpImg2Base64",
|
|
45
45
|
"imageInfo",
|
|
46
46
|
"imageInfoOfBase64",
|
|
@@ -48,7 +48,7 @@ for(var __webpack_i__ in __webpack_exports__)if (-1 === [
|
|
|
48
48
|
"resizeAndConvertImgBuffer",
|
|
49
49
|
"saveBase64Image",
|
|
50
50
|
"zoomForGPT4o"
|
|
51
|
-
].indexOf(
|
|
51
|
+
].indexOf(__rspack_i)) exports[__rspack_i] = __webpack_exports__[__rspack_i];
|
|
52
52
|
Object.defineProperty(exports, '__esModule', {
|
|
53
53
|
value: true
|
|
54
54
|
});
|
package/dist/lib/index.js
CHANGED
|
@@ -80,7 +80,7 @@ exports.getMidsceneLocationSchema = __webpack_exports__.getMidsceneLocationSchem
|
|
|
80
80
|
exports.getVersion = __webpack_exports__.getVersion;
|
|
81
81
|
exports.plan = __webpack_exports__.plan;
|
|
82
82
|
exports.z = __webpack_exports__.z;
|
|
83
|
-
for(var
|
|
83
|
+
for(var __rspack_i in __webpack_exports__)if (-1 === [
|
|
84
84
|
"Agent",
|
|
85
85
|
"AiLocateElement",
|
|
86
86
|
"MIDSCENE_MODEL_NAME",
|
|
@@ -99,7 +99,7 @@ for(var __webpack_i__ in __webpack_exports__)if (-1 === [
|
|
|
99
99
|
"getVersion",
|
|
100
100
|
"plan",
|
|
101
101
|
"z"
|
|
102
|
-
].indexOf(
|
|
102
|
+
].indexOf(__rspack_i)) exports[__rspack_i] = __webpack_exports__[__rspack_i];
|
|
103
103
|
Object.defineProperty(exports, '__esModule', {
|
|
104
104
|
value: true
|
|
105
105
|
});
|
package/dist/lib/report.js
CHANGED
|
@@ -112,9 +112,9 @@ class ReportMergingTool {
|
|
|
112
112
|
}
|
|
113
113
|
}
|
|
114
114
|
exports.ReportMergingTool = __webpack_exports__.ReportMergingTool;
|
|
115
|
-
for(var
|
|
115
|
+
for(var __rspack_i in __webpack_exports__)if (-1 === [
|
|
116
116
|
"ReportMergingTool"
|
|
117
|
-
].indexOf(
|
|
117
|
+
].indexOf(__rspack_i)) exports[__rspack_i] = __webpack_exports__[__rspack_i];
|
|
118
118
|
Object.defineProperty(exports, '__esModule', {
|
|
119
119
|
value: true
|
|
120
120
|
});
|
|
@@ -242,9 +242,9 @@ class Service {
|
|
|
242
242
|
}
|
|
243
243
|
}
|
|
244
244
|
exports["default"] = __webpack_exports__["default"];
|
|
245
|
-
for(var
|
|
245
|
+
for(var __rspack_i in __webpack_exports__)if (-1 === [
|
|
246
246
|
"default"
|
|
247
|
-
].indexOf(
|
|
247
|
+
].indexOf(__rspack_i)) exports[__rspack_i] = __webpack_exports__[__rspack_i];
|
|
248
248
|
Object.defineProperty(exports, '__esModule', {
|
|
249
249
|
value: true
|
|
250
250
|
});
|
|
@@ -39,9 +39,9 @@ function createServiceDump(data) {
|
|
|
39
39
|
return finalData;
|
|
40
40
|
}
|
|
41
41
|
exports.createServiceDump = __webpack_exports__.createServiceDump;
|
|
42
|
-
for(var
|
|
42
|
+
for(var __rspack_i in __webpack_exports__)if (-1 === [
|
|
43
43
|
"createServiceDump"
|
|
44
|
-
].indexOf(
|
|
44
|
+
].indexOf(__rspack_i)) exports[__rspack_i] = __webpack_exports__[__rspack_i];
|
|
45
45
|
Object.defineProperty(exports, '__esModule', {
|
|
46
46
|
value: true
|
|
47
47
|
});
|
package/dist/lib/task-runner.js
CHANGED
|
@@ -290,10 +290,10 @@ class TaskExecutionError extends Error {
|
|
|
290
290
|
}
|
|
291
291
|
exports.TaskExecutionError = __webpack_exports__.TaskExecutionError;
|
|
292
292
|
exports.TaskRunner = __webpack_exports__.TaskRunner;
|
|
293
|
-
for(var
|
|
293
|
+
for(var __rspack_i in __webpack_exports__)if (-1 === [
|
|
294
294
|
"TaskExecutionError",
|
|
295
295
|
"TaskRunner"
|
|
296
|
-
].indexOf(
|
|
296
|
+
].indexOf(__rspack_i)) exports[__rspack_i] = __webpack_exports__[__rspack_i];
|
|
297
297
|
Object.defineProperty(exports, '__esModule', {
|
|
298
298
|
value: true
|
|
299
299
|
});
|
package/dist/lib/tree.js
CHANGED
|
@@ -41,11 +41,11 @@ function descriptionOfTree(tree, truncateTextLength, filterNonTextContent = fals
|
|
|
41
41
|
exports.descriptionOfTree = __webpack_exports__.descriptionOfTree;
|
|
42
42
|
exports.trimAttributes = __webpack_exports__.trimAttributes;
|
|
43
43
|
exports.truncateText = __webpack_exports__.truncateText;
|
|
44
|
-
for(var
|
|
44
|
+
for(var __rspack_i in __webpack_exports__)if (-1 === [
|
|
45
45
|
"descriptionOfTree",
|
|
46
46
|
"trimAttributes",
|
|
47
47
|
"truncateText"
|
|
48
|
-
].indexOf(
|
|
48
|
+
].indexOf(__rspack_i)) exports[__rspack_i] = __webpack_exports__[__rspack_i];
|
|
49
49
|
Object.defineProperty(exports, '__esModule', {
|
|
50
50
|
value: true
|
|
51
51
|
});
|
package/dist/lib/types.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
var __webpack_modules__ = {
|
|
3
|
-
"./yaml"
|
|
3
|
+
"./yaml" (module) {
|
|
4
4
|
module.exports = require("./yaml.js");
|
|
5
5
|
}
|
|
6
6
|
};
|
|
@@ -89,11 +89,11 @@ var __webpack_exports__ = {};
|
|
|
89
89
|
exports.AIResponseFormat = __webpack_exports__.AIResponseFormat;
|
|
90
90
|
exports.ServiceError = __webpack_exports__.ServiceError;
|
|
91
91
|
exports.UIContext = __webpack_exports__.UIContext;
|
|
92
|
-
for(var
|
|
92
|
+
for(var __rspack_i in __webpack_exports__)if (-1 === [
|
|
93
93
|
"AIResponseFormat",
|
|
94
94
|
"ServiceError",
|
|
95
95
|
"UIContext"
|
|
96
|
-
].indexOf(
|
|
96
|
+
].indexOf(__rspack_i)) exports[__rspack_i] = __webpack_exports__[__rspack_i];
|
|
97
97
|
Object.defineProperty(exports, '__esModule', {
|
|
98
98
|
value: true
|
|
99
99
|
});
|