@midscene/core 0.8.11 → 0.8.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/lib/ai-model.js +4 -1
- package/dist/lib/env.js +4 -0
- package/dist/lib/index.js +5 -2
- package/dist/lib/types/env.d.ts +4 -1
- package/dist/lib/utils.js +3 -1
- package/package.json +2 -2
- package/report/index.html +2 -2
package/dist/lib/ai-model.js
CHANGED
|
@@ -4309,6 +4309,7 @@ var MIDSCENE_DEBUG_MODE = "MIDSCENE_DEBUG_MODE";
|
|
|
4309
4309
|
var MIDSCENE_OPENAI_SOCKS_PROXY = "MIDSCENE_OPENAI_SOCKS_PROXY";
|
|
4310
4310
|
var OPENAI_API_KEY = "OPENAI_API_KEY";
|
|
4311
4311
|
var OPENAI_BASE_URL = "OPENAI_BASE_URL";
|
|
4312
|
+
var OPENAI_MAX_TOKENS = "OPENAI_MAX_TOKENS";
|
|
4312
4313
|
var MIDSCENE_MODEL_TEXT_ONLY = "MIDSCENE_MODEL_TEXT_ONLY";
|
|
4313
4314
|
var MIDSCENE_CACHE = "MIDSCENE_CACHE";
|
|
4314
4315
|
var MATCH_BY_POSITION = "MATCH_BY_POSITION";
|
|
@@ -4330,6 +4331,7 @@ var allConfigFromEnv = () => {
|
|
|
4330
4331
|
[OPENAI_API_KEY]: process.env[OPENAI_API_KEY] || void 0,
|
|
4331
4332
|
[OPENAI_BASE_URL]: process.env[OPENAI_BASE_URL] || void 0,
|
|
4332
4333
|
[MIDSCENE_MODEL_TEXT_ONLY]: process.env[MIDSCENE_MODEL_TEXT_ONLY] || void 0,
|
|
4334
|
+
[OPENAI_MAX_TOKENS]: process.env[OPENAI_MAX_TOKENS] || void 0,
|
|
4333
4335
|
[OPENAI_USE_AZURE]: process.env[OPENAI_USE_AZURE] || void 0,
|
|
4334
4336
|
[MIDSCENE_CACHE]: process.env[MIDSCENE_CACHE] || void 0,
|
|
4335
4337
|
[MATCH_BY_POSITION]: process.env[MATCH_BY_POSITION] || void 0,
|
|
@@ -5219,6 +5221,7 @@ async function createChatClient() {
|
|
|
5219
5221
|
async function call(messages, responseFormat) {
|
|
5220
5222
|
const { completion, style } = await createChatClient();
|
|
5221
5223
|
const shouldPrintTiming = typeof getAIConfig(MIDSCENE_DEBUG_AI_PROFILE) === "string";
|
|
5224
|
+
const maxTokens = getAIConfig(OPENAI_MAX_TOKENS);
|
|
5222
5225
|
const startTime = Date.now();
|
|
5223
5226
|
const model = getModelName();
|
|
5224
5227
|
let content;
|
|
@@ -5226,7 +5229,7 @@ async function call(messages, responseFormat) {
|
|
|
5226
5229
|
const commonConfig = {
|
|
5227
5230
|
temperature: 0.1,
|
|
5228
5231
|
stream: false,
|
|
5229
|
-
max_tokens:
|
|
5232
|
+
max_tokens: typeof maxTokens === "number" ? maxTokens : Number.parseInt(maxTokens || "2048", 10)
|
|
5230
5233
|
};
|
|
5231
5234
|
if (style === "openai") {
|
|
5232
5235
|
const result = await completion.create({
|
package/dist/lib/env.js
CHANGED
|
@@ -38,6 +38,7 @@ __export(env_exports, {
|
|
|
38
38
|
MIDSCENE_USE_AZURE_OPENAI: () => MIDSCENE_USE_AZURE_OPENAI,
|
|
39
39
|
OPENAI_API_KEY: () => OPENAI_API_KEY,
|
|
40
40
|
OPENAI_BASE_URL: () => OPENAI_BASE_URL,
|
|
41
|
+
OPENAI_MAX_TOKENS: () => OPENAI_MAX_TOKENS,
|
|
41
42
|
OPENAI_USE_AZURE: () => OPENAI_USE_AZURE,
|
|
42
43
|
allAIConfig: () => allAIConfig,
|
|
43
44
|
getAIConfig: () => getAIConfig,
|
|
@@ -54,6 +55,7 @@ var MIDSCENE_DEBUG_MODE = "MIDSCENE_DEBUG_MODE";
|
|
|
54
55
|
var MIDSCENE_OPENAI_SOCKS_PROXY = "MIDSCENE_OPENAI_SOCKS_PROXY";
|
|
55
56
|
var OPENAI_API_KEY = "OPENAI_API_KEY";
|
|
56
57
|
var OPENAI_BASE_URL = "OPENAI_BASE_URL";
|
|
58
|
+
var OPENAI_MAX_TOKENS = "OPENAI_MAX_TOKENS";
|
|
57
59
|
var MIDSCENE_MODEL_TEXT_ONLY = "MIDSCENE_MODEL_TEXT_ONLY";
|
|
58
60
|
var MIDSCENE_CACHE = "MIDSCENE_CACHE";
|
|
59
61
|
var MATCH_BY_POSITION = "MATCH_BY_POSITION";
|
|
@@ -75,6 +77,7 @@ var allConfigFromEnv = () => {
|
|
|
75
77
|
[OPENAI_API_KEY]: process.env[OPENAI_API_KEY] || void 0,
|
|
76
78
|
[OPENAI_BASE_URL]: process.env[OPENAI_BASE_URL] || void 0,
|
|
77
79
|
[MIDSCENE_MODEL_TEXT_ONLY]: process.env[MIDSCENE_MODEL_TEXT_ONLY] || void 0,
|
|
80
|
+
[OPENAI_MAX_TOKENS]: process.env[OPENAI_MAX_TOKENS] || void 0,
|
|
78
81
|
[OPENAI_USE_AZURE]: process.env[OPENAI_USE_AZURE] || void 0,
|
|
79
82
|
[MIDSCENE_CACHE]: process.env[MIDSCENE_CACHE] || void 0,
|
|
80
83
|
[MATCH_BY_POSITION]: process.env[MATCH_BY_POSITION] || void 0,
|
|
@@ -133,6 +136,7 @@ var overrideAIConfig = (newConfig, extendMode) => {
|
|
|
133
136
|
MIDSCENE_USE_AZURE_OPENAI,
|
|
134
137
|
OPENAI_API_KEY,
|
|
135
138
|
OPENAI_BASE_URL,
|
|
139
|
+
OPENAI_MAX_TOKENS,
|
|
136
140
|
OPENAI_USE_AZURE,
|
|
137
141
|
allAIConfig,
|
|
138
142
|
getAIConfig,
|
package/dist/lib/index.js
CHANGED
|
@@ -4315,6 +4315,7 @@ var MIDSCENE_DEBUG_MODE = "MIDSCENE_DEBUG_MODE";
|
|
|
4315
4315
|
var MIDSCENE_OPENAI_SOCKS_PROXY = "MIDSCENE_OPENAI_SOCKS_PROXY";
|
|
4316
4316
|
var OPENAI_API_KEY = "OPENAI_API_KEY";
|
|
4317
4317
|
var OPENAI_BASE_URL = "OPENAI_BASE_URL";
|
|
4318
|
+
var OPENAI_MAX_TOKENS = "OPENAI_MAX_TOKENS";
|
|
4318
4319
|
var MIDSCENE_MODEL_TEXT_ONLY = "MIDSCENE_MODEL_TEXT_ONLY";
|
|
4319
4320
|
var MIDSCENE_CACHE = "MIDSCENE_CACHE";
|
|
4320
4321
|
var MATCH_BY_POSITION = "MATCH_BY_POSITION";
|
|
@@ -4336,6 +4337,7 @@ var allConfigFromEnv = () => {
|
|
|
4336
4337
|
[OPENAI_API_KEY]: process.env[OPENAI_API_KEY] || void 0,
|
|
4337
4338
|
[OPENAI_BASE_URL]: process.env[OPENAI_BASE_URL] || void 0,
|
|
4338
4339
|
[MIDSCENE_MODEL_TEXT_ONLY]: process.env[MIDSCENE_MODEL_TEXT_ONLY] || void 0,
|
|
4340
|
+
[OPENAI_MAX_TOKENS]: process.env[OPENAI_MAX_TOKENS] || void 0,
|
|
4339
4341
|
[OPENAI_USE_AZURE]: process.env[OPENAI_USE_AZURE] || void 0,
|
|
4340
4342
|
[MIDSCENE_CACHE]: process.env[MIDSCENE_CACHE] || void 0,
|
|
4341
4343
|
[MATCH_BY_POSITION]: process.env[MATCH_BY_POSITION] || void 0,
|
|
@@ -4526,7 +4528,7 @@ function stringifyDumpData(data, indents) {
|
|
|
4526
4528
|
return JSON.stringify(data, replacerForPageObject, indents);
|
|
4527
4529
|
}
|
|
4528
4530
|
function getVersion() {
|
|
4529
|
-
return "0.8.
|
|
4531
|
+
return "0.8.12";
|
|
4530
4532
|
}
|
|
4531
4533
|
|
|
4532
4534
|
// src/action/executor.ts
|
|
@@ -5582,6 +5584,7 @@ async function createChatClient() {
|
|
|
5582
5584
|
async function call(messages, responseFormat) {
|
|
5583
5585
|
const { completion, style } = await createChatClient();
|
|
5584
5586
|
const shouldPrintTiming = typeof getAIConfig(MIDSCENE_DEBUG_AI_PROFILE) === "string";
|
|
5587
|
+
const maxTokens = getAIConfig(OPENAI_MAX_TOKENS);
|
|
5585
5588
|
const startTime = Date.now();
|
|
5586
5589
|
const model = getModelName();
|
|
5587
5590
|
let content;
|
|
@@ -5589,7 +5592,7 @@ async function call(messages, responseFormat) {
|
|
|
5589
5592
|
const commonConfig = {
|
|
5590
5593
|
temperature: 0.1,
|
|
5591
5594
|
stream: false,
|
|
5592
|
-
max_tokens:
|
|
5595
|
+
max_tokens: typeof maxTokens === "number" ? maxTokens : Number.parseInt(maxTokens || "2048", 10)
|
|
5593
5596
|
};
|
|
5594
5597
|
if (style === "openai") {
|
|
5595
5598
|
const result = await completion.create({
|
package/dist/lib/types/env.d.ts
CHANGED
|
@@ -7,6 +7,7 @@ declare const MIDSCENE_DEBUG_MODE = "MIDSCENE_DEBUG_MODE";
|
|
|
7
7
|
declare const MIDSCENE_OPENAI_SOCKS_PROXY = "MIDSCENE_OPENAI_SOCKS_PROXY";
|
|
8
8
|
declare const OPENAI_API_KEY = "OPENAI_API_KEY";
|
|
9
9
|
declare const OPENAI_BASE_URL = "OPENAI_BASE_URL";
|
|
10
|
+
declare const OPENAI_MAX_TOKENS = "OPENAI_MAX_TOKENS";
|
|
10
11
|
declare const MIDSCENE_MODEL_TEXT_ONLY = "MIDSCENE_MODEL_TEXT_ONLY";
|
|
11
12
|
declare const MIDSCENE_CACHE = "MIDSCENE_CACHE";
|
|
12
13
|
declare const MATCH_BY_POSITION = "MATCH_BY_POSITION";
|
|
@@ -27,6 +28,7 @@ declare const allConfigFromEnv: () => {
|
|
|
27
28
|
OPENAI_API_KEY: string | undefined;
|
|
28
29
|
OPENAI_BASE_URL: string | undefined;
|
|
29
30
|
MIDSCENE_MODEL_TEXT_ONLY: string | undefined;
|
|
31
|
+
OPENAI_MAX_TOKENS: string | undefined;
|
|
30
32
|
OPENAI_USE_AZURE: string | undefined;
|
|
31
33
|
MIDSCENE_CACHE: string | undefined;
|
|
32
34
|
MATCH_BY_POSITION: string | undefined;
|
|
@@ -51,6 +53,7 @@ declare const allAIConfig: () => {
|
|
|
51
53
|
OPENAI_API_KEY: string | undefined;
|
|
52
54
|
OPENAI_BASE_URL: string | undefined;
|
|
53
55
|
MIDSCENE_MODEL_TEXT_ONLY: string | undefined;
|
|
56
|
+
OPENAI_MAX_TOKENS: string | undefined;
|
|
54
57
|
OPENAI_USE_AZURE: string | undefined;
|
|
55
58
|
MIDSCENE_CACHE: string | undefined;
|
|
56
59
|
MATCH_BY_POSITION: string | undefined;
|
|
@@ -64,4 +67,4 @@ declare const allAIConfig: () => {
|
|
|
64
67
|
};
|
|
65
68
|
declare const overrideAIConfig: (newConfig: ReturnType<typeof allConfigFromEnv>, extendMode?: boolean) => void;
|
|
66
69
|
|
|
67
|
-
export { ANTHROPIC_API_KEY, MATCH_BY_POSITION, MIDSCENE_AZURE_OPENAI_INIT_CONFIG_JSON, MIDSCENE_AZURE_OPENAI_SCOPE, MIDSCENE_CACHE, MIDSCENE_DANGEROUSLY_PRINT_ALL_CONFIG, MIDSCENE_DEBUG_AI_PROFILE, MIDSCENE_DEBUG_MODE, MIDSCENE_LANGSMITH_DEBUG, MIDSCENE_MODEL_NAME, MIDSCENE_MODEL_TEXT_ONLY, MIDSCENE_OPENAI_INIT_CONFIG_JSON, MIDSCENE_OPENAI_SOCKS_PROXY, MIDSCENE_REPORT_TAG_NAME, MIDSCENE_USE_ANTHROPIC_SDK, MIDSCENE_USE_AZURE_OPENAI, OPENAI_API_KEY, OPENAI_BASE_URL, OPENAI_USE_AZURE, allAIConfig, getAIConfig, getAIConfigInJson, overrideAIConfig };
|
|
70
|
+
export { ANTHROPIC_API_KEY, MATCH_BY_POSITION, MIDSCENE_AZURE_OPENAI_INIT_CONFIG_JSON, MIDSCENE_AZURE_OPENAI_SCOPE, MIDSCENE_CACHE, MIDSCENE_DANGEROUSLY_PRINT_ALL_CONFIG, MIDSCENE_DEBUG_AI_PROFILE, MIDSCENE_DEBUG_MODE, MIDSCENE_LANGSMITH_DEBUG, MIDSCENE_MODEL_NAME, MIDSCENE_MODEL_TEXT_ONLY, MIDSCENE_OPENAI_INIT_CONFIG_JSON, MIDSCENE_OPENAI_SOCKS_PROXY, MIDSCENE_REPORT_TAG_NAME, MIDSCENE_USE_ANTHROPIC_SDK, MIDSCENE_USE_AZURE_OPENAI, OPENAI_API_KEY, OPENAI_BASE_URL, OPENAI_MAX_TOKENS, OPENAI_USE_AZURE, allAIConfig, getAIConfig, getAIConfigInJson, overrideAIConfig };
|
package/dist/lib/utils.js
CHANGED
|
@@ -67,6 +67,7 @@ var MIDSCENE_DEBUG_MODE = "MIDSCENE_DEBUG_MODE";
|
|
|
67
67
|
var MIDSCENE_OPENAI_SOCKS_PROXY = "MIDSCENE_OPENAI_SOCKS_PROXY";
|
|
68
68
|
var OPENAI_API_KEY = "OPENAI_API_KEY";
|
|
69
69
|
var OPENAI_BASE_URL = "OPENAI_BASE_URL";
|
|
70
|
+
var OPENAI_MAX_TOKENS = "OPENAI_MAX_TOKENS";
|
|
70
71
|
var MIDSCENE_MODEL_TEXT_ONLY = "MIDSCENE_MODEL_TEXT_ONLY";
|
|
71
72
|
var MIDSCENE_CACHE = "MIDSCENE_CACHE";
|
|
72
73
|
var MATCH_BY_POSITION = "MATCH_BY_POSITION";
|
|
@@ -88,6 +89,7 @@ var allConfigFromEnv = () => {
|
|
|
88
89
|
[OPENAI_API_KEY]: process.env[OPENAI_API_KEY] || void 0,
|
|
89
90
|
[OPENAI_BASE_URL]: process.env[OPENAI_BASE_URL] || void 0,
|
|
90
91
|
[MIDSCENE_MODEL_TEXT_ONLY]: process.env[MIDSCENE_MODEL_TEXT_ONLY] || void 0,
|
|
92
|
+
[OPENAI_MAX_TOKENS]: process.env[OPENAI_MAX_TOKENS] || void 0,
|
|
91
93
|
[OPENAI_USE_AZURE]: process.env[OPENAI_USE_AZURE] || void 0,
|
|
92
94
|
[MIDSCENE_CACHE]: process.env[MIDSCENE_CACHE] || void 0,
|
|
93
95
|
[MATCH_BY_POSITION]: process.env[MATCH_BY_POSITION] || void 0,
|
|
@@ -293,7 +295,7 @@ function stringifyDumpData(data, indents) {
|
|
|
293
295
|
return JSON.stringify(data, replacerForPageObject, indents);
|
|
294
296
|
}
|
|
295
297
|
function getVersion() {
|
|
296
|
-
return "0.8.
|
|
298
|
+
return "0.8.12";
|
|
297
299
|
}
|
|
298
300
|
function debugLog(...message) {
|
|
299
301
|
const debugMode = getAIConfig(MIDSCENE_DEBUG_MODE);
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@midscene/core",
|
|
3
3
|
"description": "An AI-powered automation SDK can control the page, perform assertions, and extract data in JSON format using natural language. See https://midscenejs.com/ for details.",
|
|
4
|
-
"version": "0.8.
|
|
4
|
+
"version": "0.8.12",
|
|
5
5
|
"repository": "https://github.com/web-infra-dev/midscene",
|
|
6
6
|
"homepage": "https://midscenejs.com/",
|
|
7
7
|
"jsnext:source": "./src/index.ts",
|
|
@@ -42,7 +42,7 @@
|
|
|
42
42
|
"openai": "4.57.1",
|
|
43
43
|
"optional": "0.1.4",
|
|
44
44
|
"socks-proxy-agent": "8.0.4",
|
|
45
|
-
"@midscene/shared": "0.8.
|
|
45
|
+
"@midscene/shared": "0.8.12"
|
|
46
46
|
},
|
|
47
47
|
"devDependencies": {
|
|
48
48
|
"@modern-js/module-tools": "2.60.6",
|