@midscene/core 0.3.4 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/es/ai-model.js +63 -85
- package/dist/es/image.js +10 -280
- package/dist/es/index.js +33 -39
- package/dist/lib/ai-model.js +57 -82
- package/dist/lib/image.js +9 -297
- package/dist/lib/index.js +28 -42
- package/dist/types/ai-model.d.ts +3 -3
- package/dist/types/image.d.ts +1 -118
- package/dist/types/{index-7a9ec3e1.d.ts → index-0c968ddd.d.ts} +1 -1
- package/dist/types/index.d.ts +4 -4
- package/dist/types/{types-ed68710b.d.ts → types-ad0dfcb1.d.ts} +2 -2
- package/dist/types/utils.d.ts +1 -1
- package/package.json +3 -3
package/dist/lib/ai-model.js
CHANGED
|
@@ -1556,39 +1556,39 @@ var Client = class _Client {
|
|
|
1556
1556
|
}
|
|
1557
1557
|
return runParams;
|
|
1558
1558
|
}
|
|
1559
|
-
async _getResponse(
|
|
1559
|
+
async _getResponse(path, queryParams) {
|
|
1560
1560
|
var _a;
|
|
1561
1561
|
const paramsString = (_a = queryParams == null ? void 0 : queryParams.toString()) != null ? _a : "";
|
|
1562
|
-
const url = `${this.apiUrl}${
|
|
1562
|
+
const url = `${this.apiUrl}${path}?${paramsString}`;
|
|
1563
1563
|
const response = await this.caller.call(fetch, url, __spreadValues({
|
|
1564
1564
|
method: "GET",
|
|
1565
1565
|
headers: this.headers,
|
|
1566
1566
|
signal: AbortSignal.timeout(this.timeout_ms)
|
|
1567
1567
|
}, this.fetchOptions));
|
|
1568
1568
|
if (!response.ok) {
|
|
1569
|
-
throw new Error(`Failed to fetch ${
|
|
1569
|
+
throw new Error(`Failed to fetch ${path}: ${response.status} ${response.statusText}`);
|
|
1570
1570
|
}
|
|
1571
1571
|
return response;
|
|
1572
1572
|
}
|
|
1573
|
-
async _get(
|
|
1574
|
-
const response = await this._getResponse(
|
|
1573
|
+
async _get(path, queryParams) {
|
|
1574
|
+
const response = await this._getResponse(path, queryParams);
|
|
1575
1575
|
return response.json();
|
|
1576
1576
|
}
|
|
1577
1577
|
_getPaginated(_0) {
|
|
1578
|
-
return __asyncGenerator(this, arguments, function* (
|
|
1578
|
+
return __asyncGenerator(this, arguments, function* (path, queryParams = new URLSearchParams()) {
|
|
1579
1579
|
let offset = Number(queryParams.get("offset")) || 0;
|
|
1580
1580
|
const limit = Number(queryParams.get("limit")) || 100;
|
|
1581
1581
|
while (true) {
|
|
1582
1582
|
queryParams.set("offset", String(offset));
|
|
1583
1583
|
queryParams.set("limit", String(limit));
|
|
1584
|
-
const url = `${this.apiUrl}${
|
|
1584
|
+
const url = `${this.apiUrl}${path}?${queryParams}`;
|
|
1585
1585
|
const response = yield new __await(this.caller.call(fetch, url, __spreadValues({
|
|
1586
1586
|
method: "GET",
|
|
1587
1587
|
headers: this.headers,
|
|
1588
1588
|
signal: AbortSignal.timeout(this.timeout_ms)
|
|
1589
1589
|
}, this.fetchOptions)));
|
|
1590
1590
|
if (!response.ok) {
|
|
1591
|
-
throw new Error(`Failed to fetch ${
|
|
1591
|
+
throw new Error(`Failed to fetch ${path}: ${response.status} ${response.statusText}`);
|
|
1592
1592
|
}
|
|
1593
1593
|
const items = yield new __await(response.json());
|
|
1594
1594
|
if (items.length === 0) {
|
|
@@ -1602,11 +1602,11 @@ var Client = class _Client {
|
|
|
1602
1602
|
}
|
|
1603
1603
|
});
|
|
1604
1604
|
}
|
|
1605
|
-
_getCursorPaginatedList(
|
|
1605
|
+
_getCursorPaginatedList(path, body = null, requestMethod = "POST", dataKey = "runs") {
|
|
1606
1606
|
return __asyncGenerator(this, null, function* () {
|
|
1607
1607
|
const bodyParams = body ? __spreadValues({}, body) : {};
|
|
1608
1608
|
while (true) {
|
|
1609
|
-
const response = yield new __await(this.caller.call(fetch, `${this.apiUrl}${
|
|
1609
|
+
const response = yield new __await(this.caller.call(fetch, `${this.apiUrl}${path}`, __spreadProps(__spreadValues({
|
|
1610
1610
|
method: requestMethod,
|
|
1611
1611
|
headers: __spreadProps(__spreadValues({}, this.headers), { "Content-Type": "application/json" }),
|
|
1612
1612
|
signal: AbortSignal.timeout(this.timeout_ms)
|
|
@@ -2266,19 +2266,19 @@ var Client = class _Client {
|
|
|
2266
2266
|
return result;
|
|
2267
2267
|
}
|
|
2268
2268
|
async hasProject({ projectId, projectName }) {
|
|
2269
|
-
let
|
|
2269
|
+
let path = "/sessions";
|
|
2270
2270
|
const params = new URLSearchParams();
|
|
2271
2271
|
if (projectId !== void 0 && projectName !== void 0) {
|
|
2272
2272
|
throw new Error("Must provide either projectName or projectId, not both");
|
|
2273
2273
|
} else if (projectId !== void 0) {
|
|
2274
2274
|
assertUuid(projectId);
|
|
2275
|
-
|
|
2275
|
+
path += `/${projectId}`;
|
|
2276
2276
|
} else if (projectName !== void 0) {
|
|
2277
2277
|
params.append("name", projectName);
|
|
2278
2278
|
} else {
|
|
2279
2279
|
throw new Error("Must provide projectName or projectId");
|
|
2280
2280
|
}
|
|
2281
|
-
const response = await this.caller.call(fetch, `${this.apiUrl}${
|
|
2281
|
+
const response = await this.caller.call(fetch, `${this.apiUrl}${path}?${params}`, __spreadValues({
|
|
2282
2282
|
method: "GET",
|
|
2283
2283
|
headers: this.headers,
|
|
2284
2284
|
signal: AbortSignal.timeout(this.timeout_ms)
|
|
@@ -2297,13 +2297,13 @@ var Client = class _Client {
|
|
|
2297
2297
|
}
|
|
2298
2298
|
}
|
|
2299
2299
|
async readProject({ projectId, projectName, includeStats }) {
|
|
2300
|
-
let
|
|
2300
|
+
let path = "/sessions";
|
|
2301
2301
|
const params = new URLSearchParams();
|
|
2302
2302
|
if (projectId !== void 0 && projectName !== void 0) {
|
|
2303
2303
|
throw new Error("Must provide either projectName or projectId, not both");
|
|
2304
2304
|
} else if (projectId !== void 0) {
|
|
2305
2305
|
assertUuid(projectId);
|
|
2306
|
-
|
|
2306
|
+
path += `/${projectId}`;
|
|
2307
2307
|
} else if (projectName !== void 0) {
|
|
2308
2308
|
params.append("name", projectName);
|
|
2309
2309
|
} else {
|
|
@@ -2312,7 +2312,7 @@ var Client = class _Client {
|
|
|
2312
2312
|
if (includeStats !== void 0) {
|
|
2313
2313
|
params.append("include_stats", includeStats.toString());
|
|
2314
2314
|
}
|
|
2315
|
-
const response = await this._get(
|
|
2315
|
+
const response = await this._get(path, params);
|
|
2316
2316
|
let result;
|
|
2317
2317
|
if (Array.isArray(response)) {
|
|
2318
2318
|
if (response.length === 0) {
|
|
@@ -2484,19 +2484,19 @@ var Client = class _Client {
|
|
|
2484
2484
|
return result;
|
|
2485
2485
|
}
|
|
2486
2486
|
async readDataset({ datasetId, datasetName }) {
|
|
2487
|
-
let
|
|
2487
|
+
let path = "/datasets";
|
|
2488
2488
|
const params = new URLSearchParams({ limit: "1" });
|
|
2489
2489
|
if (datasetId !== void 0 && datasetName !== void 0) {
|
|
2490
2490
|
throw new Error("Must provide either datasetName or datasetId, not both");
|
|
2491
2491
|
} else if (datasetId !== void 0) {
|
|
2492
2492
|
assertUuid(datasetId);
|
|
2493
|
-
|
|
2493
|
+
path += `/${datasetId}`;
|
|
2494
2494
|
} else if (datasetName !== void 0) {
|
|
2495
2495
|
params.append("name", datasetName);
|
|
2496
2496
|
} else {
|
|
2497
2497
|
throw new Error("Must provide datasetName or datasetId");
|
|
2498
2498
|
}
|
|
2499
|
-
const response = await this._get(
|
|
2499
|
+
const response = await this._get(path, params);
|
|
2500
2500
|
let result;
|
|
2501
2501
|
if (Array.isArray(response)) {
|
|
2502
2502
|
if (response.length === 0) {
|
|
@@ -2540,21 +2540,21 @@ var Client = class _Client {
|
|
|
2540
2540
|
return response;
|
|
2541
2541
|
}
|
|
2542
2542
|
async readDatasetOpenaiFinetuning({ datasetId, datasetName }) {
|
|
2543
|
-
const
|
|
2543
|
+
const path = "/datasets";
|
|
2544
2544
|
if (datasetId !== void 0) {
|
|
2545
2545
|
} else if (datasetName !== void 0) {
|
|
2546
2546
|
datasetId = (await this.readDataset({ datasetName })).id;
|
|
2547
2547
|
} else {
|
|
2548
2548
|
throw new Error("Must provide datasetName or datasetId");
|
|
2549
2549
|
}
|
|
2550
|
-
const response = await this._getResponse(`${
|
|
2550
|
+
const response = await this._getResponse(`${path}/${datasetId}/openai_ft`);
|
|
2551
2551
|
const datasetText = await response.text();
|
|
2552
2552
|
const dataset = datasetText.trim().split("\n").map((line) => JSON.parse(line));
|
|
2553
2553
|
return dataset;
|
|
2554
2554
|
}
|
|
2555
2555
|
listDatasets() {
|
|
2556
2556
|
return __asyncGenerator(this, arguments, function* ({ limit = 100, offset = 0, datasetIds, datasetName, datasetNameContains } = {}) {
|
|
2557
|
-
const
|
|
2557
|
+
const path = "/datasets";
|
|
2558
2558
|
const params = new URLSearchParams({
|
|
2559
2559
|
limit: limit.toString(),
|
|
2560
2560
|
offset: offset.toString()
|
|
@@ -2571,7 +2571,7 @@ var Client = class _Client {
|
|
|
2571
2571
|
params.append("name_contains", datasetNameContains);
|
|
2572
2572
|
}
|
|
2573
2573
|
try {
|
|
2574
|
-
for (var iter = __forAwait(this._getPaginated(
|
|
2574
|
+
for (var iter = __forAwait(this._getPaginated(path, params)), more, temp, error; more = !(temp = yield new __await(iter.next())).done; more = false) {
|
|
2575
2575
|
const datasets = temp.value;
|
|
2576
2576
|
yield* __yieldStar(datasets);
|
|
2577
2577
|
}
|
|
@@ -2611,7 +2611,7 @@ var Client = class _Client {
|
|
|
2611
2611
|
return await response.json();
|
|
2612
2612
|
}
|
|
2613
2613
|
async deleteDataset({ datasetId, datasetName }) {
|
|
2614
|
-
let
|
|
2614
|
+
let path = "/datasets";
|
|
2615
2615
|
let datasetId_ = datasetId;
|
|
2616
2616
|
if (datasetId !== void 0 && datasetName !== void 0) {
|
|
2617
2617
|
throw new Error("Must provide either datasetName or datasetId, not both");
|
|
@@ -2621,17 +2621,17 @@ var Client = class _Client {
|
|
|
2621
2621
|
}
|
|
2622
2622
|
if (datasetId_ !== void 0) {
|
|
2623
2623
|
assertUuid(datasetId_);
|
|
2624
|
-
|
|
2624
|
+
path += `/${datasetId_}`;
|
|
2625
2625
|
} else {
|
|
2626
2626
|
throw new Error("Must provide datasetName or datasetId");
|
|
2627
2627
|
}
|
|
2628
|
-
const response = await this.caller.call(fetch, this.apiUrl +
|
|
2628
|
+
const response = await this.caller.call(fetch, this.apiUrl + path, __spreadValues({
|
|
2629
2629
|
method: "DELETE",
|
|
2630
2630
|
headers: this.headers,
|
|
2631
2631
|
signal: AbortSignal.timeout(this.timeout_ms)
|
|
2632
2632
|
}, this.fetchOptions));
|
|
2633
2633
|
if (!response.ok) {
|
|
2634
|
-
throw new Error(`Failed to delete ${
|
|
2634
|
+
throw new Error(`Failed to delete ${path}: ${response.status} ${response.statusText}`);
|
|
2635
2635
|
}
|
|
2636
2636
|
await response.json();
|
|
2637
2637
|
}
|
|
@@ -2716,8 +2716,8 @@ var Client = class _Client {
|
|
|
2716
2716
|
}
|
|
2717
2717
|
async readExample(exampleId) {
|
|
2718
2718
|
assertUuid(exampleId);
|
|
2719
|
-
const
|
|
2720
|
-
return await this._get(
|
|
2719
|
+
const path = `/examples/${exampleId}`;
|
|
2720
|
+
return await this._get(path);
|
|
2721
2721
|
}
|
|
2722
2722
|
listExamples() {
|
|
2723
2723
|
return __asyncGenerator(this, arguments, function* ({ datasetId, datasetName, exampleIds, asOf, splits, inlineS3Urls, metadata, limit, offset, filter } = {}) {
|
|
@@ -2788,14 +2788,14 @@ var Client = class _Client {
|
|
|
2788
2788
|
}
|
|
2789
2789
|
async deleteExample(exampleId) {
|
|
2790
2790
|
assertUuid(exampleId);
|
|
2791
|
-
const
|
|
2792
|
-
const response = await this.caller.call(fetch, this.apiUrl +
|
|
2791
|
+
const path = `/examples/${exampleId}`;
|
|
2792
|
+
const response = await this.caller.call(fetch, this.apiUrl + path, __spreadValues({
|
|
2793
2793
|
method: "DELETE",
|
|
2794
2794
|
headers: this.headers,
|
|
2795
2795
|
signal: AbortSignal.timeout(this.timeout_ms)
|
|
2796
2796
|
}, this.fetchOptions));
|
|
2797
2797
|
if (!response.ok) {
|
|
2798
|
-
throw new Error(`Failed to delete ${
|
|
2798
|
+
throw new Error(`Failed to delete ${path}: ${response.status} ${response.statusText}`);
|
|
2799
2799
|
}
|
|
2800
2800
|
await response.json();
|
|
2801
2801
|
}
|
|
@@ -2899,20 +2899,20 @@ var Client = class _Client {
|
|
|
2899
2899
|
}
|
|
2900
2900
|
async readFeedback(feedbackId) {
|
|
2901
2901
|
assertUuid(feedbackId);
|
|
2902
|
-
const
|
|
2903
|
-
const response = await this._get(
|
|
2902
|
+
const path = `/feedback/${feedbackId}`;
|
|
2903
|
+
const response = await this._get(path);
|
|
2904
2904
|
return response;
|
|
2905
2905
|
}
|
|
2906
2906
|
async deleteFeedback(feedbackId) {
|
|
2907
2907
|
assertUuid(feedbackId);
|
|
2908
|
-
const
|
|
2909
|
-
const response = await this.caller.call(fetch, this.apiUrl +
|
|
2908
|
+
const path = `/feedback/${feedbackId}`;
|
|
2909
|
+
const response = await this.caller.call(fetch, this.apiUrl + path, __spreadValues({
|
|
2910
2910
|
method: "DELETE",
|
|
2911
2911
|
headers: this.headers,
|
|
2912
2912
|
signal: AbortSignal.timeout(this.timeout_ms)
|
|
2913
2913
|
}, this.fetchOptions));
|
|
2914
2914
|
if (!response.ok) {
|
|
2915
|
-
throw new Error(`Failed to delete ${
|
|
2915
|
+
throw new Error(`Failed to delete ${path}: ${response.status} ${response.statusText}`);
|
|
2916
2916
|
}
|
|
2917
2917
|
await response.json();
|
|
2918
2918
|
}
|
|
@@ -3898,12 +3898,12 @@ function traceable(wrappedFunc, config) {
|
|
|
3898
3898
|
try {
|
|
3899
3899
|
let runtimeConfig;
|
|
3900
3900
|
if (argsConfigPath) {
|
|
3901
|
-
const [index,
|
|
3902
|
-
if (index === args.length - 1 && !
|
|
3901
|
+
const [index, path] = argsConfigPath;
|
|
3902
|
+
if (index === args.length - 1 && !path) {
|
|
3903
3903
|
runtimeConfig = args.pop();
|
|
3904
3904
|
} else if (index <= args.length && typeof args[index] === "object" && args[index] !== null) {
|
|
3905
|
-
if (
|
|
3906
|
-
const _a2 = args[index], { [
|
|
3905
|
+
if (path) {
|
|
3906
|
+
const _a2 = args[index], { [path]: extracted } = _a2, rest = __objRest(_a2, [__restKey(path)]);
|
|
3907
3907
|
runtimeConfig = extracted;
|
|
3908
3908
|
args[index] = rest;
|
|
3909
3909
|
} else {
|
|
@@ -4244,6 +4244,7 @@ var import_openai = __toESM(require("openai"));
|
|
|
4244
4244
|
var MIDSCENE_OPENAI_INIT_CONFIG_JSON = "MIDSCENE_OPENAI_INIT_CONFIG_JSON";
|
|
4245
4245
|
var MIDSCENE_MODEL_NAME = "MIDSCENE_MODEL_NAME";
|
|
4246
4246
|
var MIDSCENE_LANGSMITH_DEBUG = "MIDSCENE_LANGSMITH_DEBUG";
|
|
4247
|
+
var MIDSCENE_DEBUG_AI_PROFILE = "MIDSCENE_DEBUG_AI_PROFILE";
|
|
4247
4248
|
var OPENAI_API_KEY = "OPENAI_API_KEY";
|
|
4248
4249
|
function useOpenAIModel(useModel) {
|
|
4249
4250
|
if (useModel && useModel !== "openAI")
|
|
@@ -4273,12 +4274,16 @@ async function createOpenAI() {
|
|
|
4273
4274
|
}
|
|
4274
4275
|
async function call(messages, responseFormat) {
|
|
4275
4276
|
const openai = await createOpenAI();
|
|
4277
|
+
const shouldPrintTiming = typeof process.env[MIDSCENE_DEBUG_AI_PROFILE] === "string";
|
|
4278
|
+
shouldPrintTiming && console.time("Midscene - AI call");
|
|
4276
4279
|
const completion = await openai.chat.completions.create({
|
|
4277
4280
|
model,
|
|
4278
4281
|
messages,
|
|
4279
4282
|
response_format: { type: responseFormat },
|
|
4280
4283
|
temperature: 0.2
|
|
4281
4284
|
});
|
|
4285
|
+
shouldPrintTiming && console.timeEnd("Midscene - AI call");
|
|
4286
|
+
shouldPrintTiming && console.log("Midscene - AI usage", completion.usage);
|
|
4282
4287
|
const { content } = completion.choices[0].message;
|
|
4283
4288
|
(0, import_node_assert.default)(content, "empty content");
|
|
4284
4289
|
return content;
|
|
@@ -4426,40 +4431,10 @@ function multiDescription(multi) {
|
|
|
4426
4431
|
}
|
|
4427
4432
|
|
|
4428
4433
|
// src/ai-model/prompt/util.ts
|
|
4429
|
-
var import_node_assert4 = __toESM(require("assert"));
|
|
4430
|
-
|
|
4431
|
-
// src/image/info.ts
|
|
4432
4434
|
var import_node_assert2 = __toESM(require("assert"));
|
|
4433
|
-
var import_node_buffer = require("buffer");
|
|
4434
|
-
var import_node_fs = require("fs");
|
|
4435
|
-
var import_sharp = __toESM(require("sharp"));
|
|
4436
|
-
async function imageInfo(image) {
|
|
4437
|
-
const { width, height } = await (0, import_sharp.default)(image).metadata();
|
|
4438
|
-
(0, import_node_assert2.default)(width && height, `invalid image: ${image}`);
|
|
4439
|
-
return { width, height };
|
|
4440
|
-
}
|
|
4441
|
-
async function imageInfoOfBase64(imageBase64) {
|
|
4442
|
-
const base64Data = imageBase64.replace(/^data:image\/\w+;base64,/, "");
|
|
4443
|
-
return imageInfo(import_node_buffer.Buffer.from(base64Data, "base64"));
|
|
4444
|
-
}
|
|
4445
|
-
|
|
4446
|
-
// src/image/transform.ts
|
|
4447
|
-
var import_node_buffer2 = require("buffer");
|
|
4448
|
-
var import_sharp2 = __toESM(require("sharp"));
|
|
4449
|
-
|
|
4450
|
-
// src/image/visualization.ts
|
|
4451
|
-
var import_node_buffer3 = require("buffer");
|
|
4452
4435
|
|
|
4453
|
-
// src/
|
|
4454
|
-
var
|
|
4455
|
-
var import_node_crypto = require("crypto");
|
|
4456
|
-
var import_node_fs2 = require("fs");
|
|
4457
|
-
var import_node_os = require("os");
|
|
4458
|
-
var import_node_path = __toESM(require("path"));
|
|
4459
|
-
var logDir = (0, import_node_path.join)(process.cwd(), "./midscene_run/");
|
|
4460
|
-
|
|
4461
|
-
// src/image/visualization.ts
|
|
4462
|
-
var import_sharp3 = __toESM(require("sharp"));
|
|
4436
|
+
// src/image/index.ts
|
|
4437
|
+
var import_img = require("@midscene/shared/img");
|
|
4463
4438
|
|
|
4464
4439
|
// src/ai-model/prompt/util.ts
|
|
4465
4440
|
var characteristic = "You are a versatile professional in software UI design and testing. Your outstanding contributions will impact the user experience of billions of users.";
|
|
@@ -4531,7 +4506,7 @@ async function describeUserPage(context) {
|
|
|
4531
4506
|
if (context.size) {
|
|
4532
4507
|
({ width, height } = context.size);
|
|
4533
4508
|
} else {
|
|
4534
|
-
const imgSize = await imageInfoOfBase64(screenshotBase64);
|
|
4509
|
+
const imgSize = await (0, import_img.imageInfoOfBase64)(screenshotBase64);
|
|
4535
4510
|
({ width, height } = imgSize);
|
|
4536
4511
|
}
|
|
4537
4512
|
const elementsInfo = context.content;
|
|
@@ -4552,7 +4527,7 @@ async function describeUserPage(context) {
|
|
|
4552
4527
|
"elementInfos": ${JSON.stringify(elementInfosDescription)}
|
|
4553
4528
|
}`,
|
|
4554
4529
|
elementById(id) {
|
|
4555
|
-
(0,
|
|
4530
|
+
(0, import_node_assert2.default)(typeof id !== "undefined", "id is required for query");
|
|
4556
4531
|
const item = idElementMap[`${id}`];
|
|
4557
4532
|
return item;
|
|
4558
4533
|
}
|
|
@@ -4583,10 +4558,10 @@ function cropFieldInformation(elementsInfo) {
|
|
|
4583
4558
|
}
|
|
4584
4559
|
|
|
4585
4560
|
// src/ai-model/inspect.ts
|
|
4586
|
-
var
|
|
4561
|
+
var import_node_assert4 = __toESM(require("assert"));
|
|
4587
4562
|
|
|
4588
4563
|
// src/ai-model/coze/index.ts
|
|
4589
|
-
var
|
|
4564
|
+
var import_node_assert3 = __toESM(require("assert"));
|
|
4590
4565
|
var import_node_fetch = __toESM(require("node-fetch"));
|
|
4591
4566
|
var COZE_INSPECT_ELEMENT_BOT_ID = process.env.COZE_INSPECT_ELEMENT_BOT_ID || "";
|
|
4592
4567
|
var COZE_AI_ACTION_BOT_ID = process.env.COZE_AI_ACTION_BOT_ID || "";
|
|
@@ -4639,7 +4614,7 @@ async function callCozeAi(options) {
|
|
|
4639
4614
|
throw new Error("aiResponse is undefined", aiResponse);
|
|
4640
4615
|
}
|
|
4641
4616
|
const parseContent = (_b = aiResponse == null ? void 0 : aiResponse.messages[0]) == null ? void 0 : _b.content;
|
|
4642
|
-
(0,
|
|
4617
|
+
(0, import_node_assert3.default)(parseContent, "empty content");
|
|
4643
4618
|
try {
|
|
4644
4619
|
return JSON.parse(parseContent);
|
|
4645
4620
|
} catch (err) {
|
|
@@ -4814,7 +4789,7 @@ DATA_DEMAND ends.
|
|
|
4814
4789
|
}
|
|
4815
4790
|
async function AiAssert(options) {
|
|
4816
4791
|
const { assertion, context, useModel } = options;
|
|
4817
|
-
(0,
|
|
4792
|
+
(0, import_node_assert4.default)(assertion, "assertion should be a string");
|
|
4818
4793
|
const { screenshotBase64 } = context;
|
|
4819
4794
|
const { description, elementById } = await describeUserPage(context);
|
|
4820
4795
|
const systemPrompt = systemPromptToAssert();
|
|
@@ -4858,7 +4833,7 @@ async function AiAssert(options) {
|
|
|
4858
4833
|
}
|
|
4859
4834
|
|
|
4860
4835
|
// src/ai-model/automation/index.ts
|
|
4861
|
-
var
|
|
4836
|
+
var import_node_assert5 = __toESM(require("assert"));
|
|
4862
4837
|
|
|
4863
4838
|
// src/ai-model/automation/planning.ts
|
|
4864
4839
|
function systemPromptToTaskPlanning() {
|
|
@@ -4964,8 +4939,8 @@ async function plan(userPrompt, opts, useModel) {
|
|
|
4964
4939
|
});
|
|
4965
4940
|
}
|
|
4966
4941
|
const actions = (planFromAI == null ? void 0 : planFromAI.actions) || [];
|
|
4967
|
-
(0,
|
|
4968
|
-
(0,
|
|
4942
|
+
(0, import_node_assert5.default)(planFromAI, "can't get planFromAI");
|
|
4943
|
+
(0, import_node_assert5.default)(actions && actions.length > 0, "no actions in ai plan");
|
|
4969
4944
|
if (planFromAI.error) {
|
|
4970
4945
|
throw new Error(planFromAI.error);
|
|
4971
4946
|
}
|