assistsx-js 0.0.2035 → 0.0.2037
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/AssistsXAsync.d.ts +52 -2
- package/dist/AssistsXAsync.js +25 -3
- package/dist/CallMethod.d.ts +1 -0
- package/dist/CallMethod.js +1 -0
- package/package.json +1 -1
- package/src/AssistsXAsync.ts +88 -6
- package/src/CallMethod.ts +1 -0
package/dist/AssistsXAsync.d.ts
CHANGED
|
@@ -7,6 +7,37 @@ import { Bounds } from "./Bounds";
|
|
|
7
7
|
import { AppInfo } from "./AppInfo";
|
|
8
8
|
import { DeviceInfo } from "./DeviceInfo";
|
|
9
9
|
import { WebFloatingWindowOptions } from "./AssistsX";
|
|
10
|
+
/**
|
|
11
|
+
* 截图识别位置信息
|
|
12
|
+
*/
|
|
13
|
+
export interface RecognizeTextInScreenshotPosition {
|
|
14
|
+
text: string;
|
|
15
|
+
left: number;
|
|
16
|
+
top: number;
|
|
17
|
+
right: number;
|
|
18
|
+
bottom: number;
|
|
19
|
+
width: number;
|
|
20
|
+
height: number;
|
|
21
|
+
}
|
|
22
|
+
/**
|
|
23
|
+
* 截图识别结果
|
|
24
|
+
*/
|
|
25
|
+
export interface RecognizeTextInScreenshotResult {
|
|
26
|
+
fullText: string;
|
|
27
|
+
processingTimeMillis: number;
|
|
28
|
+
positions: RecognizeTextInScreenshotPosition[];
|
|
29
|
+
}
|
|
30
|
+
/**
|
|
31
|
+
* 截图识别区域参数
|
|
32
|
+
*/
|
|
33
|
+
export interface RecognizeTextRegion {
|
|
34
|
+
left?: number;
|
|
35
|
+
top?: number;
|
|
36
|
+
right?: number;
|
|
37
|
+
bottom?: number;
|
|
38
|
+
width?: number;
|
|
39
|
+
height?: number;
|
|
40
|
+
}
|
|
10
41
|
export declare class AssistsXAsync {
|
|
11
42
|
/**
|
|
12
43
|
* 执行异步调用
|
|
@@ -62,6 +93,19 @@ export declare class AssistsXAsync {
|
|
|
62
93
|
* @returns 截图路径数组
|
|
63
94
|
*/
|
|
64
95
|
static takeScreenshotNodes(nodes: Node[], overlayHiddenScreenshotDelayMillis?: number, timeout?: number): Promise<string[]>;
|
|
96
|
+
/**
|
|
97
|
+
* 截图识别文本
|
|
98
|
+
* @param param0 识别参数
|
|
99
|
+
* @returns 截图识别结果
|
|
100
|
+
*/
|
|
101
|
+
static recognizeTextInScreenshot({ targetText, rotationDegrees, overlayHiddenScreenshotDelayMillis, restoreOverlay, region, timeout, }: {
|
|
102
|
+
targetText: string;
|
|
103
|
+
rotationDegrees?: number;
|
|
104
|
+
overlayHiddenScreenshotDelayMillis?: number;
|
|
105
|
+
restoreOverlay?: boolean;
|
|
106
|
+
region?: RecognizeTextRegion;
|
|
107
|
+
timeout?: number;
|
|
108
|
+
}): Promise<RecognizeTextInScreenshotResult>;
|
|
65
109
|
static scanQR(timeout?: number): Promise<string>;
|
|
66
110
|
static loadWebViewOverlay(url: string, options?: WebFloatingWindowOptions & {
|
|
67
111
|
timeout?: number;
|
|
@@ -381,6 +425,12 @@ export declare class AssistsXAsync {
|
|
|
381
425
|
*/
|
|
382
426
|
static openUrlInBrowser(url: string, timeout?: number): Promise<boolean>;
|
|
383
427
|
static download(url: string, timeout?: number): Promise<string | null | undefined>;
|
|
384
|
-
static audioPlayFromFile(filePath: string, volume
|
|
385
|
-
|
|
428
|
+
static audioPlayFromFile(filePath: string, { volume, useAbsoluteVolume, timeout, }: {
|
|
429
|
+
volume?: number;
|
|
430
|
+
useAbsoluteVolume?: boolean;
|
|
431
|
+
timeout?: number;
|
|
432
|
+
}): Promise<string | null | undefined>;
|
|
433
|
+
static audioStop({ timeout, }: {
|
|
434
|
+
timeout?: number;
|
|
435
|
+
}): Promise<boolean | null | undefined>;
|
|
386
436
|
}
|
package/dist/AssistsXAsync.js
CHANGED
|
@@ -119,6 +119,28 @@ export class AssistsXAsync {
|
|
|
119
119
|
const data = response.getDataOrDefault("");
|
|
120
120
|
return data.images;
|
|
121
121
|
}
|
|
122
|
+
/**
|
|
123
|
+
* 截图识别文本
|
|
124
|
+
* @param param0 识别参数
|
|
125
|
+
* @returns 截图识别结果
|
|
126
|
+
*/
|
|
127
|
+
static async recognizeTextInScreenshot({ targetText, rotationDegrees, overlayHiddenScreenshotDelayMillis, restoreOverlay = true, region, timeout, }) {
|
|
128
|
+
const response = await this.asyncCall(CallMethod.recognizeTextInScreenshot, {
|
|
129
|
+
args: {
|
|
130
|
+
targetText,
|
|
131
|
+
rotationDegrees,
|
|
132
|
+
overlayHiddenScreenshotDelayMillis,
|
|
133
|
+
restoreOverlay,
|
|
134
|
+
region,
|
|
135
|
+
},
|
|
136
|
+
timeout,
|
|
137
|
+
});
|
|
138
|
+
return response.getDataOrDefault({
|
|
139
|
+
fullText: "",
|
|
140
|
+
processingTimeMillis: 0,
|
|
141
|
+
positions: [],
|
|
142
|
+
});
|
|
143
|
+
}
|
|
122
144
|
static async scanQR(timeout) {
|
|
123
145
|
const response = await this.asyncCall(CallMethod.scanQR, { timeout });
|
|
124
146
|
const data = response.getDataOrDefault({ value: "" });
|
|
@@ -645,14 +667,14 @@ export class AssistsXAsync {
|
|
|
645
667
|
});
|
|
646
668
|
return response.getDataOrDefault(null);
|
|
647
669
|
}
|
|
648
|
-
static async audioPlayFromFile(filePath, volume = undefined, timeout) {
|
|
670
|
+
static async audioPlayFromFile(filePath, { volume = undefined, useAbsoluteVolume = false, timeout = 30, }) {
|
|
649
671
|
const response = await this.asyncCall(CallMethod.audioPlayFromFile, {
|
|
650
|
-
args: { filePath, volume },
|
|
672
|
+
args: { filePath, volume, useAbsoluteVolume },
|
|
651
673
|
timeout,
|
|
652
674
|
});
|
|
653
675
|
return response.getDataOrDefault(null);
|
|
654
676
|
}
|
|
655
|
-
static async audioStop(timeout) {
|
|
677
|
+
static async audioStop({ timeout = 30, }) {
|
|
656
678
|
const response = await this.asyncCall(CallMethod.audioStop, {
|
|
657
679
|
timeout,
|
|
658
680
|
});
|
package/dist/CallMethod.d.ts
CHANGED
|
@@ -33,6 +33,7 @@ export declare const CallMethod: {
|
|
|
33
33
|
readonly setOverlayFlags: "setOverlayFlags";
|
|
34
34
|
readonly scanQR: "scanQR";
|
|
35
35
|
readonly loadWebViewOverlay: "loadWebViewOverlay";
|
|
36
|
+
readonly recognizeTextInScreenshot: "recognizeTextInScreenshot";
|
|
36
37
|
readonly clickByGesture: "clickByGesture";
|
|
37
38
|
readonly clickNodeByGesture: "clickNodeByGesture";
|
|
38
39
|
readonly doubleClickNodeByGesture: "doubleClickNodeByGesture";
|
package/dist/CallMethod.js
CHANGED
|
@@ -34,6 +34,7 @@ export const CallMethod = {
|
|
|
34
34
|
setOverlayFlags: "setOverlayFlags",
|
|
35
35
|
scanQR: "scanQR",
|
|
36
36
|
loadWebViewOverlay: "loadWebViewOverlay",
|
|
37
|
+
recognizeTextInScreenshot: "recognizeTextInScreenshot",
|
|
37
38
|
clickByGesture: "clickByGesture",
|
|
38
39
|
clickNodeByGesture: "clickNodeByGesture",
|
|
39
40
|
doubleClickNodeByGesture: "doubleClickNodeByGesture",
|
package/package.json
CHANGED
package/src/AssistsXAsync.ts
CHANGED
|
@@ -17,6 +17,40 @@ import {
|
|
|
17
17
|
HttpResponse,
|
|
18
18
|
} from "./AssistsX";
|
|
19
19
|
|
|
20
|
+
/**
|
|
21
|
+
* 截图识别位置信息
|
|
22
|
+
*/
|
|
23
|
+
export interface RecognizeTextInScreenshotPosition {
|
|
24
|
+
text: string;
|
|
25
|
+
left: number;
|
|
26
|
+
top: number;
|
|
27
|
+
right: number;
|
|
28
|
+
bottom: number;
|
|
29
|
+
width: number;
|
|
30
|
+
height: number;
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
/**
|
|
34
|
+
* 截图识别结果
|
|
35
|
+
*/
|
|
36
|
+
export interface RecognizeTextInScreenshotResult {
|
|
37
|
+
fullText: string;
|
|
38
|
+
processingTimeMillis: number;
|
|
39
|
+
positions: RecognizeTextInScreenshotPosition[];
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
/**
|
|
43
|
+
* 截图识别区域参数
|
|
44
|
+
*/
|
|
45
|
+
export interface RecognizeTextRegion {
|
|
46
|
+
left?: number;
|
|
47
|
+
top?: number;
|
|
48
|
+
right?: number;
|
|
49
|
+
bottom?: number;
|
|
50
|
+
width?: number;
|
|
51
|
+
height?: number;
|
|
52
|
+
}
|
|
53
|
+
|
|
20
54
|
export class AssistsXAsync {
|
|
21
55
|
/**
|
|
22
56
|
* 执行异步调用
|
|
@@ -167,6 +201,45 @@ export class AssistsXAsync {
|
|
|
167
201
|
const data = response.getDataOrDefault("");
|
|
168
202
|
return data.images;
|
|
169
203
|
}
|
|
204
|
+
/**
|
|
205
|
+
* 截图识别文本
|
|
206
|
+
* @param param0 识别参数
|
|
207
|
+
* @returns 截图识别结果
|
|
208
|
+
*/
|
|
209
|
+
public static async recognizeTextInScreenshot({
|
|
210
|
+
targetText,
|
|
211
|
+
rotationDegrees,
|
|
212
|
+
overlayHiddenScreenshotDelayMillis,
|
|
213
|
+
restoreOverlay = true,
|
|
214
|
+
region,
|
|
215
|
+
timeout,
|
|
216
|
+
}: {
|
|
217
|
+
targetText: string;
|
|
218
|
+
rotationDegrees?: number;
|
|
219
|
+
overlayHiddenScreenshotDelayMillis?: number;
|
|
220
|
+
restoreOverlay?: boolean;
|
|
221
|
+
region?: RecognizeTextRegion;
|
|
222
|
+
timeout?: number;
|
|
223
|
+
}): Promise<RecognizeTextInScreenshotResult> {
|
|
224
|
+
const response = await this.asyncCall(
|
|
225
|
+
CallMethod.recognizeTextInScreenshot,
|
|
226
|
+
{
|
|
227
|
+
args: {
|
|
228
|
+
targetText,
|
|
229
|
+
rotationDegrees,
|
|
230
|
+
overlayHiddenScreenshotDelayMillis,
|
|
231
|
+
restoreOverlay,
|
|
232
|
+
region,
|
|
233
|
+
},
|
|
234
|
+
timeout,
|
|
235
|
+
}
|
|
236
|
+
);
|
|
237
|
+
return response.getDataOrDefault({
|
|
238
|
+
fullText: "",
|
|
239
|
+
processingTimeMillis: 0,
|
|
240
|
+
positions: [],
|
|
241
|
+
});
|
|
242
|
+
}
|
|
170
243
|
public static async scanQR(timeout?: number): Promise<string> {
|
|
171
244
|
const response = await this.asyncCall(CallMethod.scanQR, { timeout });
|
|
172
245
|
const data = response.getDataOrDefault({ value: "" });
|
|
@@ -927,18 +1000,27 @@ export class AssistsXAsync {
|
|
|
927
1000
|
}
|
|
928
1001
|
public static async audioPlayFromFile(
|
|
929
1002
|
filePath: string,
|
|
930
|
-
|
|
931
|
-
|
|
1003
|
+
{
|
|
1004
|
+
volume = undefined,
|
|
1005
|
+
useAbsoluteVolume = false,
|
|
1006
|
+
timeout = 30,
|
|
1007
|
+
}: {
|
|
1008
|
+
volume?: number;
|
|
1009
|
+
useAbsoluteVolume?: boolean;
|
|
1010
|
+
timeout?: number;
|
|
1011
|
+
}
|
|
932
1012
|
): Promise<string | null | undefined> {
|
|
933
1013
|
const response = await this.asyncCall(CallMethod.audioPlayFromFile, {
|
|
934
|
-
args: { filePath, volume },
|
|
1014
|
+
args: { filePath, volume, useAbsoluteVolume },
|
|
935
1015
|
timeout,
|
|
936
1016
|
});
|
|
937
1017
|
return response.getDataOrDefault(null);
|
|
938
1018
|
}
|
|
939
|
-
public static async audioStop(
|
|
940
|
-
timeout
|
|
941
|
-
|
|
1019
|
+
public static async audioStop({
|
|
1020
|
+
timeout = 30,
|
|
1021
|
+
}: {
|
|
1022
|
+
timeout?: number;
|
|
1023
|
+
}): Promise<boolean | null | undefined> {
|
|
942
1024
|
const response = await this.asyncCall(CallMethod.audioStop, {
|
|
943
1025
|
timeout,
|
|
944
1026
|
});
|
package/src/CallMethod.ts
CHANGED
|
@@ -34,6 +34,7 @@ export const CallMethod = {
|
|
|
34
34
|
setOverlayFlags: "setOverlayFlags",
|
|
35
35
|
scanQR: "scanQR",
|
|
36
36
|
loadWebViewOverlay: "loadWebViewOverlay",
|
|
37
|
+
recognizeTextInScreenshot: "recognizeTextInScreenshot",
|
|
37
38
|
|
|
38
39
|
clickByGesture: "clickByGesture",
|
|
39
40
|
clickNodeByGesture: "clickNodeByGesture",
|