@hyperbrowser/sdk 0.7.0 → 0.9.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +3 -3
- package/dist/client.d.ts +7 -31
- package/dist/client.js +10 -176
- package/dist/index.d.ts +3 -1
- package/dist/index.js +12 -5
- package/dist/services/base.d.ts +7 -0
- package/dist/services/base.js +62 -0
- package/dist/services/crawl.d.ts +21 -0
- package/dist/services/crawl.js +82 -0
- package/dist/services/scrape.d.ts +19 -0
- package/dist/services/scrape.js +62 -0
- package/dist/services/sessions.d.ts +29 -0
- package/dist/services/sessions.js +91 -0
- package/dist/types/constants.d.ts +2 -0
- package/dist/types/crawl.d.ts +13 -17
- package/dist/types/index.d.ts +5 -1
- package/dist/types/index.js +0 -15
- package/dist/types/scrape.d.ts +17 -17
- package/dist/types/session.d.ts +8 -0
- package/dist/utils.d.ts +1 -0
- package/dist/utils.js +5 -0
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -14,7 +14,7 @@ npm install @hyperbrowser/sdk
|
|
|
14
14
|
|
|
15
15
|
```typescript
|
|
16
16
|
import { connect } from "puppeteer-core";
|
|
17
|
-
import Hyperbrowser from "hyperbrowser";
|
|
17
|
+
import { Hyperbrowser } from "@hyperbrowser/sdk";
|
|
18
18
|
import dotenv from "dotenv";
|
|
19
19
|
|
|
20
20
|
dotenv.config();
|
|
@@ -24,7 +24,7 @@ const client = new Hyperbrowser({
|
|
|
24
24
|
});
|
|
25
25
|
|
|
26
26
|
(async () => {
|
|
27
|
-
const session = await client.
|
|
27
|
+
const session = await client.sessions.create();
|
|
28
28
|
|
|
29
29
|
const browser = await connect({
|
|
30
30
|
browserWSEndpoint: session.wsEndpoint,
|
|
@@ -43,6 +43,6 @@ const client = new Hyperbrowser({
|
|
|
43
43
|
await page.close();
|
|
44
44
|
await browser.close();
|
|
45
45
|
console.log("Session completed!");
|
|
46
|
-
await client.
|
|
46
|
+
await client.sessions.stop(session.id);
|
|
47
47
|
})().catch((error) => console.error(error.message));
|
|
48
48
|
```
|
package/dist/client.d.ts
CHANGED
|
@@ -1,38 +1,14 @@
|
|
|
1
|
-
import { Response } from "node-fetch";
|
|
2
1
|
import { HyperbrowserConfig } from "./types/config";
|
|
3
|
-
import {
|
|
4
|
-
import {
|
|
5
|
-
import {
|
|
2
|
+
import { SessionsService } from "./services/sessions";
|
|
3
|
+
import { ScrapeService } from "./services/scrape";
|
|
4
|
+
import { CrawlService } from "./services/crawl";
|
|
6
5
|
export declare class HyperbrowserError extends Error {
|
|
7
6
|
statusCode?: number | undefined;
|
|
8
|
-
|
|
9
|
-
originalError?: Error | undefined;
|
|
10
|
-
constructor(message: string, statusCode?: number | undefined, response?: Response | undefined, originalError?: Error | undefined);
|
|
7
|
+
constructor(message: string, statusCode?: number | undefined);
|
|
11
8
|
}
|
|
12
9
|
export declare class HyperbrowserClient {
|
|
13
|
-
|
|
14
|
-
|
|
10
|
+
readonly sessions: SessionsService;
|
|
11
|
+
readonly scrape: ScrapeService;
|
|
12
|
+
readonly crawl: CrawlService;
|
|
15
13
|
constructor(config: HyperbrowserConfig);
|
|
16
|
-
private request;
|
|
17
|
-
/**
|
|
18
|
-
* Create a new browser session
|
|
19
|
-
* @param params Configuration parameters for the new session
|
|
20
|
-
*/
|
|
21
|
-
createSession(params?: CreateSessionParams): Promise<SessionDetail>;
|
|
22
|
-
/**
|
|
23
|
-
* Get details of an existing session
|
|
24
|
-
*/
|
|
25
|
-
getSession(id: string): Promise<SessionDetail>;
|
|
26
|
-
/**
|
|
27
|
-
* Stop a running session
|
|
28
|
-
*/
|
|
29
|
-
stopSession(id: string): Promise<BasicResponse>;
|
|
30
|
-
/**
|
|
31
|
-
* List all sessions with optional filtering
|
|
32
|
-
*/
|
|
33
|
-
listSessions(params?: SessionListParams): Promise<SessionListResponse>;
|
|
34
|
-
startScrapeJob(params: StartScrapeJobParams): Promise<StartScrapeJobResponse>;
|
|
35
|
-
getScrapeJob(id: string): Promise<ScrapeJobResponse>;
|
|
36
|
-
startCrawlJob(params: StartCrawlJobParams): Promise<StartCrawlJobResponse>;
|
|
37
|
-
getCrawlJob(id: string, params?: GetCrawlJobParams): Promise<CrawlJobResponse>;
|
|
38
14
|
}
|
package/dist/client.js
CHANGED
|
@@ -1,193 +1,27 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
3
|
exports.HyperbrowserClient = exports.HyperbrowserError = void 0;
|
|
7
|
-
const
|
|
4
|
+
const sessions_1 = require("./services/sessions");
|
|
5
|
+
const scrape_1 = require("./services/scrape");
|
|
6
|
+
const crawl_1 = require("./services/crawl");
|
|
8
7
|
class HyperbrowserError extends Error {
|
|
9
|
-
constructor(message, statusCode
|
|
8
|
+
constructor(message, statusCode) {
|
|
10
9
|
super(`[Hyperbrowser]: ${message}`);
|
|
11
10
|
this.statusCode = statusCode;
|
|
12
|
-
this.response = response;
|
|
13
|
-
this.originalError = originalError;
|
|
14
11
|
this.name = "HyperbrowserError";
|
|
15
12
|
}
|
|
16
13
|
}
|
|
17
14
|
exports.HyperbrowserError = HyperbrowserError;
|
|
18
15
|
class HyperbrowserClient {
|
|
19
16
|
constructor(config) {
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
if (!
|
|
17
|
+
const apiKey = config.apiKey;
|
|
18
|
+
const baseUrl = config.baseUrl || "https://app.hyperbrowser.ai";
|
|
19
|
+
if (!apiKey) {
|
|
23
20
|
throw new HyperbrowserError("API key is required");
|
|
24
21
|
}
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
const url = new URL(`${this.baseUrl}/api${path}`);
|
|
29
|
-
if (params) {
|
|
30
|
-
Object.entries(params).forEach(([key, value]) => {
|
|
31
|
-
if (value !== undefined) {
|
|
32
|
-
url.searchParams.append(key, value.toString());
|
|
33
|
-
}
|
|
34
|
-
});
|
|
35
|
-
}
|
|
36
|
-
const response = await (0, node_fetch_1.default)(url.toString(), {
|
|
37
|
-
...init,
|
|
38
|
-
headers: {
|
|
39
|
-
"x-api-key": this.apiKey,
|
|
40
|
-
"Content-Type": "application/json",
|
|
41
|
-
...init?.headers,
|
|
42
|
-
},
|
|
43
|
-
});
|
|
44
|
-
if (!response.ok) {
|
|
45
|
-
let errorMessage;
|
|
46
|
-
try {
|
|
47
|
-
const errorData = await response.json();
|
|
48
|
-
errorMessage =
|
|
49
|
-
errorData.message || errorData.error || `HTTP error! status: ${response.status}`;
|
|
50
|
-
}
|
|
51
|
-
catch {
|
|
52
|
-
errorMessage = `HTTP error! status: ${response.status}`;
|
|
53
|
-
}
|
|
54
|
-
throw new HyperbrowserError(errorMessage, response.status, response);
|
|
55
|
-
}
|
|
56
|
-
if (response.headers.get("content-length") === "0") {
|
|
57
|
-
return {};
|
|
58
|
-
}
|
|
59
|
-
try {
|
|
60
|
-
return (await response.json());
|
|
61
|
-
}
|
|
62
|
-
catch (error) {
|
|
63
|
-
throw new HyperbrowserError("Failed to parse JSON response", response.status, response, error instanceof Error ? error : undefined);
|
|
64
|
-
}
|
|
65
|
-
}
|
|
66
|
-
catch (error) {
|
|
67
|
-
// If it's already a HyperbrowserError, rethrow it
|
|
68
|
-
if (error instanceof HyperbrowserError) {
|
|
69
|
-
throw error;
|
|
70
|
-
}
|
|
71
|
-
// Convert other errors to HyperbrowserError
|
|
72
|
-
throw new HyperbrowserError(error instanceof Error ? error.message : "Unknown error occurred", undefined, undefined, error instanceof Error ? error : undefined);
|
|
73
|
-
}
|
|
74
|
-
}
|
|
75
|
-
/**
|
|
76
|
-
* Create a new browser session
|
|
77
|
-
* @param params Configuration parameters for the new session
|
|
78
|
-
*/
|
|
79
|
-
async createSession(params) {
|
|
80
|
-
try {
|
|
81
|
-
return await this.request("/session", {
|
|
82
|
-
method: "POST",
|
|
83
|
-
body: params ? JSON.stringify(params) : undefined,
|
|
84
|
-
});
|
|
85
|
-
}
|
|
86
|
-
catch (error) {
|
|
87
|
-
if (error instanceof HyperbrowserError) {
|
|
88
|
-
throw error;
|
|
89
|
-
}
|
|
90
|
-
throw new HyperbrowserError("Failed to create session", undefined, undefined, error instanceof Error ? error : undefined);
|
|
91
|
-
}
|
|
92
|
-
}
|
|
93
|
-
/**
|
|
94
|
-
* Get details of an existing session
|
|
95
|
-
*/
|
|
96
|
-
async getSession(id) {
|
|
97
|
-
try {
|
|
98
|
-
return await this.request(`/session/${id}`);
|
|
99
|
-
}
|
|
100
|
-
catch (error) {
|
|
101
|
-
if (error instanceof HyperbrowserError) {
|
|
102
|
-
throw error;
|
|
103
|
-
}
|
|
104
|
-
throw new HyperbrowserError(`Failed to get session ${id}`, undefined, undefined, error instanceof Error ? error : undefined);
|
|
105
|
-
}
|
|
106
|
-
}
|
|
107
|
-
/**
|
|
108
|
-
* Stop a running session
|
|
109
|
-
*/
|
|
110
|
-
async stopSession(id) {
|
|
111
|
-
try {
|
|
112
|
-
return await this.request(`/session/${id}/stop`, {
|
|
113
|
-
method: "PUT",
|
|
114
|
-
});
|
|
115
|
-
}
|
|
116
|
-
catch (error) {
|
|
117
|
-
if (error instanceof HyperbrowserError) {
|
|
118
|
-
throw error;
|
|
119
|
-
}
|
|
120
|
-
throw new HyperbrowserError(`Failed to stop session ${id}`, undefined, undefined, error instanceof Error ? error : undefined);
|
|
121
|
-
}
|
|
122
|
-
}
|
|
123
|
-
/**
|
|
124
|
-
* List all sessions with optional filtering
|
|
125
|
-
*/
|
|
126
|
-
async listSessions(params = {}) {
|
|
127
|
-
try {
|
|
128
|
-
return await this.request("/sessions", undefined, {
|
|
129
|
-
status: params.status,
|
|
130
|
-
page: params.page,
|
|
131
|
-
});
|
|
132
|
-
}
|
|
133
|
-
catch (error) {
|
|
134
|
-
if (error instanceof HyperbrowserError) {
|
|
135
|
-
throw error;
|
|
136
|
-
}
|
|
137
|
-
throw new HyperbrowserError("Failed to list sessions", undefined, undefined, error instanceof Error ? error : undefined);
|
|
138
|
-
}
|
|
139
|
-
}
|
|
140
|
-
async startScrapeJob(params) {
|
|
141
|
-
try {
|
|
142
|
-
return await this.request("/scrape", {
|
|
143
|
-
method: "POST",
|
|
144
|
-
body: JSON.stringify(params),
|
|
145
|
-
});
|
|
146
|
-
}
|
|
147
|
-
catch (error) {
|
|
148
|
-
if (error instanceof HyperbrowserError) {
|
|
149
|
-
throw error;
|
|
150
|
-
}
|
|
151
|
-
throw new HyperbrowserError("Failed to start scrape job", undefined, undefined, error instanceof Error ? error : undefined);
|
|
152
|
-
}
|
|
153
|
-
}
|
|
154
|
-
async getScrapeJob(id) {
|
|
155
|
-
try {
|
|
156
|
-
return await this.request(`/scrape/${id}`);
|
|
157
|
-
}
|
|
158
|
-
catch (error) {
|
|
159
|
-
if (error instanceof HyperbrowserError) {
|
|
160
|
-
throw error;
|
|
161
|
-
}
|
|
162
|
-
throw new HyperbrowserError(`Failed to get scrape job ${id}`, undefined, undefined, error instanceof Error ? error : undefined);
|
|
163
|
-
}
|
|
164
|
-
}
|
|
165
|
-
async startCrawlJob(params) {
|
|
166
|
-
try {
|
|
167
|
-
return await this.request("/crawl", {
|
|
168
|
-
method: "POST",
|
|
169
|
-
body: JSON.stringify(params),
|
|
170
|
-
});
|
|
171
|
-
}
|
|
172
|
-
catch (error) {
|
|
173
|
-
if (error instanceof HyperbrowserError) {
|
|
174
|
-
throw error;
|
|
175
|
-
}
|
|
176
|
-
throw new HyperbrowserError("Failed to start crawl job", undefined, undefined, error instanceof Error ? error : undefined);
|
|
177
|
-
}
|
|
178
|
-
}
|
|
179
|
-
async getCrawlJob(id, params) {
|
|
180
|
-
try {
|
|
181
|
-
return await this.request(`/crawl/${id}`, undefined, {
|
|
182
|
-
page: params?.page,
|
|
183
|
-
});
|
|
184
|
-
}
|
|
185
|
-
catch (error) {
|
|
186
|
-
if (error instanceof HyperbrowserError) {
|
|
187
|
-
throw error;
|
|
188
|
-
}
|
|
189
|
-
throw new HyperbrowserError(`Failed to get crawl job ${id}`, undefined, undefined, error instanceof Error ? error : undefined);
|
|
190
|
-
}
|
|
22
|
+
this.sessions = new sessions_1.SessionsService(apiKey, baseUrl);
|
|
23
|
+
this.scrape = new scrape_1.ScrapeService(apiKey, baseUrl);
|
|
24
|
+
this.crawl = new crawl_1.CrawlService(apiKey, baseUrl);
|
|
191
25
|
}
|
|
192
26
|
}
|
|
193
27
|
exports.HyperbrowserClient = HyperbrowserClient;
|
package/dist/index.d.ts
CHANGED
|
@@ -1,3 +1,5 @@
|
|
|
1
|
-
|
|
1
|
+
import { HyperbrowserClient } from "./client";
|
|
2
2
|
export * from "./types";
|
|
3
3
|
export { HyperbrowserError } from "./client";
|
|
4
|
+
export declare const Hyperbrowser: typeof HyperbrowserClient;
|
|
5
|
+
export default HyperbrowserClient;
|
package/dist/index.js
CHANGED
|
@@ -14,11 +14,18 @@ var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
|
14
14
|
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
|
15
15
|
};
|
|
16
16
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
17
|
-
exports.
|
|
18
|
-
|
|
19
|
-
Object.defineProperty(exports, "default", { enumerable: true, get: function () { return client_1.HyperbrowserClient; } });
|
|
17
|
+
exports.Hyperbrowser = exports.HyperbrowserError = void 0;
|
|
18
|
+
const client_1 = require("./client");
|
|
20
19
|
__exportStar(require("./types"), exports);
|
|
21
20
|
var client_2 = require("./client");
|
|
22
21
|
Object.defineProperty(exports, "HyperbrowserError", { enumerable: true, get: function () { return client_2.HyperbrowserError; } });
|
|
23
|
-
|
|
24
|
-
|
|
22
|
+
// Export HyperbrowserClient as Hyperbrowser for named imports
|
|
23
|
+
exports.Hyperbrowser = client_1.HyperbrowserClient;
|
|
24
|
+
exports.default = client_1.HyperbrowserClient;
|
|
25
|
+
// For CommonJS compatibility
|
|
26
|
+
if (typeof module !== "undefined" && module.exports) {
|
|
27
|
+
module.exports = client_1.HyperbrowserClient;
|
|
28
|
+
module.exports.Hyperbrowser = client_1.HyperbrowserClient;
|
|
29
|
+
module.exports.HyperbrowserClient = client_1.HyperbrowserClient;
|
|
30
|
+
module.exports.default = client_1.HyperbrowserClient;
|
|
31
|
+
}
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import { RequestInit } from "node-fetch";
|
|
2
|
+
export declare class BaseService {
|
|
3
|
+
protected readonly apiKey: string;
|
|
4
|
+
protected readonly baseUrl: string;
|
|
5
|
+
constructor(apiKey: string, baseUrl: string);
|
|
6
|
+
protected request<T>(path: string, init?: RequestInit, params?: Record<string, string | number | undefined>): Promise<T>;
|
|
7
|
+
}
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.BaseService = void 0;
|
|
7
|
+
const node_fetch_1 = __importDefault(require("node-fetch"));
|
|
8
|
+
const client_1 = require("../client");
|
|
9
|
+
class BaseService {
|
|
10
|
+
constructor(apiKey, baseUrl) {
|
|
11
|
+
this.apiKey = apiKey;
|
|
12
|
+
this.baseUrl = baseUrl;
|
|
13
|
+
}
|
|
14
|
+
async request(path, init, params) {
|
|
15
|
+
try {
|
|
16
|
+
const url = new URL(`${this.baseUrl}/api${path}`);
|
|
17
|
+
if (params) {
|
|
18
|
+
Object.entries(params).forEach(([key, value]) => {
|
|
19
|
+
if (value !== undefined) {
|
|
20
|
+
url.searchParams.append(key, value.toString());
|
|
21
|
+
}
|
|
22
|
+
});
|
|
23
|
+
}
|
|
24
|
+
const response = await (0, node_fetch_1.default)(url.toString(), {
|
|
25
|
+
...init,
|
|
26
|
+
headers: {
|
|
27
|
+
"x-api-key": this.apiKey,
|
|
28
|
+
"Content-Type": "application/json",
|
|
29
|
+
...init?.headers,
|
|
30
|
+
},
|
|
31
|
+
});
|
|
32
|
+
if (!response.ok) {
|
|
33
|
+
let errorMessage;
|
|
34
|
+
try {
|
|
35
|
+
const errorData = await response.json();
|
|
36
|
+
errorMessage =
|
|
37
|
+
errorData.message || errorData.error || `HTTP error! status: ${response.status}`;
|
|
38
|
+
}
|
|
39
|
+
catch {
|
|
40
|
+
errorMessage = `HTTP error! status: ${response.status}`;
|
|
41
|
+
}
|
|
42
|
+
throw new client_1.HyperbrowserError(errorMessage, response.status);
|
|
43
|
+
}
|
|
44
|
+
if (response.headers.get("content-length") === "0") {
|
|
45
|
+
return {};
|
|
46
|
+
}
|
|
47
|
+
try {
|
|
48
|
+
return (await response.json());
|
|
49
|
+
}
|
|
50
|
+
catch {
|
|
51
|
+
throw new client_1.HyperbrowserError("Failed to parse JSON response", response.status);
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
catch (error) {
|
|
55
|
+
if (error instanceof client_1.HyperbrowserError) {
|
|
56
|
+
throw error;
|
|
57
|
+
}
|
|
58
|
+
throw new client_1.HyperbrowserError(error instanceof Error ? error.message : "Unknown error occurred", undefined);
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
exports.BaseService = BaseService;
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import { CrawlJobResponse, GetCrawlJobParams, StartCrawlJobParams, StartCrawlJobResponse } from "../types/crawl";
|
|
2
|
+
import { BaseService } from "./base";
|
|
3
|
+
export declare class CrawlService extends BaseService {
|
|
4
|
+
/**
|
|
5
|
+
* Start a new crawl job
|
|
6
|
+
* @param params The parameters for the crawl job
|
|
7
|
+
*/
|
|
8
|
+
start(params: StartCrawlJobParams): Promise<StartCrawlJobResponse>;
|
|
9
|
+
/**
|
|
10
|
+
* Get the status of a crawl job
|
|
11
|
+
* @param id The ID of the crawl job to get
|
|
12
|
+
* @param params Optional parameters to filter the crawl job
|
|
13
|
+
*/
|
|
14
|
+
get(id: string, params?: GetCrawlJobParams): Promise<CrawlJobResponse>;
|
|
15
|
+
/**
|
|
16
|
+
* Start a crawl job and wait for it to complete
|
|
17
|
+
* @param params The parameters for the crawl job
|
|
18
|
+
* @param returnAllPages Whether to return all pages in the crawl job response
|
|
19
|
+
*/
|
|
20
|
+
startAndWait(params: StartCrawlJobParams, returnAllPages?: boolean): Promise<CrawlJobResponse>;
|
|
21
|
+
}
|
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.CrawlService = void 0;
|
|
4
|
+
const base_1 = require("./base");
|
|
5
|
+
const utils_1 = require("../utils");
|
|
6
|
+
const client_1 = require("../client");
|
|
7
|
+
class CrawlService extends base_1.BaseService {
|
|
8
|
+
/**
|
|
9
|
+
* Start a new crawl job
|
|
10
|
+
* @param params The parameters for the crawl job
|
|
11
|
+
*/
|
|
12
|
+
async start(params) {
|
|
13
|
+
try {
|
|
14
|
+
return await this.request("/crawl", {
|
|
15
|
+
method: "POST",
|
|
16
|
+
body: JSON.stringify(params),
|
|
17
|
+
});
|
|
18
|
+
}
|
|
19
|
+
catch (error) {
|
|
20
|
+
if (error instanceof client_1.HyperbrowserError) {
|
|
21
|
+
throw error;
|
|
22
|
+
}
|
|
23
|
+
throw new client_1.HyperbrowserError("Failed to start crawl job", undefined);
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
/**
|
|
27
|
+
* Get the status of a crawl job
|
|
28
|
+
* @param id The ID of the crawl job to get
|
|
29
|
+
* @param params Optional parameters to filter the crawl job
|
|
30
|
+
*/
|
|
31
|
+
async get(id, params) {
|
|
32
|
+
try {
|
|
33
|
+
return await this.request(`/crawl/${id}`, undefined, {
|
|
34
|
+
page: params?.page,
|
|
35
|
+
});
|
|
36
|
+
}
|
|
37
|
+
catch (error) {
|
|
38
|
+
if (error instanceof client_1.HyperbrowserError) {
|
|
39
|
+
throw error;
|
|
40
|
+
}
|
|
41
|
+
throw new client_1.HyperbrowserError(`Failed to get crawl job ${id}`, undefined);
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
/**
|
|
45
|
+
* Start a crawl job and wait for it to complete
|
|
46
|
+
* @param params The parameters for the crawl job
|
|
47
|
+
* @param returnAllPages Whether to return all pages in the crawl job response
|
|
48
|
+
*/
|
|
49
|
+
async startAndWait(params, returnAllPages = true) {
|
|
50
|
+
const job = await this.start(params);
|
|
51
|
+
const jobId = job.jobId;
|
|
52
|
+
if (!jobId) {
|
|
53
|
+
throw new client_1.HyperbrowserError("Failed to start crawl job, could not get job ID");
|
|
54
|
+
}
|
|
55
|
+
let jobResponse;
|
|
56
|
+
while (true) {
|
|
57
|
+
jobResponse = await this.get(jobId);
|
|
58
|
+
if (jobResponse.status === "completed" || jobResponse.status === "failed") {
|
|
59
|
+
break;
|
|
60
|
+
}
|
|
61
|
+
await (0, utils_1.sleep)(2000);
|
|
62
|
+
}
|
|
63
|
+
if (!returnAllPages) {
|
|
64
|
+
return jobResponse;
|
|
65
|
+
}
|
|
66
|
+
while (jobResponse.currentPageBatch < jobResponse.totalPageBatches) {
|
|
67
|
+
const tmpJobResponse = await this.get(jobId, {
|
|
68
|
+
page: jobResponse.currentPageBatch + 1,
|
|
69
|
+
});
|
|
70
|
+
if (tmpJobResponse.data) {
|
|
71
|
+
jobResponse.data?.push(...tmpJobResponse.data);
|
|
72
|
+
}
|
|
73
|
+
jobResponse.currentPageBatch = tmpJobResponse.currentPageBatch;
|
|
74
|
+
jobResponse.totalCrawledPages = tmpJobResponse.totalCrawledPages;
|
|
75
|
+
jobResponse.totalPageBatches = tmpJobResponse.totalPageBatches;
|
|
76
|
+
jobResponse.batchSize = tmpJobResponse.batchSize;
|
|
77
|
+
await (0, utils_1.sleep)(500);
|
|
78
|
+
}
|
|
79
|
+
return jobResponse;
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
exports.CrawlService = CrawlService;
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import { ScrapeJobResponse, StartScrapeJobParams, StartScrapeJobResponse } from "../types/scrape";
|
|
2
|
+
import { BaseService } from "./base";
|
|
3
|
+
export declare class ScrapeService extends BaseService {
|
|
4
|
+
/**
|
|
5
|
+
* Start a new scrape job
|
|
6
|
+
* @param params The parameters for the scrape job
|
|
7
|
+
*/
|
|
8
|
+
start(params: StartScrapeJobParams): Promise<StartScrapeJobResponse>;
|
|
9
|
+
/**
|
|
10
|
+
* Get the status of a scrape job
|
|
11
|
+
* @param id The ID of the scrape job to get
|
|
12
|
+
*/
|
|
13
|
+
get(id: string): Promise<ScrapeJobResponse>;
|
|
14
|
+
/**
|
|
15
|
+
* Start a scrape job and wait for it to complete
|
|
16
|
+
* @param params The parameters for the scrape job
|
|
17
|
+
*/
|
|
18
|
+
startAndWait(params: StartScrapeJobParams): Promise<ScrapeJobResponse>;
|
|
19
|
+
}
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.ScrapeService = void 0;
|
|
4
|
+
const base_1 = require("./base");
|
|
5
|
+
const utils_1 = require("../utils");
|
|
6
|
+
const client_1 = require("../client");
|
|
7
|
+
class ScrapeService extends base_1.BaseService {
|
|
8
|
+
/**
|
|
9
|
+
* Start a new scrape job
|
|
10
|
+
* @param params The parameters for the scrape job
|
|
11
|
+
*/
|
|
12
|
+
async start(params) {
|
|
13
|
+
try {
|
|
14
|
+
return await this.request("/scrape", {
|
|
15
|
+
method: "POST",
|
|
16
|
+
body: JSON.stringify(params),
|
|
17
|
+
});
|
|
18
|
+
}
|
|
19
|
+
catch (error) {
|
|
20
|
+
if (error instanceof client_1.HyperbrowserError) {
|
|
21
|
+
throw error;
|
|
22
|
+
}
|
|
23
|
+
throw new client_1.HyperbrowserError("Failed to start scrape job", undefined);
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
/**
|
|
27
|
+
* Get the status of a scrape job
|
|
28
|
+
* @param id The ID of the scrape job to get
|
|
29
|
+
*/
|
|
30
|
+
async get(id) {
|
|
31
|
+
try {
|
|
32
|
+
return await this.request(`/scrape/${id}`);
|
|
33
|
+
}
|
|
34
|
+
catch (error) {
|
|
35
|
+
if (error instanceof client_1.HyperbrowserError) {
|
|
36
|
+
throw error;
|
|
37
|
+
}
|
|
38
|
+
throw new client_1.HyperbrowserError(`Failed to get scrape job ${id}`, undefined);
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
/**
|
|
42
|
+
* Start a scrape job and wait for it to complete
|
|
43
|
+
* @param params The parameters for the scrape job
|
|
44
|
+
*/
|
|
45
|
+
async startAndWait(params) {
|
|
46
|
+
const job = await this.start(params);
|
|
47
|
+
const jobId = job.jobId;
|
|
48
|
+
if (!jobId) {
|
|
49
|
+
throw new client_1.HyperbrowserError("Failed to start scrape job, could not get job ID");
|
|
50
|
+
}
|
|
51
|
+
let jobResponse;
|
|
52
|
+
while (true) {
|
|
53
|
+
jobResponse = await this.get(jobId);
|
|
54
|
+
if (jobResponse.status === "completed" || jobResponse.status === "failed") {
|
|
55
|
+
break;
|
|
56
|
+
}
|
|
57
|
+
await (0, utils_1.sleep)(2000);
|
|
58
|
+
}
|
|
59
|
+
return jobResponse;
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
exports.ScrapeService = ScrapeService;
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
import { BasicResponse, CreateSessionParams, SessionDetail, SessionListParams, SessionListResponse, SessionRecording } from "../types/session";
|
|
2
|
+
import { BaseService } from "./base";
|
|
3
|
+
export declare class SessionsService extends BaseService {
|
|
4
|
+
/**
|
|
5
|
+
* Create a new browser session
|
|
6
|
+
* @param params Configuration parameters for the new session
|
|
7
|
+
*/
|
|
8
|
+
create(params?: CreateSessionParams): Promise<SessionDetail>;
|
|
9
|
+
/**
|
|
10
|
+
* Get details of an existing session
|
|
11
|
+
* @param id The ID of the session to get
|
|
12
|
+
*/
|
|
13
|
+
get(id: string): Promise<SessionDetail>;
|
|
14
|
+
/**
|
|
15
|
+
* Stop a running session
|
|
16
|
+
* @param id The ID of the session to stop
|
|
17
|
+
*/
|
|
18
|
+
stop(id: string): Promise<BasicResponse>;
|
|
19
|
+
/**
|
|
20
|
+
* List all sessions with optional filtering
|
|
21
|
+
* @param params Optional parameters to filter the sessions
|
|
22
|
+
*/
|
|
23
|
+
list(params?: SessionListParams): Promise<SessionListResponse>;
|
|
24
|
+
/**
|
|
25
|
+
* Get the recording of a session
|
|
26
|
+
* @param id The ID of the session to get the recording from
|
|
27
|
+
*/
|
|
28
|
+
getRecording(id: string): Promise<SessionRecording[]>;
|
|
29
|
+
}
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.SessionsService = void 0;
|
|
4
|
+
const base_1 = require("./base");
|
|
5
|
+
const client_1 = require("../client");
|
|
6
|
+
class SessionsService extends base_1.BaseService {
|
|
7
|
+
/**
|
|
8
|
+
* Create a new browser session
|
|
9
|
+
* @param params Configuration parameters for the new session
|
|
10
|
+
*/
|
|
11
|
+
async create(params) {
|
|
12
|
+
try {
|
|
13
|
+
return await this.request("/session", {
|
|
14
|
+
method: "POST",
|
|
15
|
+
body: params ? JSON.stringify(params) : undefined,
|
|
16
|
+
});
|
|
17
|
+
}
|
|
18
|
+
catch (error) {
|
|
19
|
+
if (error instanceof client_1.HyperbrowserError) {
|
|
20
|
+
throw error;
|
|
21
|
+
}
|
|
22
|
+
throw new client_1.HyperbrowserError("Failed to create session", undefined);
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
/**
|
|
26
|
+
* Get details of an existing session
|
|
27
|
+
* @param id The ID of the session to get
|
|
28
|
+
*/
|
|
29
|
+
async get(id) {
|
|
30
|
+
try {
|
|
31
|
+
return await this.request(`/session/${id}`);
|
|
32
|
+
}
|
|
33
|
+
catch (error) {
|
|
34
|
+
if (error instanceof client_1.HyperbrowserError) {
|
|
35
|
+
throw error;
|
|
36
|
+
}
|
|
37
|
+
throw new client_1.HyperbrowserError(`Failed to get session ${id}`, undefined);
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
/**
|
|
41
|
+
* Stop a running session
|
|
42
|
+
* @param id The ID of the session to stop
|
|
43
|
+
*/
|
|
44
|
+
async stop(id) {
|
|
45
|
+
try {
|
|
46
|
+
return await this.request(`/session/${id}/stop`, {
|
|
47
|
+
method: "PUT",
|
|
48
|
+
});
|
|
49
|
+
}
|
|
50
|
+
catch (error) {
|
|
51
|
+
if (error instanceof client_1.HyperbrowserError) {
|
|
52
|
+
throw error;
|
|
53
|
+
}
|
|
54
|
+
throw new client_1.HyperbrowserError(`Failed to stop session ${id}`, undefined);
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
/**
|
|
58
|
+
* List all sessions with optional filtering
|
|
59
|
+
* @param params Optional parameters to filter the sessions
|
|
60
|
+
*/
|
|
61
|
+
async list(params = {}) {
|
|
62
|
+
try {
|
|
63
|
+
return await this.request("/sessions", undefined, {
|
|
64
|
+
status: params.status,
|
|
65
|
+
page: params.page,
|
|
66
|
+
});
|
|
67
|
+
}
|
|
68
|
+
catch (error) {
|
|
69
|
+
if (error instanceof client_1.HyperbrowserError) {
|
|
70
|
+
throw error;
|
|
71
|
+
}
|
|
72
|
+
throw new client_1.HyperbrowserError("Failed to list sessions", undefined);
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
/**
|
|
76
|
+
* Get the recording of a session
|
|
77
|
+
* @param id The ID of the session to get the recording from
|
|
78
|
+
*/
|
|
79
|
+
async getRecording(id) {
|
|
80
|
+
try {
|
|
81
|
+
return await this.request(`/session/${id}/recording`);
|
|
82
|
+
}
|
|
83
|
+
catch (error) {
|
|
84
|
+
if (error instanceof client_1.HyperbrowserError) {
|
|
85
|
+
throw error;
|
|
86
|
+
}
|
|
87
|
+
throw new client_1.HyperbrowserError(`Failed to get recording for session ${id}`, undefined);
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
exports.SessionsService = SessionsService;
|
|
@@ -1,5 +1,7 @@
|
|
|
1
|
+
export type ScrapeFormat = "markdown" | "html" | "links";
|
|
1
2
|
export type ScrapeJobStatus = "pending" | "running" | "completed" | "failed";
|
|
2
3
|
export type CrawlJobStatus = "pending" | "running" | "completed" | "failed";
|
|
4
|
+
export type CrawlPageStatus = "completed" | "failed";
|
|
3
5
|
export type Country = "AD" | "AE" | "AF" | "AL" | "AM" | "AO" | "AR" | "AT" | "AU" | "AW" | "AZ" | "BA" | "BD" | "BE" | "BG" | "BH" | "BJ" | "BO" | "BR" | "BS" | "BT" | "BY" | "BZ" | "CA" | "CF" | "CH" | "CI" | "CL" | "CM" | "CN" | "CO" | "CR" | "CU" | "CY" | "CZ" | "DE" | "DJ" | "DK" | "DM" | "EC" | "EE" | "EG" | "ES" | "ET" | "EU" | "FI" | "FJ" | "FR" | "GB" | "GE" | "GH" | "GM" | "GR" | "HK" | "HN" | "HR" | "HT" | "HU" | "ID" | "IE" | "IL" | "IN" | "IQ" | "IR" | "IS" | "IT" | "JM" | "JO" | "JP" | "KE" | "KH" | "KR" | "KW" | "KZ" | "LB" | "LI" | "LR" | "LT" | "LU" | "LV" | "MA" | "MC" | "MD" | "ME" | "MG" | "MK" | "ML" | "MM" | "MN" | "MR" | "MT" | "MU" | "MV" | "MX" | "MY" | "MZ" | "NG" | "NL" | "NO" | "NZ" | "OM" | "PA" | "PE" | "PH" | "PK" | "PL" | "PR" | "PT" | "PY" | "QA" | "RANDOM_COUNTRY" | "RO" | "RS" | "RU" | "SA" | "SC" | "SD" | "SE" | "SG" | "SI" | "SK" | "SN" | "SS" | "TD" | "TG" | "TH" | "TM" | "TN" | "TR" | "TT" | "TW" | "UA" | "UG" | "US" | "UY" | "UZ" | "VE" | "VG" | "VN" | "YE" | "ZA" | "ZM" | "ZW" | "ad" | "ae" | "af" | "al" | "am" | "ao" | "ar" | "at" | "au" | "aw" | "az" | "ba" | "bd" | "be" | "bg" | "bh" | "bj" | "bo" | "br" | "bs" | "bt" | "by" | "bz" | "ca" | "cf" | "ch" | "ci" | "cl" | "cm" | "cn" | "co" | "cr" | "cu" | "cy" | "cz" | "de" | "dj" | "dk" | "dm" | "ec" | "ee" | "eg" | "es" | "et" | "eu" | "fi" | "fj" | "fr" | "gb" | "ge" | "gh" | "gm" | "gr" | "hk" | "hn" | "hr" | "ht" | "hu" | "id" | "ie" | "il" | "in" | "iq" | "ir" | "is" | "it" | "jm" | "jo" | "jp" | "ke" | "kh" | "kr" | "kw" | "kz" | "lb" | "li" | "lr" | "lt" | "lu" | "lv" | "ma" | "mc" | "md" | "me" | "mg" | "mk" | "ml" | "mm" | "mn" | "mr" | "mt" | "mu" | "mv" | "mx" | "my" | "mz" | "ng" | "nl" | "no" | "nz" | "om" | "pa" | "pe" | "ph" | "pk" | "pl" | "pr" | "pt" | "py" | "qa" | "ro" | "rs" | "ru" | "sa" | "sc" | "sd" | "se" | "sg" | "si" | "sk" | "sn" | "ss" | "td" | "tg" | "th" | "tm" | "tn" | "tr" | "tt" | "tw" | "ua" | "ug" | "us" | "uy" | "uz" | "ve" | "vg" | "vn" | "ye" | "za" | "zm" | "zw";
|
|
4
6
|
export type OperatingSystem = "windows" | "android" | "macos" | "linux" | "ios";
|
|
5
7
|
export type Platform = "chrome" | "firefox" | "safari" | "edge";
|
package/dist/types/crawl.d.ts
CHANGED
|
@@ -1,12 +1,15 @@
|
|
|
1
|
-
import { CrawlJobStatus } from "./constants";
|
|
1
|
+
import { CrawlJobStatus, CrawlPageStatus } from "./constants";
|
|
2
|
+
import { ScrapeOptions } from "./scrape";
|
|
3
|
+
import { CreateSessionParams } from "./session";
|
|
2
4
|
export interface StartCrawlJobParams {
|
|
3
5
|
url: string;
|
|
4
6
|
maxPages?: number;
|
|
5
7
|
followLinks?: boolean;
|
|
8
|
+
ignoreSitemap?: boolean;
|
|
6
9
|
excludePatterns?: string[];
|
|
7
10
|
includePatterns?: string[];
|
|
8
|
-
|
|
9
|
-
|
|
11
|
+
sessionOptions?: CreateSessionParams;
|
|
12
|
+
scrapeOptions?: ScrapeOptions;
|
|
10
13
|
}
|
|
11
14
|
export interface StartCrawlJobResponse {
|
|
12
15
|
jobId: string;
|
|
@@ -15,24 +18,17 @@ export interface GetCrawlJobParams {
|
|
|
15
18
|
page?: number;
|
|
16
19
|
batchSize?: number;
|
|
17
20
|
}
|
|
18
|
-
export interface CrawledPageMetadata {
|
|
19
|
-
title: string;
|
|
20
|
-
description: string;
|
|
21
|
-
robots: string;
|
|
22
|
-
ogTitle: string;
|
|
23
|
-
ogDescription: string;
|
|
24
|
-
ogUrl: string;
|
|
25
|
-
ogImage: string;
|
|
26
|
-
ogLocaleAlternate: string[];
|
|
27
|
-
ogSiteName: string;
|
|
28
|
-
sourceURL: string;
|
|
29
|
-
}
|
|
30
21
|
export interface CrawledPage {
|
|
31
22
|
url: string;
|
|
32
|
-
|
|
33
|
-
|
|
23
|
+
status: CrawlPageStatus;
|
|
24
|
+
error?: string | null;
|
|
25
|
+
metadata?: Record<string, string | string[]>;
|
|
26
|
+
markdown?: string;
|
|
27
|
+
html?: string;
|
|
28
|
+
links?: string[];
|
|
34
29
|
}
|
|
35
30
|
export interface CrawlJobResponse {
|
|
31
|
+
jobId: string;
|
|
36
32
|
status: CrawlJobStatus;
|
|
37
33
|
data?: CrawledPage[];
|
|
38
34
|
error?: string;
|
package/dist/types/index.d.ts
CHANGED
|
@@ -1 +1,5 @@
|
|
|
1
|
-
export
|
|
1
|
+
export { HyperbrowserConfig } from "./config";
|
|
2
|
+
export { StartCrawlJobParams, StartCrawlJobResponse, CrawledPage, CrawlJobResponse, GetCrawlJobParams, } from "./crawl";
|
|
3
|
+
export { StartScrapeJobParams, StartScrapeJobResponse, ScrapeJobData, ScrapeJobResponse, } from "./scrape";
|
|
4
|
+
export { BasicResponse, SessionStatus, Session, SessionDetail, SessionListParams, SessionListResponse, ScreenConfig, CreateSessionParams, } from "./session";
|
|
5
|
+
export { ScrapeJobStatus, CrawlJobStatus, Country, ISO639_1, OperatingSystem, Platform, } from "./constants";
|
package/dist/types/index.js
CHANGED
|
@@ -1,17 +1,2 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
-
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
-
if (k2 === undefined) k2 = k;
|
|
4
|
-
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
-
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
-
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
-
}
|
|
8
|
-
Object.defineProperty(o, k2, desc);
|
|
9
|
-
}) : (function(o, m, k, k2) {
|
|
10
|
-
if (k2 === undefined) k2 = k;
|
|
11
|
-
o[k2] = m[k];
|
|
12
|
-
}));
|
|
13
|
-
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
14
|
-
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
|
15
|
-
};
|
|
16
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
17
|
-
__exportStar(require("."), exports);
|
package/dist/types/scrape.d.ts
CHANGED
|
@@ -1,29 +1,29 @@
|
|
|
1
|
-
import { ScrapeJobStatus } from "./constants";
|
|
1
|
+
import { ScrapeFormat, ScrapeJobStatus } from "./constants";
|
|
2
|
+
import { CreateSessionParams } from "./session";
|
|
3
|
+
export interface ScrapeOptions {
|
|
4
|
+
formats?: ScrapeFormat[];
|
|
5
|
+
includeTags?: string[];
|
|
6
|
+
excludeTags?: string[];
|
|
7
|
+
onlyMainContent?: boolean;
|
|
8
|
+
waitFor?: number;
|
|
9
|
+
timeout?: number;
|
|
10
|
+
}
|
|
2
11
|
export interface StartScrapeJobParams {
|
|
3
12
|
url: string;
|
|
4
|
-
|
|
5
|
-
|
|
13
|
+
sessionOptions?: CreateSessionParams;
|
|
14
|
+
scrapeOptions?: ScrapeOptions;
|
|
6
15
|
}
|
|
7
16
|
export interface StartScrapeJobResponse {
|
|
8
17
|
jobId: string;
|
|
9
18
|
}
|
|
10
|
-
export interface ScrapeJobMetadata {
|
|
11
|
-
title: string;
|
|
12
|
-
description: string;
|
|
13
|
-
robots: string;
|
|
14
|
-
ogTitle: string;
|
|
15
|
-
ogDescription: string;
|
|
16
|
-
ogUrl: string;
|
|
17
|
-
ogImage: string;
|
|
18
|
-
ogLocaleAlternate: string[];
|
|
19
|
-
ogSiteName: string;
|
|
20
|
-
sourceURL: string;
|
|
21
|
-
}
|
|
22
19
|
export interface ScrapeJobData {
|
|
23
|
-
metadata
|
|
24
|
-
markdown
|
|
20
|
+
metadata?: Record<string, string | string[]>;
|
|
21
|
+
markdown?: string;
|
|
22
|
+
html?: string;
|
|
23
|
+
links?: string[];
|
|
25
24
|
}
|
|
26
25
|
export interface ScrapeJobResponse {
|
|
26
|
+
jobId: string;
|
|
27
27
|
status: ScrapeJobStatus;
|
|
28
28
|
data?: ScrapeJobData;
|
|
29
29
|
error?: string;
|
package/dist/types/session.d.ts
CHANGED
|
@@ -12,6 +12,7 @@ export interface Session {
|
|
|
12
12
|
createdAt: string;
|
|
13
13
|
updatedAt: string;
|
|
14
14
|
sessionUrl: string;
|
|
15
|
+
token: string;
|
|
15
16
|
}
|
|
16
17
|
export interface SessionDetail extends Session {
|
|
17
18
|
wsEndpoint?: string;
|
|
@@ -46,4 +47,11 @@ export interface CreateSessionParams {
|
|
|
46
47
|
adblock?: boolean;
|
|
47
48
|
trackers?: boolean;
|
|
48
49
|
annoyances?: boolean;
|
|
50
|
+
enableWebRecording?: boolean;
|
|
51
|
+
}
|
|
52
|
+
export interface SessionRecording {
|
|
53
|
+
type: number;
|
|
54
|
+
data: unknown;
|
|
55
|
+
timestamp: number;
|
|
56
|
+
delay?: number;
|
|
49
57
|
}
|
package/dist/utils.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare const sleep: (ms: number) => Promise<unknown>;
|
package/dist/utils.js
ADDED