@hyperbrowser/sdk 0.15.0 → 0.17.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/tools/anthropic.js +14 -0
- package/dist/tools/index.d.ts +12 -0
- package/dist/tools/index.js +31 -0
- package/dist/tools/openai.js +22 -0
- package/dist/tools/schema.d.ts +126 -0
- package/dist/tools/schema.js +87 -0
- package/package.json +27 -2
- package/dist/client.d.ts +0 -18
- package/dist/client.js +0 -32
- package/dist/index.d.ts +0 -5
- package/dist/index.js +0 -31
- package/dist/services/base.d.ts +0 -8
- package/dist/services/base.js +0 -68
- package/dist/services/crawl.d.ts +0 -21
- package/dist/services/crawl.js +0 -82
- package/dist/services/extensions.d.ts +0 -13
- package/dist/services/extensions.js +0 -74
- package/dist/services/profiles.d.ts +0 -19
- package/dist/services/profiles.js +0 -56
- package/dist/services/scrape.d.ts +0 -19
- package/dist/services/scrape.js +0 -62
- package/dist/services/sessions.d.ts +0 -29
- package/dist/services/sessions.js +0 -91
- package/dist/types/config.d.ts +0 -5
- package/dist/types/config.js +0 -2
- package/dist/types/constants.d.ts +0 -8
- package/dist/types/constants.js +0 -2
- package/dist/types/crawl.d.ts +0 -40
- package/dist/types/crawl.js +0 -2
- package/dist/types/extension.d.ts +0 -13
- package/dist/types/extension.js +0 -2
- package/dist/types/index.d.ts +0 -7
- package/dist/types/index.js +0 -2
- package/dist/types/profile.d.ts +0 -9
- package/dist/types/profile.js +0 -2
- package/dist/types/scrape.d.ts +0 -31
- package/dist/types/scrape.js +0 -2
- package/dist/types/session.d.ts +0 -64
- package/dist/types/session.js +0 -2
- package/dist/utils.d.ts +0 -1
- package/dist/utils.js +0 -5
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.CRAWL_TOOL_ANTHROPIC = exports.SCRAPE_TOOL_ANTHROPIC = void 0;
|
|
4
|
+
const schema_1 = require("./schema");
|
|
5
|
+
exports.SCRAPE_TOOL_ANTHROPIC = {
|
|
6
|
+
input_schema: schema_1.SCRAPE_SCHEMA,
|
|
7
|
+
name: "scrape_webpage",
|
|
8
|
+
description: "Scrape content from a webpage and return the content in markdown format",
|
|
9
|
+
};
|
|
10
|
+
exports.CRAWL_TOOL_ANTHROPIC = {
|
|
11
|
+
input_schema: schema_1.CRAWL_SCHEMA,
|
|
12
|
+
name: "crawl_website",
|
|
13
|
+
description: "Crawl a website and return the content in markdown format",
|
|
14
|
+
};
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import { HyperbrowserClient } from "../client";
|
|
2
|
+
import { StartScrapeJobParams, StartCrawlJobParams } from "../types";
|
|
3
|
+
export declare class WebsiteScrapeTool {
|
|
4
|
+
static openaiToolDefinition: import("./openai").ChatCompletionTool;
|
|
5
|
+
static anthropicToolDefinition: import("./anthropic").Tool;
|
|
6
|
+
static runnable(hb: HyperbrowserClient, params: StartScrapeJobParams): Promise<string>;
|
|
7
|
+
}
|
|
8
|
+
export declare class WebsiteCrawlTool {
|
|
9
|
+
static openaiToolDefinition: import("./openai").ChatCompletionTool;
|
|
10
|
+
static anthropicToolDefinition: import("./anthropic").Tool;
|
|
11
|
+
static runnable(hb: HyperbrowserClient, params: StartCrawlJobParams): Promise<string>;
|
|
12
|
+
}
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.WebsiteCrawlTool = exports.WebsiteScrapeTool = void 0;
|
|
4
|
+
const openai_1 = require("./openai");
|
|
5
|
+
const anthropic_1 = require("./anthropic");
|
|
6
|
+
class WebsiteScrapeTool {
|
|
7
|
+
static async runnable(hb, params) {
|
|
8
|
+
const resp = await hb.scrape.startAndWait(params);
|
|
9
|
+
return resp.data?.markdown || "";
|
|
10
|
+
}
|
|
11
|
+
}
|
|
12
|
+
exports.WebsiteScrapeTool = WebsiteScrapeTool;
|
|
13
|
+
WebsiteScrapeTool.openaiToolDefinition = openai_1.SCRAPE_TOOL_OPENAI;
|
|
14
|
+
WebsiteScrapeTool.anthropicToolDefinition = anthropic_1.SCRAPE_TOOL_ANTHROPIC;
|
|
15
|
+
class WebsiteCrawlTool {
|
|
16
|
+
static async runnable(hb, params) {
|
|
17
|
+
const resp = await hb.crawl.startAndWait(params);
|
|
18
|
+
let markdown = "";
|
|
19
|
+
if (resp.data) {
|
|
20
|
+
for (const page of resp.data) {
|
|
21
|
+
if (page.markdown) {
|
|
22
|
+
markdown += `\n${"-".repeat(50)}\nUrl: ${page.url}\nMarkdown:\n${page.markdown}\n`;
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
return markdown;
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
exports.WebsiteCrawlTool = WebsiteCrawlTool;
|
|
30
|
+
WebsiteCrawlTool.openaiToolDefinition = openai_1.CRAWL_TOOL_OPENAI;
|
|
31
|
+
WebsiteCrawlTool.anthropicToolDefinition = anthropic_1.CRAWL_TOOL_ANTHROPIC;
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.CRAWL_TOOL_OPENAI = exports.SCRAPE_TOOL_OPENAI = void 0;
|
|
4
|
+
const schema_1 = require("./schema");
|
|
5
|
+
exports.SCRAPE_TOOL_OPENAI = {
|
|
6
|
+
type: "function",
|
|
7
|
+
function: {
|
|
8
|
+
name: "scrape_webpage",
|
|
9
|
+
description: "Scrape content from a webpage and return the content in markdown format",
|
|
10
|
+
parameters: schema_1.SCRAPE_SCHEMA,
|
|
11
|
+
strict: true,
|
|
12
|
+
},
|
|
13
|
+
};
|
|
14
|
+
exports.CRAWL_TOOL_OPENAI = {
|
|
15
|
+
type: "function",
|
|
16
|
+
function: {
|
|
17
|
+
name: "crawl_website",
|
|
18
|
+
description: "Crawl a website and return the content in markdown format",
|
|
19
|
+
parameters: schema_1.CRAWL_SCHEMA,
|
|
20
|
+
strict: true,
|
|
21
|
+
},
|
|
22
|
+
};
|
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
export declare const SCRAPE_OPTIONS: {
|
|
2
|
+
type: string;
|
|
3
|
+
description: string;
|
|
4
|
+
properties: {
|
|
5
|
+
include_tags: {
|
|
6
|
+
type: string;
|
|
7
|
+
items: {
|
|
8
|
+
type: string;
|
|
9
|
+
};
|
|
10
|
+
description: string;
|
|
11
|
+
};
|
|
12
|
+
exclude_tags: {
|
|
13
|
+
type: string;
|
|
14
|
+
items: {
|
|
15
|
+
type: string;
|
|
16
|
+
};
|
|
17
|
+
description: string;
|
|
18
|
+
};
|
|
19
|
+
only_main_content: {
|
|
20
|
+
type: string;
|
|
21
|
+
description: string;
|
|
22
|
+
};
|
|
23
|
+
};
|
|
24
|
+
required: string[];
|
|
25
|
+
additionalProperties: boolean;
|
|
26
|
+
};
|
|
27
|
+
export declare const SCRAPE_SCHEMA: {
|
|
28
|
+
type: "object";
|
|
29
|
+
properties: {
|
|
30
|
+
url: {
|
|
31
|
+
type: string;
|
|
32
|
+
description: string;
|
|
33
|
+
};
|
|
34
|
+
scrape_options: {
|
|
35
|
+
type: string;
|
|
36
|
+
description: string;
|
|
37
|
+
properties: {
|
|
38
|
+
include_tags: {
|
|
39
|
+
type: string;
|
|
40
|
+
items: {
|
|
41
|
+
type: string;
|
|
42
|
+
};
|
|
43
|
+
description: string;
|
|
44
|
+
};
|
|
45
|
+
exclude_tags: {
|
|
46
|
+
type: string;
|
|
47
|
+
items: {
|
|
48
|
+
type: string;
|
|
49
|
+
};
|
|
50
|
+
description: string;
|
|
51
|
+
};
|
|
52
|
+
only_main_content: {
|
|
53
|
+
type: string;
|
|
54
|
+
description: string;
|
|
55
|
+
};
|
|
56
|
+
};
|
|
57
|
+
required: string[];
|
|
58
|
+
additionalProperties: boolean;
|
|
59
|
+
};
|
|
60
|
+
};
|
|
61
|
+
required: string[];
|
|
62
|
+
additionalProperties: boolean;
|
|
63
|
+
};
|
|
64
|
+
export declare const CRAWL_SCHEMA: {
|
|
65
|
+
type: "object";
|
|
66
|
+
properties: {
|
|
67
|
+
url: {
|
|
68
|
+
type: string;
|
|
69
|
+
description: string;
|
|
70
|
+
};
|
|
71
|
+
max_pages: {
|
|
72
|
+
type: string;
|
|
73
|
+
description: string;
|
|
74
|
+
};
|
|
75
|
+
follow_links: {
|
|
76
|
+
type: string;
|
|
77
|
+
description: string;
|
|
78
|
+
};
|
|
79
|
+
ignore_sitemap: {
|
|
80
|
+
type: string;
|
|
81
|
+
description: string;
|
|
82
|
+
};
|
|
83
|
+
exclude_patterns: {
|
|
84
|
+
type: string;
|
|
85
|
+
items: {
|
|
86
|
+
type: string;
|
|
87
|
+
};
|
|
88
|
+
description: string;
|
|
89
|
+
};
|
|
90
|
+
include_patterns: {
|
|
91
|
+
type: string;
|
|
92
|
+
items: {
|
|
93
|
+
type: string;
|
|
94
|
+
};
|
|
95
|
+
description: string;
|
|
96
|
+
};
|
|
97
|
+
scrape_options: {
|
|
98
|
+
type: string;
|
|
99
|
+
description: string;
|
|
100
|
+
properties: {
|
|
101
|
+
include_tags: {
|
|
102
|
+
type: string;
|
|
103
|
+
items: {
|
|
104
|
+
type: string;
|
|
105
|
+
};
|
|
106
|
+
description: string;
|
|
107
|
+
};
|
|
108
|
+
exclude_tags: {
|
|
109
|
+
type: string;
|
|
110
|
+
items: {
|
|
111
|
+
type: string;
|
|
112
|
+
};
|
|
113
|
+
description: string;
|
|
114
|
+
};
|
|
115
|
+
only_main_content: {
|
|
116
|
+
type: string;
|
|
117
|
+
description: string;
|
|
118
|
+
};
|
|
119
|
+
};
|
|
120
|
+
required: string[];
|
|
121
|
+
additionalProperties: boolean;
|
|
122
|
+
};
|
|
123
|
+
};
|
|
124
|
+
required: string[];
|
|
125
|
+
additionalProperties: boolean;
|
|
126
|
+
};
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.CRAWL_SCHEMA = exports.SCRAPE_SCHEMA = exports.SCRAPE_OPTIONS = void 0;
|
|
4
|
+
exports.SCRAPE_OPTIONS = {
|
|
5
|
+
type: "object",
|
|
6
|
+
description: "The options for the scrape",
|
|
7
|
+
properties: {
|
|
8
|
+
include_tags: {
|
|
9
|
+
type: "array",
|
|
10
|
+
items: {
|
|
11
|
+
type: "string",
|
|
12
|
+
},
|
|
13
|
+
description: "An array of HTML tags, classes, or IDs to include in the scraped content. Only elements matching these selectors will be returned.",
|
|
14
|
+
},
|
|
15
|
+
exclude_tags: {
|
|
16
|
+
type: "array",
|
|
17
|
+
items: {
|
|
18
|
+
type: "string",
|
|
19
|
+
},
|
|
20
|
+
description: "An array of HTML tags, classes, or IDs to exclude from the scraped content. Elements matching these selectors will be omitted from the response.",
|
|
21
|
+
},
|
|
22
|
+
only_main_content: {
|
|
23
|
+
type: "boolean",
|
|
24
|
+
description: "Whether to only return the main content of the page. If true, only the main content of the page will be returned, excluding any headers, navigation menus,footers, or other non-main content.",
|
|
25
|
+
},
|
|
26
|
+
},
|
|
27
|
+
required: ["include_tags", "exclude_tags", "only_main_content"],
|
|
28
|
+
additionalProperties: false,
|
|
29
|
+
};
|
|
30
|
+
exports.SCRAPE_SCHEMA = {
|
|
31
|
+
type: "object",
|
|
32
|
+
properties: {
|
|
33
|
+
url: {
|
|
34
|
+
type: "string",
|
|
35
|
+
description: "The URL of the website to scrape",
|
|
36
|
+
},
|
|
37
|
+
scrape_options: exports.SCRAPE_OPTIONS,
|
|
38
|
+
},
|
|
39
|
+
required: ["url", "scrape_options"],
|
|
40
|
+
additionalProperties: false,
|
|
41
|
+
};
|
|
42
|
+
exports.CRAWL_SCHEMA = {
|
|
43
|
+
type: "object",
|
|
44
|
+
properties: {
|
|
45
|
+
url: {
|
|
46
|
+
type: "string",
|
|
47
|
+
description: "The URL of the website to crawl",
|
|
48
|
+
},
|
|
49
|
+
max_pages: {
|
|
50
|
+
type: "number",
|
|
51
|
+
description: "The maximum number of pages to crawl",
|
|
52
|
+
},
|
|
53
|
+
follow_links: {
|
|
54
|
+
type: "boolean",
|
|
55
|
+
description: "Whether to follow links on the page",
|
|
56
|
+
},
|
|
57
|
+
ignore_sitemap: {
|
|
58
|
+
type: "boolean",
|
|
59
|
+
description: "Whether to ignore the sitemap",
|
|
60
|
+
},
|
|
61
|
+
exclude_patterns: {
|
|
62
|
+
type: "array",
|
|
63
|
+
items: {
|
|
64
|
+
type: "string",
|
|
65
|
+
},
|
|
66
|
+
description: "An array of regular expressions or wildcard patterns specifying which URLs should be excluded from the crawl. Any pages whose URLs' path match one of these patterns will be skipped. Example: ['/admin', '/careers/*']",
|
|
67
|
+
},
|
|
68
|
+
include_patterns: {
|
|
69
|
+
type: "array",
|
|
70
|
+
items: {
|
|
71
|
+
type: "string",
|
|
72
|
+
},
|
|
73
|
+
description: "An array of regular expressions or wildcard patterns specifying which URLs should be included in the crawl. Only pages whose URLs' path match one of these path patterns will be visited. Example: ['/admin', '/careers/*']",
|
|
74
|
+
},
|
|
75
|
+
scrape_options: exports.SCRAPE_OPTIONS,
|
|
76
|
+
},
|
|
77
|
+
required: [
|
|
78
|
+
"url",
|
|
79
|
+
"max_pages",
|
|
80
|
+
"follow_links",
|
|
81
|
+
"ignore_sitemap",
|
|
82
|
+
"exclude_patterns",
|
|
83
|
+
"include_patterns",
|
|
84
|
+
"scrape_options",
|
|
85
|
+
],
|
|
86
|
+
additionalProperties: false,
|
|
87
|
+
};
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@hyperbrowser/sdk",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.17.0",
|
|
4
4
|
"description": "Node SDK for Hyperbrowser API",
|
|
5
5
|
"author": "",
|
|
6
6
|
"main": "dist/index.js",
|
|
@@ -22,7 +22,12 @@
|
|
|
22
22
|
"keywords": [
|
|
23
23
|
"hyperbrowser",
|
|
24
24
|
"browser",
|
|
25
|
-
"automation"
|
|
25
|
+
"automation",
|
|
26
|
+
"webscraping",
|
|
27
|
+
"webcrawling",
|
|
28
|
+
"scraping",
|
|
29
|
+
"crawling",
|
|
30
|
+
"ai"
|
|
26
31
|
],
|
|
27
32
|
"dependencies": {
|
|
28
33
|
"form-data": "^4.0.1",
|
|
@@ -38,5 +43,25 @@
|
|
|
38
43
|
"prettier": "^3.3.3",
|
|
39
44
|
"ts-node": "^10.9.2",
|
|
40
45
|
"typescript": "^5.6.3"
|
|
46
|
+
},
|
|
47
|
+
"exports": {
|
|
48
|
+
".": {
|
|
49
|
+
"types": "./dist/index.d.ts",
|
|
50
|
+
"default": "./dist/index.js"
|
|
51
|
+
},
|
|
52
|
+
"./types": {
|
|
53
|
+
"types": "./dist/types/index.d.ts",
|
|
54
|
+
"default": "./dist/types/index.js"
|
|
55
|
+
},
|
|
56
|
+
"./tools": {
|
|
57
|
+
"types": "./dist/tools/index.d.ts",
|
|
58
|
+
"default": "./dist/tools/index.js"
|
|
59
|
+
}
|
|
60
|
+
},
|
|
61
|
+
"typesVersions": {
|
|
62
|
+
"*": {
|
|
63
|
+
"types": ["./dist/types/index.d.ts"],
|
|
64
|
+
"tools": ["./dist/tools/index.d.ts"]
|
|
65
|
+
}
|
|
41
66
|
}
|
|
42
67
|
}
|
package/dist/client.d.ts
DELETED
|
@@ -1,18 +0,0 @@
|
|
|
1
|
-
import { HyperbrowserConfig } from "./types/config";
|
|
2
|
-
import { SessionsService } from "./services/sessions";
|
|
3
|
-
import { ScrapeService } from "./services/scrape";
|
|
4
|
-
import { CrawlService } from "./services/crawl";
|
|
5
|
-
import { ProfilesService } from "./services/profiles";
|
|
6
|
-
import { ExtensionService } from "./services/extensions";
|
|
7
|
-
export declare class HyperbrowserError extends Error {
|
|
8
|
-
statusCode?: number | undefined;
|
|
9
|
-
constructor(message: string, statusCode?: number | undefined);
|
|
10
|
-
}
|
|
11
|
-
export declare class HyperbrowserClient {
|
|
12
|
-
readonly sessions: SessionsService;
|
|
13
|
-
readonly scrape: ScrapeService;
|
|
14
|
-
readonly crawl: CrawlService;
|
|
15
|
-
readonly profiles: ProfilesService;
|
|
16
|
-
readonly extensions: ExtensionService;
|
|
17
|
-
constructor(config: HyperbrowserConfig);
|
|
18
|
-
}
|
package/dist/client.js
DELETED
|
@@ -1,32 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.HyperbrowserClient = exports.HyperbrowserError = void 0;
|
|
4
|
-
const sessions_1 = require("./services/sessions");
|
|
5
|
-
const scrape_1 = require("./services/scrape");
|
|
6
|
-
const crawl_1 = require("./services/crawl");
|
|
7
|
-
const profiles_1 = require("./services/profiles");
|
|
8
|
-
const extensions_1 = require("./services/extensions");
|
|
9
|
-
class HyperbrowserError extends Error {
|
|
10
|
-
constructor(message, statusCode) {
|
|
11
|
-
super(`[Hyperbrowser]: ${message}`);
|
|
12
|
-
this.statusCode = statusCode;
|
|
13
|
-
this.name = "HyperbrowserError";
|
|
14
|
-
}
|
|
15
|
-
}
|
|
16
|
-
exports.HyperbrowserError = HyperbrowserError;
|
|
17
|
-
class HyperbrowserClient {
|
|
18
|
-
constructor(config) {
|
|
19
|
-
const apiKey = config.apiKey;
|
|
20
|
-
const baseUrl = config.baseUrl || "https://app.hyperbrowser.ai";
|
|
21
|
-
const timeout = config.timeout || 30000;
|
|
22
|
-
if (!apiKey) {
|
|
23
|
-
throw new HyperbrowserError("API key is required");
|
|
24
|
-
}
|
|
25
|
-
this.sessions = new sessions_1.SessionsService(apiKey, baseUrl, timeout);
|
|
26
|
-
this.scrape = new scrape_1.ScrapeService(apiKey, baseUrl, timeout);
|
|
27
|
-
this.crawl = new crawl_1.CrawlService(apiKey, baseUrl, timeout);
|
|
28
|
-
this.profiles = new profiles_1.ProfilesService(apiKey, baseUrl, timeout);
|
|
29
|
-
this.extensions = new extensions_1.ExtensionService(apiKey, baseUrl, timeout);
|
|
30
|
-
}
|
|
31
|
-
}
|
|
32
|
-
exports.HyperbrowserClient = HyperbrowserClient;
|
package/dist/index.d.ts
DELETED
package/dist/index.js
DELETED
|
@@ -1,31 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
-
if (k2 === undefined) k2 = k;
|
|
4
|
-
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
-
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
-
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
-
}
|
|
8
|
-
Object.defineProperty(o, k2, desc);
|
|
9
|
-
}) : (function(o, m, k, k2) {
|
|
10
|
-
if (k2 === undefined) k2 = k;
|
|
11
|
-
o[k2] = m[k];
|
|
12
|
-
}));
|
|
13
|
-
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
14
|
-
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
|
15
|
-
};
|
|
16
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
17
|
-
exports.Hyperbrowser = exports.HyperbrowserError = void 0;
|
|
18
|
-
const client_1 = require("./client");
|
|
19
|
-
__exportStar(require("./types"), exports);
|
|
20
|
-
var client_2 = require("./client");
|
|
21
|
-
Object.defineProperty(exports, "HyperbrowserError", { enumerable: true, get: function () { return client_2.HyperbrowserError; } });
|
|
22
|
-
// Export HyperbrowserClient as Hyperbrowser for named imports
|
|
23
|
-
exports.Hyperbrowser = client_1.HyperbrowserClient;
|
|
24
|
-
exports.default = client_1.HyperbrowserClient;
|
|
25
|
-
// For CommonJS compatibility
|
|
26
|
-
if (typeof module !== "undefined" && module.exports) {
|
|
27
|
-
module.exports = client_1.HyperbrowserClient;
|
|
28
|
-
module.exports.Hyperbrowser = client_1.HyperbrowserClient;
|
|
29
|
-
module.exports.HyperbrowserClient = client_1.HyperbrowserClient;
|
|
30
|
-
module.exports.default = client_1.HyperbrowserClient;
|
|
31
|
-
}
|
package/dist/services/base.d.ts
DELETED
|
@@ -1,8 +0,0 @@
|
|
|
1
|
-
import { RequestInit } from "node-fetch";
|
|
2
|
-
export declare class BaseService {
|
|
3
|
-
protected readonly apiKey: string;
|
|
4
|
-
protected readonly baseUrl: string;
|
|
5
|
-
protected readonly timeout: number;
|
|
6
|
-
constructor(apiKey: string, baseUrl: string, timeout?: number);
|
|
7
|
-
protected request<T>(path: string, init?: RequestInit, params?: Record<string, string | number | undefined>): Promise<T>;
|
|
8
|
-
}
|
package/dist/services/base.js
DELETED
|
@@ -1,68 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
exports.BaseService = void 0;
|
|
7
|
-
const node_fetch_1 = __importDefault(require("node-fetch"));
|
|
8
|
-
const client_1 = require("../client");
|
|
9
|
-
class BaseService {
|
|
10
|
-
constructor(apiKey, baseUrl, timeout = 30000) {
|
|
11
|
-
this.apiKey = apiKey;
|
|
12
|
-
this.baseUrl = baseUrl;
|
|
13
|
-
this.timeout = timeout;
|
|
14
|
-
}
|
|
15
|
-
async request(path, init, params) {
|
|
16
|
-
try {
|
|
17
|
-
const url = new URL(`${this.baseUrl}/api${path}`);
|
|
18
|
-
if (params) {
|
|
19
|
-
Object.entries(params).forEach(([key, value]) => {
|
|
20
|
-
if (value !== undefined) {
|
|
21
|
-
url.searchParams.append(key, value.toString());
|
|
22
|
-
}
|
|
23
|
-
});
|
|
24
|
-
}
|
|
25
|
-
const headerKeys = Object.keys(init?.headers || {});
|
|
26
|
-
const contentTypeKey = headerKeys.find((key) => key.toLowerCase() === "content-type");
|
|
27
|
-
const response = await (0, node_fetch_1.default)(url.toString(), {
|
|
28
|
-
...init,
|
|
29
|
-
timeout: this.timeout,
|
|
30
|
-
headers: {
|
|
31
|
-
"x-api-key": this.apiKey,
|
|
32
|
-
...(contentTypeKey && init?.headers
|
|
33
|
-
? { "content-type": init.headers[contentTypeKey] }
|
|
34
|
-
: { "content-type": "application/json" }),
|
|
35
|
-
...init?.headers,
|
|
36
|
-
},
|
|
37
|
-
});
|
|
38
|
-
if (!response.ok) {
|
|
39
|
-
let errorMessage;
|
|
40
|
-
try {
|
|
41
|
-
const errorData = await response.json();
|
|
42
|
-
errorMessage =
|
|
43
|
-
errorData.message || errorData.error || `HTTP error! status: ${response.status}`;
|
|
44
|
-
}
|
|
45
|
-
catch {
|
|
46
|
-
errorMessage = `HTTP error! status: ${response.status}`;
|
|
47
|
-
}
|
|
48
|
-
throw new client_1.HyperbrowserError(errorMessage, response.status);
|
|
49
|
-
}
|
|
50
|
-
if (response.headers.get("content-length") === "0") {
|
|
51
|
-
return {};
|
|
52
|
-
}
|
|
53
|
-
try {
|
|
54
|
-
return (await response.json());
|
|
55
|
-
}
|
|
56
|
-
catch {
|
|
57
|
-
throw new client_1.HyperbrowserError("Failed to parse JSON response", response.status);
|
|
58
|
-
}
|
|
59
|
-
}
|
|
60
|
-
catch (error) {
|
|
61
|
-
if (error instanceof client_1.HyperbrowserError) {
|
|
62
|
-
throw error;
|
|
63
|
-
}
|
|
64
|
-
throw new client_1.HyperbrowserError(error instanceof Error ? error.message : "Unknown error occurred", undefined);
|
|
65
|
-
}
|
|
66
|
-
}
|
|
67
|
-
}
|
|
68
|
-
exports.BaseService = BaseService;
|
package/dist/services/crawl.d.ts
DELETED
|
@@ -1,21 +0,0 @@
|
|
|
1
|
-
import { CrawlJobResponse, GetCrawlJobParams, StartCrawlJobParams, StartCrawlJobResponse } from "../types/crawl";
|
|
2
|
-
import { BaseService } from "./base";
|
|
3
|
-
export declare class CrawlService extends BaseService {
|
|
4
|
-
/**
|
|
5
|
-
* Start a new crawl job
|
|
6
|
-
* @param params The parameters for the crawl job
|
|
7
|
-
*/
|
|
8
|
-
start(params: StartCrawlJobParams): Promise<StartCrawlJobResponse>;
|
|
9
|
-
/**
|
|
10
|
-
* Get the status of a crawl job
|
|
11
|
-
* @param id The ID of the crawl job to get
|
|
12
|
-
* @param params Optional parameters to filter the crawl job
|
|
13
|
-
*/
|
|
14
|
-
get(id: string, params?: GetCrawlJobParams): Promise<CrawlJobResponse>;
|
|
15
|
-
/**
|
|
16
|
-
* Start a crawl job and wait for it to complete
|
|
17
|
-
* @param params The parameters for the crawl job
|
|
18
|
-
* @param returnAllPages Whether to return all pages in the crawl job response
|
|
19
|
-
*/
|
|
20
|
-
startAndWait(params: StartCrawlJobParams, returnAllPages?: boolean): Promise<CrawlJobResponse>;
|
|
21
|
-
}
|
package/dist/services/crawl.js
DELETED
|
@@ -1,82 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.CrawlService = void 0;
|
|
4
|
-
const base_1 = require("./base");
|
|
5
|
-
const utils_1 = require("../utils");
|
|
6
|
-
const client_1 = require("../client");
|
|
7
|
-
class CrawlService extends base_1.BaseService {
|
|
8
|
-
/**
|
|
9
|
-
* Start a new crawl job
|
|
10
|
-
* @param params The parameters for the crawl job
|
|
11
|
-
*/
|
|
12
|
-
async start(params) {
|
|
13
|
-
try {
|
|
14
|
-
return await this.request("/crawl", {
|
|
15
|
-
method: "POST",
|
|
16
|
-
body: JSON.stringify(params),
|
|
17
|
-
});
|
|
18
|
-
}
|
|
19
|
-
catch (error) {
|
|
20
|
-
if (error instanceof client_1.HyperbrowserError) {
|
|
21
|
-
throw error;
|
|
22
|
-
}
|
|
23
|
-
throw new client_1.HyperbrowserError("Failed to start crawl job", undefined);
|
|
24
|
-
}
|
|
25
|
-
}
|
|
26
|
-
/**
|
|
27
|
-
* Get the status of a crawl job
|
|
28
|
-
* @param id The ID of the crawl job to get
|
|
29
|
-
* @param params Optional parameters to filter the crawl job
|
|
30
|
-
*/
|
|
31
|
-
async get(id, params) {
|
|
32
|
-
try {
|
|
33
|
-
return await this.request(`/crawl/${id}`, undefined, {
|
|
34
|
-
page: params?.page,
|
|
35
|
-
});
|
|
36
|
-
}
|
|
37
|
-
catch (error) {
|
|
38
|
-
if (error instanceof client_1.HyperbrowserError) {
|
|
39
|
-
throw error;
|
|
40
|
-
}
|
|
41
|
-
throw new client_1.HyperbrowserError(`Failed to get crawl job ${id}`, undefined);
|
|
42
|
-
}
|
|
43
|
-
}
|
|
44
|
-
/**
|
|
45
|
-
* Start a crawl job and wait for it to complete
|
|
46
|
-
* @param params The parameters for the crawl job
|
|
47
|
-
* @param returnAllPages Whether to return all pages in the crawl job response
|
|
48
|
-
*/
|
|
49
|
-
async startAndWait(params, returnAllPages = true) {
|
|
50
|
-
const job = await this.start(params);
|
|
51
|
-
const jobId = job.jobId;
|
|
52
|
-
if (!jobId) {
|
|
53
|
-
throw new client_1.HyperbrowserError("Failed to start crawl job, could not get job ID");
|
|
54
|
-
}
|
|
55
|
-
let jobResponse;
|
|
56
|
-
while (true) {
|
|
57
|
-
jobResponse = await this.get(jobId);
|
|
58
|
-
if (jobResponse.status === "completed" || jobResponse.status === "failed") {
|
|
59
|
-
break;
|
|
60
|
-
}
|
|
61
|
-
await (0, utils_1.sleep)(2000);
|
|
62
|
-
}
|
|
63
|
-
if (!returnAllPages) {
|
|
64
|
-
return jobResponse;
|
|
65
|
-
}
|
|
66
|
-
while (jobResponse.currentPageBatch < jobResponse.totalPageBatches) {
|
|
67
|
-
const tmpJobResponse = await this.get(jobId, {
|
|
68
|
-
page: jobResponse.currentPageBatch + 1,
|
|
69
|
-
});
|
|
70
|
-
if (tmpJobResponse.data) {
|
|
71
|
-
jobResponse.data?.push(...tmpJobResponse.data);
|
|
72
|
-
}
|
|
73
|
-
jobResponse.currentPageBatch = tmpJobResponse.currentPageBatch;
|
|
74
|
-
jobResponse.totalCrawledPages = tmpJobResponse.totalCrawledPages;
|
|
75
|
-
jobResponse.totalPageBatches = tmpJobResponse.totalPageBatches;
|
|
76
|
-
jobResponse.batchSize = tmpJobResponse.batchSize;
|
|
77
|
-
await (0, utils_1.sleep)(500);
|
|
78
|
-
}
|
|
79
|
-
return jobResponse;
|
|
80
|
-
}
|
|
81
|
-
}
|
|
82
|
-
exports.CrawlService = CrawlService;
|
|
@@ -1,13 +0,0 @@
|
|
|
1
|
-
import { CreateExtensionParams, CreateExtensionResponse, ListExtensionsResponse } from "../types/extension";
|
|
2
|
-
import { BaseService } from "./base";
|
|
3
|
-
export declare class ExtensionService extends BaseService {
|
|
4
|
-
/**
|
|
5
|
-
* Upload an extension to hyperbrowser
|
|
6
|
-
* @param params Configuration parameters for the new extension
|
|
7
|
-
*/
|
|
8
|
-
create(params: CreateExtensionParams): Promise<CreateExtensionResponse>;
|
|
9
|
-
/**
|
|
10
|
-
* List all uploaded extensions for the account
|
|
11
|
-
*/
|
|
12
|
-
list(): Promise<ListExtensionsResponse>;
|
|
13
|
-
}
|
|
@@ -1,74 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
exports.ExtensionService = void 0;
|
|
7
|
-
const client_1 = require("../client");
|
|
8
|
-
const base_1 = require("./base");
|
|
9
|
-
const form_data_1 = __importDefault(require("form-data"));
|
|
10
|
-
const promises_1 = __importDefault(require("node:fs/promises"));
|
|
11
|
-
const node_path_1 = __importDefault(require("node:path"));
|
|
12
|
-
async function checkFileExists(filePath) {
|
|
13
|
-
try {
|
|
14
|
-
await promises_1.default.access(filePath, promises_1.default.constants.R_OK);
|
|
15
|
-
const extension = node_path_1.default.extname(filePath);
|
|
16
|
-
if (extension !== ".zip") {
|
|
17
|
-
throw new client_1.HyperbrowserError("Extension file provided is not zipped", undefined);
|
|
18
|
-
}
|
|
19
|
-
}
|
|
20
|
-
catch (err) {
|
|
21
|
-
if (err instanceof client_1.HyperbrowserError) {
|
|
22
|
-
throw err;
|
|
23
|
-
}
|
|
24
|
-
throw new client_1.HyperbrowserError("Could not find extension file", undefined);
|
|
25
|
-
}
|
|
26
|
-
}
|
|
27
|
-
class ExtensionService extends base_1.BaseService {
|
|
28
|
-
/**
|
|
29
|
-
* Upload an extension to hyperbrowser
|
|
30
|
-
* @param params Configuration parameters for the new extension
|
|
31
|
-
*/
|
|
32
|
-
async create(params) {
|
|
33
|
-
try {
|
|
34
|
-
await checkFileExists(params.filePath);
|
|
35
|
-
const form = new form_data_1.default();
|
|
36
|
-
form.append("file", await promises_1.default.readFile(params.filePath), {
|
|
37
|
-
filename: node_path_1.default.basename(params.filePath),
|
|
38
|
-
contentType: "application/zip",
|
|
39
|
-
});
|
|
40
|
-
if (params.name) {
|
|
41
|
-
form.append("name", params.name);
|
|
42
|
-
}
|
|
43
|
-
const response = await this.request("/extensions/add", {
|
|
44
|
-
method: "POST",
|
|
45
|
-
body: form,
|
|
46
|
-
headers: form.getHeaders(),
|
|
47
|
-
});
|
|
48
|
-
return response;
|
|
49
|
-
}
|
|
50
|
-
catch (error) {
|
|
51
|
-
if (error instanceof client_1.HyperbrowserError) {
|
|
52
|
-
throw error;
|
|
53
|
-
}
|
|
54
|
-
throw new client_1.HyperbrowserError("Failed to upload extension", undefined);
|
|
55
|
-
}
|
|
56
|
-
}
|
|
57
|
-
/**
|
|
58
|
-
* List all uploaded extensions for the account
|
|
59
|
-
*/
|
|
60
|
-
async list() {
|
|
61
|
-
try {
|
|
62
|
-
return await this.request("/extensions/list", { method: "GET" });
|
|
63
|
-
}
|
|
64
|
-
catch (err) {
|
|
65
|
-
if (err instanceof client_1.HyperbrowserError) {
|
|
66
|
-
throw err;
|
|
67
|
-
}
|
|
68
|
-
else {
|
|
69
|
-
throw new client_1.HyperbrowserError("Could not list extensions", undefined);
|
|
70
|
-
}
|
|
71
|
-
}
|
|
72
|
-
}
|
|
73
|
-
}
|
|
74
|
-
exports.ExtensionService = ExtensionService;
|
|
@@ -1,19 +0,0 @@
|
|
|
1
|
-
import { BaseService } from "./base";
|
|
2
|
-
import { ProfileResponse, CreateProfileResponse } from "../types/profile";
|
|
3
|
-
import { BasicResponse } from "../types";
|
|
4
|
-
export declare class ProfilesService extends BaseService {
|
|
5
|
-
/**
|
|
6
|
-
* Create a new profile
|
|
7
|
-
*/
|
|
8
|
-
create(): Promise<CreateProfileResponse>;
|
|
9
|
-
/**
|
|
10
|
-
* Get details of an existing profile
|
|
11
|
-
* @param id The ID of the profile to get
|
|
12
|
-
*/
|
|
13
|
-
get(id: string): Promise<ProfileResponse>;
|
|
14
|
-
/**
|
|
15
|
-
* Delete an existing profile
|
|
16
|
-
* @param id The ID of the profile to delete
|
|
17
|
-
*/
|
|
18
|
-
delete(id: string): Promise<BasicResponse>;
|
|
19
|
-
}
|
|
@@ -1,56 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.ProfilesService = void 0;
|
|
4
|
-
const base_1 = require("./base");
|
|
5
|
-
const client_1 = require("../client");
|
|
6
|
-
class ProfilesService extends base_1.BaseService {
|
|
7
|
-
/**
|
|
8
|
-
* Create a new profile
|
|
9
|
-
*/
|
|
10
|
-
async create() {
|
|
11
|
-
try {
|
|
12
|
-
return await this.request("/profile", {
|
|
13
|
-
method: "POST",
|
|
14
|
-
});
|
|
15
|
-
}
|
|
16
|
-
catch (error) {
|
|
17
|
-
if (error instanceof client_1.HyperbrowserError) {
|
|
18
|
-
throw error;
|
|
19
|
-
}
|
|
20
|
-
throw new client_1.HyperbrowserError("Failed to create profile", undefined);
|
|
21
|
-
}
|
|
22
|
-
}
|
|
23
|
-
/**
|
|
24
|
-
* Get details of an existing profile
|
|
25
|
-
* @param id The ID of the profile to get
|
|
26
|
-
*/
|
|
27
|
-
async get(id) {
|
|
28
|
-
try {
|
|
29
|
-
return await this.request(`/profile/${id}`);
|
|
30
|
-
}
|
|
31
|
-
catch (error) {
|
|
32
|
-
if (error instanceof client_1.HyperbrowserError) {
|
|
33
|
-
throw error;
|
|
34
|
-
}
|
|
35
|
-
throw new client_1.HyperbrowserError(`Failed to get profile ${id}`, undefined);
|
|
36
|
-
}
|
|
37
|
-
}
|
|
38
|
-
/**
|
|
39
|
-
* Delete an existing profile
|
|
40
|
-
* @param id The ID of the profile to delete
|
|
41
|
-
*/
|
|
42
|
-
async delete(id) {
|
|
43
|
-
try {
|
|
44
|
-
return await this.request(`/profile/${id}`, {
|
|
45
|
-
method: "DELETE",
|
|
46
|
-
});
|
|
47
|
-
}
|
|
48
|
-
catch (error) {
|
|
49
|
-
if (error instanceof client_1.HyperbrowserError) {
|
|
50
|
-
throw error;
|
|
51
|
-
}
|
|
52
|
-
throw new client_1.HyperbrowserError(`Failed to delete profile ${id}`, undefined);
|
|
53
|
-
}
|
|
54
|
-
}
|
|
55
|
-
}
|
|
56
|
-
exports.ProfilesService = ProfilesService;
|
|
@@ -1,19 +0,0 @@
|
|
|
1
|
-
import { ScrapeJobResponse, StartScrapeJobParams, StartScrapeJobResponse } from "../types/scrape";
|
|
2
|
-
import { BaseService } from "./base";
|
|
3
|
-
export declare class ScrapeService extends BaseService {
|
|
4
|
-
/**
|
|
5
|
-
* Start a new scrape job
|
|
6
|
-
* @param params The parameters for the scrape job
|
|
7
|
-
*/
|
|
8
|
-
start(params: StartScrapeJobParams): Promise<StartScrapeJobResponse>;
|
|
9
|
-
/**
|
|
10
|
-
* Get the status of a scrape job
|
|
11
|
-
* @param id The ID of the scrape job to get
|
|
12
|
-
*/
|
|
13
|
-
get(id: string): Promise<ScrapeJobResponse>;
|
|
14
|
-
/**
|
|
15
|
-
* Start a scrape job and wait for it to complete
|
|
16
|
-
* @param params The parameters for the scrape job
|
|
17
|
-
*/
|
|
18
|
-
startAndWait(params: StartScrapeJobParams): Promise<ScrapeJobResponse>;
|
|
19
|
-
}
|
package/dist/services/scrape.js
DELETED
|
@@ -1,62 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.ScrapeService = void 0;
|
|
4
|
-
const base_1 = require("./base");
|
|
5
|
-
const utils_1 = require("../utils");
|
|
6
|
-
const client_1 = require("../client");
|
|
7
|
-
class ScrapeService extends base_1.BaseService {
|
|
8
|
-
/**
|
|
9
|
-
* Start a new scrape job
|
|
10
|
-
* @param params The parameters for the scrape job
|
|
11
|
-
*/
|
|
12
|
-
async start(params) {
|
|
13
|
-
try {
|
|
14
|
-
return await this.request("/scrape", {
|
|
15
|
-
method: "POST",
|
|
16
|
-
body: JSON.stringify(params),
|
|
17
|
-
});
|
|
18
|
-
}
|
|
19
|
-
catch (error) {
|
|
20
|
-
if (error instanceof client_1.HyperbrowserError) {
|
|
21
|
-
throw error;
|
|
22
|
-
}
|
|
23
|
-
throw new client_1.HyperbrowserError("Failed to start scrape job", undefined);
|
|
24
|
-
}
|
|
25
|
-
}
|
|
26
|
-
/**
|
|
27
|
-
* Get the status of a scrape job
|
|
28
|
-
* @param id The ID of the scrape job to get
|
|
29
|
-
*/
|
|
30
|
-
async get(id) {
|
|
31
|
-
try {
|
|
32
|
-
return await this.request(`/scrape/${id}`);
|
|
33
|
-
}
|
|
34
|
-
catch (error) {
|
|
35
|
-
if (error instanceof client_1.HyperbrowserError) {
|
|
36
|
-
throw error;
|
|
37
|
-
}
|
|
38
|
-
throw new client_1.HyperbrowserError(`Failed to get scrape job ${id}`, undefined);
|
|
39
|
-
}
|
|
40
|
-
}
|
|
41
|
-
/**
|
|
42
|
-
* Start a scrape job and wait for it to complete
|
|
43
|
-
* @param params The parameters for the scrape job
|
|
44
|
-
*/
|
|
45
|
-
async startAndWait(params) {
|
|
46
|
-
const job = await this.start(params);
|
|
47
|
-
const jobId = job.jobId;
|
|
48
|
-
if (!jobId) {
|
|
49
|
-
throw new client_1.HyperbrowserError("Failed to start scrape job, could not get job ID");
|
|
50
|
-
}
|
|
51
|
-
let jobResponse;
|
|
52
|
-
while (true) {
|
|
53
|
-
jobResponse = await this.get(jobId);
|
|
54
|
-
if (jobResponse.status === "completed" || jobResponse.status === "failed") {
|
|
55
|
-
break;
|
|
56
|
-
}
|
|
57
|
-
await (0, utils_1.sleep)(2000);
|
|
58
|
-
}
|
|
59
|
-
return jobResponse;
|
|
60
|
-
}
|
|
61
|
-
}
|
|
62
|
-
exports.ScrapeService = ScrapeService;
|
|
@@ -1,29 +0,0 @@
|
|
|
1
|
-
import { BasicResponse, CreateSessionParams, SessionDetail, SessionListParams, SessionListResponse, SessionRecording } from "../types/session";
|
|
2
|
-
import { BaseService } from "./base";
|
|
3
|
-
export declare class SessionsService extends BaseService {
|
|
4
|
-
/**
|
|
5
|
-
* Create a new browser session
|
|
6
|
-
* @param params Configuration parameters for the new session
|
|
7
|
-
*/
|
|
8
|
-
create(params?: CreateSessionParams): Promise<SessionDetail>;
|
|
9
|
-
/**
|
|
10
|
-
* Get details of an existing session
|
|
11
|
-
* @param id The ID of the session to get
|
|
12
|
-
*/
|
|
13
|
-
get(id: string): Promise<SessionDetail>;
|
|
14
|
-
/**
|
|
15
|
-
* Stop a running session
|
|
16
|
-
* @param id The ID of the session to stop
|
|
17
|
-
*/
|
|
18
|
-
stop(id: string): Promise<BasicResponse>;
|
|
19
|
-
/**
|
|
20
|
-
* List all sessions with optional filtering
|
|
21
|
-
* @param params Optional parameters to filter the sessions
|
|
22
|
-
*/
|
|
23
|
-
list(params?: SessionListParams): Promise<SessionListResponse>;
|
|
24
|
-
/**
|
|
25
|
-
* Get the recording of a session
|
|
26
|
-
* @param id The ID of the session to get the recording from
|
|
27
|
-
*/
|
|
28
|
-
getRecording(id: string): Promise<SessionRecording[]>;
|
|
29
|
-
}
|
|
@@ -1,91 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.SessionsService = void 0;
|
|
4
|
-
const base_1 = require("./base");
|
|
5
|
-
const client_1 = require("../client");
|
|
6
|
-
class SessionsService extends base_1.BaseService {
|
|
7
|
-
/**
|
|
8
|
-
* Create a new browser session
|
|
9
|
-
* @param params Configuration parameters for the new session
|
|
10
|
-
*/
|
|
11
|
-
async create(params) {
|
|
12
|
-
try {
|
|
13
|
-
return await this.request("/session", {
|
|
14
|
-
method: "POST",
|
|
15
|
-
body: params ? JSON.stringify(params) : undefined,
|
|
16
|
-
});
|
|
17
|
-
}
|
|
18
|
-
catch (error) {
|
|
19
|
-
if (error instanceof client_1.HyperbrowserError) {
|
|
20
|
-
throw error;
|
|
21
|
-
}
|
|
22
|
-
throw new client_1.HyperbrowserError("Failed to create session", undefined);
|
|
23
|
-
}
|
|
24
|
-
}
|
|
25
|
-
/**
|
|
26
|
-
* Get details of an existing session
|
|
27
|
-
* @param id The ID of the session to get
|
|
28
|
-
*/
|
|
29
|
-
async get(id) {
|
|
30
|
-
try {
|
|
31
|
-
return await this.request(`/session/${id}`);
|
|
32
|
-
}
|
|
33
|
-
catch (error) {
|
|
34
|
-
if (error instanceof client_1.HyperbrowserError) {
|
|
35
|
-
throw error;
|
|
36
|
-
}
|
|
37
|
-
throw new client_1.HyperbrowserError(`Failed to get session ${id}`, undefined);
|
|
38
|
-
}
|
|
39
|
-
}
|
|
40
|
-
/**
|
|
41
|
-
* Stop a running session
|
|
42
|
-
* @param id The ID of the session to stop
|
|
43
|
-
*/
|
|
44
|
-
async stop(id) {
|
|
45
|
-
try {
|
|
46
|
-
return await this.request(`/session/${id}/stop`, {
|
|
47
|
-
method: "PUT",
|
|
48
|
-
});
|
|
49
|
-
}
|
|
50
|
-
catch (error) {
|
|
51
|
-
if (error instanceof client_1.HyperbrowserError) {
|
|
52
|
-
throw error;
|
|
53
|
-
}
|
|
54
|
-
throw new client_1.HyperbrowserError(`Failed to stop session ${id}`, undefined);
|
|
55
|
-
}
|
|
56
|
-
}
|
|
57
|
-
/**
|
|
58
|
-
* List all sessions with optional filtering
|
|
59
|
-
* @param params Optional parameters to filter the sessions
|
|
60
|
-
*/
|
|
61
|
-
async list(params = {}) {
|
|
62
|
-
try {
|
|
63
|
-
return await this.request("/sessions", undefined, {
|
|
64
|
-
status: params.status,
|
|
65
|
-
page: params.page,
|
|
66
|
-
});
|
|
67
|
-
}
|
|
68
|
-
catch (error) {
|
|
69
|
-
if (error instanceof client_1.HyperbrowserError) {
|
|
70
|
-
throw error;
|
|
71
|
-
}
|
|
72
|
-
throw new client_1.HyperbrowserError("Failed to list sessions", undefined);
|
|
73
|
-
}
|
|
74
|
-
}
|
|
75
|
-
/**
|
|
76
|
-
* Get the recording of a session
|
|
77
|
-
* @param id The ID of the session to get the recording from
|
|
78
|
-
*/
|
|
79
|
-
async getRecording(id) {
|
|
80
|
-
try {
|
|
81
|
-
return await this.request(`/session/${id}/recording`);
|
|
82
|
-
}
|
|
83
|
-
catch (error) {
|
|
84
|
-
if (error instanceof client_1.HyperbrowserError) {
|
|
85
|
-
throw error;
|
|
86
|
-
}
|
|
87
|
-
throw new client_1.HyperbrowserError(`Failed to get recording for session ${id}`, undefined);
|
|
88
|
-
}
|
|
89
|
-
}
|
|
90
|
-
}
|
|
91
|
-
exports.SessionsService = SessionsService;
|
package/dist/types/config.d.ts
DELETED
package/dist/types/config.js
DELETED
|
@@ -1,8 +0,0 @@
|
|
|
1
|
-
export type ScrapeFormat = "markdown" | "html" | "links" | "screenshot";
|
|
2
|
-
export type ScrapeJobStatus = "pending" | "running" | "completed" | "failed";
|
|
3
|
-
export type CrawlJobStatus = "pending" | "running" | "completed" | "failed";
|
|
4
|
-
export type CrawlPageStatus = "completed" | "failed";
|
|
5
|
-
export type Country = "AD" | "AE" | "AF" | "AL" | "AM" | "AO" | "AR" | "AT" | "AU" | "AW" | "AZ" | "BA" | "BD" | "BE" | "BG" | "BH" | "BJ" | "BO" | "BR" | "BS" | "BT" | "BY" | "BZ" | "CA" | "CF" | "CH" | "CI" | "CL" | "CM" | "CN" | "CO" | "CR" | "CU" | "CY" | "CZ" | "DE" | "DJ" | "DK" | "DM" | "EC" | "EE" | "EG" | "ES" | "ET" | "EU" | "FI" | "FJ" | "FR" | "GB" | "GE" | "GH" | "GM" | "GR" | "HK" | "HN" | "HR" | "HT" | "HU" | "ID" | "IE" | "IL" | "IN" | "IQ" | "IR" | "IS" | "IT" | "JM" | "JO" | "JP" | "KE" | "KH" | "KR" | "KW" | "KZ" | "LB" | "LI" | "LR" | "LT" | "LU" | "LV" | "MA" | "MC" | "MD" | "ME" | "MG" | "MK" | "ML" | "MM" | "MN" | "MR" | "MT" | "MU" | "MV" | "MX" | "MY" | "MZ" | "NG" | "NL" | "NO" | "NZ" | "OM" | "PA" | "PE" | "PH" | "PK" | "PL" | "PR" | "PT" | "PY" | "QA" | "RANDOM_COUNTRY" | "RO" | "RS" | "RU" | "SA" | "SC" | "SD" | "SE" | "SG" | "SI" | "SK" | "SN" | "SS" | "TD" | "TG" | "TH" | "TM" | "TN" | "TR" | "TT" | "TW" | "UA" | "UG" | "US" | "UY" | "UZ" | "VE" | "VG" | "VN" | "YE" | "ZA" | "ZM" | "ZW" | "ad" | "ae" | "af" | "al" | "am" | "ao" | "ar" | "at" | "au" | "aw" | "az" | "ba" | "bd" | "be" | "bg" | "bh" | "bj" | "bo" | "br" | "bs" | "bt" | "by" | "bz" | "ca" | "cf" | "ch" | "ci" | "cl" | "cm" | "cn" | "co" | "cr" | "cu" | "cy" | "cz" | "de" | "dj" | "dk" | "dm" | "ec" | "ee" | "eg" | "es" | "et" | "eu" | "fi" | "fj" | "fr" | "gb" | "ge" | "gh" | "gm" | "gr" | "hk" | "hn" | "hr" | "ht" | "hu" | "id" | "ie" | "il" | "in" | "iq" | "ir" | "is" | "it" | "jm" | "jo" | "jp" | "ke" | "kh" | "kr" | "kw" | "kz" | "lb" | "li" | "lr" | "lt" | "lu" | "lv" | "ma" | "mc" | "md" | "me" | "mg" | "mk" | "ml" | "mm" | "mn" | "mr" | "mt" | "mu" | "mv" | "mx" | "my" | "mz" | "ng" | "nl" | "no" | "nz" | "om" | "pa" | "pe" | "ph" | "pk" | "pl" | "pr" | "pt" | "py" | "qa" | "ro" | "rs" | "ru" | "sa" | "sc" | "sd" | "se" | "sg" | "si" | "sk" | "sn" | "ss" | "td" | "tg" | "th" | "tm" | "tn" | "tr" | "tt" | "tw" | "ua" | "ug" | "us" | "uy" | "uz" | "ve" | "vg" | "vn" | "ye" | "za" | "zm" | "zw";
|
|
6
|
-
export type OperatingSystem = "windows" | "android" | "macos" | "linux" | "ios";
|
|
7
|
-
export type Platform = "chrome" | "firefox" | "safari" | "edge";
|
|
8
|
-
export type ISO639_1 = "aa" | "ab" | "ae" | "af" | "ak" | "am" | "an" | "ar" | "as" | "av" | "ay" | "az" | "ba" | "be" | "bg" | "bh" | "bi" | "bm" | "bn" | "bo" | "br" | "bs" | "ca" | "ce" | "ch" | "co" | "cr" | "cs" | "cu" | "cv" | "cy" | "da" | "de" | "dv" | "dz" | "ee" | "el" | "en" | "eo" | "es" | "et" | "eu" | "fa" | "ff" | "fi" | "fj" | "fo" | "fr" | "fy" | "ga" | "gd" | "gl" | "gn" | "gu" | "gv" | "ha" | "he" | "hi" | "ho" | "hr" | "ht" | "hu" | "hy" | "hz" | "ia" | "id" | "ie" | "ig" | "ii" | "ik" | "io" | "is" | "it" | "iu" | "ja" | "jv" | "ka" | "kg" | "ki" | "kj" | "kk" | "kl" | "km" | "kn" | "ko" | "kr" | "ks" | "ku" | "kv" | "kw" | "ky" | "la" | "lb" | "lg" | "li" | "ln" | "lo" | "lt" | "lu" | "lv" | "mg" | "mh" | "mi" | "mk" | "ml" | "mn" | "mo" | "mr" | "ms" | "mt" | "my" | "na" | "nb" | "nd" | "ne" | "ng" | "nl" | "nn" | "no" | "nr" | "nv" | "ny" | "oc" | "oj" | "om" | "or" | "os" | "pa" | "pi" | "pl" | "ps" | "pt" | "qu" | "rm" | "rn" | "ro" | "ru" | "rw" | "sa" | "sc" | "sd" | "se" | "sg" | "si" | "sk" | "sl" | "sm" | "sn" | "so" | "sq" | "sr" | "ss" | "st" | "su" | "sv" | "sw" | "ta" | "te" | "tg" | "th" | "ti" | "tk" | "tl" | "tn" | "to" | "tr" | "ts" | "tt" | "tw" | "ty" | "ug" | "uk" | "ur" | "uz" | "ve" | "vi" | "vo" | "wa" | "wo" | "xh" | "yi" | "yo" | "za" | "zh" | "zu";
|
package/dist/types/constants.js
DELETED
package/dist/types/crawl.d.ts
DELETED
|
@@ -1,40 +0,0 @@
|
|
|
1
|
-
import { CrawlJobStatus, CrawlPageStatus } from "./constants";
|
|
2
|
-
import { ScrapeOptions } from "./scrape";
|
|
3
|
-
import { CreateSessionParams } from "./session";
|
|
4
|
-
export interface StartCrawlJobParams {
|
|
5
|
-
url: string;
|
|
6
|
-
maxPages?: number;
|
|
7
|
-
followLinks?: boolean;
|
|
8
|
-
ignoreSitemap?: boolean;
|
|
9
|
-
excludePatterns?: string[];
|
|
10
|
-
includePatterns?: string[];
|
|
11
|
-
sessionOptions?: CreateSessionParams;
|
|
12
|
-
scrapeOptions?: ScrapeOptions;
|
|
13
|
-
}
|
|
14
|
-
export interface StartCrawlJobResponse {
|
|
15
|
-
jobId: string;
|
|
16
|
-
}
|
|
17
|
-
export interface GetCrawlJobParams {
|
|
18
|
-
page?: number;
|
|
19
|
-
batchSize?: number;
|
|
20
|
-
}
|
|
21
|
-
export interface CrawledPage {
|
|
22
|
-
url: string;
|
|
23
|
-
status: CrawlPageStatus;
|
|
24
|
-
error?: string | null;
|
|
25
|
-
metadata?: Record<string, string | string[]>;
|
|
26
|
-
markdown?: string;
|
|
27
|
-
html?: string;
|
|
28
|
-
links?: string[];
|
|
29
|
-
screenshot?: string;
|
|
30
|
-
}
|
|
31
|
-
export interface CrawlJobResponse {
|
|
32
|
-
jobId: string;
|
|
33
|
-
status: CrawlJobStatus;
|
|
34
|
-
data?: CrawledPage[];
|
|
35
|
-
error?: string;
|
|
36
|
-
totalCrawledPages: number;
|
|
37
|
-
totalPageBatches: number;
|
|
38
|
-
currentPageBatch: number;
|
|
39
|
-
batchSize: number;
|
|
40
|
-
}
|
package/dist/types/crawl.js
DELETED
|
@@ -1,13 +0,0 @@
|
|
|
1
|
-
export interface CreateExtensionParams {
|
|
2
|
-
filePath: string;
|
|
3
|
-
name?: string;
|
|
4
|
-
}
|
|
5
|
-
interface ExtensionResponse {
|
|
6
|
-
name: string;
|
|
7
|
-
id: string;
|
|
8
|
-
createdAt: string;
|
|
9
|
-
updatedAt: string;
|
|
10
|
-
}
|
|
11
|
-
export type CreateExtensionResponse = ExtensionResponse;
|
|
12
|
-
export type ListExtensionsResponse = Array<ExtensionResponse>;
|
|
13
|
-
export {};
|
package/dist/types/extension.js
DELETED
package/dist/types/index.d.ts
DELETED
|
@@ -1,7 +0,0 @@
|
|
|
1
|
-
export { HyperbrowserConfig } from "./config";
|
|
2
|
-
export { StartCrawlJobParams, StartCrawlJobResponse, CrawledPage, CrawlJobResponse, GetCrawlJobParams, } from "./crawl";
|
|
3
|
-
export { StartScrapeJobParams, StartScrapeJobResponse, ScrapeJobData, ScrapeJobResponse, } from "./scrape";
|
|
4
|
-
export { BasicResponse, SessionStatus, Session, SessionDetail, SessionListParams, SessionListResponse, ScreenConfig, CreateSessionParams, } from "./session";
|
|
5
|
-
export { ProfileResponse, CreateProfileResponse } from "./profile";
|
|
6
|
-
export { CreateExtensionParams, CreateExtensionResponse, ListExtensionsResponse, } from "./extension";
|
|
7
|
-
export { ScrapeJobStatus, CrawlJobStatus, Country, ISO639_1, OperatingSystem, Platform, } from "./constants";
|
package/dist/types/index.js
DELETED
package/dist/types/profile.d.ts
DELETED
package/dist/types/profile.js
DELETED
package/dist/types/scrape.d.ts
DELETED
|
@@ -1,31 +0,0 @@
|
|
|
1
|
-
import { ScrapeFormat, ScrapeJobStatus } from "./constants";
|
|
2
|
-
import { CreateSessionParams } from "./session";
|
|
3
|
-
export interface ScrapeOptions {
|
|
4
|
-
formats?: ScrapeFormat[];
|
|
5
|
-
includeTags?: string[];
|
|
6
|
-
excludeTags?: string[];
|
|
7
|
-
onlyMainContent?: boolean;
|
|
8
|
-
waitFor?: number;
|
|
9
|
-
timeout?: number;
|
|
10
|
-
}
|
|
11
|
-
export interface StartScrapeJobParams {
|
|
12
|
-
url: string;
|
|
13
|
-
sessionOptions?: CreateSessionParams;
|
|
14
|
-
scrapeOptions?: ScrapeOptions;
|
|
15
|
-
}
|
|
16
|
-
export interface StartScrapeJobResponse {
|
|
17
|
-
jobId: string;
|
|
18
|
-
}
|
|
19
|
-
export interface ScrapeJobData {
|
|
20
|
-
metadata?: Record<string, string | string[]>;
|
|
21
|
-
markdown?: string;
|
|
22
|
-
html?: string;
|
|
23
|
-
links?: string[];
|
|
24
|
-
screenshot?: string;
|
|
25
|
-
}
|
|
26
|
-
export interface ScrapeJobResponse {
|
|
27
|
-
jobId: string;
|
|
28
|
-
status: ScrapeJobStatus;
|
|
29
|
-
data?: ScrapeJobData;
|
|
30
|
-
error?: string;
|
|
31
|
-
}
|
package/dist/types/scrape.js
DELETED
package/dist/types/session.d.ts
DELETED
|
@@ -1,64 +0,0 @@
|
|
|
1
|
-
import { Country, ISO639_1, OperatingSystem, Platform } from "./constants";
|
|
2
|
-
export type SessionStatus = "active" | "closed" | "error";
|
|
3
|
-
export interface BasicResponse {
|
|
4
|
-
success: boolean;
|
|
5
|
-
}
|
|
6
|
-
export interface Session {
|
|
7
|
-
id: string;
|
|
8
|
-
teamId: string;
|
|
9
|
-
status: SessionStatus;
|
|
10
|
-
startTime?: number;
|
|
11
|
-
endTime?: number;
|
|
12
|
-
createdAt: string;
|
|
13
|
-
updatedAt: string;
|
|
14
|
-
sessionUrl: string;
|
|
15
|
-
liveUrl?: string;
|
|
16
|
-
token: string;
|
|
17
|
-
}
|
|
18
|
-
export interface SessionDetail extends Session {
|
|
19
|
-
wsEndpoint?: string;
|
|
20
|
-
}
|
|
21
|
-
export interface SessionListParams {
|
|
22
|
-
status?: SessionStatus;
|
|
23
|
-
page?: number;
|
|
24
|
-
}
|
|
25
|
-
export interface SessionListResponse {
|
|
26
|
-
sessions: Session[];
|
|
27
|
-
totalCount: number;
|
|
28
|
-
page: number;
|
|
29
|
-
perPage: number;
|
|
30
|
-
}
|
|
31
|
-
export interface ScreenConfig {
|
|
32
|
-
width: number;
|
|
33
|
-
height: number;
|
|
34
|
-
}
|
|
35
|
-
export interface CreateSessionProfile {
|
|
36
|
-
id?: string;
|
|
37
|
-
persistChanges?: boolean;
|
|
38
|
-
}
|
|
39
|
-
export interface CreateSessionParams {
|
|
40
|
-
useStealth?: boolean;
|
|
41
|
-
useProxy?: boolean;
|
|
42
|
-
proxyServer?: string;
|
|
43
|
-
proxyServerPassword?: string;
|
|
44
|
-
proxyServerUsername?: string;
|
|
45
|
-
proxyCountry?: Country;
|
|
46
|
-
operatingSystems?: OperatingSystem[];
|
|
47
|
-
device?: ("desktop" | "mobile")[];
|
|
48
|
-
platform?: Platform[];
|
|
49
|
-
locales?: ISO639_1[];
|
|
50
|
-
screen?: ScreenConfig;
|
|
51
|
-
solveCaptchas?: boolean;
|
|
52
|
-
adblock?: boolean;
|
|
53
|
-
trackers?: boolean;
|
|
54
|
-
annoyances?: boolean;
|
|
55
|
-
enableWebRecording?: boolean;
|
|
56
|
-
profile?: CreateSessionProfile;
|
|
57
|
-
extensionIds?: Array<string>;
|
|
58
|
-
}
|
|
59
|
-
export interface SessionRecording {
|
|
60
|
-
type: number;
|
|
61
|
-
data: unknown;
|
|
62
|
-
timestamp: number;
|
|
63
|
-
delay?: number;
|
|
64
|
-
}
|
package/dist/types/session.js
DELETED
package/dist/utils.d.ts
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export declare const sleep: (ms: number) => Promise<unknown>;
|