@hyperbrowser/sdk 0.18.0 → 0.20.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/client.d.ts +20 -0
- package/dist/client.js +34 -0
- package/dist/index.d.ts +4 -0
- package/dist/index.js +16 -0
- package/dist/services/base.d.ts +8 -0
- package/dist/services/base.js +68 -0
- package/dist/services/crawl.d.ts +21 -0
- package/dist/services/crawl.js +82 -0
- package/dist/services/extensions.d.ts +13 -0
- package/dist/services/extensions.js +74 -0
- package/dist/services/extract.d.ts +20 -0
- package/dist/services/extract.js +78 -0
- package/dist/services/profiles.d.ts +19 -0
- package/dist/services/profiles.js +56 -0
- package/dist/services/scrape.d.ts +19 -0
- package/dist/services/scrape.js +62 -0
- package/dist/services/sessions.d.ts +29 -0
- package/dist/services/sessions.js +91 -0
- package/dist/tools/anthropic.d.ts +35 -0
- package/dist/tools/anthropic.js +14 -0
- package/dist/tools/index.d.ts +12 -0
- package/dist/tools/index.js +31 -0
- package/dist/tools/openai.d.ts +40 -0
- package/dist/tools/openai.js +22 -0
- package/dist/tools/schema.d.ts +126 -0
- package/dist/tools/schema.js +87 -0
- package/dist/types/config.d.ts +5 -0
- package/dist/types/config.js +2 -0
- package/dist/types/constants.d.ts +9 -0
- package/dist/types/constants.js +2 -0
- package/dist/types/crawl.d.ts +40 -0
- package/dist/types/crawl.js +2 -0
- package/dist/types/extension.d.ts +13 -0
- package/dist/types/extension.js +2 -0
- package/dist/types/extract.d.ts +18 -0
- package/dist/types/extract.js +2 -0
- package/dist/types/index.d.ts +7 -0
- package/dist/types/index.js +2 -0
- package/dist/types/profile.d.ts +9 -0
- package/dist/types/profile.js +2 -0
- package/dist/types/scrape.d.ts +31 -0
- package/dist/types/scrape.js +2 -0
- package/dist/types/session.d.ts +64 -0
- package/dist/types/session.js +2 -0
- package/dist/utils.d.ts +1 -0
- package/dist/utils.js +5 -0
- package/package.json +10 -4
package/dist/client.d.ts
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import { HyperbrowserConfig } from "./types/config";
|
|
2
|
+
import { SessionsService } from "./services/sessions";
|
|
3
|
+
import { ScrapeService } from "./services/scrape";
|
|
4
|
+
import { CrawlService } from "./services/crawl";
|
|
5
|
+
import { ProfilesService } from "./services/profiles";
|
|
6
|
+
import { ExtensionService } from "./services/extensions";
|
|
7
|
+
import { ExtractService } from "./services/extract";
|
|
8
|
+
export declare class HyperbrowserError extends Error {
|
|
9
|
+
statusCode?: number | undefined;
|
|
10
|
+
constructor(message: string, statusCode?: number | undefined);
|
|
11
|
+
}
|
|
12
|
+
export declare class HyperbrowserClient {
|
|
13
|
+
readonly sessions: SessionsService;
|
|
14
|
+
readonly scrape: ScrapeService;
|
|
15
|
+
readonly crawl: CrawlService;
|
|
16
|
+
readonly extract: ExtractService;
|
|
17
|
+
readonly profiles: ProfilesService;
|
|
18
|
+
readonly extensions: ExtensionService;
|
|
19
|
+
constructor(config: HyperbrowserConfig);
|
|
20
|
+
}
|
package/dist/client.js
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.HyperbrowserClient = exports.HyperbrowserError = void 0;
|
|
4
|
+
const sessions_1 = require("./services/sessions");
|
|
5
|
+
const scrape_1 = require("./services/scrape");
|
|
6
|
+
const crawl_1 = require("./services/crawl");
|
|
7
|
+
const profiles_1 = require("./services/profiles");
|
|
8
|
+
const extensions_1 = require("./services/extensions");
|
|
9
|
+
const extract_1 = require("./services/extract");
|
|
10
|
+
class HyperbrowserError extends Error {
|
|
11
|
+
constructor(message, statusCode) {
|
|
12
|
+
super(`[Hyperbrowser]: ${message}`);
|
|
13
|
+
this.statusCode = statusCode;
|
|
14
|
+
this.name = "HyperbrowserError";
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
exports.HyperbrowserError = HyperbrowserError;
|
|
18
|
+
class HyperbrowserClient {
|
|
19
|
+
constructor(config) {
|
|
20
|
+
const apiKey = config.apiKey || process.env["HYPERBROWSER_API_KEY"];
|
|
21
|
+
const baseUrl = config.baseUrl || "https://app.hyperbrowser.ai";
|
|
22
|
+
const timeout = config.timeout || 30000;
|
|
23
|
+
if (!apiKey) {
|
|
24
|
+
throw new HyperbrowserError("API key is required - either pass it in config or set HYPERBROWSER_API_KEY environment variable");
|
|
25
|
+
}
|
|
26
|
+
this.sessions = new sessions_1.SessionsService(apiKey, baseUrl, timeout);
|
|
27
|
+
this.scrape = new scrape_1.ScrapeService(apiKey, baseUrl, timeout);
|
|
28
|
+
this.crawl = new crawl_1.CrawlService(apiKey, baseUrl, timeout);
|
|
29
|
+
this.extract = new extract_1.ExtractService(apiKey, baseUrl, timeout);
|
|
30
|
+
this.profiles = new profiles_1.ProfilesService(apiKey, baseUrl, timeout);
|
|
31
|
+
this.extensions = new extensions_1.ExtensionService(apiKey, baseUrl, timeout);
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
exports.HyperbrowserClient = HyperbrowserClient;
|
package/dist/index.d.ts
ADDED
package/dist/index.js
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.Hyperbrowser = exports.HyperbrowserError = void 0;
|
|
4
|
+
const client_1 = require("./client");
|
|
5
|
+
var client_2 = require("./client");
|
|
6
|
+
Object.defineProperty(exports, "HyperbrowserError", { enumerable: true, get: function () { return client_2.HyperbrowserError; } });
|
|
7
|
+
// Export HyperbrowserClient as Hyperbrowser for named imports
|
|
8
|
+
exports.Hyperbrowser = client_1.HyperbrowserClient;
|
|
9
|
+
exports.default = client_1.HyperbrowserClient;
|
|
10
|
+
// For CommonJS compatibility
|
|
11
|
+
if (typeof module !== "undefined" && module.exports) {
|
|
12
|
+
module.exports = client_1.HyperbrowserClient;
|
|
13
|
+
module.exports.Hyperbrowser = client_1.HyperbrowserClient;
|
|
14
|
+
module.exports.HyperbrowserClient = client_1.HyperbrowserClient;
|
|
15
|
+
module.exports.default = client_1.HyperbrowserClient;
|
|
16
|
+
}
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import { RequestInit } from "node-fetch";
|
|
2
|
+
export declare class BaseService {
|
|
3
|
+
protected readonly apiKey: string;
|
|
4
|
+
protected readonly baseUrl: string;
|
|
5
|
+
protected readonly timeout: number;
|
|
6
|
+
constructor(apiKey: string, baseUrl: string, timeout?: number);
|
|
7
|
+
protected request<T>(path: string, init?: RequestInit, params?: Record<string, string | number | undefined>): Promise<T>;
|
|
8
|
+
}
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.BaseService = void 0;
|
|
7
|
+
const node_fetch_1 = __importDefault(require("node-fetch"));
|
|
8
|
+
const client_1 = require("../client");
|
|
9
|
+
class BaseService {
|
|
10
|
+
constructor(apiKey, baseUrl, timeout = 30000) {
|
|
11
|
+
this.apiKey = apiKey;
|
|
12
|
+
this.baseUrl = baseUrl;
|
|
13
|
+
this.timeout = timeout;
|
|
14
|
+
}
|
|
15
|
+
async request(path, init, params) {
|
|
16
|
+
try {
|
|
17
|
+
const url = new URL(`${this.baseUrl}/api${path}`);
|
|
18
|
+
if (params) {
|
|
19
|
+
Object.entries(params).forEach(([key, value]) => {
|
|
20
|
+
if (value !== undefined) {
|
|
21
|
+
url.searchParams.append(key, value.toString());
|
|
22
|
+
}
|
|
23
|
+
});
|
|
24
|
+
}
|
|
25
|
+
const headerKeys = Object.keys(init?.headers || {});
|
|
26
|
+
const contentTypeKey = headerKeys.find((key) => key.toLowerCase() === "content-type");
|
|
27
|
+
const response = await (0, node_fetch_1.default)(url.toString(), {
|
|
28
|
+
...init,
|
|
29
|
+
timeout: this.timeout,
|
|
30
|
+
headers: {
|
|
31
|
+
"x-api-key": this.apiKey,
|
|
32
|
+
...(contentTypeKey && init?.headers
|
|
33
|
+
? { "content-type": init.headers[contentTypeKey] }
|
|
34
|
+
: { "content-type": "application/json" }),
|
|
35
|
+
...init?.headers,
|
|
36
|
+
},
|
|
37
|
+
});
|
|
38
|
+
if (!response.ok) {
|
|
39
|
+
let errorMessage;
|
|
40
|
+
try {
|
|
41
|
+
const errorData = await response.json();
|
|
42
|
+
errorMessage =
|
|
43
|
+
errorData.message || errorData.error || `HTTP error! status: ${response.status}`;
|
|
44
|
+
}
|
|
45
|
+
catch {
|
|
46
|
+
errorMessage = `HTTP error! status: ${response.status}`;
|
|
47
|
+
}
|
|
48
|
+
throw new client_1.HyperbrowserError(errorMessage, response.status);
|
|
49
|
+
}
|
|
50
|
+
if (response.headers.get("content-length") === "0") {
|
|
51
|
+
return {};
|
|
52
|
+
}
|
|
53
|
+
try {
|
|
54
|
+
return (await response.json());
|
|
55
|
+
}
|
|
56
|
+
catch {
|
|
57
|
+
throw new client_1.HyperbrowserError("Failed to parse JSON response", response.status);
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
catch (error) {
|
|
61
|
+
if (error instanceof client_1.HyperbrowserError) {
|
|
62
|
+
throw error;
|
|
63
|
+
}
|
|
64
|
+
throw new client_1.HyperbrowserError(error instanceof Error ? error.message : "Unknown error occurred", undefined);
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
exports.BaseService = BaseService;
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import { CrawlJobResponse, GetCrawlJobParams, StartCrawlJobParams, StartCrawlJobResponse } from "../types/crawl";
|
|
2
|
+
import { BaseService } from "./base";
|
|
3
|
+
export declare class CrawlService extends BaseService {
|
|
4
|
+
/**
|
|
5
|
+
* Start a new crawl job
|
|
6
|
+
* @param params The parameters for the crawl job
|
|
7
|
+
*/
|
|
8
|
+
start(params: StartCrawlJobParams): Promise<StartCrawlJobResponse>;
|
|
9
|
+
/**
|
|
10
|
+
* Get the status of a crawl job
|
|
11
|
+
* @param id The ID of the crawl job to get
|
|
12
|
+
* @param params Optional parameters to filter the crawl job
|
|
13
|
+
*/
|
|
14
|
+
get(id: string, params?: GetCrawlJobParams): Promise<CrawlJobResponse>;
|
|
15
|
+
/**
|
|
16
|
+
* Start a crawl job and wait for it to complete
|
|
17
|
+
* @param params The parameters for the crawl job
|
|
18
|
+
* @param returnAllPages Whether to return all pages in the crawl job response
|
|
19
|
+
*/
|
|
20
|
+
startAndWait(params: StartCrawlJobParams, returnAllPages?: boolean): Promise<CrawlJobResponse>;
|
|
21
|
+
}
|
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.CrawlService = void 0;
|
|
4
|
+
const base_1 = require("./base");
|
|
5
|
+
const utils_1 = require("../utils");
|
|
6
|
+
const client_1 = require("../client");
|
|
7
|
+
class CrawlService extends base_1.BaseService {
|
|
8
|
+
/**
|
|
9
|
+
* Start a new crawl job
|
|
10
|
+
* @param params The parameters for the crawl job
|
|
11
|
+
*/
|
|
12
|
+
async start(params) {
|
|
13
|
+
try {
|
|
14
|
+
return await this.request("/crawl", {
|
|
15
|
+
method: "POST",
|
|
16
|
+
body: JSON.stringify(params),
|
|
17
|
+
});
|
|
18
|
+
}
|
|
19
|
+
catch (error) {
|
|
20
|
+
if (error instanceof client_1.HyperbrowserError) {
|
|
21
|
+
throw error;
|
|
22
|
+
}
|
|
23
|
+
throw new client_1.HyperbrowserError("Failed to start crawl job", undefined);
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
/**
|
|
27
|
+
* Get the status of a crawl job
|
|
28
|
+
* @param id The ID of the crawl job to get
|
|
29
|
+
* @param params Optional parameters to filter the crawl job
|
|
30
|
+
*/
|
|
31
|
+
async get(id, params) {
|
|
32
|
+
try {
|
|
33
|
+
return await this.request(`/crawl/${id}`, undefined, {
|
|
34
|
+
page: params?.page,
|
|
35
|
+
});
|
|
36
|
+
}
|
|
37
|
+
catch (error) {
|
|
38
|
+
if (error instanceof client_1.HyperbrowserError) {
|
|
39
|
+
throw error;
|
|
40
|
+
}
|
|
41
|
+
throw new client_1.HyperbrowserError(`Failed to get crawl job ${id}`, undefined);
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
/**
|
|
45
|
+
* Start a crawl job and wait for it to complete
|
|
46
|
+
* @param params The parameters for the crawl job
|
|
47
|
+
* @param returnAllPages Whether to return all pages in the crawl job response
|
|
48
|
+
*/
|
|
49
|
+
async startAndWait(params, returnAllPages = true) {
|
|
50
|
+
const job = await this.start(params);
|
|
51
|
+
const jobId = job.jobId;
|
|
52
|
+
if (!jobId) {
|
|
53
|
+
throw new client_1.HyperbrowserError("Failed to start crawl job, could not get job ID");
|
|
54
|
+
}
|
|
55
|
+
let jobResponse;
|
|
56
|
+
while (true) {
|
|
57
|
+
jobResponse = await this.get(jobId);
|
|
58
|
+
if (jobResponse.status === "completed" || jobResponse.status === "failed") {
|
|
59
|
+
break;
|
|
60
|
+
}
|
|
61
|
+
await (0, utils_1.sleep)(2000);
|
|
62
|
+
}
|
|
63
|
+
if (!returnAllPages) {
|
|
64
|
+
return jobResponse;
|
|
65
|
+
}
|
|
66
|
+
while (jobResponse.currentPageBatch < jobResponse.totalPageBatches) {
|
|
67
|
+
const tmpJobResponse = await this.get(jobId, {
|
|
68
|
+
page: jobResponse.currentPageBatch + 1,
|
|
69
|
+
});
|
|
70
|
+
if (tmpJobResponse.data) {
|
|
71
|
+
jobResponse.data?.push(...tmpJobResponse.data);
|
|
72
|
+
}
|
|
73
|
+
jobResponse.currentPageBatch = tmpJobResponse.currentPageBatch;
|
|
74
|
+
jobResponse.totalCrawledPages = tmpJobResponse.totalCrawledPages;
|
|
75
|
+
jobResponse.totalPageBatches = tmpJobResponse.totalPageBatches;
|
|
76
|
+
jobResponse.batchSize = tmpJobResponse.batchSize;
|
|
77
|
+
await (0, utils_1.sleep)(500);
|
|
78
|
+
}
|
|
79
|
+
return jobResponse;
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
exports.CrawlService = CrawlService;
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import { CreateExtensionParams, CreateExtensionResponse, ListExtensionsResponse } from "../types/extension";
|
|
2
|
+
import { BaseService } from "./base";
|
|
3
|
+
export declare class ExtensionService extends BaseService {
|
|
4
|
+
/**
|
|
5
|
+
* Upload an extension to hyperbrowser
|
|
6
|
+
* @param params Configuration parameters for the new extension
|
|
7
|
+
*/
|
|
8
|
+
create(params: CreateExtensionParams): Promise<CreateExtensionResponse>;
|
|
9
|
+
/**
|
|
10
|
+
* List all uploaded extensions for the account
|
|
11
|
+
*/
|
|
12
|
+
list(): Promise<ListExtensionsResponse>;
|
|
13
|
+
}
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.ExtensionService = void 0;
|
|
7
|
+
const client_1 = require("../client");
|
|
8
|
+
const base_1 = require("./base");
|
|
9
|
+
const form_data_1 = __importDefault(require("form-data"));
|
|
10
|
+
const promises_1 = __importDefault(require("node:fs/promises"));
|
|
11
|
+
const node_path_1 = __importDefault(require("node:path"));
|
|
12
|
+
async function checkFileExists(filePath) {
|
|
13
|
+
try {
|
|
14
|
+
await promises_1.default.access(filePath, promises_1.default.constants.R_OK);
|
|
15
|
+
const extension = node_path_1.default.extname(filePath);
|
|
16
|
+
if (extension !== ".zip") {
|
|
17
|
+
throw new client_1.HyperbrowserError("Extension file provided is not zipped", undefined);
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
catch (err) {
|
|
21
|
+
if (err instanceof client_1.HyperbrowserError) {
|
|
22
|
+
throw err;
|
|
23
|
+
}
|
|
24
|
+
throw new client_1.HyperbrowserError("Could not find extension file", undefined);
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
class ExtensionService extends base_1.BaseService {
|
|
28
|
+
/**
|
|
29
|
+
* Upload an extension to hyperbrowser
|
|
30
|
+
* @param params Configuration parameters for the new extension
|
|
31
|
+
*/
|
|
32
|
+
async create(params) {
|
|
33
|
+
try {
|
|
34
|
+
await checkFileExists(params.filePath);
|
|
35
|
+
const form = new form_data_1.default();
|
|
36
|
+
form.append("file", await promises_1.default.readFile(params.filePath), {
|
|
37
|
+
filename: node_path_1.default.basename(params.filePath),
|
|
38
|
+
contentType: "application/zip",
|
|
39
|
+
});
|
|
40
|
+
if (params.name) {
|
|
41
|
+
form.append("name", params.name);
|
|
42
|
+
}
|
|
43
|
+
const response = await this.request("/extensions/add", {
|
|
44
|
+
method: "POST",
|
|
45
|
+
body: form,
|
|
46
|
+
headers: form.getHeaders(),
|
|
47
|
+
});
|
|
48
|
+
return response;
|
|
49
|
+
}
|
|
50
|
+
catch (error) {
|
|
51
|
+
if (error instanceof client_1.HyperbrowserError) {
|
|
52
|
+
throw error;
|
|
53
|
+
}
|
|
54
|
+
throw new client_1.HyperbrowserError("Failed to upload extension", undefined);
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
/**
|
|
58
|
+
* List all uploaded extensions for the account
|
|
59
|
+
*/
|
|
60
|
+
async list() {
|
|
61
|
+
try {
|
|
62
|
+
return await this.request("/extensions/list", { method: "GET" });
|
|
63
|
+
}
|
|
64
|
+
catch (err) {
|
|
65
|
+
if (err instanceof client_1.HyperbrowserError) {
|
|
66
|
+
throw err;
|
|
67
|
+
}
|
|
68
|
+
else {
|
|
69
|
+
throw new client_1.HyperbrowserError("Could not list extensions", undefined);
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
exports.ExtensionService = ExtensionService;
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import { BaseService } from "./base";
|
|
2
|
+
import { ExtractJobResponse, StartExtractJobResponse } from "../types/extract";
|
|
3
|
+
import { StartExtractJobParams } from "../types/extract";
|
|
4
|
+
export declare class ExtractService extends BaseService {
|
|
5
|
+
/**
|
|
6
|
+
* Start a new extract job
|
|
7
|
+
* @param params The parameters for the extract job
|
|
8
|
+
*/
|
|
9
|
+
start(params: StartExtractJobParams): Promise<StartExtractJobResponse>;
|
|
10
|
+
/**
|
|
11
|
+
* Get the status of an extract job
|
|
12
|
+
* @param id The ID of the extract job to get
|
|
13
|
+
*/
|
|
14
|
+
get(id: string): Promise<ExtractJobResponse>;
|
|
15
|
+
/**
|
|
16
|
+
* Start an extract job and wait for it to complete
|
|
17
|
+
* @param params The parameters for the extract job
|
|
18
|
+
*/
|
|
19
|
+
startAndWait(params: StartExtractJobParams): Promise<ExtractJobResponse>;
|
|
20
|
+
}
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.ExtractService = void 0;
|
|
4
|
+
const zod_to_json_schema_1 = require("zod-to-json-schema");
|
|
5
|
+
const base_1 = require("./base");
|
|
6
|
+
const utils_1 = require("../utils");
|
|
7
|
+
const client_1 = require("../client");
|
|
8
|
+
const isZodSchema = (schema) => {
|
|
9
|
+
return (schema &&
|
|
10
|
+
typeof schema === "object" &&
|
|
11
|
+
"_def" in schema &&
|
|
12
|
+
"parse" in schema &&
|
|
13
|
+
typeof schema.parse === "function");
|
|
14
|
+
};
|
|
15
|
+
class ExtractService extends base_1.BaseService {
|
|
16
|
+
/**
|
|
17
|
+
* Start a new extract job
|
|
18
|
+
* @param params The parameters for the extract job
|
|
19
|
+
*/
|
|
20
|
+
async start(params) {
|
|
21
|
+
try {
|
|
22
|
+
if (!params.schema && !params.prompt) {
|
|
23
|
+
throw new client_1.HyperbrowserError("Either schema or prompt must be provided");
|
|
24
|
+
}
|
|
25
|
+
if (params.schema) {
|
|
26
|
+
if (isZodSchema(params.schema)) {
|
|
27
|
+
params.schema = (0, zod_to_json_schema_1.zodToJsonSchema)(params.schema);
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
return await this.request("/extract", {
|
|
31
|
+
method: "POST",
|
|
32
|
+
body: JSON.stringify(params),
|
|
33
|
+
});
|
|
34
|
+
}
|
|
35
|
+
catch (error) {
|
|
36
|
+
if (error instanceof client_1.HyperbrowserError) {
|
|
37
|
+
throw error;
|
|
38
|
+
}
|
|
39
|
+
throw new client_1.HyperbrowserError("Failed to start extract job", undefined);
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
/**
|
|
43
|
+
* Get the status of an extract job
|
|
44
|
+
* @param id The ID of the extract job to get
|
|
45
|
+
*/
|
|
46
|
+
async get(id) {
|
|
47
|
+
try {
|
|
48
|
+
return await this.request(`/extract/${id}`);
|
|
49
|
+
}
|
|
50
|
+
catch (error) {
|
|
51
|
+
if (error instanceof client_1.HyperbrowserError) {
|
|
52
|
+
throw error;
|
|
53
|
+
}
|
|
54
|
+
throw new client_1.HyperbrowserError(`Failed to get extract job ${id}`, undefined);
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
/**
|
|
58
|
+
* Start an extract job and wait for it to complete
|
|
59
|
+
* @param params The parameters for the extract job
|
|
60
|
+
*/
|
|
61
|
+
async startAndWait(params) {
|
|
62
|
+
const job = await this.start(params);
|
|
63
|
+
const jobId = job.jobId;
|
|
64
|
+
if (!jobId) {
|
|
65
|
+
throw new client_1.HyperbrowserError("Failed to start extract job, could not get job ID");
|
|
66
|
+
}
|
|
67
|
+
let jobResponse;
|
|
68
|
+
while (true) {
|
|
69
|
+
jobResponse = await this.get(jobId);
|
|
70
|
+
if (jobResponse.status === "completed" || jobResponse.status === "failed") {
|
|
71
|
+
break;
|
|
72
|
+
}
|
|
73
|
+
await (0, utils_1.sleep)(2000);
|
|
74
|
+
}
|
|
75
|
+
return jobResponse;
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
exports.ExtractService = ExtractService;
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import { BaseService } from "./base";
|
|
2
|
+
import { ProfileResponse, CreateProfileResponse } from "../types/profile";
|
|
3
|
+
import { BasicResponse } from "../types";
|
|
4
|
+
export declare class ProfilesService extends BaseService {
|
|
5
|
+
/**
|
|
6
|
+
* Create a new profile
|
|
7
|
+
*/
|
|
8
|
+
create(): Promise<CreateProfileResponse>;
|
|
9
|
+
/**
|
|
10
|
+
* Get details of an existing profile
|
|
11
|
+
* @param id The ID of the profile to get
|
|
12
|
+
*/
|
|
13
|
+
get(id: string): Promise<ProfileResponse>;
|
|
14
|
+
/**
|
|
15
|
+
* Delete an existing profile
|
|
16
|
+
* @param id The ID of the profile to delete
|
|
17
|
+
*/
|
|
18
|
+
delete(id: string): Promise<BasicResponse>;
|
|
19
|
+
}
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.ProfilesService = void 0;
|
|
4
|
+
const base_1 = require("./base");
|
|
5
|
+
const client_1 = require("../client");
|
|
6
|
+
class ProfilesService extends base_1.BaseService {
|
|
7
|
+
/**
|
|
8
|
+
* Create a new profile
|
|
9
|
+
*/
|
|
10
|
+
async create() {
|
|
11
|
+
try {
|
|
12
|
+
return await this.request("/profile", {
|
|
13
|
+
method: "POST",
|
|
14
|
+
});
|
|
15
|
+
}
|
|
16
|
+
catch (error) {
|
|
17
|
+
if (error instanceof client_1.HyperbrowserError) {
|
|
18
|
+
throw error;
|
|
19
|
+
}
|
|
20
|
+
throw new client_1.HyperbrowserError("Failed to create profile", undefined);
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
/**
|
|
24
|
+
* Get details of an existing profile
|
|
25
|
+
* @param id The ID of the profile to get
|
|
26
|
+
*/
|
|
27
|
+
async get(id) {
|
|
28
|
+
try {
|
|
29
|
+
return await this.request(`/profile/${id}`);
|
|
30
|
+
}
|
|
31
|
+
catch (error) {
|
|
32
|
+
if (error instanceof client_1.HyperbrowserError) {
|
|
33
|
+
throw error;
|
|
34
|
+
}
|
|
35
|
+
throw new client_1.HyperbrowserError(`Failed to get profile ${id}`, undefined);
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
/**
|
|
39
|
+
* Delete an existing profile
|
|
40
|
+
* @param id The ID of the profile to delete
|
|
41
|
+
*/
|
|
42
|
+
async delete(id) {
|
|
43
|
+
try {
|
|
44
|
+
return await this.request(`/profile/${id}`, {
|
|
45
|
+
method: "DELETE",
|
|
46
|
+
});
|
|
47
|
+
}
|
|
48
|
+
catch (error) {
|
|
49
|
+
if (error instanceof client_1.HyperbrowserError) {
|
|
50
|
+
throw error;
|
|
51
|
+
}
|
|
52
|
+
throw new client_1.HyperbrowserError(`Failed to delete profile ${id}`, undefined);
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
exports.ProfilesService = ProfilesService;
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import { ScrapeJobResponse, StartScrapeJobParams, StartScrapeJobResponse } from "../types/scrape";
|
|
2
|
+
import { BaseService } from "./base";
|
|
3
|
+
export declare class ScrapeService extends BaseService {
|
|
4
|
+
/**
|
|
5
|
+
* Start a new scrape job
|
|
6
|
+
* @param params The parameters for the scrape job
|
|
7
|
+
*/
|
|
8
|
+
start(params: StartScrapeJobParams): Promise<StartScrapeJobResponse>;
|
|
9
|
+
/**
|
|
10
|
+
* Get the status of a scrape job
|
|
11
|
+
* @param id The ID of the scrape job to get
|
|
12
|
+
*/
|
|
13
|
+
get(id: string): Promise<ScrapeJobResponse>;
|
|
14
|
+
/**
|
|
15
|
+
* Start a scrape job and wait for it to complete
|
|
16
|
+
* @param params The parameters for the scrape job
|
|
17
|
+
*/
|
|
18
|
+
startAndWait(params: StartScrapeJobParams): Promise<ScrapeJobResponse>;
|
|
19
|
+
}
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.ScrapeService = void 0;
|
|
4
|
+
const base_1 = require("./base");
|
|
5
|
+
const utils_1 = require("../utils");
|
|
6
|
+
const client_1 = require("../client");
|
|
7
|
+
class ScrapeService extends base_1.BaseService {
|
|
8
|
+
/**
|
|
9
|
+
* Start a new scrape job
|
|
10
|
+
* @param params The parameters for the scrape job
|
|
11
|
+
*/
|
|
12
|
+
async start(params) {
|
|
13
|
+
try {
|
|
14
|
+
return await this.request("/scrape", {
|
|
15
|
+
method: "POST",
|
|
16
|
+
body: JSON.stringify(params),
|
|
17
|
+
});
|
|
18
|
+
}
|
|
19
|
+
catch (error) {
|
|
20
|
+
if (error instanceof client_1.HyperbrowserError) {
|
|
21
|
+
throw error;
|
|
22
|
+
}
|
|
23
|
+
throw new client_1.HyperbrowserError("Failed to start scrape job", undefined);
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
/**
|
|
27
|
+
* Get the status of a scrape job
|
|
28
|
+
* @param id The ID of the scrape job to get
|
|
29
|
+
*/
|
|
30
|
+
async get(id) {
|
|
31
|
+
try {
|
|
32
|
+
return await this.request(`/scrape/${id}`);
|
|
33
|
+
}
|
|
34
|
+
catch (error) {
|
|
35
|
+
if (error instanceof client_1.HyperbrowserError) {
|
|
36
|
+
throw error;
|
|
37
|
+
}
|
|
38
|
+
throw new client_1.HyperbrowserError(`Failed to get scrape job ${id}`, undefined);
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
/**
|
|
42
|
+
* Start a scrape job and wait for it to complete
|
|
43
|
+
* @param params The parameters for the scrape job
|
|
44
|
+
*/
|
|
45
|
+
async startAndWait(params) {
|
|
46
|
+
const job = await this.start(params);
|
|
47
|
+
const jobId = job.jobId;
|
|
48
|
+
if (!jobId) {
|
|
49
|
+
throw new client_1.HyperbrowserError("Failed to start scrape job, could not get job ID");
|
|
50
|
+
}
|
|
51
|
+
let jobResponse;
|
|
52
|
+
while (true) {
|
|
53
|
+
jobResponse = await this.get(jobId);
|
|
54
|
+
if (jobResponse.status === "completed" || jobResponse.status === "failed") {
|
|
55
|
+
break;
|
|
56
|
+
}
|
|
57
|
+
await (0, utils_1.sleep)(2000);
|
|
58
|
+
}
|
|
59
|
+
return jobResponse;
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
exports.ScrapeService = ScrapeService;
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
import { BasicResponse, CreateSessionParams, SessionDetail, SessionListParams, SessionListResponse, SessionRecording } from "../types/session";
|
|
2
|
+
import { BaseService } from "./base";
|
|
3
|
+
export declare class SessionsService extends BaseService {
|
|
4
|
+
/**
|
|
5
|
+
* Create a new browser session
|
|
6
|
+
* @param params Configuration parameters for the new session
|
|
7
|
+
*/
|
|
8
|
+
create(params?: CreateSessionParams): Promise<SessionDetail>;
|
|
9
|
+
/**
|
|
10
|
+
* Get details of an existing session
|
|
11
|
+
* @param id The ID of the session to get
|
|
12
|
+
*/
|
|
13
|
+
get(id: string): Promise<SessionDetail>;
|
|
14
|
+
/**
|
|
15
|
+
* Stop a running session
|
|
16
|
+
* @param id The ID of the session to stop
|
|
17
|
+
*/
|
|
18
|
+
stop(id: string): Promise<BasicResponse>;
|
|
19
|
+
/**
|
|
20
|
+
* List all sessions with optional filtering
|
|
21
|
+
* @param params Optional parameters to filter the sessions
|
|
22
|
+
*/
|
|
23
|
+
list(params?: SessionListParams): Promise<SessionListResponse>;
|
|
24
|
+
/**
|
|
25
|
+
* Get the recording of a session
|
|
26
|
+
* @param id The ID of the session to get the recording from
|
|
27
|
+
*/
|
|
28
|
+
getRecording(id: string): Promise<SessionRecording[]>;
|
|
29
|
+
}
|