@hyperbrowser/sdk 0.23.0 → 0.25.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,27 @@
1
- import { ScrapeJobResponse, StartScrapeJobParams, StartScrapeJobResponse } from "../types/scrape";
1
+ import { BatchScrapeJobResponse, GetBatchScrapeJobParams, ScrapeJobResponse, StartBatchScrapeJobParams, StartBatchScrapeJobResponse, StartScrapeJobParams, StartScrapeJobResponse } from "../types/scrape";
2
2
  import { BaseService } from "./base";
3
+ export declare class BatchScrapeService extends BaseService {
4
+ /**
5
+ * Start a new batch scrape job
6
+ * @param params The parameters for the batch scrape job
7
+ */
8
+ start(params: StartBatchScrapeJobParams): Promise<StartBatchScrapeJobResponse>;
9
+ /**
10
+ * Get the status of a batch scrape job
11
+ * @param id The ID of the batch scrape job to get
12
+ * @param params Optional parameters to filter the batch scrape job
13
+ */
14
+ get(id: string, params?: GetBatchScrapeJobParams): Promise<BatchScrapeJobResponse>;
15
+ /**
16
+ * Start a batch scrape job and wait for it to complete
17
+ * @param params The parameters for the batch scrape job
18
+ * @param returnAllPages Whether to return all pages in the batch scrape job response
19
+ */
20
+ startAndWait(params: StartBatchScrapeJobParams, returnAllPages?: boolean): Promise<BatchScrapeJobResponse>;
21
+ }
3
22
  export declare class ScrapeService extends BaseService {
23
+ readonly batch: BatchScrapeService;
24
+ constructor(apiKey: string, baseUrl: string, timeout: number);
4
25
  /**
5
26
  * Start a new scrape job
6
27
  * @param params The parameters for the scrape job
@@ -1,10 +1,111 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.ScrapeService = void 0;
3
+ exports.ScrapeService = exports.BatchScrapeService = void 0;
4
4
  const base_1 = require("./base");
5
5
  const utils_1 = require("../utils");
6
6
  const client_1 = require("../client");
7
+ class BatchScrapeService extends base_1.BaseService {
8
+ /**
9
+ * Start a new batch scrape job
10
+ * @param params The parameters for the batch scrape job
11
+ */
12
+ async start(params) {
13
+ try {
14
+ return await this.request("/scrape/batch", {
15
+ method: "POST",
16
+ body: JSON.stringify(params),
17
+ });
18
+ }
19
+ catch (error) {
20
+ if (error instanceof client_1.HyperbrowserError) {
21
+ throw error;
22
+ }
23
+ throw new client_1.HyperbrowserError("Failed to start batch scrape job", undefined);
24
+ }
25
+ }
26
+ /**
27
+ * Get the status of a batch scrape job
28
+ * @param id The ID of the batch scrape job to get
29
+ * @param params Optional parameters to filter the batch scrape job
30
+ */
31
+ async get(id, params) {
32
+ try {
33
+ return await this.request(`/scrape/batch/${id}`, undefined, {
34
+ page: params?.page,
35
+ });
36
+ }
37
+ catch (error) {
38
+ if (error instanceof client_1.HyperbrowserError) {
39
+ throw error;
40
+ }
41
+ throw new client_1.HyperbrowserError(`Failed to get batch scrape job ${id}`, undefined);
42
+ }
43
+ }
44
+ /**
45
+ * Start a batch scrape job and wait for it to complete
46
+ * @param params The parameters for the batch scrape job
47
+ * @param returnAllPages Whether to return all pages in the batch scrape job response
48
+ */
49
+ async startAndWait(params, returnAllPages = true) {
50
+ const job = await this.start(params);
51
+ const jobId = job.jobId;
52
+ if (!jobId) {
53
+ throw new client_1.HyperbrowserError("Failed to start batch scrape job, could not get job ID");
54
+ }
55
+ let jobResponse;
56
+ let failures = 0;
57
+ while (true) {
58
+ try {
59
+ jobResponse = await this.get(jobId);
60
+ if (jobResponse.status === "completed" || jobResponse.status === "failed") {
61
+ break;
62
+ }
63
+ failures = 0;
64
+ }
65
+ catch (error) {
66
+ failures++;
67
+ if (failures >= 5) {
68
+ throw new client_1.HyperbrowserError(`Failed to poll batch scrape job ${jobId} after 5 attempts: ${error}`);
69
+ }
70
+ }
71
+ await (0, utils_1.sleep)(2000);
72
+ }
73
+ if (!returnAllPages) {
74
+ return jobResponse;
75
+ }
76
+ failures = 0;
77
+ while (jobResponse.currentPageBatch < jobResponse.totalPageBatches) {
78
+ try {
79
+ const tmpJobResponse = await this.get(jobId, {
80
+ page: jobResponse.currentPageBatch + 1,
81
+ batchSize: 100,
82
+ });
83
+ if (tmpJobResponse.data) {
84
+ jobResponse.data?.push(...tmpJobResponse.data);
85
+ }
86
+ jobResponse.currentPageBatch = tmpJobResponse.currentPageBatch;
87
+ jobResponse.totalScrapedPages = tmpJobResponse.totalScrapedPages;
88
+ jobResponse.totalPageBatches = tmpJobResponse.totalPageBatches;
89
+ jobResponse.batchSize = tmpJobResponse.batchSize;
90
+ failures = 0;
91
+ }
92
+ catch (error) {
93
+ failures++;
94
+ if (failures >= 5) {
95
+ throw new client_1.HyperbrowserError(`Failed to get batch page ${jobResponse.currentPageBatch + 1} for job ${jobId} after 5 attempts: ${error}`);
96
+ }
97
+ }
98
+ await (0, utils_1.sleep)(500);
99
+ }
100
+ return jobResponse;
101
+ }
102
+ }
103
+ exports.BatchScrapeService = BatchScrapeService;
7
104
  class ScrapeService extends base_1.BaseService {
105
+ constructor(apiKey, baseUrl, timeout) {
106
+ super(apiKey, baseUrl, timeout);
107
+ this.batch = new BatchScrapeService(apiKey, baseUrl, timeout);
108
+ }
8
109
  /**
9
110
  * Start a new scrape job
10
111
  * @param params The parameters for the scrape job
@@ -2,7 +2,9 @@ export type ScrapeFormat = "markdown" | "html" | "links" | "screenshot";
2
2
  export type ScrapeJobStatus = "pending" | "running" | "completed" | "failed";
3
3
  export type ExtractJobStatus = "pending" | "running" | "completed" | "failed";
4
4
  export type CrawlJobStatus = "pending" | "running" | "completed" | "failed";
5
+ export type ScrapePageStatus = "completed" | "failed";
5
6
  export type CrawlPageStatus = "completed" | "failed";
7
+ export type ScrapeWaitUntil = "load" | "domcontentloaded" | "networkidle";
6
8
  export type Country = "AD" | "AE" | "AF" | "AL" | "AM" | "AO" | "AR" | "AT" | "AU" | "AW" | "AZ" | "BA" | "BD" | "BE" | "BG" | "BH" | "BJ" | "BO" | "BR" | "BS" | "BT" | "BY" | "BZ" | "CA" | "CF" | "CH" | "CI" | "CL" | "CM" | "CN" | "CO" | "CR" | "CU" | "CY" | "CZ" | "DE" | "DJ" | "DK" | "DM" | "EC" | "EE" | "EG" | "ES" | "ET" | "EU" | "FI" | "FJ" | "FR" | "GB" | "GE" | "GH" | "GM" | "GR" | "HK" | "HN" | "HR" | "HT" | "HU" | "ID" | "IE" | "IL" | "IN" | "IQ" | "IR" | "IS" | "IT" | "JM" | "JO" | "JP" | "KE" | "KH" | "KR" | "KW" | "KZ" | "LB" | "LI" | "LR" | "LT" | "LU" | "LV" | "MA" | "MC" | "MD" | "ME" | "MG" | "MK" | "ML" | "MM" | "MN" | "MR" | "MT" | "MU" | "MV" | "MX" | "MY" | "MZ" | "NG" | "NL" | "NO" | "NZ" | "OM" | "PA" | "PE" | "PH" | "PK" | "PL" | "PR" | "PT" | "PY" | "QA" | "RANDOM_COUNTRY" | "RO" | "RS" | "RU" | "SA" | "SC" | "SD" | "SE" | "SG" | "SI" | "SK" | "SN" | "SS" | "TD" | "TG" | "TH" | "TM" | "TN" | "TR" | "TT" | "TW" | "UA" | "UG" | "US" | "UY" | "UZ" | "VE" | "VG" | "VN" | "YE" | "ZA" | "ZM" | "ZW" | "ad" | "ae" | "af" | "al" | "am" | "ao" | "ar" | "at" | "au" | "aw" | "az" | "ba" | "bd" | "be" | "bg" | "bh" | "bj" | "bo" | "br" | "bs" | "bt" | "by" | "bz" | "ca" | "cf" | "ch" | "ci" | "cl" | "cm" | "cn" | "co" | "cr" | "cu" | "cy" | "cz" | "de" | "dj" | "dk" | "dm" | "ec" | "ee" | "eg" | "es" | "et" | "eu" | "fi" | "fj" | "fr" | "gb" | "ge" | "gh" | "gm" | "gr" | "hk" | "hn" | "hr" | "ht" | "hu" | "id" | "ie" | "il" | "in" | "iq" | "ir" | "is" | "it" | "jm" | "jo" | "jp" | "ke" | "kh" | "kr" | "kw" | "kz" | "lb" | "li" | "lr" | "lt" | "lu" | "lv" | "ma" | "mc" | "md" | "me" | "mg" | "mk" | "ml" | "mm" | "mn" | "mr" | "mt" | "mu" | "mv" | "mx" | "my" | "mz" | "ng" | "nl" | "no" | "nz" | "om" | "pa" | "pe" | "ph" | "pk" | "pl" | "pr" | "pt" | "py" | "qa" | "ro" | "rs" | "ru" | "sa" | "sc" | "sd" | "se" | "sg" | "si" | "sk" | "sn" | "ss" | "td" | "tg" | "th" | "tm" | "tn" | "tr" | "tt" | "tw" | "ua" | "ug" | "us" | "uy" | "uz" | "ve" | "vg" | "vn" | "ye" | "za" | "zm" | "zw";
7
9
  export type OperatingSystem = "windows" | "android" | "macos" | "linux" | "ios";
8
10
  export type Platform = "chrome" | "firefox" | "safari" | "edge";
@@ -1,7 +1,7 @@
1
1
  export { HyperbrowserConfig } from "./config";
2
2
  export { StartCrawlJobParams, StartCrawlJobResponse, CrawledPage, CrawlJobResponse, GetCrawlJobParams, } from "./crawl";
3
- export { StartScrapeJobParams, StartScrapeJobResponse, ScrapeJobData, ScrapeJobResponse, } from "./scrape";
3
+ export { StartScrapeJobParams, StartScrapeJobResponse, ScrapeJobData, ScrapeJobResponse, ScrapeOptions, } from "./scrape";
4
4
  export { BasicResponse, SessionStatus, Session, SessionDetail, SessionListParams, SessionListResponse, ScreenConfig, CreateSessionParams, } from "./session";
5
5
  export { ProfileResponse, CreateProfileResponse, ProfileListParams, ProfileListResponse, } from "./profile";
6
6
  export { CreateExtensionParams, CreateExtensionResponse, ListExtensionsResponse, } from "./extension";
7
- export { ScrapeJobStatus, CrawlJobStatus, Country, ISO639_1, OperatingSystem, Platform, } from "./constants";
7
+ export { ScrapeJobStatus, CrawlJobStatus, Country, ISO639_1, OperatingSystem, Platform, ScrapeFormat, ScrapeWaitUntil, ScrapePageStatus, CrawlPageStatus, } from "./constants";
@@ -1,4 +1,4 @@
1
- import { ScrapeFormat, ScrapeJobStatus } from "./constants";
1
+ import { ScrapeFormat, ScrapeJobStatus, ScrapePageStatus, ScrapeWaitUntil } from "./constants";
2
2
  import { CreateSessionParams } from "./session";
3
3
  export interface ScrapeOptions {
4
4
  formats?: ScrapeFormat[];
@@ -7,6 +7,7 @@ export interface ScrapeOptions {
7
7
  onlyMainContent?: boolean;
8
8
  waitFor?: number;
9
9
  timeout?: number;
10
+ waitUntil?: ScrapeWaitUntil;
10
11
  }
11
12
  export interface StartScrapeJobParams {
12
13
  url: string;
@@ -29,3 +30,35 @@ export interface ScrapeJobResponse {
29
30
  data?: ScrapeJobData;
30
31
  error?: string;
31
32
  }
33
+ export interface StartBatchScrapeJobParams {
34
+ urls: string[];
35
+ sessionOptions?: CreateSessionParams;
36
+ scrapeOptions?: ScrapeOptions;
37
+ }
38
+ export interface ScrapedPage {
39
+ url: string;
40
+ status: ScrapePageStatus;
41
+ error?: string | null;
42
+ metadata?: Record<string, string | string[]>;
43
+ markdown?: string;
44
+ html?: string;
45
+ links?: string[];
46
+ screenshot?: string;
47
+ }
48
+ export interface GetBatchScrapeJobParams {
49
+ page?: number;
50
+ batchSize?: number;
51
+ }
52
+ export interface StartBatchScrapeJobResponse {
53
+ jobId: string;
54
+ }
55
+ export interface BatchScrapeJobResponse {
56
+ jobId: string;
57
+ status: ScrapeJobStatus;
58
+ data?: ScrapedPage[];
59
+ error?: string;
60
+ totalScrapedPages: number;
61
+ totalPageBatches: number;
62
+ currentPageBatch: number;
63
+ batchSize: number;
64
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@hyperbrowser/sdk",
3
- "version": "0.23.0",
3
+ "version": "0.25.0",
4
4
  "description": "Node SDK for Hyperbrowser API",
5
5
  "author": "",
6
6
  "main": "dist/index.js",