hyperbrowser 0.4.0__tar.gz → 0.5.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of hyperbrowser might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: hyperbrowser
3
- Version: 0.4.0
3
+ Version: 0.5.0
4
4
  Summary: Python SDK for hyperbrowser
5
5
  Home-page: https://github.com/hyperbrowserai/python-sdk
6
6
  License: MIT
@@ -1,4 +1,10 @@
1
1
  from typing import Optional
2
+
3
+ from hyperbrowser.models.scrape import (
4
+ ScrapeJobResponse,
5
+ StartScrapeJobParams,
6
+ StartScrapeJobResponse,
7
+ )
2
8
  from ..transport.async_transport import AsyncTransport
3
9
  from .base import HyperbrowserBase
4
10
  from ..models.session import (
@@ -45,6 +51,19 @@ class AsyncHyperbrowser(HyperbrowserBase):
45
51
  )
46
52
  return SessionListResponse(**response.data)
47
53
 
54
+ async def start_scrape_job(
55
+ self, params: StartScrapeJobParams
56
+ ) -> StartScrapeJobResponse:
57
+ response = await self.transport.post(
58
+ self._build_url("/scrape"),
59
+ data=params.model_dump(exclude_none=True, by_alias=True),
60
+ )
61
+ return StartScrapeJobResponse(**response.data)
62
+
63
+ async def get_scrape_job(self, job_id: str) -> ScrapeJobResponse:
64
+ response = await self.transport.get(self._build_url(f"/api/scrape/{job_id}"))
65
+ return ScrapeJobResponse(**response.data)
66
+
48
67
  async def close(self) -> None:
49
68
  await self.transport.close()
50
69
 
@@ -1,4 +1,10 @@
1
1
  from typing import Optional
2
+
3
+ from hyperbrowser.models.scrape import (
4
+ ScrapeJobResponse,
5
+ StartScrapeJobParams,
6
+ StartScrapeJobResponse,
7
+ )
2
8
  from ..transport.sync import SyncTransport
3
9
  from .base import HyperbrowserBase
4
10
  from ..models.session import (
@@ -43,5 +49,16 @@ class Hyperbrowser(HyperbrowserBase):
43
49
  )
44
50
  return SessionListResponse(**response.data)
45
51
 
52
+ def start_scrape_job(self, params: StartScrapeJobParams) -> StartScrapeJobResponse:
53
+ response = self.transport.post(
54
+ self._build_url("/scrape"),
55
+ data=params.model_dump(exclude_none=True, by_alias=True),
56
+ )
57
+ return StartScrapeJobResponse(**response.data)
58
+
59
+ def get_scrape_job(self, job_id: str) -> ScrapeJobResponse:
60
+ response = self.transport.get(self._build_url(f"/api/scrape/{job_id}"))
61
+ return ScrapeJobResponse(**response.data)
62
+
46
63
  def close(self) -> None:
47
64
  self.transport.close()
@@ -0,0 +1,72 @@
1
+ from typing import Literal, Optional
2
+ from pydantic import BaseModel, ConfigDict, Field
3
+
4
+ ScrapeJobStatus = Literal["pending", "running", "completed", "failed"]
5
+
6
+
7
+ class StartScrapeJobParams(BaseModel):
8
+ """
9
+ Parameters for creating a new scrape job.
10
+ """
11
+
12
+ model_config = ConfigDict(
13
+ populate_by_alias=True,
14
+ )
15
+
16
+ url: str
17
+
18
+
19
+ class StartScrapeJobResponse(BaseModel):
20
+ """
21
+ Response from creating a scrape job.
22
+ """
23
+
24
+ model_config = ConfigDict(
25
+ populate_by_alias=True,
26
+ )
27
+
28
+ job_id: str = Field(alias="jobId")
29
+
30
+
31
+ class ScrapeJobMetadata(BaseModel):
32
+ """
33
+ Metadata for the scraped site.
34
+ """
35
+
36
+ model_config = ConfigDict(
37
+ populate_by_alias=True,
38
+ )
39
+
40
+ title: str
41
+ description: str
42
+ robots: str
43
+ og_title: str = Field(alias="ogTitle")
44
+ og_description: str = Field(alias="ogDescription")
45
+ og_url: str = Field(alias="ogUrl")
46
+ og_image: str = Field(alias="ogImage")
47
+ og_locale_alternate: list[str] = Field(alias="ogLocaleAlternate")
48
+ og_site_name: str = Field(alias="ogSiteName")
49
+ source_url: str = Field(alias="sourceURL")
50
+
51
+
52
+ class ScrapeJobData(BaseModel):
53
+ """
54
+ Data from a scraped site.
55
+ """
56
+
57
+ metadata: ScrapeJobMetadata
58
+ markdown: str
59
+
60
+
61
+ class ScrapeJobResponse(BaseModel):
62
+ """
63
+ Response from getting a scrape job.
64
+ """
65
+
66
+ model_config = ConfigDict(
67
+ populate_by_alias=True,
68
+ )
69
+
70
+ status: ScrapeJobStatus
71
+ error: Optional[str] = None
72
+ data: Optional[ScrapeJobData] = None
@@ -96,10 +96,8 @@ class ScreenConfig(BaseModel):
96
96
  Screen configuration parameters for browser session.
97
97
  """
98
98
 
99
- max_width: int = Field(default=1280, le=4096, serialization_alias="maxWidth")
100
- max_height: int = Field(default=720, le=4096, serialization_alias="maxHeight")
101
- min_width: int = Field(default=800, ge=360, serialization_alias="minWidth")
102
- min_height: int = Field(default=480, ge=360, serialization_alias="minHeight")
99
+ width: int = Field(default=1280, le=3840, ge=640, serialization_alias="width")
100
+ height: int = Field(default=720, le=2160, ge=360, serialization_alias="height")
103
101
 
104
102
 
105
103
  class CreateSessionParams(BaseModel):
@@ -111,6 +109,8 @@ class CreateSessionParams(BaseModel):
111
109
  populate_by_alias=True,
112
110
  )
113
111
 
112
+ use_stealth: bool = Field(default=False, serialization_alias="useStealth")
113
+ use_proxy: bool = Field(default=False, serialization_alias="useProxy")
114
114
  proxy_server: Optional[str] = Field(default=None, serialization_alias="proxyServer")
115
115
  proxy_server_password: Optional[str] = Field(
116
116
  default=None, serialization_alias="proxyServerPassword"
@@ -128,3 +128,7 @@ class CreateSessionParams(BaseModel):
128
128
  platform: Optional[List[Platform]] = Field(default=None)
129
129
  locales: List[ISO639_1] = Field(default=["en"])
130
130
  screen: Optional[ScreenConfig] = Field(default=None)
131
+ solve_captchas: bool = Field(default=False, serialization_alias="solveCaptchas")
132
+ adblock: bool = Field(default=False, serialization_alias="adblock")
133
+ trackers: bool = Field(default=False, serialization_alias="trackers")
134
+ annoyances: bool = Field(default=False, serialization_alias="annoyances")
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "hyperbrowser"
3
- version = "0.4.0"
3
+ version = "0.5.0"
4
4
  description = "Python SDK for hyperbrowser"
5
5
  authors = ["Nikhil Shahi <nshahi1998@gmail.com>"]
6
6
  license = "MIT"
File without changes
File without changes