hyperbrowser 0.16.0__tar.gz → 0.17.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of hyperbrowser might be problematic. Click here for more details.

Files changed (33) hide show
  1. {hyperbrowser-0.16.0 → hyperbrowser-0.17.0}/PKG-INFO +1 -1
  2. hyperbrowser-0.17.0/hyperbrowser/tools/__init__.py +62 -0
  3. {hyperbrowser-0.16.0 → hyperbrowser-0.17.0}/hyperbrowser/tools/openai.py +2 -0
  4. {hyperbrowser-0.16.0 → hyperbrowser-0.17.0}/hyperbrowser/tools/schema.py +14 -3
  5. {hyperbrowser-0.16.0 → hyperbrowser-0.17.0}/pyproject.toml +1 -1
  6. {hyperbrowser-0.16.0 → hyperbrowser-0.17.0}/LICENSE +0 -0
  7. {hyperbrowser-0.16.0 → hyperbrowser-0.17.0}/README.md +0 -0
  8. {hyperbrowser-0.16.0 → hyperbrowser-0.17.0}/hyperbrowser/__init__.py +0 -0
  9. {hyperbrowser-0.16.0 → hyperbrowser-0.17.0}/hyperbrowser/client/async_client.py +0 -0
  10. {hyperbrowser-0.16.0 → hyperbrowser-0.17.0}/hyperbrowser/client/base.py +0 -0
  11. {hyperbrowser-0.16.0 → hyperbrowser-0.17.0}/hyperbrowser/client/managers/async_manager/crawl.py +0 -0
  12. {hyperbrowser-0.16.0 → hyperbrowser-0.17.0}/hyperbrowser/client/managers/async_manager/extension.py +0 -0
  13. {hyperbrowser-0.16.0 → hyperbrowser-0.17.0}/hyperbrowser/client/managers/async_manager/profile.py +0 -0
  14. {hyperbrowser-0.16.0 → hyperbrowser-0.17.0}/hyperbrowser/client/managers/async_manager/scrape.py +0 -0
  15. {hyperbrowser-0.16.0 → hyperbrowser-0.17.0}/hyperbrowser/client/managers/async_manager/session.py +0 -0
  16. {hyperbrowser-0.16.0 → hyperbrowser-0.17.0}/hyperbrowser/client/managers/sync_manager/crawl.py +0 -0
  17. {hyperbrowser-0.16.0 → hyperbrowser-0.17.0}/hyperbrowser/client/managers/sync_manager/extension.py +0 -0
  18. {hyperbrowser-0.16.0 → hyperbrowser-0.17.0}/hyperbrowser/client/managers/sync_manager/profile.py +0 -0
  19. {hyperbrowser-0.16.0 → hyperbrowser-0.17.0}/hyperbrowser/client/managers/sync_manager/scrape.py +0 -0
  20. {hyperbrowser-0.16.0 → hyperbrowser-0.17.0}/hyperbrowser/client/managers/sync_manager/session.py +0 -0
  21. {hyperbrowser-0.16.0 → hyperbrowser-0.17.0}/hyperbrowser/client/sync.py +0 -0
  22. {hyperbrowser-0.16.0 → hyperbrowser-0.17.0}/hyperbrowser/config.py +0 -0
  23. {hyperbrowser-0.16.0 → hyperbrowser-0.17.0}/hyperbrowser/exceptions.py +0 -0
  24. {hyperbrowser-0.16.0 → hyperbrowser-0.17.0}/hyperbrowser/models/consts.py +0 -0
  25. {hyperbrowser-0.16.0 → hyperbrowser-0.17.0}/hyperbrowser/models/crawl.py +0 -0
  26. {hyperbrowser-0.16.0 → hyperbrowser-0.17.0}/hyperbrowser/models/extension.py +0 -0
  27. {hyperbrowser-0.16.0 → hyperbrowser-0.17.0}/hyperbrowser/models/profile.py +0 -0
  28. {hyperbrowser-0.16.0 → hyperbrowser-0.17.0}/hyperbrowser/models/scrape.py +0 -0
  29. {hyperbrowser-0.16.0 → hyperbrowser-0.17.0}/hyperbrowser/models/session.py +0 -0
  30. {hyperbrowser-0.16.0 → hyperbrowser-0.17.0}/hyperbrowser/tools/anthropic.py +0 -0
  31. {hyperbrowser-0.16.0 → hyperbrowser-0.17.0}/hyperbrowser/transport/async_transport.py +0 -0
  32. {hyperbrowser-0.16.0 → hyperbrowser-0.17.0}/hyperbrowser/transport/base.py +0 -0
  33. {hyperbrowser-0.16.0 → hyperbrowser-0.17.0}/hyperbrowser/transport/sync.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: hyperbrowser
3
- Version: 0.16.0
3
+ Version: 0.17.0
4
4
  Summary: Python SDK for hyperbrowser
5
5
  Home-page: https://github.com/hyperbrowserai/python-sdk
6
6
  License: MIT
@@ -0,0 +1,62 @@
1
+ from hyperbrowser.models.crawl import StartCrawlJobParams
2
+ from hyperbrowser.models.scrape import StartScrapeJobParams
3
+ from hyperbrowser import Hyperbrowser, AsyncHyperbrowser
4
+
5
+ from .openai import (
6
+ SCRAPE_TOOL_OPENAI,
7
+ CRAWL_TOOL_OPENAI,
8
+ )
9
+ from .anthropic import (
10
+ SCRAPE_TOOL_ANTHROPIC,
11
+ CRAWL_TOOL_ANTHROPIC,
12
+ )
13
+
14
+
15
+ class WebsiteScrapeTool:
16
+ openai_tool_definition = SCRAPE_TOOL_OPENAI
17
+ anthropic_tool_definition = SCRAPE_TOOL_ANTHROPIC
18
+
19
+ @staticmethod
20
+ def runnable(hb: Hyperbrowser, params: dict) -> str:
21
+ resp = hb.scrape.start_and_wait(params=StartScrapeJobParams(**params))
22
+ return resp.data.markdown if resp.data and resp.data.markdown else ""
23
+
24
+ @staticmethod
25
+ async def async_runnable(hb: AsyncHyperbrowser, params: dict) -> str:
26
+ resp = await hb.scrape.start_and_wait(params=StartScrapeJobParams(**params))
27
+ return resp.data.markdown if resp.data and resp.data.markdown else ""
28
+
29
+
30
+ class WebsiteCrawlTool:
31
+ openai_tool_definition = CRAWL_TOOL_OPENAI
32
+ anthropic_tool_definition = CRAWL_TOOL_ANTHROPIC
33
+
34
+ @staticmethod
35
+ def runnable(hb: Hyperbrowser, params: dict) -> str:
36
+ resp = hb.crawl.start_and_wait(params=StartCrawlJobParams(**params))
37
+ markdown = ""
38
+ if resp.data:
39
+ for page in resp.data:
40
+ if page.markdown:
41
+ markdown += (
42
+ f"\n{'-'*50}\nUrl: {page.url}\nMarkdown:\n{page.markdown}\n"
43
+ )
44
+ return markdown
45
+
46
+ @staticmethod
47
+ async def async_runnable(hb: AsyncHyperbrowser, params: dict) -> str:
48
+ resp = await hb.crawl.start_and_wait(params=StartCrawlJobParams(**params))
49
+ markdown = ""
50
+ if resp.data:
51
+ for page in resp.data:
52
+ if page.markdown:
53
+ markdown += (
54
+ f"\n{'-'*50}\nUrl: {page.url}\nMarkdown:\n{page.markdown}\n"
55
+ )
56
+ return markdown
57
+
58
+
59
+ __all__ = [
60
+ "WebsiteScrapeTool",
61
+ "WebsiteCrawlTool",
62
+ ]
@@ -54,6 +54,7 @@ SCRAPE_TOOL_OPENAI: ChatCompletionToolParam = {
54
54
  "name": "scrape_webpage",
55
55
  "description": "Scrape content from a webpage and return the content in markdown format",
56
56
  "parameters": SCRAPE_SCHEMA,
57
+ "strict": True,
57
58
  },
58
59
  }
59
60
 
@@ -63,5 +64,6 @@ CRAWL_TOOL_OPENAI: ChatCompletionToolParam = {
63
64
  "name": "crawl_website",
64
65
  "description": "Crawl a website and return the content in markdown format",
65
66
  "parameters": CRAWL_SCHEMA,
67
+ "strict": True,
66
68
  },
67
69
  }
@@ -21,6 +21,8 @@ SCRAPE_OPTIONS = {
21
21
  "description": "Whether to only return the main content of the page. If true, only the main content of the page will be returned, excluding any headers, navigation menus,footers, or other non-main content.",
22
22
  },
23
23
  },
24
+ "required": ["include_tags", "exclude_tags", "only_main_content"],
25
+ "additionalProperties": False,
24
26
  }
25
27
 
26
28
  SCRAPE_SCHEMA = {
@@ -32,7 +34,8 @@ SCRAPE_SCHEMA = {
32
34
  },
33
35
  "scrape_options": SCRAPE_OPTIONS,
34
36
  },
35
- "required": ["url"],
37
+ "required": ["url", "scrape_options"],
38
+ "additionalProperties": False,
36
39
  }
37
40
 
38
41
  CRAWL_SCHEMA = {
@@ -44,7 +47,6 @@ CRAWL_SCHEMA = {
44
47
  },
45
48
  "max_pages": {
46
49
  "type": "number",
47
- "default": 10,
48
50
  "description": "The maximum number of pages to crawl",
49
51
  },
50
52
  "follow_links": {
@@ -71,5 +73,14 @@ CRAWL_SCHEMA = {
71
73
  },
72
74
  "scrape_options": SCRAPE_OPTIONS,
73
75
  },
74
- "required": ["url"],
76
+ "required": [
77
+ "url",
78
+ "max_pages",
79
+ "follow_links",
80
+ "ignore_sitemap",
81
+ "exclude_patterns",
82
+ "include_patterns",
83
+ "scrape_options",
84
+ ],
85
+ "additionalProperties": False,
75
86
  }
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "hyperbrowser"
3
- version = "0.16.0"
3
+ version = "0.17.0"
4
4
  description = "Python SDK for hyperbrowser"
5
5
  authors = ["Nikhil Shahi <nshahi1998@gmail.com>"]
6
6
  license = "MIT"
File without changes
File without changes