firecrawl-py 2.6.0__py3-none-any.whl → 2.7.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of firecrawl-py might be problematic. Click here for more details.

Files changed (52) hide show
  1. build/lib/firecrawl/__init__.py +1 -1
  2. build/lib/firecrawl/firecrawl.py +11 -10
  3. firecrawl/__init__.py +1 -1
  4. firecrawl/firecrawl.py +11 -10
  5. {firecrawl_py-2.6.0.dist-info → firecrawl_py-2.7.1.dist-info}/METADATA +1 -1
  6. firecrawl_py-2.7.1.dist-info/RECORD +19 -0
  7. build/lib/build/lib/build/lib/build/lib/build/lib/build/lib/build/lib/firecrawl/__init__.py +0 -79
  8. build/lib/build/lib/build/lib/build/lib/build/lib/build/lib/build/lib/firecrawl/__tests__/e2e_withAuth/__init__.py +0 -0
  9. build/lib/build/lib/build/lib/build/lib/build/lib/build/lib/build/lib/firecrawl/__tests__/e2e_withAuth/test.py +0 -170
  10. build/lib/build/lib/build/lib/build/lib/build/lib/build/lib/build/lib/firecrawl/__tests__/v1/e2e_withAuth/__init__.py +0 -0
  11. build/lib/build/lib/build/lib/build/lib/build/lib/build/lib/build/lib/firecrawl/__tests__/v1/e2e_withAuth/test.py +0 -440
  12. build/lib/build/lib/build/lib/build/lib/build/lib/build/lib/build/lib/firecrawl/firecrawl.py +0 -4466
  13. build/lib/build/lib/build/lib/build/lib/build/lib/build/lib/build/lib/tests/test_change_tracking.py +0 -98
  14. build/lib/build/lib/build/lib/build/lib/build/lib/build/lib/firecrawl/__init__.py +0 -79
  15. build/lib/build/lib/build/lib/build/lib/build/lib/build/lib/firecrawl/__tests__/e2e_withAuth/__init__.py +0 -0
  16. build/lib/build/lib/build/lib/build/lib/build/lib/build/lib/firecrawl/__tests__/e2e_withAuth/test.py +0 -170
  17. build/lib/build/lib/build/lib/build/lib/build/lib/build/lib/firecrawl/__tests__/v1/e2e_withAuth/__init__.py +0 -0
  18. build/lib/build/lib/build/lib/build/lib/build/lib/build/lib/firecrawl/__tests__/v1/e2e_withAuth/test.py +0 -440
  19. build/lib/build/lib/build/lib/build/lib/build/lib/build/lib/firecrawl/firecrawl.py +0 -4466
  20. build/lib/build/lib/build/lib/build/lib/build/lib/build/lib/tests/test_change_tracking.py +0 -98
  21. build/lib/build/lib/build/lib/build/lib/build/lib/firecrawl/__init__.py +0 -79
  22. build/lib/build/lib/build/lib/build/lib/build/lib/firecrawl/__tests__/e2e_withAuth/__init__.py +0 -0
  23. build/lib/build/lib/build/lib/build/lib/build/lib/firecrawl/__tests__/e2e_withAuth/test.py +0 -170
  24. build/lib/build/lib/build/lib/build/lib/build/lib/firecrawl/__tests__/v1/e2e_withAuth/__init__.py +0 -0
  25. build/lib/build/lib/build/lib/build/lib/build/lib/firecrawl/__tests__/v1/e2e_withAuth/test.py +0 -440
  26. build/lib/build/lib/build/lib/build/lib/build/lib/firecrawl/firecrawl.py +0 -4466
  27. build/lib/build/lib/build/lib/build/lib/build/lib/tests/test_change_tracking.py +0 -98
  28. build/lib/build/lib/build/lib/build/lib/firecrawl/__init__.py +0 -79
  29. build/lib/build/lib/build/lib/build/lib/firecrawl/__tests__/e2e_withAuth/__init__.py +0 -0
  30. build/lib/build/lib/build/lib/build/lib/firecrawl/__tests__/e2e_withAuth/test.py +0 -170
  31. build/lib/build/lib/build/lib/build/lib/firecrawl/__tests__/v1/e2e_withAuth/__init__.py +0 -0
  32. build/lib/build/lib/build/lib/build/lib/firecrawl/__tests__/v1/e2e_withAuth/test.py +0 -440
  33. build/lib/build/lib/build/lib/build/lib/firecrawl/firecrawl.py +0 -4466
  34. build/lib/build/lib/build/lib/build/lib/tests/test_change_tracking.py +0 -98
  35. build/lib/build/lib/build/lib/firecrawl/__init__.py +0 -79
  36. build/lib/build/lib/build/lib/firecrawl/__tests__/e2e_withAuth/__init__.py +0 -0
  37. build/lib/build/lib/build/lib/firecrawl/__tests__/e2e_withAuth/test.py +0 -170
  38. build/lib/build/lib/build/lib/firecrawl/__tests__/v1/e2e_withAuth/__init__.py +0 -0
  39. build/lib/build/lib/build/lib/firecrawl/__tests__/v1/e2e_withAuth/test.py +0 -440
  40. build/lib/build/lib/build/lib/firecrawl/firecrawl.py +0 -4466
  41. build/lib/build/lib/build/lib/tests/test_change_tracking.py +0 -98
  42. build/lib/build/lib/firecrawl/__init__.py +0 -79
  43. build/lib/build/lib/firecrawl/__tests__/e2e_withAuth/__init__.py +0 -0
  44. build/lib/build/lib/firecrawl/__tests__/e2e_withAuth/test.py +0 -170
  45. build/lib/build/lib/firecrawl/__tests__/v1/e2e_withAuth/__init__.py +0 -0
  46. build/lib/build/lib/firecrawl/__tests__/v1/e2e_withAuth/test.py +0 -440
  47. build/lib/build/lib/firecrawl/firecrawl.py +0 -4466
  48. build/lib/build/lib/tests/test_change_tracking.py +0 -98
  49. firecrawl_py-2.6.0.dist-info/RECORD +0 -61
  50. {firecrawl_py-2.6.0.dist-info → firecrawl_py-2.7.1.dist-info}/LICENSE +0 -0
  51. {firecrawl_py-2.6.0.dist-info → firecrawl_py-2.7.1.dist-info}/WHEEL +0 -0
  52. {firecrawl_py-2.6.0.dist-info → firecrawl_py-2.7.1.dist-info}/top_level.txt +0 -0
@@ -13,7 +13,7 @@ import os
13
13
 
14
14
  from .firecrawl import FirecrawlApp, AsyncFirecrawlApp, JsonConfig, ScrapeOptions, ChangeTrackingOptions # noqa
15
15
 
16
- __version__ = "2.6.0"
16
+ __version__ = "2.7.1"
17
17
 
18
18
  # Define the logger for the Firecrawl project
19
19
  logger: logging.Logger = logging.getLogger("firecrawl")
@@ -155,7 +155,7 @@ class ScrapeOptions(pydantic.BaseModel):
155
155
  skipTlsVerification: Optional[bool] = None
156
156
  removeBase64Images: Optional[bool] = None
157
157
  blockAds: Optional[bool] = None
158
- proxy: Optional[Literal["basic", "stealth"]] = None
158
+ proxy: Optional[Literal["basic", "stealth", "auto"]] = None
159
159
  changeTrackingOptions: Optional[ChangeTrackingOptions] = None
160
160
 
161
161
  class WaitAction(pydantic.BaseModel):
@@ -459,7 +459,7 @@ class FirecrawlApp:
459
459
  skip_tls_verification: Optional[bool] = None,
460
460
  remove_base64_images: Optional[bool] = None,
461
461
  block_ads: Optional[bool] = None,
462
- proxy: Optional[Literal["basic", "stealth"]] = None,
462
+ proxy: Optional[Literal["basic", "stealth", "auto"]] = None,
463
463
  extract: Optional[JsonConfig] = None,
464
464
  json_options: Optional[JsonConfig] = None,
465
465
  actions: Optional[List[Union[WaitAction, ScreenshotAction, ClickAction, WriteAction, PressAction, ScrollAction, ScrapeAction, ExecuteJavascriptAction]]] = None,
@@ -481,7 +481,7 @@ class FirecrawlApp:
481
481
  skip_tls_verification (Optional[bool]): Skip TLS verification
482
482
  remove_base64_images (Optional[bool]): Remove base64 images
483
483
  block_ads (Optional[bool]): Block ads
484
- proxy (Optional[Literal["basic", "stealth"]]): Proxy type (basic/stealth)
484
+ proxy (Optional[Literal["basic", "stealth", "auto"]]): Proxy type (basic/stealth)
485
485
  extract (Optional[JsonConfig]): Content extraction settings
486
486
  json_options (Optional[JsonConfig]): JSON extraction settings
487
487
  actions (Optional[List[Union[WaitAction, ScreenshotAction, ClickAction, WriteAction, PressAction, ScrollAction, ScrapeAction, ExecuteJavascriptAction]]]): Actions to perform
@@ -1191,7 +1191,7 @@ class FirecrawlApp:
1191
1191
  skip_tls_verification: Optional[bool] = None,
1192
1192
  remove_base64_images: Optional[bool] = None,
1193
1193
  block_ads: Optional[bool] = None,
1194
- proxy: Optional[Literal["basic", "stealth"]] = None,
1194
+ proxy: Optional[Literal["basic", "stealth", "auto"]] = None,
1195
1195
  extract: Optional[JsonConfig] = None,
1196
1196
  json_options: Optional[JsonConfig] = None,
1197
1197
  actions: Optional[List[Union[WaitAction, ScreenshotAction, ClickAction, WriteAction, PressAction, ScrollAction, ScrapeAction, ExecuteJavascriptAction]]] = None,
@@ -1325,7 +1325,7 @@ class FirecrawlApp:
1325
1325
  skip_tls_verification: Optional[bool] = None,
1326
1326
  remove_base64_images: Optional[bool] = None,
1327
1327
  block_ads: Optional[bool] = None,
1328
- proxy: Optional[Literal["basic", "stealth"]] = None,
1328
+ proxy: Optional[Literal["basic", "stealth", "auto"]] = None,
1329
1329
  extract: Optional[JsonConfig] = None,
1330
1330
  json_options: Optional[JsonConfig] = None,
1331
1331
  actions: Optional[List[Union[WaitAction, ScreenshotAction, ClickAction, WriteAction, PressAction, ScrollAction, ScrapeAction, ExecuteJavascriptAction]]] = None,
@@ -1457,7 +1457,7 @@ class FirecrawlApp:
1457
1457
  skip_tls_verification: Optional[bool] = None,
1458
1458
  remove_base64_images: Optional[bool] = None,
1459
1459
  block_ads: Optional[bool] = None,
1460
- proxy: Optional[Literal["basic", "stealth"]] = None,
1460
+ proxy: Optional[Literal["basic", "stealth", "auto"]] = None,
1461
1461
  extract: Optional[JsonConfig] = None,
1462
1462
  json_options: Optional[JsonConfig] = None,
1463
1463
  actions: Optional[List[Union[WaitAction, ScreenshotAction, ClickAction, WriteAction, PressAction, ScrollAction, ScrapeAction, ExecuteJavascriptAction]]] = None,
@@ -2553,6 +2553,7 @@ class CrawlWatcher:
2553
2553
  """
2554
2554
  async with websockets.connect(
2555
2555
  self.ws_url,
2556
+ max_size=None,
2556
2557
  additional_headers=[("Authorization", f"Bearer {self.app.api_key}")]
2557
2558
  ) as websocket:
2558
2559
  await self._listen(websocket)
@@ -2851,7 +2852,7 @@ class AsyncFirecrawlApp(FirecrawlApp):
2851
2852
  skip_tls_verification: Optional[bool] = None,
2852
2853
  remove_base64_images: Optional[bool] = None,
2853
2854
  block_ads: Optional[bool] = None,
2854
- proxy: Optional[Literal["basic", "stealth"]] = None,
2855
+ proxy: Optional[Literal["basic", "stealth", "auto"]] = None,
2855
2856
  extract: Optional[JsonConfig] = None,
2856
2857
  json_options: Optional[JsonConfig] = None,
2857
2858
  actions: Optional[List[Union[WaitAction, ScreenshotAction, ClickAction, WriteAction, PressAction, ScrollAction, ScrapeAction, ExecuteJavascriptAction]]] = None,
@@ -2872,7 +2873,7 @@ class AsyncFirecrawlApp(FirecrawlApp):
2872
2873
  skip_tls_verification (Optional[bool]): Skip TLS verification
2873
2874
  remove_base64_images (Optional[bool]): Remove base64 images
2874
2875
  block_ads (Optional[bool]): Block ads
2875
- proxy (Optional[Literal["basic", "stealth"]]): Proxy type (basic/stealth)
2876
+ proxy (Optional[Literal["basic", "stealth", "auto"]]): Proxy type (basic/stealth)
2876
2877
  extract (Optional[JsonConfig]): Content extraction settings
2877
2878
  json_options (Optional[JsonConfig]): JSON extraction settings
2878
2879
  actions (Optional[List[Union[WaitAction, ScreenshotAction, ClickAction, WriteAction, PressAction, ScrollAction, ScrapeAction, ExecuteJavascriptAction]]]): Actions to perform
@@ -2980,7 +2981,7 @@ class AsyncFirecrawlApp(FirecrawlApp):
2980
2981
  skip_tls_verification: Optional[bool] = None,
2981
2982
  remove_base64_images: Optional[bool] = None,
2982
2983
  block_ads: Optional[bool] = None,
2983
- proxy: Optional[Literal["basic", "stealth"]] = None,
2984
+ proxy: Optional[Literal["basic", "stealth", "auto"]] = None,
2984
2985
  extract: Optional[JsonConfig] = None,
2985
2986
  json_options: Optional[JsonConfig] = None,
2986
2987
  actions: Optional[List[Union[WaitAction, ScreenshotAction, ClickAction, WriteAction, PressAction, ScrollAction, ScrapeAction, ExecuteJavascriptAction]]] = None,
@@ -3119,7 +3120,7 @@ class AsyncFirecrawlApp(FirecrawlApp):
3119
3120
  skip_tls_verification: Optional[bool] = None,
3120
3121
  remove_base64_images: Optional[bool] = None,
3121
3122
  block_ads: Optional[bool] = None,
3122
- proxy: Optional[Literal["basic", "stealth"]] = None,
3123
+ proxy: Optional[Literal["basic", "stealth", "auto"]] = None,
3123
3124
  extract: Optional[JsonConfig] = None,
3124
3125
  json_options: Optional[JsonConfig] = None,
3125
3126
  actions: Optional[List[Union[WaitAction, ScreenshotAction, ClickAction, WriteAction, PressAction, ScrollAction, ScrapeAction, ExecuteJavascriptAction]]] = None,
firecrawl/__init__.py CHANGED
@@ -13,7 +13,7 @@ import os
13
13
 
14
14
  from .firecrawl import FirecrawlApp, AsyncFirecrawlApp, JsonConfig, ScrapeOptions, ChangeTrackingOptions # noqa
15
15
 
16
- __version__ = "2.6.0"
16
+ __version__ = "2.7.1"
17
17
 
18
18
  # Define the logger for the Firecrawl project
19
19
  logger: logging.Logger = logging.getLogger("firecrawl")
firecrawl/firecrawl.py CHANGED
@@ -155,7 +155,7 @@ class ScrapeOptions(pydantic.BaseModel):
155
155
  skipTlsVerification: Optional[bool] = None
156
156
  removeBase64Images: Optional[bool] = None
157
157
  blockAds: Optional[bool] = None
158
- proxy: Optional[Literal["basic", "stealth"]] = None
158
+ proxy: Optional[Literal["basic", "stealth", "auto"]] = None
159
159
  changeTrackingOptions: Optional[ChangeTrackingOptions] = None
160
160
 
161
161
  class WaitAction(pydantic.BaseModel):
@@ -459,7 +459,7 @@ class FirecrawlApp:
459
459
  skip_tls_verification: Optional[bool] = None,
460
460
  remove_base64_images: Optional[bool] = None,
461
461
  block_ads: Optional[bool] = None,
462
- proxy: Optional[Literal["basic", "stealth"]] = None,
462
+ proxy: Optional[Literal["basic", "stealth", "auto"]] = None,
463
463
  extract: Optional[JsonConfig] = None,
464
464
  json_options: Optional[JsonConfig] = None,
465
465
  actions: Optional[List[Union[WaitAction, ScreenshotAction, ClickAction, WriteAction, PressAction, ScrollAction, ScrapeAction, ExecuteJavascriptAction]]] = None,
@@ -481,7 +481,7 @@ class FirecrawlApp:
481
481
  skip_tls_verification (Optional[bool]): Skip TLS verification
482
482
  remove_base64_images (Optional[bool]): Remove base64 images
483
483
  block_ads (Optional[bool]): Block ads
484
- proxy (Optional[Literal["basic", "stealth"]]): Proxy type (basic/stealth)
484
+ proxy (Optional[Literal["basic", "stealth", "auto"]]): Proxy type (basic/stealth)
485
485
  extract (Optional[JsonConfig]): Content extraction settings
486
486
  json_options (Optional[JsonConfig]): JSON extraction settings
487
487
  actions (Optional[List[Union[WaitAction, ScreenshotAction, ClickAction, WriteAction, PressAction, ScrollAction, ScrapeAction, ExecuteJavascriptAction]]]): Actions to perform
@@ -1191,7 +1191,7 @@ class FirecrawlApp:
1191
1191
  skip_tls_verification: Optional[bool] = None,
1192
1192
  remove_base64_images: Optional[bool] = None,
1193
1193
  block_ads: Optional[bool] = None,
1194
- proxy: Optional[Literal["basic", "stealth"]] = None,
1194
+ proxy: Optional[Literal["basic", "stealth", "auto"]] = None,
1195
1195
  extract: Optional[JsonConfig] = None,
1196
1196
  json_options: Optional[JsonConfig] = None,
1197
1197
  actions: Optional[List[Union[WaitAction, ScreenshotAction, ClickAction, WriteAction, PressAction, ScrollAction, ScrapeAction, ExecuteJavascriptAction]]] = None,
@@ -1325,7 +1325,7 @@ class FirecrawlApp:
1325
1325
  skip_tls_verification: Optional[bool] = None,
1326
1326
  remove_base64_images: Optional[bool] = None,
1327
1327
  block_ads: Optional[bool] = None,
1328
- proxy: Optional[Literal["basic", "stealth"]] = None,
1328
+ proxy: Optional[Literal["basic", "stealth", "auto"]] = None,
1329
1329
  extract: Optional[JsonConfig] = None,
1330
1330
  json_options: Optional[JsonConfig] = None,
1331
1331
  actions: Optional[List[Union[WaitAction, ScreenshotAction, ClickAction, WriteAction, PressAction, ScrollAction, ScrapeAction, ExecuteJavascriptAction]]] = None,
@@ -1457,7 +1457,7 @@ class FirecrawlApp:
1457
1457
  skip_tls_verification: Optional[bool] = None,
1458
1458
  remove_base64_images: Optional[bool] = None,
1459
1459
  block_ads: Optional[bool] = None,
1460
- proxy: Optional[Literal["basic", "stealth"]] = None,
1460
+ proxy: Optional[Literal["basic", "stealth", "auto"]] = None,
1461
1461
  extract: Optional[JsonConfig] = None,
1462
1462
  json_options: Optional[JsonConfig] = None,
1463
1463
  actions: Optional[List[Union[WaitAction, ScreenshotAction, ClickAction, WriteAction, PressAction, ScrollAction, ScrapeAction, ExecuteJavascriptAction]]] = None,
@@ -2553,6 +2553,7 @@ class CrawlWatcher:
2553
2553
  """
2554
2554
  async with websockets.connect(
2555
2555
  self.ws_url,
2556
+ max_size=None,
2556
2557
  additional_headers=[("Authorization", f"Bearer {self.app.api_key}")]
2557
2558
  ) as websocket:
2558
2559
  await self._listen(websocket)
@@ -2851,7 +2852,7 @@ class AsyncFirecrawlApp(FirecrawlApp):
2851
2852
  skip_tls_verification: Optional[bool] = None,
2852
2853
  remove_base64_images: Optional[bool] = None,
2853
2854
  block_ads: Optional[bool] = None,
2854
- proxy: Optional[Literal["basic", "stealth"]] = None,
2855
+ proxy: Optional[Literal["basic", "stealth", "auto"]] = None,
2855
2856
  extract: Optional[JsonConfig] = None,
2856
2857
  json_options: Optional[JsonConfig] = None,
2857
2858
  actions: Optional[List[Union[WaitAction, ScreenshotAction, ClickAction, WriteAction, PressAction, ScrollAction, ScrapeAction, ExecuteJavascriptAction]]] = None,
@@ -2872,7 +2873,7 @@ class AsyncFirecrawlApp(FirecrawlApp):
2872
2873
  skip_tls_verification (Optional[bool]): Skip TLS verification
2873
2874
  remove_base64_images (Optional[bool]): Remove base64 images
2874
2875
  block_ads (Optional[bool]): Block ads
2875
- proxy (Optional[Literal["basic", "stealth"]]): Proxy type (basic/stealth)
2876
+ proxy (Optional[Literal["basic", "stealth", "auto"]]): Proxy type (basic/stealth)
2876
2877
  extract (Optional[JsonConfig]): Content extraction settings
2877
2878
  json_options (Optional[JsonConfig]): JSON extraction settings
2878
2879
  actions (Optional[List[Union[WaitAction, ScreenshotAction, ClickAction, WriteAction, PressAction, ScrollAction, ScrapeAction, ExecuteJavascriptAction]]]): Actions to perform
@@ -2980,7 +2981,7 @@ class AsyncFirecrawlApp(FirecrawlApp):
2980
2981
  skip_tls_verification: Optional[bool] = None,
2981
2982
  remove_base64_images: Optional[bool] = None,
2982
2983
  block_ads: Optional[bool] = None,
2983
- proxy: Optional[Literal["basic", "stealth"]] = None,
2984
+ proxy: Optional[Literal["basic", "stealth", "auto"]] = None,
2984
2985
  extract: Optional[JsonConfig] = None,
2985
2986
  json_options: Optional[JsonConfig] = None,
2986
2987
  actions: Optional[List[Union[WaitAction, ScreenshotAction, ClickAction, WriteAction, PressAction, ScrollAction, ScrapeAction, ExecuteJavascriptAction]]] = None,
@@ -3119,7 +3120,7 @@ class AsyncFirecrawlApp(FirecrawlApp):
3119
3120
  skip_tls_verification: Optional[bool] = None,
3120
3121
  remove_base64_images: Optional[bool] = None,
3121
3122
  block_ads: Optional[bool] = None,
3122
- proxy: Optional[Literal["basic", "stealth"]] = None,
3123
+ proxy: Optional[Literal["basic", "stealth", "auto"]] = None,
3123
3124
  extract: Optional[JsonConfig] = None,
3124
3125
  json_options: Optional[JsonConfig] = None,
3125
3126
  actions: Optional[List[Union[WaitAction, ScreenshotAction, ClickAction, WriteAction, PressAction, ScrollAction, ScrapeAction, ExecuteJavascriptAction]]] = None,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: firecrawl-py
3
- Version: 2.6.0
3
+ Version: 2.7.1
4
4
  Summary: Python SDK for Firecrawl API
5
5
  Home-page: https://github.com/mendableai/firecrawl
6
6
  Author: Mendable.ai
@@ -0,0 +1,19 @@
1
+ build/lib/firecrawl/__init__.py,sha256=jAxgyVgi4Aq94lwkcicF2_Ba2Y9u51-KfskU9d7ynRQ,2612
2
+ build/lib/firecrawl/firecrawl.py,sha256=fsKXa1cHcIIsGUbFAENLlKYZaW349CHQ6O353Uq6ypg,190196
3
+ build/lib/firecrawl/__tests__/e2e_withAuth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
+ build/lib/firecrawl/__tests__/e2e_withAuth/test.py,sha256=-Fq2vPcMo0iQi4dwsUkkCd931ybDaTxMBnZbRfGdDcA,7931
5
+ build/lib/firecrawl/__tests__/v1/e2e_withAuth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
+ build/lib/firecrawl/__tests__/v1/e2e_withAuth/test.py,sha256=DcCw-cohtnL-t9XPekUtRoQrgg3UCWu8Ikqudf9ory8,19880
7
+ build/lib/tests/test_change_tracking.py,sha256=_IJ5ShLcoj2fHDBaw-nE4I4lHdmDB617ocK_XMHhXps,4177
8
+ firecrawl/__init__.py,sha256=jAxgyVgi4Aq94lwkcicF2_Ba2Y9u51-KfskU9d7ynRQ,2612
9
+ firecrawl/firecrawl.py,sha256=fsKXa1cHcIIsGUbFAENLlKYZaW349CHQ6O353Uq6ypg,190196
10
+ firecrawl/__tests__/e2e_withAuth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
+ firecrawl/__tests__/e2e_withAuth/test.py,sha256=-Fq2vPcMo0iQi4dwsUkkCd931ybDaTxMBnZbRfGdDcA,7931
12
+ firecrawl/__tests__/v1/e2e_withAuth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
13
+ firecrawl/__tests__/v1/e2e_withAuth/test.py,sha256=DcCw-cohtnL-t9XPekUtRoQrgg3UCWu8Ikqudf9ory8,19880
14
+ tests/test_change_tracking.py,sha256=_IJ5ShLcoj2fHDBaw-nE4I4lHdmDB617ocK_XMHhXps,4177
15
+ firecrawl_py-2.7.1.dist-info/LICENSE,sha256=nPCunEDwjRGHlmjvsiDUyIWbkqqyj3Ej84ntnh0g0zA,1084
16
+ firecrawl_py-2.7.1.dist-info/METADATA,sha256=Y987r2PX3wds2_T6J47titJBzWVKNLBvLvd_d9CVQKw,7168
17
+ firecrawl_py-2.7.1.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
18
+ firecrawl_py-2.7.1.dist-info/top_level.txt,sha256=ytN_R30g2U2qZYFyIm710Z8QeK9FO1Uwa-WPGHXyqjE,27
19
+ firecrawl_py-2.7.1.dist-info/RECORD,,
@@ -1,79 +0,0 @@
1
- """
2
- This is the Firecrawl package.
3
-
4
- This package provides a Python SDK for interacting with the Firecrawl API.
5
- It includes methods to scrape URLs, perform searches, initiate and monitor crawl jobs,
6
- and check the status of these jobs.
7
-
8
- For more information visit https://github.com/firecrawl/
9
- """
10
-
11
- import logging
12
- import os
13
-
14
- from .firecrawl import FirecrawlApp, AsyncFirecrawlApp, JsonConfig, ScrapeOptions, ChangeTrackingOptions # noqa
15
-
16
- __version__ = "2.6.0"
17
-
18
- # Define the logger for the Firecrawl project
19
- logger: logging.Logger = logging.getLogger("firecrawl")
20
-
21
-
22
- def _configure_logger() -> None:
23
- """
24
- Configure the firecrawl logger for console output.
25
-
26
- The function attaches a handler for console output with a specific format and date
27
- format to the firecrawl logger.
28
- """
29
- try:
30
- # Create the formatter
31
- formatter = logging.Formatter(
32
- "[%(asctime)s - %(name)s:%(lineno)d - %(levelname)s] %(message)s",
33
- datefmt="%Y-%m-%d %H:%M:%S",
34
- )
35
-
36
- # Create the console handler and set the formatter
37
- console_handler = logging.StreamHandler()
38
- console_handler.setFormatter(formatter)
39
-
40
- # Add the console handler to the firecrawl logger
41
- logger.addHandler(console_handler)
42
- except Exception as e:
43
- logger.error("Failed to configure logging: %s", e)
44
-
45
-
46
- def setup_logging() -> None:
47
- """Set up logging based on the FIRECRAWL_LOGGING_LEVEL environment variable."""
48
- # Check if the firecrawl logger already has a handler
49
- if logger.hasHandlers():
50
- return # To prevent duplicate logging
51
-
52
- # Check if the FIRECRAWL_LOGGING_LEVEL environment variable is set
53
- if not (env := os.getenv("FIRECRAWL_LOGGING_LEVEL", "").upper()):
54
- # Attach a no-op handler to prevent warnings about no handlers
55
- logger.addHandler(logging.NullHandler())
56
- return
57
-
58
- # Attach the console handler to the firecrawl logger
59
- _configure_logger()
60
-
61
- # Set the logging level based on the FIRECRAWL_LOGGING_LEVEL environment variable
62
- if env == "DEBUG":
63
- logger.setLevel(logging.DEBUG)
64
- elif env == "INFO":
65
- logger.setLevel(logging.INFO)
66
- elif env == "WARNING":
67
- logger.setLevel(logging.WARNING)
68
- elif env == "ERROR":
69
- logger.setLevel(logging.ERROR)
70
- elif env == "CRITICAL":
71
- logger.setLevel(logging.CRITICAL)
72
- else:
73
- logger.setLevel(logging.INFO)
74
- logger.warning("Unknown logging level: %s, defaulting to INFO", env)
75
-
76
-
77
- # Initialize logging configuration when the module is imported
78
- setup_logging()
79
- logger.debug("Debugging logger setup")
@@ -1,170 +0,0 @@
1
- import importlib.util
2
- import pytest
3
- import time
4
- import os
5
- from uuid import uuid4
6
- from dotenv import load_dotenv
7
-
8
- load_dotenv()
9
-
10
- API_URL = "http://127.0.0.1:3002"
11
- ABSOLUTE_FIRECRAWL_PATH = "firecrawl/firecrawl.py"
12
- TEST_API_KEY = os.getenv('TEST_API_KEY')
13
-
14
- print(f"ABSOLUTE_FIRECRAWL_PATH: {ABSOLUTE_FIRECRAWL_PATH}")
15
-
16
- spec = importlib.util.spec_from_file_location("FirecrawlApp", ABSOLUTE_FIRECRAWL_PATH)
17
- firecrawl = importlib.util.module_from_spec(spec)
18
- spec.loader.exec_module(firecrawl)
19
- FirecrawlApp = firecrawl.FirecrawlApp
20
-
21
- def test_no_api_key():
22
- with pytest.raises(Exception) as excinfo:
23
- invalid_app = FirecrawlApp(api_url=API_URL, version='v0')
24
- assert "No API key provided" in str(excinfo.value)
25
-
26
- def test_scrape_url_invalid_api_key():
27
- invalid_app = FirecrawlApp(api_url=API_URL, api_key="invalid_api_key", version='v0')
28
- with pytest.raises(Exception) as excinfo:
29
- invalid_app.scrape_url('https://firecrawl.dev')
30
- assert "Unexpected error during scrape URL: Status code 401. Unauthorized: Invalid token" in str(excinfo.value)
31
-
32
- # def test_blocklisted_url():
33
- # blocklisted_url = "https://facebook.com/fake-test"
34
- # app = FirecrawlApp(api_url=API_URL, api_key=TEST_API_KEY, version='v0')
35
- # with pytest.raises(Exception) as excinfo:
36
- # app.scrape_url(blocklisted_url)
37
- # assert "Unexpected error during scrape URL: Status code 403. Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it." in str(excinfo.value)
38
-
39
- def test_successful_response_with_valid_preview_token():
40
- app = FirecrawlApp(api_url=API_URL, api_key=os.getenv('PREVIEW_TOKEN'), version='v0')
41
- response = app.scrape_url('https://roastmywebsite.ai')
42
- assert response is not None
43
- assert 'content' in response
44
- assert "_Roast_" in response['content']
45
-
46
- def test_scrape_url_e2e():
47
- app = FirecrawlApp(api_url=API_URL, api_key=TEST_API_KEY, version='v0')
48
- response = app.scrape_url('https://roastmywebsite.ai')
49
- print(response)
50
-
51
- assert response is not None
52
- assert 'content' in response
53
- assert 'markdown' in response
54
- assert 'metadata' in response
55
- assert 'html' not in response
56
- assert "_Roast_" in response['content']
57
-
58
- def test_successful_response_with_valid_api_key_and_include_html():
59
- app = FirecrawlApp(api_url=API_URL, api_key=TEST_API_KEY, version='v0')
60
- response = app.scrape_url('https://roastmywebsite.ai', {'pageOptions': {'includeHtml': True}})
61
- assert response is not None
62
- assert 'content' in response
63
- assert 'markdown' in response
64
- assert 'html' in response
65
- assert 'metadata' in response
66
- assert "_Roast_" in response['content']
67
- assert "_Roast_" in response['markdown']
68
- assert "<h1" in response['html']
69
-
70
- def test_successful_response_for_valid_scrape_with_pdf_file():
71
- app = FirecrawlApp(api_url=API_URL, api_key=TEST_API_KEY, version='v0')
72
- response = app.scrape_url('https://arxiv.org/pdf/astro-ph/9301001.pdf')
73
- assert response is not None
74
- assert 'content' in response
75
- assert 'metadata' in response
76
- assert 'We present spectrophotometric observations of the Broad Line Radio Galaxy' in response['content']
77
-
78
- def test_successful_response_for_valid_scrape_with_pdf_file_without_explicit_extension():
79
- app = FirecrawlApp(api_url=API_URL, api_key=TEST_API_KEY, version='v0')
80
- response = app.scrape_url('https://arxiv.org/pdf/astro-ph/9301001')
81
- time.sleep(6) # wait for 6 seconds
82
- assert response is not None
83
- assert 'content' in response
84
- assert 'metadata' in response
85
- assert 'We present spectrophotometric observations of the Broad Line Radio Galaxy' in response['content']
86
-
87
- def test_crawl_url_invalid_api_key():
88
- invalid_app = FirecrawlApp(api_url=API_URL, api_key="invalid_api_key", version='v0')
89
- with pytest.raises(Exception) as excinfo:
90
- invalid_app.crawl_url('https://firecrawl.dev')
91
- assert "Unexpected error during start crawl job: Status code 401. Unauthorized: Invalid token" in str(excinfo.value)
92
-
93
- # def test_should_return_error_for_blocklisted_url():
94
- # app = FirecrawlApp(api_url=API_URL, api_key=TEST_API_KEY, version='v0')
95
- # blocklisted_url = "https://twitter.com/fake-test"
96
- # with pytest.raises(Exception) as excinfo:
97
- # app.crawl_url(blocklisted_url)
98
- # assert "Unexpected error during start crawl job: Status code 403. Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it." in str(excinfo.value)
99
-
100
- def test_crawl_url_wait_for_completion_e2e():
101
- app = FirecrawlApp(api_url=API_URL, api_key=TEST_API_KEY, version='v0')
102
- response = app.crawl_url('https://roastmywebsite.ai', {'crawlerOptions': {'excludes': ['blog/*']}}, True)
103
- assert response is not None
104
- assert len(response) > 0
105
- assert 'content' in response[0]
106
- assert "_Roast_" in response[0]['content']
107
-
108
- def test_crawl_url_with_idempotency_key_e2e():
109
- app = FirecrawlApp(api_url=API_URL, api_key=TEST_API_KEY, version='v0')
110
- uniqueIdempotencyKey = str(uuid4())
111
- response = app.crawl_url('https://roastmywebsite.ai', {'crawlerOptions': {'excludes': ['blog/*']}}, True, 2, uniqueIdempotencyKey)
112
- assert response is not None
113
- assert len(response) > 0
114
- assert 'content' in response[0]
115
- assert "_Roast_" in response[0]['content']
116
-
117
- with pytest.raises(Exception) as excinfo:
118
- app.crawl_url('https://firecrawl.dev', {'crawlerOptions': {'excludes': ['blog/*']}}, True, 2, uniqueIdempotencyKey)
119
- assert "Conflict: Failed to start crawl job due to a conflict. Idempotency key already used" in str(excinfo.value)
120
-
121
- def test_check_crawl_status_e2e():
122
- app = FirecrawlApp(api_url=API_URL, api_key=TEST_API_KEY, version='v0')
123
- response = app.crawl_url('https://firecrawl.dev', {'crawlerOptions': {'excludes': ['blog/*']}}, False)
124
- assert response is not None
125
- assert 'jobId' in response
126
-
127
- time.sleep(30) # wait for 30 seconds
128
- status_response = app.check_crawl_status(response['jobId'])
129
- assert status_response is not None
130
- assert 'status' in status_response
131
- assert status_response['status'] == 'completed'
132
- assert 'data' in status_response
133
- assert len(status_response['data']) > 0
134
-
135
- def test_search_e2e():
136
- app = FirecrawlApp(api_url=API_URL, api_key=TEST_API_KEY, version='v0')
137
- response = app.search("test query")
138
- assert response is not None
139
- assert 'content' in response[0]
140
- assert len(response) > 2
141
-
142
- def test_search_invalid_api_key():
143
- invalid_app = FirecrawlApp(api_url=API_URL, api_key="invalid_api_key", version='v0')
144
- with pytest.raises(Exception) as excinfo:
145
- invalid_app.search("test query")
146
- assert "Unexpected error during search: Status code 401. Unauthorized: Invalid token" in str(excinfo.value)
147
-
148
- def test_llm_extraction():
149
- app = FirecrawlApp(api_url=API_URL, api_key=TEST_API_KEY, version='v0')
150
- response = app.scrape_url("https://firecrawl.dev", {
151
- 'extractorOptions': {
152
- 'mode': 'llm-extraction',
153
- 'extractionPrompt': "Based on the information on the page, find what the company's mission is and whether it supports SSO, and whether it is open source",
154
- 'extractionSchema': {
155
- 'type': 'object',
156
- 'properties': {
157
- 'company_mission': {'type': 'string'},
158
- 'supports_sso': {'type': 'boolean'},
159
- 'is_open_source': {'type': 'boolean'}
160
- },
161
- 'required': ['company_mission', 'supports_sso', 'is_open_source']
162
- }
163
- }
164
- })
165
- assert response is not None
166
- assert 'llm_extraction' in response
167
- llm_extraction = response['llm_extraction']
168
- assert 'company_mission' in llm_extraction
169
- assert isinstance(llm_extraction['supports_sso'], bool)
170
- assert isinstance(llm_extraction['is_open_source'], bool)