firecrawl 2.5.3__py3-none-any.whl → 2.5.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of firecrawl might be problematic. Click here for more details.

Files changed (35) hide show
  1. build/lib/build/lib/build/lib/build/lib/build/lib/build/lib/firecrawl/__init__.py +79 -0
  2. build/lib/build/lib/build/lib/build/lib/build/lib/build/lib/firecrawl/__tests__/e2e_withAuth/__init__.py +0 -0
  3. build/lib/build/lib/build/lib/build/lib/build/lib/build/lib/firecrawl/__tests__/e2e_withAuth/test.py +170 -0
  4. build/lib/build/lib/build/lib/build/lib/build/lib/build/lib/firecrawl/__tests__/v1/e2e_withAuth/__init__.py +0 -0
  5. build/lib/build/lib/build/lib/build/lib/build/lib/build/lib/firecrawl/__tests__/v1/e2e_withAuth/test.py +440 -0
  6. build/lib/build/lib/build/lib/build/lib/build/lib/build/lib/firecrawl/firecrawl.py +4454 -0
  7. build/lib/build/lib/build/lib/build/lib/build/lib/build/lib/tests/test_change_tracking.py +98 -0
  8. build/lib/build/lib/build/lib/build/lib/build/lib/firecrawl/__init__.py +79 -0
  9. build/lib/build/lib/build/lib/build/lib/build/lib/firecrawl/__tests__/e2e_withAuth/__init__.py +0 -0
  10. build/lib/build/lib/build/lib/build/lib/build/lib/firecrawl/__tests__/e2e_withAuth/test.py +170 -0
  11. build/lib/build/lib/build/lib/build/lib/build/lib/firecrawl/__tests__/v1/e2e_withAuth/__init__.py +0 -0
  12. build/lib/build/lib/build/lib/build/lib/build/lib/firecrawl/__tests__/v1/e2e_withAuth/test.py +440 -0
  13. build/lib/build/lib/build/lib/build/lib/build/lib/firecrawl/firecrawl.py +4454 -0
  14. build/lib/build/lib/build/lib/build/lib/build/lib/tests/test_change_tracking.py +98 -0
  15. build/lib/build/lib/build/lib/build/lib/firecrawl/__init__.py +79 -0
  16. build/lib/build/lib/build/lib/build/lib/firecrawl/__tests__/e2e_withAuth/__init__.py +0 -0
  17. build/lib/build/lib/build/lib/build/lib/firecrawl/__tests__/e2e_withAuth/test.py +170 -0
  18. build/lib/build/lib/build/lib/build/lib/firecrawl/__tests__/v1/e2e_withAuth/__init__.py +0 -0
  19. build/lib/build/lib/build/lib/build/lib/firecrawl/__tests__/v1/e2e_withAuth/test.py +440 -0
  20. build/lib/build/lib/build/lib/build/lib/firecrawl/firecrawl.py +4454 -0
  21. build/lib/build/lib/build/lib/build/lib/tests/test_change_tracking.py +98 -0
  22. build/lib/build/lib/build/lib/firecrawl/__init__.py +1 -1
  23. build/lib/build/lib/build/lib/firecrawl/firecrawl.py +16 -1
  24. build/lib/build/lib/firecrawl/__init__.py +1 -1
  25. build/lib/build/lib/firecrawl/firecrawl.py +16 -1
  26. build/lib/firecrawl/__init__.py +1 -1
  27. build/lib/firecrawl/firecrawl.py +16 -1
  28. firecrawl/__init__.py +1 -1
  29. firecrawl/firecrawl.py +16 -1
  30. {firecrawl-2.5.3.dist-info → firecrawl-2.5.4.dist-info}/METADATA +1 -1
  31. firecrawl-2.5.4.dist-info/RECORD +54 -0
  32. firecrawl-2.5.3.dist-info/RECORD +0 -33
  33. {firecrawl-2.5.3.dist-info → firecrawl-2.5.4.dist-info}/LICENSE +0 -0
  34. {firecrawl-2.5.3.dist-info → firecrawl-2.5.4.dist-info}/WHEEL +0 -0
  35. {firecrawl-2.5.3.dist-info → firecrawl-2.5.4.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,98 @@
1
+ import unittest
2
+ from unittest.mock import patch, MagicMock
3
+ import json
4
+ import os
5
+ from firecrawl import FirecrawlApp
6
+
7
+ class TestChangeTracking(unittest.TestCase):
8
+ @patch('requests.post')
9
+ def test_change_tracking_format(self, mock_post):
10
+ mock_response = MagicMock()
11
+ mock_response.status_code = 200
12
+ mock_response.json.return_value = {
13
+ 'success': True,
14
+ 'data': {
15
+ 'markdown': 'Test markdown content',
16
+ 'changeTracking': {
17
+ 'previousScrapeAt': '2023-01-01T00:00:00Z',
18
+ 'changeStatus': 'changed',
19
+ 'visibility': 'visible'
20
+ }
21
+ }
22
+ }
23
+ mock_post.return_value = mock_response
24
+
25
+ app = FirecrawlApp(api_key=os.environ.get('TEST_API_KEY', 'dummy-api-key-for-testing'))
26
+ result = app.scrape_url('https://example.com', {
27
+ 'formats': ['markdown', 'changeTracking']
28
+ })
29
+
30
+ args, kwargs = mock_post.call_args
31
+ self.assertEqual(kwargs['json']['formats'], ['markdown', 'changeTracking'])
32
+
33
+ self.assertEqual(result['changeTracking']['previousScrapeAt'], '2023-01-01T00:00:00Z')
34
+ self.assertEqual(result['changeTracking']['changeStatus'], 'changed')
35
+ self.assertEqual(result['changeTracking']['visibility'], 'visible')
36
+
37
+ @patch('requests.post')
38
+ def test_change_tracking_options(self, mock_post):
39
+ mock_response = MagicMock()
40
+ mock_response.status_code = 200
41
+ mock_response.json.return_value = {
42
+ 'success': True,
43
+ 'data': {
44
+ 'markdown': 'Test markdown content',
45
+ 'changeTracking': {
46
+ 'previousScrapeAt': '2023-01-01T00:00:00Z',
47
+ 'changeStatus': 'changed',
48
+ 'visibility': 'visible',
49
+ 'diff': {
50
+ 'text': '@@ -1,1 +1,1 @@\n-old content\n+new content',
51
+ 'json': {
52
+ 'files': [{
53
+ 'from': None,
54
+ 'to': None,
55
+ 'chunks': [{
56
+ 'content': '@@ -1,1 +1,1 @@',
57
+ 'changes': [{
58
+ 'type': 'del',
59
+ 'content': '-old content',
60
+ 'del': True,
61
+ 'ln': 1
62
+ }, {
63
+ 'type': 'add',
64
+ 'content': '+new content',
65
+ 'add': True,
66
+ 'ln': 1
67
+ }]
68
+ }]
69
+ }]
70
+ }
71
+ },
72
+ 'json': {
73
+ 'title': {
74
+ 'previous': 'Old Title',
75
+ 'current': 'New Title'
76
+ }
77
+ }
78
+ }
79
+ }
80
+ }
81
+ mock_post.return_value = mock_response
82
+
83
+ app = FirecrawlApp(api_key=os.environ.get('TEST_API_KEY', 'dummy-api-key-for-testing'))
84
+ result = app.scrape_url('https://example.com', {
85
+ 'formats': ['markdown', 'changeTracking'],
86
+ 'changeTrackingOptions': {
87
+ 'modes': ['git-diff', 'json'],
88
+ 'schema': {'type': 'object', 'properties': {'title': {'type': 'string'}}}
89
+ }
90
+ })
91
+
92
+ args, kwargs = mock_post.call_args
93
+ self.assertEqual(kwargs['json']['formats'], ['markdown', 'changeTracking'])
94
+ self.assertEqual(kwargs['json']['changeTrackingOptions']['modes'], ['git-diff', 'json'])
95
+
96
+ self.assertEqual(result['changeTracking']['diff']['text'], '@@ -1,1 +1,1 @@\n-old content\n+new content')
97
+ self.assertEqual(result['changeTracking']['json']['title']['previous'], 'Old Title')
98
+ self.assertEqual(result['changeTracking']['json']['title']['current'], 'New Title')
@@ -13,7 +13,7 @@ import os
13
13
 
14
14
  from .firecrawl import FirecrawlApp, AsyncFirecrawlApp, JsonConfig, ScrapeOptions, ChangeTrackingOptions # noqa
15
15
 
16
- __version__ = "2.5.3"
16
+ __version__ = "2.5.4"
17
17
 
18
18
  # Define the logger for the Firecrawl project
19
19
  logger: logging.Logger = logging.getLogger("firecrawl")
@@ -161,7 +161,7 @@ class ScrapeOptions(pydantic.BaseModel):
161
161
  class WaitAction(pydantic.BaseModel):
162
162
  """Wait action to perform during scraping."""
163
163
  type: Literal["wait"]
164
- milliseconds: int
164
+ milliseconds: Optional[int] = None
165
165
  selector: Optional[str] = None
166
166
 
167
167
  class ScreenshotAction(pydantic.BaseModel):
@@ -259,6 +259,7 @@ class CrawlParams(pydantic.BaseModel):
259
259
  deduplicateSimilarURLs: Optional[bool] = None
260
260
  ignoreQueryParameters: Optional[bool] = None
261
261
  regexOnFullURL: Optional[bool] = None
262
+ delay: Optional[int] = None # Delay in seconds between scrapes
262
263
 
263
264
  class CrawlResponse(pydantic.BaseModel):
264
265
  """Response from crawling operations."""
@@ -681,6 +682,7 @@ class FirecrawlApp:
681
682
  deduplicate_similar_urls: Optional[bool] = None,
682
683
  ignore_query_parameters: Optional[bool] = None,
683
684
  regex_on_full_url: Optional[bool] = None,
685
+ delay: Optional[int] = None,
684
686
  poll_interval: Optional[int] = 2,
685
687
  idempotency_key: Optional[str] = None,
686
688
  **kwargs
@@ -703,6 +705,7 @@ class FirecrawlApp:
703
705
  deduplicate_similar_urls (Optional[bool]): Remove similar URLs
704
706
  ignore_query_parameters (Optional[bool]): Ignore URL parameters
705
707
  regex_on_full_url (Optional[bool]): Apply regex to full URLs
708
+ delay (Optional[int]): Delay in seconds between scrapes
706
709
  poll_interval (Optional[int]): Seconds between status checks (default: 2)
707
710
  idempotency_key (Optional[str]): Unique key to prevent duplicate requests
708
711
  **kwargs: Additional parameters to pass to the API
@@ -748,6 +751,8 @@ class FirecrawlApp:
748
751
  crawl_params['ignoreQueryParameters'] = ignore_query_parameters
749
752
  if regex_on_full_url is not None:
750
753
  crawl_params['regexOnFullURL'] = regex_on_full_url
754
+ if delay is not None:
755
+ crawl_params['delay'] = delay
751
756
 
752
757
  # Add any additional kwargs
753
758
  crawl_params.update(kwargs)
@@ -788,6 +793,7 @@ class FirecrawlApp:
788
793
  deduplicate_similar_urls: Optional[bool] = None,
789
794
  ignore_query_parameters: Optional[bool] = None,
790
795
  regex_on_full_url: Optional[bool] = None,
796
+ delay: Optional[int] = None,
791
797
  idempotency_key: Optional[str] = None,
792
798
  **kwargs
793
799
  ) -> CrawlResponse:
@@ -854,6 +860,8 @@ class FirecrawlApp:
854
860
  crawl_params['ignoreQueryParameters'] = ignore_query_parameters
855
861
  if regex_on_full_url is not None:
856
862
  crawl_params['regexOnFullURL'] = regex_on_full_url
863
+ if delay is not None:
864
+ crawl_params['delay'] = delay
857
865
 
858
866
  # Add any additional kwargs
859
867
  crawl_params.update(kwargs)
@@ -3240,6 +3248,7 @@ class AsyncFirecrawlApp(FirecrawlApp):
3240
3248
  deduplicate_similar_urls: Optional[bool] = None,
3241
3249
  ignore_query_parameters: Optional[bool] = None,
3242
3250
  regex_on_full_url: Optional[bool] = None,
3251
+ delay: Optional[int] = None,
3243
3252
  poll_interval: Optional[int] = 2,
3244
3253
  idempotency_key: Optional[str] = None,
3245
3254
  **kwargs
@@ -3262,6 +3271,7 @@ class AsyncFirecrawlApp(FirecrawlApp):
3262
3271
  deduplicate_similar_urls (Optional[bool]): Remove similar URLs
3263
3272
  ignore_query_parameters (Optional[bool]): Ignore URL parameters
3264
3273
  regex_on_full_url (Optional[bool]): Apply regex to full URLs
3274
+ delay (Optional[int]): Delay in seconds between scrapes
3265
3275
  poll_interval (Optional[int]): Seconds between status checks (default: 2)
3266
3276
  idempotency_key (Optional[str]): Unique key to prevent duplicate requests
3267
3277
  **kwargs: Additional parameters to pass to the API
@@ -3307,6 +3317,8 @@ class AsyncFirecrawlApp(FirecrawlApp):
3307
3317
  crawl_params['ignoreQueryParameters'] = ignore_query_parameters
3308
3318
  if regex_on_full_url is not None:
3309
3319
  crawl_params['regexOnFullURL'] = regex_on_full_url
3320
+ if delay is not None:
3321
+ crawl_params['delay'] = delay
3310
3322
 
3311
3323
  # Add any additional kwargs
3312
3324
  crawl_params.update(kwargs)
@@ -3348,6 +3360,7 @@ class AsyncFirecrawlApp(FirecrawlApp):
3348
3360
  deduplicate_similar_urls: Optional[bool] = None,
3349
3361
  ignore_query_parameters: Optional[bool] = None,
3350
3362
  regex_on_full_url: Optional[bool] = None,
3363
+ delay: Optional[int] = None,
3351
3364
  poll_interval: Optional[int] = 2,
3352
3365
  idempotency_key: Optional[str] = None,
3353
3366
  **kwargs
@@ -3412,6 +3425,8 @@ class AsyncFirecrawlApp(FirecrawlApp):
3412
3425
  crawl_params['ignoreQueryParameters'] = ignore_query_parameters
3413
3426
  if regex_on_full_url is not None:
3414
3427
  crawl_params['regexOnFullURL'] = regex_on_full_url
3428
+ if delay is not None:
3429
+ crawl_params['delay'] = delay
3415
3430
 
3416
3431
  # Add any additional kwargs
3417
3432
  crawl_params.update(kwargs)
@@ -13,7 +13,7 @@ import os
13
13
 
14
14
  from .firecrawl import FirecrawlApp, AsyncFirecrawlApp, JsonConfig, ScrapeOptions, ChangeTrackingOptions # noqa
15
15
 
16
- __version__ = "2.5.3"
16
+ __version__ = "2.5.4"
17
17
 
18
18
  # Define the logger for the Firecrawl project
19
19
  logger: logging.Logger = logging.getLogger("firecrawl")
@@ -161,7 +161,7 @@ class ScrapeOptions(pydantic.BaseModel):
161
161
  class WaitAction(pydantic.BaseModel):
162
162
  """Wait action to perform during scraping."""
163
163
  type: Literal["wait"]
164
- milliseconds: int
164
+ milliseconds: Optional[int] = None
165
165
  selector: Optional[str] = None
166
166
 
167
167
  class ScreenshotAction(pydantic.BaseModel):
@@ -259,6 +259,7 @@ class CrawlParams(pydantic.BaseModel):
259
259
  deduplicateSimilarURLs: Optional[bool] = None
260
260
  ignoreQueryParameters: Optional[bool] = None
261
261
  regexOnFullURL: Optional[bool] = None
262
+ delay: Optional[int] = None # Delay in seconds between scrapes
262
263
 
263
264
  class CrawlResponse(pydantic.BaseModel):
264
265
  """Response from crawling operations."""
@@ -681,6 +682,7 @@ class FirecrawlApp:
681
682
  deduplicate_similar_urls: Optional[bool] = None,
682
683
  ignore_query_parameters: Optional[bool] = None,
683
684
  regex_on_full_url: Optional[bool] = None,
685
+ delay: Optional[int] = None,
684
686
  poll_interval: Optional[int] = 2,
685
687
  idempotency_key: Optional[str] = None,
686
688
  **kwargs
@@ -703,6 +705,7 @@ class FirecrawlApp:
703
705
  deduplicate_similar_urls (Optional[bool]): Remove similar URLs
704
706
  ignore_query_parameters (Optional[bool]): Ignore URL parameters
705
707
  regex_on_full_url (Optional[bool]): Apply regex to full URLs
708
+ delay (Optional[int]): Delay in seconds between scrapes
706
709
  poll_interval (Optional[int]): Seconds between status checks (default: 2)
707
710
  idempotency_key (Optional[str]): Unique key to prevent duplicate requests
708
711
  **kwargs: Additional parameters to pass to the API
@@ -748,6 +751,8 @@ class FirecrawlApp:
748
751
  crawl_params['ignoreQueryParameters'] = ignore_query_parameters
749
752
  if regex_on_full_url is not None:
750
753
  crawl_params['regexOnFullURL'] = regex_on_full_url
754
+ if delay is not None:
755
+ crawl_params['delay'] = delay
751
756
 
752
757
  # Add any additional kwargs
753
758
  crawl_params.update(kwargs)
@@ -788,6 +793,7 @@ class FirecrawlApp:
788
793
  deduplicate_similar_urls: Optional[bool] = None,
789
794
  ignore_query_parameters: Optional[bool] = None,
790
795
  regex_on_full_url: Optional[bool] = None,
796
+ delay: Optional[int] = None,
791
797
  idempotency_key: Optional[str] = None,
792
798
  **kwargs
793
799
  ) -> CrawlResponse:
@@ -854,6 +860,8 @@ class FirecrawlApp:
854
860
  crawl_params['ignoreQueryParameters'] = ignore_query_parameters
855
861
  if regex_on_full_url is not None:
856
862
  crawl_params['regexOnFullURL'] = regex_on_full_url
863
+ if delay is not None:
864
+ crawl_params['delay'] = delay
857
865
 
858
866
  # Add any additional kwargs
859
867
  crawl_params.update(kwargs)
@@ -3240,6 +3248,7 @@ class AsyncFirecrawlApp(FirecrawlApp):
3240
3248
  deduplicate_similar_urls: Optional[bool] = None,
3241
3249
  ignore_query_parameters: Optional[bool] = None,
3242
3250
  regex_on_full_url: Optional[bool] = None,
3251
+ delay: Optional[int] = None,
3243
3252
  poll_interval: Optional[int] = 2,
3244
3253
  idempotency_key: Optional[str] = None,
3245
3254
  **kwargs
@@ -3262,6 +3271,7 @@ class AsyncFirecrawlApp(FirecrawlApp):
3262
3271
  deduplicate_similar_urls (Optional[bool]): Remove similar URLs
3263
3272
  ignore_query_parameters (Optional[bool]): Ignore URL parameters
3264
3273
  regex_on_full_url (Optional[bool]): Apply regex to full URLs
3274
+ delay (Optional[int]): Delay in seconds between scrapes
3265
3275
  poll_interval (Optional[int]): Seconds between status checks (default: 2)
3266
3276
  idempotency_key (Optional[str]): Unique key to prevent duplicate requests
3267
3277
  **kwargs: Additional parameters to pass to the API
@@ -3307,6 +3317,8 @@ class AsyncFirecrawlApp(FirecrawlApp):
3307
3317
  crawl_params['ignoreQueryParameters'] = ignore_query_parameters
3308
3318
  if regex_on_full_url is not None:
3309
3319
  crawl_params['regexOnFullURL'] = regex_on_full_url
3320
+ if delay is not None:
3321
+ crawl_params['delay'] = delay
3310
3322
 
3311
3323
  # Add any additional kwargs
3312
3324
  crawl_params.update(kwargs)
@@ -3348,6 +3360,7 @@ class AsyncFirecrawlApp(FirecrawlApp):
3348
3360
  deduplicate_similar_urls: Optional[bool] = None,
3349
3361
  ignore_query_parameters: Optional[bool] = None,
3350
3362
  regex_on_full_url: Optional[bool] = None,
3363
+ delay: Optional[int] = None,
3351
3364
  poll_interval: Optional[int] = 2,
3352
3365
  idempotency_key: Optional[str] = None,
3353
3366
  **kwargs
@@ -3412,6 +3425,8 @@ class AsyncFirecrawlApp(FirecrawlApp):
3412
3425
  crawl_params['ignoreQueryParameters'] = ignore_query_parameters
3413
3426
  if regex_on_full_url is not None:
3414
3427
  crawl_params['regexOnFullURL'] = regex_on_full_url
3428
+ if delay is not None:
3429
+ crawl_params['delay'] = delay
3415
3430
 
3416
3431
  # Add any additional kwargs
3417
3432
  crawl_params.update(kwargs)
@@ -13,7 +13,7 @@ import os
13
13
 
14
14
  from .firecrawl import FirecrawlApp, AsyncFirecrawlApp, JsonConfig, ScrapeOptions, ChangeTrackingOptions # noqa
15
15
 
16
- __version__ = "2.5.3"
16
+ __version__ = "2.5.4"
17
17
 
18
18
  # Define the logger for the Firecrawl project
19
19
  logger: logging.Logger = logging.getLogger("firecrawl")
@@ -161,7 +161,7 @@ class ScrapeOptions(pydantic.BaseModel):
161
161
  class WaitAction(pydantic.BaseModel):
162
162
  """Wait action to perform during scraping."""
163
163
  type: Literal["wait"]
164
- milliseconds: int
164
+ milliseconds: Optional[int] = None
165
165
  selector: Optional[str] = None
166
166
 
167
167
  class ScreenshotAction(pydantic.BaseModel):
@@ -259,6 +259,7 @@ class CrawlParams(pydantic.BaseModel):
259
259
  deduplicateSimilarURLs: Optional[bool] = None
260
260
  ignoreQueryParameters: Optional[bool] = None
261
261
  regexOnFullURL: Optional[bool] = None
262
+ delay: Optional[int] = None # Delay in seconds between scrapes
262
263
 
263
264
  class CrawlResponse(pydantic.BaseModel):
264
265
  """Response from crawling operations."""
@@ -681,6 +682,7 @@ class FirecrawlApp:
681
682
  deduplicate_similar_urls: Optional[bool] = None,
682
683
  ignore_query_parameters: Optional[bool] = None,
683
684
  regex_on_full_url: Optional[bool] = None,
685
+ delay: Optional[int] = None,
684
686
  poll_interval: Optional[int] = 2,
685
687
  idempotency_key: Optional[str] = None,
686
688
  **kwargs
@@ -703,6 +705,7 @@ class FirecrawlApp:
703
705
  deduplicate_similar_urls (Optional[bool]): Remove similar URLs
704
706
  ignore_query_parameters (Optional[bool]): Ignore URL parameters
705
707
  regex_on_full_url (Optional[bool]): Apply regex to full URLs
708
+ delay (Optional[int]): Delay in seconds between scrapes
706
709
  poll_interval (Optional[int]): Seconds between status checks (default: 2)
707
710
  idempotency_key (Optional[str]): Unique key to prevent duplicate requests
708
711
  **kwargs: Additional parameters to pass to the API
@@ -748,6 +751,8 @@ class FirecrawlApp:
748
751
  crawl_params['ignoreQueryParameters'] = ignore_query_parameters
749
752
  if regex_on_full_url is not None:
750
753
  crawl_params['regexOnFullURL'] = regex_on_full_url
754
+ if delay is not None:
755
+ crawl_params['delay'] = delay
751
756
 
752
757
  # Add any additional kwargs
753
758
  crawl_params.update(kwargs)
@@ -788,6 +793,7 @@ class FirecrawlApp:
788
793
  deduplicate_similar_urls: Optional[bool] = None,
789
794
  ignore_query_parameters: Optional[bool] = None,
790
795
  regex_on_full_url: Optional[bool] = None,
796
+ delay: Optional[int] = None,
791
797
  idempotency_key: Optional[str] = None,
792
798
  **kwargs
793
799
  ) -> CrawlResponse:
@@ -854,6 +860,8 @@ class FirecrawlApp:
854
860
  crawl_params['ignoreQueryParameters'] = ignore_query_parameters
855
861
  if regex_on_full_url is not None:
856
862
  crawl_params['regexOnFullURL'] = regex_on_full_url
863
+ if delay is not None:
864
+ crawl_params['delay'] = delay
857
865
 
858
866
  # Add any additional kwargs
859
867
  crawl_params.update(kwargs)
@@ -3240,6 +3248,7 @@ class AsyncFirecrawlApp(FirecrawlApp):
3240
3248
  deduplicate_similar_urls: Optional[bool] = None,
3241
3249
  ignore_query_parameters: Optional[bool] = None,
3242
3250
  regex_on_full_url: Optional[bool] = None,
3251
+ delay: Optional[int] = None,
3243
3252
  poll_interval: Optional[int] = 2,
3244
3253
  idempotency_key: Optional[str] = None,
3245
3254
  **kwargs
@@ -3262,6 +3271,7 @@ class AsyncFirecrawlApp(FirecrawlApp):
3262
3271
  deduplicate_similar_urls (Optional[bool]): Remove similar URLs
3263
3272
  ignore_query_parameters (Optional[bool]): Ignore URL parameters
3264
3273
  regex_on_full_url (Optional[bool]): Apply regex to full URLs
3274
+ delay (Optional[int]): Delay in seconds between scrapes
3265
3275
  poll_interval (Optional[int]): Seconds between status checks (default: 2)
3266
3276
  idempotency_key (Optional[str]): Unique key to prevent duplicate requests
3267
3277
  **kwargs: Additional parameters to pass to the API
@@ -3307,6 +3317,8 @@ class AsyncFirecrawlApp(FirecrawlApp):
3307
3317
  crawl_params['ignoreQueryParameters'] = ignore_query_parameters
3308
3318
  if regex_on_full_url is not None:
3309
3319
  crawl_params['regexOnFullURL'] = regex_on_full_url
3320
+ if delay is not None:
3321
+ crawl_params['delay'] = delay
3310
3322
 
3311
3323
  # Add any additional kwargs
3312
3324
  crawl_params.update(kwargs)
@@ -3348,6 +3360,7 @@ class AsyncFirecrawlApp(FirecrawlApp):
3348
3360
  deduplicate_similar_urls: Optional[bool] = None,
3349
3361
  ignore_query_parameters: Optional[bool] = None,
3350
3362
  regex_on_full_url: Optional[bool] = None,
3363
+ delay: Optional[int] = None,
3351
3364
  poll_interval: Optional[int] = 2,
3352
3365
  idempotency_key: Optional[str] = None,
3353
3366
  **kwargs
@@ -3412,6 +3425,8 @@ class AsyncFirecrawlApp(FirecrawlApp):
3412
3425
  crawl_params['ignoreQueryParameters'] = ignore_query_parameters
3413
3426
  if regex_on_full_url is not None:
3414
3427
  crawl_params['regexOnFullURL'] = regex_on_full_url
3428
+ if delay is not None:
3429
+ crawl_params['delay'] = delay
3415
3430
 
3416
3431
  # Add any additional kwargs
3417
3432
  crawl_params.update(kwargs)
firecrawl/__init__.py CHANGED
@@ -13,7 +13,7 @@ import os
13
13
 
14
14
  from .firecrawl import FirecrawlApp, AsyncFirecrawlApp, JsonConfig, ScrapeOptions, ChangeTrackingOptions # noqa
15
15
 
16
- __version__ = "2.5.3"
16
+ __version__ = "2.5.4"
17
17
 
18
18
  # Define the logger for the Firecrawl project
19
19
  logger: logging.Logger = logging.getLogger("firecrawl")
firecrawl/firecrawl.py CHANGED
@@ -161,7 +161,7 @@ class ScrapeOptions(pydantic.BaseModel):
161
161
  class WaitAction(pydantic.BaseModel):
162
162
  """Wait action to perform during scraping."""
163
163
  type: Literal["wait"]
164
- milliseconds: int
164
+ milliseconds: Optional[int] = None
165
165
  selector: Optional[str] = None
166
166
 
167
167
  class ScreenshotAction(pydantic.BaseModel):
@@ -259,6 +259,7 @@ class CrawlParams(pydantic.BaseModel):
259
259
  deduplicateSimilarURLs: Optional[bool] = None
260
260
  ignoreQueryParameters: Optional[bool] = None
261
261
  regexOnFullURL: Optional[bool] = None
262
+ delay: Optional[int] = None # Delay in seconds between scrapes
262
263
 
263
264
  class CrawlResponse(pydantic.BaseModel):
264
265
  """Response from crawling operations."""
@@ -681,6 +682,7 @@ class FirecrawlApp:
681
682
  deduplicate_similar_urls: Optional[bool] = None,
682
683
  ignore_query_parameters: Optional[bool] = None,
683
684
  regex_on_full_url: Optional[bool] = None,
685
+ delay: Optional[int] = None,
684
686
  poll_interval: Optional[int] = 2,
685
687
  idempotency_key: Optional[str] = None,
686
688
  **kwargs
@@ -703,6 +705,7 @@ class FirecrawlApp:
703
705
  deduplicate_similar_urls (Optional[bool]): Remove similar URLs
704
706
  ignore_query_parameters (Optional[bool]): Ignore URL parameters
705
707
  regex_on_full_url (Optional[bool]): Apply regex to full URLs
708
+ delay (Optional[int]): Delay in seconds between scrapes
706
709
  poll_interval (Optional[int]): Seconds between status checks (default: 2)
707
710
  idempotency_key (Optional[str]): Unique key to prevent duplicate requests
708
711
  **kwargs: Additional parameters to pass to the API
@@ -748,6 +751,8 @@ class FirecrawlApp:
748
751
  crawl_params['ignoreQueryParameters'] = ignore_query_parameters
749
752
  if regex_on_full_url is not None:
750
753
  crawl_params['regexOnFullURL'] = regex_on_full_url
754
+ if delay is not None:
755
+ crawl_params['delay'] = delay
751
756
 
752
757
  # Add any additional kwargs
753
758
  crawl_params.update(kwargs)
@@ -788,6 +793,7 @@ class FirecrawlApp:
788
793
  deduplicate_similar_urls: Optional[bool] = None,
789
794
  ignore_query_parameters: Optional[bool] = None,
790
795
  regex_on_full_url: Optional[bool] = None,
796
+ delay: Optional[int] = None,
791
797
  idempotency_key: Optional[str] = None,
792
798
  **kwargs
793
799
  ) -> CrawlResponse:
@@ -854,6 +860,8 @@ class FirecrawlApp:
854
860
  crawl_params['ignoreQueryParameters'] = ignore_query_parameters
855
861
  if regex_on_full_url is not None:
856
862
  crawl_params['regexOnFullURL'] = regex_on_full_url
863
+ if delay is not None:
864
+ crawl_params['delay'] = delay
857
865
 
858
866
  # Add any additional kwargs
859
867
  crawl_params.update(kwargs)
@@ -3240,6 +3248,7 @@ class AsyncFirecrawlApp(FirecrawlApp):
3240
3248
  deduplicate_similar_urls: Optional[bool] = None,
3241
3249
  ignore_query_parameters: Optional[bool] = None,
3242
3250
  regex_on_full_url: Optional[bool] = None,
3251
+ delay: Optional[int] = None,
3243
3252
  poll_interval: Optional[int] = 2,
3244
3253
  idempotency_key: Optional[str] = None,
3245
3254
  **kwargs
@@ -3262,6 +3271,7 @@ class AsyncFirecrawlApp(FirecrawlApp):
3262
3271
  deduplicate_similar_urls (Optional[bool]): Remove similar URLs
3263
3272
  ignore_query_parameters (Optional[bool]): Ignore URL parameters
3264
3273
  regex_on_full_url (Optional[bool]): Apply regex to full URLs
3274
+ delay (Optional[int]): Delay in seconds between scrapes
3265
3275
  poll_interval (Optional[int]): Seconds between status checks (default: 2)
3266
3276
  idempotency_key (Optional[str]): Unique key to prevent duplicate requests
3267
3277
  **kwargs: Additional parameters to pass to the API
@@ -3307,6 +3317,8 @@ class AsyncFirecrawlApp(FirecrawlApp):
3307
3317
  crawl_params['ignoreQueryParameters'] = ignore_query_parameters
3308
3318
  if regex_on_full_url is not None:
3309
3319
  crawl_params['regexOnFullURL'] = regex_on_full_url
3320
+ if delay is not None:
3321
+ crawl_params['delay'] = delay
3310
3322
 
3311
3323
  # Add any additional kwargs
3312
3324
  crawl_params.update(kwargs)
@@ -3348,6 +3360,7 @@ class AsyncFirecrawlApp(FirecrawlApp):
3348
3360
  deduplicate_similar_urls: Optional[bool] = None,
3349
3361
  ignore_query_parameters: Optional[bool] = None,
3350
3362
  regex_on_full_url: Optional[bool] = None,
3363
+ delay: Optional[int] = None,
3351
3364
  poll_interval: Optional[int] = 2,
3352
3365
  idempotency_key: Optional[str] = None,
3353
3366
  **kwargs
@@ -3412,6 +3425,8 @@ class AsyncFirecrawlApp(FirecrawlApp):
3412
3425
  crawl_params['ignoreQueryParameters'] = ignore_query_parameters
3413
3426
  if regex_on_full_url is not None:
3414
3427
  crawl_params['regexOnFullURL'] = regex_on_full_url
3428
+ if delay is not None:
3429
+ crawl_params['delay'] = delay
3415
3430
 
3416
3431
  # Add any additional kwargs
3417
3432
  crawl_params.update(kwargs)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: firecrawl
3
- Version: 2.5.3
3
+ Version: 2.5.4
4
4
  Summary: Python SDK for Firecrawl API
5
5
  Home-page: https://github.com/mendableai/firecrawl
6
6
  Author: Mendable.ai
@@ -0,0 +1,54 @@
1
+ build/lib/build/lib/build/lib/build/lib/build/lib/build/lib/firecrawl/__init__.py,sha256=L_9rdTon_1D8WYQjyd72zXNnRPvNJBp5PhobDZjz_PI,2612
2
+ build/lib/build/lib/build/lib/build/lib/build/lib/build/lib/firecrawl/firecrawl.py,sha256=k9BFLuGhVCuTcyjYmZ8u5dDez952y08gWCmDuRfTjeQ,189520
3
+ build/lib/build/lib/build/lib/build/lib/build/lib/build/lib/firecrawl/__tests__/e2e_withAuth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
+ build/lib/build/lib/build/lib/build/lib/build/lib/build/lib/firecrawl/__tests__/e2e_withAuth/test.py,sha256=-Fq2vPcMo0iQi4dwsUkkCd931ybDaTxMBnZbRfGdDcA,7931
5
+ build/lib/build/lib/build/lib/build/lib/build/lib/build/lib/firecrawl/__tests__/v1/e2e_withAuth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
+ build/lib/build/lib/build/lib/build/lib/build/lib/build/lib/firecrawl/__tests__/v1/e2e_withAuth/test.py,sha256=DcCw-cohtnL-t9XPekUtRoQrgg3UCWu8Ikqudf9ory8,19880
7
+ build/lib/build/lib/build/lib/build/lib/build/lib/build/lib/tests/test_change_tracking.py,sha256=_IJ5ShLcoj2fHDBaw-nE4I4lHdmDB617ocK_XMHhXps,4177
8
+ build/lib/build/lib/build/lib/build/lib/build/lib/firecrawl/__init__.py,sha256=L_9rdTon_1D8WYQjyd72zXNnRPvNJBp5PhobDZjz_PI,2612
9
+ build/lib/build/lib/build/lib/build/lib/build/lib/firecrawl/firecrawl.py,sha256=k9BFLuGhVCuTcyjYmZ8u5dDez952y08gWCmDuRfTjeQ,189520
10
+ build/lib/build/lib/build/lib/build/lib/build/lib/firecrawl/__tests__/e2e_withAuth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
+ build/lib/build/lib/build/lib/build/lib/build/lib/firecrawl/__tests__/e2e_withAuth/test.py,sha256=-Fq2vPcMo0iQi4dwsUkkCd931ybDaTxMBnZbRfGdDcA,7931
12
+ build/lib/build/lib/build/lib/build/lib/build/lib/firecrawl/__tests__/v1/e2e_withAuth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
13
+ build/lib/build/lib/build/lib/build/lib/build/lib/firecrawl/__tests__/v1/e2e_withAuth/test.py,sha256=DcCw-cohtnL-t9XPekUtRoQrgg3UCWu8Ikqudf9ory8,19880
14
+ build/lib/build/lib/build/lib/build/lib/build/lib/tests/test_change_tracking.py,sha256=_IJ5ShLcoj2fHDBaw-nE4I4lHdmDB617ocK_XMHhXps,4177
15
+ build/lib/build/lib/build/lib/build/lib/firecrawl/__init__.py,sha256=L_9rdTon_1D8WYQjyd72zXNnRPvNJBp5PhobDZjz_PI,2612
16
+ build/lib/build/lib/build/lib/build/lib/firecrawl/firecrawl.py,sha256=k9BFLuGhVCuTcyjYmZ8u5dDez952y08gWCmDuRfTjeQ,189520
17
+ build/lib/build/lib/build/lib/build/lib/firecrawl/__tests__/e2e_withAuth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
18
+ build/lib/build/lib/build/lib/build/lib/firecrawl/__tests__/e2e_withAuth/test.py,sha256=-Fq2vPcMo0iQi4dwsUkkCd931ybDaTxMBnZbRfGdDcA,7931
19
+ build/lib/build/lib/build/lib/build/lib/firecrawl/__tests__/v1/e2e_withAuth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
20
+ build/lib/build/lib/build/lib/build/lib/firecrawl/__tests__/v1/e2e_withAuth/test.py,sha256=DcCw-cohtnL-t9XPekUtRoQrgg3UCWu8Ikqudf9ory8,19880
21
+ build/lib/build/lib/build/lib/build/lib/tests/test_change_tracking.py,sha256=_IJ5ShLcoj2fHDBaw-nE4I4lHdmDB617ocK_XMHhXps,4177
22
+ build/lib/build/lib/build/lib/firecrawl/__init__.py,sha256=L_9rdTon_1D8WYQjyd72zXNnRPvNJBp5PhobDZjz_PI,2612
23
+ build/lib/build/lib/build/lib/firecrawl/firecrawl.py,sha256=k9BFLuGhVCuTcyjYmZ8u5dDez952y08gWCmDuRfTjeQ,189520
24
+ build/lib/build/lib/build/lib/firecrawl/__tests__/e2e_withAuth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
25
+ build/lib/build/lib/build/lib/firecrawl/__tests__/e2e_withAuth/test.py,sha256=-Fq2vPcMo0iQi4dwsUkkCd931ybDaTxMBnZbRfGdDcA,7931
26
+ build/lib/build/lib/build/lib/firecrawl/__tests__/v1/e2e_withAuth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
27
+ build/lib/build/lib/build/lib/firecrawl/__tests__/v1/e2e_withAuth/test.py,sha256=DcCw-cohtnL-t9XPekUtRoQrgg3UCWu8Ikqudf9ory8,19880
28
+ build/lib/build/lib/build/lib/tests/test_change_tracking.py,sha256=_IJ5ShLcoj2fHDBaw-nE4I4lHdmDB617ocK_XMHhXps,4177
29
+ build/lib/build/lib/firecrawl/__init__.py,sha256=L_9rdTon_1D8WYQjyd72zXNnRPvNJBp5PhobDZjz_PI,2612
30
+ build/lib/build/lib/firecrawl/firecrawl.py,sha256=k9BFLuGhVCuTcyjYmZ8u5dDez952y08gWCmDuRfTjeQ,189520
31
+ build/lib/build/lib/firecrawl/__tests__/e2e_withAuth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
32
+ build/lib/build/lib/firecrawl/__tests__/e2e_withAuth/test.py,sha256=-Fq2vPcMo0iQi4dwsUkkCd931ybDaTxMBnZbRfGdDcA,7931
33
+ build/lib/build/lib/firecrawl/__tests__/v1/e2e_withAuth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
34
+ build/lib/build/lib/firecrawl/__tests__/v1/e2e_withAuth/test.py,sha256=DcCw-cohtnL-t9XPekUtRoQrgg3UCWu8Ikqudf9ory8,19880
35
+ build/lib/build/lib/tests/test_change_tracking.py,sha256=_IJ5ShLcoj2fHDBaw-nE4I4lHdmDB617ocK_XMHhXps,4177
36
+ build/lib/firecrawl/__init__.py,sha256=L_9rdTon_1D8WYQjyd72zXNnRPvNJBp5PhobDZjz_PI,2612
37
+ build/lib/firecrawl/firecrawl.py,sha256=k9BFLuGhVCuTcyjYmZ8u5dDez952y08gWCmDuRfTjeQ,189520
38
+ build/lib/firecrawl/__tests__/e2e_withAuth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
39
+ build/lib/firecrawl/__tests__/e2e_withAuth/test.py,sha256=-Fq2vPcMo0iQi4dwsUkkCd931ybDaTxMBnZbRfGdDcA,7931
40
+ build/lib/firecrawl/__tests__/v1/e2e_withAuth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
41
+ build/lib/firecrawl/__tests__/v1/e2e_withAuth/test.py,sha256=DcCw-cohtnL-t9XPekUtRoQrgg3UCWu8Ikqudf9ory8,19880
42
+ build/lib/tests/test_change_tracking.py,sha256=_IJ5ShLcoj2fHDBaw-nE4I4lHdmDB617ocK_XMHhXps,4177
43
+ firecrawl/__init__.py,sha256=L_9rdTon_1D8WYQjyd72zXNnRPvNJBp5PhobDZjz_PI,2612
44
+ firecrawl/firecrawl.py,sha256=k9BFLuGhVCuTcyjYmZ8u5dDez952y08gWCmDuRfTjeQ,189520
45
+ firecrawl/__tests__/e2e_withAuth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
46
+ firecrawl/__tests__/e2e_withAuth/test.py,sha256=-Fq2vPcMo0iQi4dwsUkkCd931ybDaTxMBnZbRfGdDcA,7931
47
+ firecrawl/__tests__/v1/e2e_withAuth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
48
+ firecrawl/__tests__/v1/e2e_withAuth/test.py,sha256=DcCw-cohtnL-t9XPekUtRoQrgg3UCWu8Ikqudf9ory8,19880
49
+ tests/test_change_tracking.py,sha256=_IJ5ShLcoj2fHDBaw-nE4I4lHdmDB617ocK_XMHhXps,4177
50
+ firecrawl-2.5.4.dist-info/LICENSE,sha256=nPCunEDwjRGHlmjvsiDUyIWbkqqyj3Ej84ntnh0g0zA,1084
51
+ firecrawl-2.5.4.dist-info/METADATA,sha256=68zJrP9Dk3Xlof1v0oNNUJg8-_lRJ_Cpw6rS17ObaVA,7165
52
+ firecrawl-2.5.4.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
53
+ firecrawl-2.5.4.dist-info/top_level.txt,sha256=ytN_R30g2U2qZYFyIm710Z8QeK9FO1Uwa-WPGHXyqjE,27
54
+ firecrawl-2.5.4.dist-info/RECORD,,
@@ -1,33 +0,0 @@
1
- build/lib/build/lib/build/lib/firecrawl/__init__.py,sha256=5EQyhHOLTxh0BoUDjtEL5e_0pGgY9tC3XvRMQaHrGdU,2612
2
- build/lib/build/lib/build/lib/firecrawl/firecrawl.py,sha256=SGlpyEryoJlvj3ZThNGhHmfY7YPEFJlg36HxMtw38RM,188863
3
- build/lib/build/lib/build/lib/firecrawl/__tests__/e2e_withAuth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
- build/lib/build/lib/build/lib/firecrawl/__tests__/e2e_withAuth/test.py,sha256=-Fq2vPcMo0iQi4dwsUkkCd931ybDaTxMBnZbRfGdDcA,7931
5
- build/lib/build/lib/build/lib/firecrawl/__tests__/v1/e2e_withAuth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
- build/lib/build/lib/build/lib/firecrawl/__tests__/v1/e2e_withAuth/test.py,sha256=DcCw-cohtnL-t9XPekUtRoQrgg3UCWu8Ikqudf9ory8,19880
7
- build/lib/build/lib/build/lib/tests/test_change_tracking.py,sha256=_IJ5ShLcoj2fHDBaw-nE4I4lHdmDB617ocK_XMHhXps,4177
8
- build/lib/build/lib/firecrawl/__init__.py,sha256=5EQyhHOLTxh0BoUDjtEL5e_0pGgY9tC3XvRMQaHrGdU,2612
9
- build/lib/build/lib/firecrawl/firecrawl.py,sha256=SGlpyEryoJlvj3ZThNGhHmfY7YPEFJlg36HxMtw38RM,188863
10
- build/lib/build/lib/firecrawl/__tests__/e2e_withAuth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
- build/lib/build/lib/firecrawl/__tests__/e2e_withAuth/test.py,sha256=-Fq2vPcMo0iQi4dwsUkkCd931ybDaTxMBnZbRfGdDcA,7931
12
- build/lib/build/lib/firecrawl/__tests__/v1/e2e_withAuth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
13
- build/lib/build/lib/firecrawl/__tests__/v1/e2e_withAuth/test.py,sha256=DcCw-cohtnL-t9XPekUtRoQrgg3UCWu8Ikqudf9ory8,19880
14
- build/lib/build/lib/tests/test_change_tracking.py,sha256=_IJ5ShLcoj2fHDBaw-nE4I4lHdmDB617ocK_XMHhXps,4177
15
- build/lib/firecrawl/__init__.py,sha256=5EQyhHOLTxh0BoUDjtEL5e_0pGgY9tC3XvRMQaHrGdU,2612
16
- build/lib/firecrawl/firecrawl.py,sha256=SGlpyEryoJlvj3ZThNGhHmfY7YPEFJlg36HxMtw38RM,188863
17
- build/lib/firecrawl/__tests__/e2e_withAuth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
18
- build/lib/firecrawl/__tests__/e2e_withAuth/test.py,sha256=-Fq2vPcMo0iQi4dwsUkkCd931ybDaTxMBnZbRfGdDcA,7931
19
- build/lib/firecrawl/__tests__/v1/e2e_withAuth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
20
- build/lib/firecrawl/__tests__/v1/e2e_withAuth/test.py,sha256=DcCw-cohtnL-t9XPekUtRoQrgg3UCWu8Ikqudf9ory8,19880
21
- build/lib/tests/test_change_tracking.py,sha256=_IJ5ShLcoj2fHDBaw-nE4I4lHdmDB617ocK_XMHhXps,4177
22
- firecrawl/__init__.py,sha256=5EQyhHOLTxh0BoUDjtEL5e_0pGgY9tC3XvRMQaHrGdU,2612
23
- firecrawl/firecrawl.py,sha256=SGlpyEryoJlvj3ZThNGhHmfY7YPEFJlg36HxMtw38RM,188863
24
- firecrawl/__tests__/e2e_withAuth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
25
- firecrawl/__tests__/e2e_withAuth/test.py,sha256=-Fq2vPcMo0iQi4dwsUkkCd931ybDaTxMBnZbRfGdDcA,7931
26
- firecrawl/__tests__/v1/e2e_withAuth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
27
- firecrawl/__tests__/v1/e2e_withAuth/test.py,sha256=DcCw-cohtnL-t9XPekUtRoQrgg3UCWu8Ikqudf9ory8,19880
28
- tests/test_change_tracking.py,sha256=_IJ5ShLcoj2fHDBaw-nE4I4lHdmDB617ocK_XMHhXps,4177
29
- firecrawl-2.5.3.dist-info/LICENSE,sha256=nPCunEDwjRGHlmjvsiDUyIWbkqqyj3Ej84ntnh0g0zA,1084
30
- firecrawl-2.5.3.dist-info/METADATA,sha256=3Bxqn0U7hv4C0JB4fqv2qu9PWDqXMrnUmA72yxQSOj0,7165
31
- firecrawl-2.5.3.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
32
- firecrawl-2.5.3.dist-info/top_level.txt,sha256=ytN_R30g2U2qZYFyIm710Z8QeK9FO1Uwa-WPGHXyqjE,27
33
- firecrawl-2.5.3.dist-info/RECORD,,