firecrawl 1.11.1__tar.gz → 1.13.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of firecrawl might be problematic. Click here for more details.
- {firecrawl-1.11.1 → firecrawl-1.13.0}/PKG-INFO +1 -1
- {firecrawl-1.11.1 → firecrawl-1.13.0}/firecrawl/__init__.py +1 -1
- {firecrawl-1.11.1 → firecrawl-1.13.0}/firecrawl/firecrawl.py +290 -8
- {firecrawl-1.11.1 → firecrawl-1.13.0}/firecrawl.egg-info/PKG-INFO +1 -1
- {firecrawl-1.11.1 → firecrawl-1.13.0}/LICENSE +0 -0
- {firecrawl-1.11.1 → firecrawl-1.13.0}/README.md +0 -0
- {firecrawl-1.11.1 → firecrawl-1.13.0}/firecrawl/__tests__/e2e_withAuth/__init__.py +0 -0
- {firecrawl-1.11.1 → firecrawl-1.13.0}/firecrawl/__tests__/e2e_withAuth/test.py +0 -0
- {firecrawl-1.11.1 → firecrawl-1.13.0}/firecrawl/__tests__/v1/e2e_withAuth/__init__.py +0 -0
- {firecrawl-1.11.1 → firecrawl-1.13.0}/firecrawl/__tests__/v1/e2e_withAuth/test.py +0 -0
- {firecrawl-1.11.1 → firecrawl-1.13.0}/firecrawl.egg-info/SOURCES.txt +0 -0
- {firecrawl-1.11.1 → firecrawl-1.13.0}/firecrawl.egg-info/dependency_links.txt +0 -0
- {firecrawl-1.11.1 → firecrawl-1.13.0}/firecrawl.egg-info/requires.txt +0 -0
- {firecrawl-1.11.1 → firecrawl-1.13.0}/firecrawl.egg-info/top_level.txt +0 -0
- {firecrawl-1.11.1 → firecrawl-1.13.0}/pyproject.toml +0 -0
- {firecrawl-1.11.1 → firecrawl-1.13.0}/setup.cfg +0 -0
- {firecrawl-1.11.1 → firecrawl-1.13.0}/setup.py +0 -0
|
@@ -12,7 +12,7 @@ Classes:
|
|
|
12
12
|
import logging
|
|
13
13
|
import os
|
|
14
14
|
import time
|
|
15
|
-
from typing import Any, Dict, Optional, List, Union
|
|
15
|
+
from typing import Any, Dict, Optional, List, Union, Callable
|
|
16
16
|
import json
|
|
17
17
|
|
|
18
18
|
import requests
|
|
@@ -33,6 +33,46 @@ class SearchParams(pydantic.BaseModel):
|
|
|
33
33
|
timeout: Optional[int] = 60000
|
|
34
34
|
scrapeOptions: Optional[Dict[str, Any]] = None
|
|
35
35
|
|
|
36
|
+
class GenerateLLMsTextParams(pydantic.BaseModel):
|
|
37
|
+
"""
|
|
38
|
+
Parameters for the LLMs.txt generation operation.
|
|
39
|
+
"""
|
|
40
|
+
maxUrls: Optional[int] = 10
|
|
41
|
+
showFullText: Optional[bool] = False
|
|
42
|
+
__experimental_stream: Optional[bool] = None
|
|
43
|
+
|
|
44
|
+
class DeepResearchParams(pydantic.BaseModel):
|
|
45
|
+
"""
|
|
46
|
+
Parameters for the deep research operation.
|
|
47
|
+
"""
|
|
48
|
+
maxDepth: Optional[int] = 7
|
|
49
|
+
timeLimit: Optional[int] = 270
|
|
50
|
+
maxUrls: Optional[int] = 20
|
|
51
|
+
__experimental_streamSteps: Optional[bool] = None
|
|
52
|
+
|
|
53
|
+
class DeepResearchResponse(pydantic.BaseModel):
|
|
54
|
+
"""
|
|
55
|
+
Response from the deep research operation.
|
|
56
|
+
"""
|
|
57
|
+
success: bool
|
|
58
|
+
id: str
|
|
59
|
+
error: Optional[str] = None
|
|
60
|
+
|
|
61
|
+
class DeepResearchStatusResponse(pydantic.BaseModel):
|
|
62
|
+
"""
|
|
63
|
+
Status response from the deep research operation.
|
|
64
|
+
"""
|
|
65
|
+
success: bool
|
|
66
|
+
data: Optional[Dict[str, Any]] = None
|
|
67
|
+
status: str
|
|
68
|
+
error: Optional[str] = None
|
|
69
|
+
expiresAt: str
|
|
70
|
+
currentDepth: int
|
|
71
|
+
maxDepth: int
|
|
72
|
+
activities: List[Dict[str, Any]]
|
|
73
|
+
sources: List[Dict[str, Any]]
|
|
74
|
+
summaries: List[str]
|
|
75
|
+
|
|
36
76
|
class FirecrawlApp:
|
|
37
77
|
class SearchResponse(pydantic.BaseModel):
|
|
38
78
|
"""
|
|
@@ -54,6 +94,8 @@ class FirecrawlApp:
|
|
|
54
94
|
enable_web_search: Optional[bool] = False
|
|
55
95
|
# Just for backwards compatibility
|
|
56
96
|
enableWebSearch: Optional[bool] = False
|
|
97
|
+
show_sources: Optional[bool] = False
|
|
98
|
+
|
|
57
99
|
|
|
58
100
|
|
|
59
101
|
|
|
@@ -135,6 +177,7 @@ class FirecrawlApp:
|
|
|
135
177
|
f'{self.api_url}{endpoint}',
|
|
136
178
|
headers=headers,
|
|
137
179
|
json=scrape_params,
|
|
180
|
+
timeout=(scrape_params["timeout"] + 5000 if "timeout" in scrape_params else None),
|
|
138
181
|
)
|
|
139
182
|
if response.status_code == 200:
|
|
140
183
|
try:
|
|
@@ -423,7 +466,7 @@ class FirecrawlApp:
|
|
|
423
466
|
else:
|
|
424
467
|
self._handle_error(response, 'map')
|
|
425
468
|
|
|
426
|
-
def batch_scrape_urls(self, urls:
|
|
469
|
+
def batch_scrape_urls(self, urls: List[str],
|
|
427
470
|
params: Optional[Dict[str, Any]] = None,
|
|
428
471
|
poll_interval: Optional[int] = 2,
|
|
429
472
|
idempotency_key: Optional[str] = None) -> Any:
|
|
@@ -431,7 +474,7 @@ class FirecrawlApp:
|
|
|
431
474
|
Initiate a batch scrape job for the specified URLs using the Firecrawl API.
|
|
432
475
|
|
|
433
476
|
Args:
|
|
434
|
-
urls (
|
|
477
|
+
urls (List[str]): The URLs to scrape.
|
|
435
478
|
params (Optional[Dict[str, Any]]): Additional parameters for the scraper.
|
|
436
479
|
poll_interval (Optional[int]): Time in seconds between status checks when waiting for job completion. Defaults to 2 seconds.
|
|
437
480
|
idempotency_key (Optional[str]): A unique uuid key to ensure idempotency of requests.
|
|
@@ -466,12 +509,12 @@ class FirecrawlApp:
|
|
|
466
509
|
self._handle_error(response, 'start batch scrape job')
|
|
467
510
|
|
|
468
511
|
|
|
469
|
-
def async_batch_scrape_urls(self, urls:
|
|
512
|
+
def async_batch_scrape_urls(self, urls: List[str], params: Optional[Dict[str, Any]] = None, idempotency_key: Optional[str] = None) -> Dict[str, Any]:
|
|
470
513
|
"""
|
|
471
514
|
Initiate a crawl job asynchronously.
|
|
472
515
|
|
|
473
516
|
Args:
|
|
474
|
-
urls (
|
|
517
|
+
urls (List[str]): The URLs to scrape.
|
|
475
518
|
params (Optional[Dict[str, Any]]): Additional parameters for the scraper.
|
|
476
519
|
idempotency_key (Optional[str]): A unique uuid key to ensure idempotency of requests.
|
|
477
520
|
|
|
@@ -495,12 +538,12 @@ class FirecrawlApp:
|
|
|
495
538
|
else:
|
|
496
539
|
self._handle_error(response, 'start batch scrape job')
|
|
497
540
|
|
|
498
|
-
def batch_scrape_urls_and_watch(self, urls:
|
|
541
|
+
def batch_scrape_urls_and_watch(self, urls: List[str], params: Optional[Dict[str, Any]] = None, idempotency_key: Optional[str] = None) -> 'CrawlWatcher':
|
|
499
542
|
"""
|
|
500
543
|
Initiate a batch scrape job and return a CrawlWatcher to monitor the job via WebSocket.
|
|
501
544
|
|
|
502
545
|
Args:
|
|
503
|
-
urls (
|
|
546
|
+
urls (List[str]): The URLs to scrape.
|
|
504
547
|
params (Optional[Dict[str, Any]]): Additional parameters for the scraper.
|
|
505
548
|
idempotency_key (Optional[str]): A unique uuid key to ensure idempotency of requests.
|
|
506
549
|
|
|
@@ -631,6 +674,7 @@ class FirecrawlApp:
|
|
|
631
674
|
**jsonData,
|
|
632
675
|
'allowExternalLinks': params.get('allow_external_links', params.get('allowExternalLinks', False)),
|
|
633
676
|
'enableWebSearch': params.get('enable_web_search', params.get('enableWebSearch', False)),
|
|
677
|
+
'showSources': params.get('show_sources', params.get('showSources', False)),
|
|
634
678
|
'schema': schema,
|
|
635
679
|
'origin': 'api-sdk'
|
|
636
680
|
}
|
|
@@ -753,6 +797,123 @@ class FirecrawlApp:
|
|
|
753
797
|
except Exception as e:
|
|
754
798
|
raise ValueError(str(e), 500)
|
|
755
799
|
|
|
800
|
+
def generate_llms_text(self, url: str, params: Optional[Union[Dict[str, Any], GenerateLLMsTextParams]] = None) -> Dict[str, Any]:
|
|
801
|
+
"""
|
|
802
|
+
Generate LLMs.txt for a given URL and poll until completion.
|
|
803
|
+
|
|
804
|
+
Args:
|
|
805
|
+
url (str): The URL to generate LLMs.txt from.
|
|
806
|
+
params (Optional[Union[Dict[str, Any], GenerateLLMsTextParams]]): Parameters for the LLMs.txt generation.
|
|
807
|
+
|
|
808
|
+
Returns:
|
|
809
|
+
Dict[str, Any]: A dictionary containing the generation results. The structure includes:
|
|
810
|
+
- 'success' (bool): Indicates if the generation was successful.
|
|
811
|
+
- 'status' (str): The final status of the generation job.
|
|
812
|
+
- 'data' (Dict): The generated LLMs.txt data.
|
|
813
|
+
- 'error' (Optional[str]): Error message if the generation failed.
|
|
814
|
+
- 'expiresAt' (str): ISO 8601 formatted date-time string indicating when the data expires.
|
|
815
|
+
|
|
816
|
+
Raises:
|
|
817
|
+
Exception: If the generation job fails or an error occurs during status checks.
|
|
818
|
+
"""
|
|
819
|
+
if params is None:
|
|
820
|
+
params = {}
|
|
821
|
+
|
|
822
|
+
if isinstance(params, dict):
|
|
823
|
+
generation_params = GenerateLLMsTextParams(**params)
|
|
824
|
+
else:
|
|
825
|
+
generation_params = params
|
|
826
|
+
|
|
827
|
+
response = self.async_generate_llms_text(url, generation_params)
|
|
828
|
+
if not response.get('success') or 'id' not in response:
|
|
829
|
+
return response
|
|
830
|
+
|
|
831
|
+
job_id = response['id']
|
|
832
|
+
while True:
|
|
833
|
+
status = self.check_generate_llms_text_status(job_id)
|
|
834
|
+
|
|
835
|
+
if status['status'] == 'completed':
|
|
836
|
+
return status
|
|
837
|
+
elif status['status'] == 'failed':
|
|
838
|
+
raise Exception(f'LLMs.txt generation failed. Error: {status.get("error")}')
|
|
839
|
+
elif status['status'] != 'processing':
|
|
840
|
+
break
|
|
841
|
+
|
|
842
|
+
time.sleep(2) # Polling interval
|
|
843
|
+
|
|
844
|
+
return {'success': False, 'error': 'LLMs.txt generation job terminated unexpectedly'}
|
|
845
|
+
|
|
846
|
+
def async_generate_llms_text(self, url: str, params: Optional[Union[Dict[str, Any], GenerateLLMsTextParams]] = None) -> Dict[str, Any]:
|
|
847
|
+
"""
|
|
848
|
+
Initiate an asynchronous LLMs.txt generation operation.
|
|
849
|
+
|
|
850
|
+
Args:
|
|
851
|
+
url (str): The URL to generate LLMs.txt from.
|
|
852
|
+
params (Optional[Union[Dict[str, Any], GenerateLLMsTextParams]]): Parameters for the LLMs.txt generation.
|
|
853
|
+
|
|
854
|
+
Returns:
|
|
855
|
+
Dict[str, Any]: A dictionary containing the generation initiation response. The structure includes:
|
|
856
|
+
- 'success' (bool): Indicates if the generation initiation was successful.
|
|
857
|
+
- 'id' (str): The unique identifier for the generation job.
|
|
858
|
+
|
|
859
|
+
Raises:
|
|
860
|
+
Exception: If the generation job initiation fails.
|
|
861
|
+
"""
|
|
862
|
+
if params is None:
|
|
863
|
+
params = {}
|
|
864
|
+
|
|
865
|
+
if isinstance(params, dict):
|
|
866
|
+
generation_params = GenerateLLMsTextParams(**params)
|
|
867
|
+
else:
|
|
868
|
+
generation_params = params
|
|
869
|
+
|
|
870
|
+
headers = self._prepare_headers()
|
|
871
|
+
json_data = {'url': url, **generation_params.dict(exclude_none=True)}
|
|
872
|
+
|
|
873
|
+
try:
|
|
874
|
+
response = self._post_request(f'{self.api_url}/v1/llmstxt', json_data, headers)
|
|
875
|
+
if response.status_code == 200:
|
|
876
|
+
try:
|
|
877
|
+
return response.json()
|
|
878
|
+
except:
|
|
879
|
+
raise Exception('Failed to parse Firecrawl response as JSON.')
|
|
880
|
+
else:
|
|
881
|
+
self._handle_error(response, 'start LLMs.txt generation')
|
|
882
|
+
except Exception as e:
|
|
883
|
+
raise ValueError(str(e))
|
|
884
|
+
|
|
885
|
+
return {'success': False, 'error': 'Internal server error'}
|
|
886
|
+
|
|
887
|
+
def check_generate_llms_text_status(self, id: str) -> Dict[str, Any]:
|
|
888
|
+
"""
|
|
889
|
+
Check the status of a LLMs.txt generation operation.
|
|
890
|
+
|
|
891
|
+
Args:
|
|
892
|
+
id (str): The ID of the LLMs.txt generation operation.
|
|
893
|
+
|
|
894
|
+
Returns:
|
|
895
|
+
Dict[str, Any]: The current status and results of the generation operation.
|
|
896
|
+
|
|
897
|
+
Raises:
|
|
898
|
+
Exception: If the status check fails.
|
|
899
|
+
"""
|
|
900
|
+
headers = self._prepare_headers()
|
|
901
|
+
try:
|
|
902
|
+
response = self._get_request(f'{self.api_url}/v1/llmstxt/{id}', headers)
|
|
903
|
+
if response.status_code == 200:
|
|
904
|
+
try:
|
|
905
|
+
return response.json()
|
|
906
|
+
except:
|
|
907
|
+
raise Exception('Failed to parse Firecrawl response as JSON.')
|
|
908
|
+
elif response.status_code == 404:
|
|
909
|
+
raise Exception('LLMs.txt generation job not found')
|
|
910
|
+
else:
|
|
911
|
+
self._handle_error(response, 'check LLMs.txt generation status')
|
|
912
|
+
except Exception as e:
|
|
913
|
+
raise ValueError(str(e))
|
|
914
|
+
|
|
915
|
+
return {'success': False, 'error': 'Internal server error'}
|
|
916
|
+
|
|
756
917
|
def _prepare_headers(self, idempotency_key: Optional[str] = None) -> Dict[str, str]:
|
|
757
918
|
"""
|
|
758
919
|
Prepare the headers for API requests.
|
|
@@ -797,7 +958,7 @@ class FirecrawlApp:
|
|
|
797
958
|
requests.RequestException: If the request fails after the specified retries.
|
|
798
959
|
"""
|
|
799
960
|
for attempt in range(retries):
|
|
800
|
-
response = requests.post(url, headers=headers, json=data)
|
|
961
|
+
response = requests.post(url, headers=headers, json=data, timeout=((data["timeout"] + 5000) if "timeout" in data else None))
|
|
801
962
|
if response.status_code == 502:
|
|
802
963
|
time.sleep(backoff_factor * (2 ** attempt))
|
|
803
964
|
else:
|
|
@@ -937,6 +1098,127 @@ class FirecrawlApp:
|
|
|
937
1098
|
# Raise an HTTPError with the custom message and attach the response
|
|
938
1099
|
raise requests.exceptions.HTTPError(message, response=response)
|
|
939
1100
|
|
|
1101
|
+
def deep_research(self, query: str, params: Optional[Union[Dict[str, Any], DeepResearchParams]] = None,
|
|
1102
|
+
on_activity: Optional[Callable[[Dict[str, Any]], None]] = None,
|
|
1103
|
+
on_source: Optional[Callable[[Dict[str, Any]], None]] = None) -> Dict[str, Any]:
|
|
1104
|
+
"""
|
|
1105
|
+
Initiates a deep research operation on a given query and polls until completion.
|
|
1106
|
+
|
|
1107
|
+
Args:
|
|
1108
|
+
query (str): The query to research.
|
|
1109
|
+
params (Optional[Union[Dict[str, Any], DeepResearchParams]]): Parameters for the deep research operation.
|
|
1110
|
+
on_activity (Optional[Callable[[Dict[str, Any]], None]]): Optional callback to receive activity updates in real-time.
|
|
1111
|
+
|
|
1112
|
+
Returns:
|
|
1113
|
+
Dict[str, Any]: The final research results.
|
|
1114
|
+
|
|
1115
|
+
Raises:
|
|
1116
|
+
Exception: If the research operation fails.
|
|
1117
|
+
"""
|
|
1118
|
+
if params is None:
|
|
1119
|
+
params = {}
|
|
1120
|
+
|
|
1121
|
+
if isinstance(params, dict):
|
|
1122
|
+
research_params = DeepResearchParams(**params)
|
|
1123
|
+
else:
|
|
1124
|
+
research_params = params
|
|
1125
|
+
|
|
1126
|
+
response = self.async_deep_research(query, research_params)
|
|
1127
|
+
if not response.get('success') or 'id' not in response:
|
|
1128
|
+
return response
|
|
1129
|
+
|
|
1130
|
+
job_id = response['id']
|
|
1131
|
+
while True:
|
|
1132
|
+
status = self.check_deep_research_status(job_id)
|
|
1133
|
+
|
|
1134
|
+
if on_activity and 'activities' in status:
|
|
1135
|
+
for activity in status['activities']:
|
|
1136
|
+
on_activity(activity)
|
|
1137
|
+
|
|
1138
|
+
if on_source and 'sources' in status:
|
|
1139
|
+
for source in status['sources']:
|
|
1140
|
+
on_source(source)
|
|
1141
|
+
|
|
1142
|
+
if status['status'] == 'completed':
|
|
1143
|
+
return status
|
|
1144
|
+
elif status['status'] == 'failed':
|
|
1145
|
+
raise Exception(f'Deep research failed. Error: {status.get("error")}')
|
|
1146
|
+
elif status['status'] != 'processing':
|
|
1147
|
+
break
|
|
1148
|
+
|
|
1149
|
+
time.sleep(2) # Polling interval
|
|
1150
|
+
|
|
1151
|
+
return {'success': False, 'error': 'Deep research job terminated unexpectedly'}
|
|
1152
|
+
|
|
1153
|
+
def async_deep_research(self, query: str, params: Optional[Union[Dict[str, Any], DeepResearchParams]] = None) -> Dict[str, Any]:
|
|
1154
|
+
"""
|
|
1155
|
+
Initiates an asynchronous deep research operation.
|
|
1156
|
+
|
|
1157
|
+
Args:
|
|
1158
|
+
query (str): The query to research.
|
|
1159
|
+
params (Optional[Union[Dict[str, Any], DeepResearchParams]]): Parameters for the deep research operation.
|
|
1160
|
+
|
|
1161
|
+
Returns:
|
|
1162
|
+
Dict[str, Any]: The response from the deep research initiation.
|
|
1163
|
+
|
|
1164
|
+
Raises:
|
|
1165
|
+
Exception: If the research initiation fails.
|
|
1166
|
+
"""
|
|
1167
|
+
if params is None:
|
|
1168
|
+
params = {}
|
|
1169
|
+
|
|
1170
|
+
if isinstance(params, dict):
|
|
1171
|
+
research_params = DeepResearchParams(**params)
|
|
1172
|
+
else:
|
|
1173
|
+
research_params = params
|
|
1174
|
+
|
|
1175
|
+
headers = self._prepare_headers()
|
|
1176
|
+
json_data = {'query': query, **research_params.dict(exclude_none=True)}
|
|
1177
|
+
|
|
1178
|
+
try:
|
|
1179
|
+
response = self._post_request(f'{self.api_url}/v1/research', json_data, headers)
|
|
1180
|
+
if response.status_code == 200:
|
|
1181
|
+
try:
|
|
1182
|
+
return response.json()
|
|
1183
|
+
except:
|
|
1184
|
+
raise Exception('Failed to parse Firecrawl response as JSON.')
|
|
1185
|
+
else:
|
|
1186
|
+
self._handle_error(response, 'start deep research')
|
|
1187
|
+
except Exception as e:
|
|
1188
|
+
raise ValueError(str(e))
|
|
1189
|
+
|
|
1190
|
+
return {'success': False, 'error': 'Internal server error'}
|
|
1191
|
+
|
|
1192
|
+
def check_deep_research_status(self, id: str) -> Dict[str, Any]:
|
|
1193
|
+
"""
|
|
1194
|
+
Check the status of a deep research operation.
|
|
1195
|
+
|
|
1196
|
+
Args:
|
|
1197
|
+
id (str): The ID of the deep research operation.
|
|
1198
|
+
|
|
1199
|
+
Returns:
|
|
1200
|
+
Dict[str, Any]: The current status and results of the research operation.
|
|
1201
|
+
|
|
1202
|
+
Raises:
|
|
1203
|
+
Exception: If the status check fails.
|
|
1204
|
+
"""
|
|
1205
|
+
headers = self._prepare_headers()
|
|
1206
|
+
try:
|
|
1207
|
+
response = self._get_request(f'{self.api_url}/v1/research/{id}', headers)
|
|
1208
|
+
if response.status_code == 200:
|
|
1209
|
+
try:
|
|
1210
|
+
return response.json()
|
|
1211
|
+
except:
|
|
1212
|
+
raise Exception('Failed to parse Firecrawl response as JSON.')
|
|
1213
|
+
elif response.status_code == 404:
|
|
1214
|
+
raise Exception('Deep research job not found')
|
|
1215
|
+
else:
|
|
1216
|
+
self._handle_error(response, 'check deep research status')
|
|
1217
|
+
except Exception as e:
|
|
1218
|
+
raise ValueError(str(e))
|
|
1219
|
+
|
|
1220
|
+
return {'success': False, 'error': 'Internal server error'}
|
|
1221
|
+
|
|
940
1222
|
class CrawlWatcher:
|
|
941
1223
|
def __init__(self, id: str, app: FirecrawlApp):
|
|
942
1224
|
self.id = id
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|