firecrawl 1.12.0__py3-none-any.whl → 1.13.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of firecrawl might be problematic. Click here for more details.

firecrawl/__init__.py CHANGED
@@ -13,7 +13,7 @@ import os
13
13
 
14
14
  from .firecrawl import FirecrawlApp # noqa
15
15
 
16
- __version__ = "1.12.0"
16
+ __version__ = "1.13.1"
17
17
 
18
18
  # Define the logger for the Firecrawl project
19
19
  logger: logging.Logger = logging.getLogger("firecrawl")
firecrawl/firecrawl.py CHANGED
@@ -12,7 +12,7 @@ Classes:
12
12
  import logging
13
13
  import os
14
14
  import time
15
- from typing import Any, Dict, Optional, List, Union
15
+ from typing import Any, Dict, Optional, List, Union, Callable
16
16
  import json
17
17
 
18
18
  import requests
@@ -33,6 +33,46 @@ class SearchParams(pydantic.BaseModel):
33
33
  timeout: Optional[int] = 60000
34
34
  scrapeOptions: Optional[Dict[str, Any]] = None
35
35
 
36
+ class GenerateLLMsTextParams(pydantic.BaseModel):
37
+ """
38
+ Parameters for the LLMs.txt generation operation.
39
+ """
40
+ maxUrls: Optional[int] = 10
41
+ showFullText: Optional[bool] = False
42
+ __experimental_stream: Optional[bool] = None
43
+
44
+ class DeepResearchParams(pydantic.BaseModel):
45
+ """
46
+ Parameters for the deep research operation.
47
+ """
48
+ maxDepth: Optional[int] = 7
49
+ timeLimit: Optional[int] = 270
50
+ maxUrls: Optional[int] = 20
51
+ __experimental_streamSteps: Optional[bool] = None
52
+
53
+ class DeepResearchResponse(pydantic.BaseModel):
54
+ """
55
+ Response from the deep research operation.
56
+ """
57
+ success: bool
58
+ id: str
59
+ error: Optional[str] = None
60
+
61
+ class DeepResearchStatusResponse(pydantic.BaseModel):
62
+ """
63
+ Status response from the deep research operation.
64
+ """
65
+ success: bool
66
+ data: Optional[Dict[str, Any]] = None
67
+ status: str
68
+ error: Optional[str] = None
69
+ expiresAt: str
70
+ currentDepth: int
71
+ maxDepth: int
72
+ activities: List[Dict[str, Any]]
73
+ sources: List[Dict[str, Any]]
74
+ summaries: List[str]
75
+
36
76
  class FirecrawlApp:
37
77
  class SearchResponse(pydantic.BaseModel):
38
78
  """
@@ -137,6 +177,7 @@ class FirecrawlApp:
137
177
  f'{self.api_url}{endpoint}',
138
178
  headers=headers,
139
179
  json=scrape_params,
180
+ timeout=(scrape_params["timeout"] + 5000 if "timeout" in scrape_params else None),
140
181
  )
141
182
  if response.status_code == 200:
142
183
  try:
@@ -425,7 +466,7 @@ class FirecrawlApp:
425
466
  else:
426
467
  self._handle_error(response, 'map')
427
468
 
428
- def batch_scrape_urls(self, urls: list[str],
469
+ def batch_scrape_urls(self, urls: List[str],
429
470
  params: Optional[Dict[str, Any]] = None,
430
471
  poll_interval: Optional[int] = 2,
431
472
  idempotency_key: Optional[str] = None) -> Any:
@@ -433,7 +474,7 @@ class FirecrawlApp:
433
474
  Initiate a batch scrape job for the specified URLs using the Firecrawl API.
434
475
 
435
476
  Args:
436
- urls (list[str]): The URLs to scrape.
477
+ urls (List[str]): The URLs to scrape.
437
478
  params (Optional[Dict[str, Any]]): Additional parameters for the scraper.
438
479
  poll_interval (Optional[int]): Time in seconds between status checks when waiting for job completion. Defaults to 2 seconds.
439
480
  idempotency_key (Optional[str]): A unique uuid key to ensure idempotency of requests.
@@ -468,12 +509,12 @@ class FirecrawlApp:
468
509
  self._handle_error(response, 'start batch scrape job')
469
510
 
470
511
 
471
- def async_batch_scrape_urls(self, urls: list[str], params: Optional[Dict[str, Any]] = None, idempotency_key: Optional[str] = None) -> Dict[str, Any]:
512
+ def async_batch_scrape_urls(self, urls: List[str], params: Optional[Dict[str, Any]] = None, idempotency_key: Optional[str] = None) -> Dict[str, Any]:
472
513
  """
473
514
  Initiate a crawl job asynchronously.
474
515
 
475
516
  Args:
476
- urls (list[str]): The URLs to scrape.
517
+ urls (List[str]): The URLs to scrape.
477
518
  params (Optional[Dict[str, Any]]): Additional parameters for the scraper.
478
519
  idempotency_key (Optional[str]): A unique uuid key to ensure idempotency of requests.
479
520
 
@@ -497,12 +538,12 @@ class FirecrawlApp:
497
538
  else:
498
539
  self._handle_error(response, 'start batch scrape job')
499
540
 
500
- def batch_scrape_urls_and_watch(self, urls: list[str], params: Optional[Dict[str, Any]] = None, idempotency_key: Optional[str] = None) -> 'CrawlWatcher':
541
+ def batch_scrape_urls_and_watch(self, urls: List[str], params: Optional[Dict[str, Any]] = None, idempotency_key: Optional[str] = None) -> 'CrawlWatcher':
501
542
  """
502
543
  Initiate a batch scrape job and return a CrawlWatcher to monitor the job via WebSocket.
503
544
 
504
545
  Args:
505
- urls (list[str]): The URLs to scrape.
546
+ urls (List[str]): The URLs to scrape.
506
547
  params (Optional[Dict[str, Any]]): Additional parameters for the scraper.
507
548
  idempotency_key (Optional[str]): A unique uuid key to ensure idempotency of requests.
508
549
 
@@ -756,6 +797,123 @@ class FirecrawlApp:
756
797
  except Exception as e:
757
798
  raise ValueError(str(e), 500)
758
799
 
800
+ def generate_llms_text(self, url: str, params: Optional[Union[Dict[str, Any], GenerateLLMsTextParams]] = None) -> Dict[str, Any]:
801
+ """
802
+ Generate LLMs.txt for a given URL and poll until completion.
803
+
804
+ Args:
805
+ url (str): The URL to generate LLMs.txt from.
806
+ params (Optional[Union[Dict[str, Any], GenerateLLMsTextParams]]): Parameters for the LLMs.txt generation.
807
+
808
+ Returns:
809
+ Dict[str, Any]: A dictionary containing the generation results. The structure includes:
810
+ - 'success' (bool): Indicates if the generation was successful.
811
+ - 'status' (str): The final status of the generation job.
812
+ - 'data' (Dict): The generated LLMs.txt data.
813
+ - 'error' (Optional[str]): Error message if the generation failed.
814
+ - 'expiresAt' (str): ISO 8601 formatted date-time string indicating when the data expires.
815
+
816
+ Raises:
817
+ Exception: If the generation job fails or an error occurs during status checks.
818
+ """
819
+ if params is None:
820
+ params = {}
821
+
822
+ if isinstance(params, dict):
823
+ generation_params = GenerateLLMsTextParams(**params)
824
+ else:
825
+ generation_params = params
826
+
827
+ response = self.async_generate_llms_text(url, generation_params)
828
+ if not response.get('success') or 'id' not in response:
829
+ return response
830
+
831
+ job_id = response['id']
832
+ while True:
833
+ status = self.check_generate_llms_text_status(job_id)
834
+
835
+ if status['status'] == 'completed':
836
+ return status
837
+ elif status['status'] == 'failed':
838
+ raise Exception(f'LLMs.txt generation failed. Error: {status.get("error")}')
839
+ elif status['status'] != 'processing':
840
+ break
841
+
842
+ time.sleep(2) # Polling interval
843
+
844
+ return {'success': False, 'error': 'LLMs.txt generation job terminated unexpectedly'}
845
+
846
+ def async_generate_llms_text(self, url: str, params: Optional[Union[Dict[str, Any], GenerateLLMsTextParams]] = None) -> Dict[str, Any]:
847
+ """
848
+ Initiate an asynchronous LLMs.txt generation operation.
849
+
850
+ Args:
851
+ url (str): The URL to generate LLMs.txt from.
852
+ params (Optional[Union[Dict[str, Any], GenerateLLMsTextParams]]): Parameters for the LLMs.txt generation.
853
+
854
+ Returns:
855
+ Dict[str, Any]: A dictionary containing the generation initiation response. The structure includes:
856
+ - 'success' (bool): Indicates if the generation initiation was successful.
857
+ - 'id' (str): The unique identifier for the generation job.
858
+
859
+ Raises:
860
+ Exception: If the generation job initiation fails.
861
+ """
862
+ if params is None:
863
+ params = {}
864
+
865
+ if isinstance(params, dict):
866
+ generation_params = GenerateLLMsTextParams(**params)
867
+ else:
868
+ generation_params = params
869
+
870
+ headers = self._prepare_headers()
871
+ json_data = {'url': url, **generation_params.dict(exclude_none=True)}
872
+
873
+ try:
874
+ response = self._post_request(f'{self.api_url}/v1/llmstxt', json_data, headers)
875
+ if response.status_code == 200:
876
+ try:
877
+ return response.json()
878
+ except:
879
+ raise Exception('Failed to parse Firecrawl response as JSON.')
880
+ else:
881
+ self._handle_error(response, 'start LLMs.txt generation')
882
+ except Exception as e:
883
+ raise ValueError(str(e))
884
+
885
+ return {'success': False, 'error': 'Internal server error'}
886
+
887
+ def check_generate_llms_text_status(self, id: str) -> Dict[str, Any]:
888
+ """
889
+ Check the status of a LLMs.txt generation operation.
890
+
891
+ Args:
892
+ id (str): The ID of the LLMs.txt generation operation.
893
+
894
+ Returns:
895
+ Dict[str, Any]: The current status and results of the generation operation.
896
+
897
+ Raises:
898
+ Exception: If the status check fails.
899
+ """
900
+ headers = self._prepare_headers()
901
+ try:
902
+ response = self._get_request(f'{self.api_url}/v1/llmstxt/{id}', headers)
903
+ if response.status_code == 200:
904
+ try:
905
+ return response.json()
906
+ except:
907
+ raise Exception('Failed to parse Firecrawl response as JSON.')
908
+ elif response.status_code == 404:
909
+ raise Exception('LLMs.txt generation job not found')
910
+ else:
911
+ self._handle_error(response, 'check LLMs.txt generation status')
912
+ except Exception as e:
913
+ raise ValueError(str(e))
914
+
915
+ return {'success': False, 'error': 'Internal server error'}
916
+
759
917
  def _prepare_headers(self, idempotency_key: Optional[str] = None) -> Dict[str, str]:
760
918
  """
761
919
  Prepare the headers for API requests.
@@ -800,7 +958,7 @@ class FirecrawlApp:
800
958
  requests.RequestException: If the request fails after the specified retries.
801
959
  """
802
960
  for attempt in range(retries):
803
- response = requests.post(url, headers=headers, json=data)
961
+ response = requests.post(url, headers=headers, json=data, timeout=((data["timeout"] + 5000) if "timeout" in data else None))
804
962
  if response.status_code == 502:
805
963
  time.sleep(backoff_factor * (2 ** attempt))
806
964
  else:
@@ -940,6 +1098,127 @@ class FirecrawlApp:
940
1098
  # Raise an HTTPError with the custom message and attach the response
941
1099
  raise requests.exceptions.HTTPError(message, response=response)
942
1100
 
1101
+ def deep_research(self, query: str, params: Optional[Union[Dict[str, Any], DeepResearchParams]] = None,
1102
+ on_activity: Optional[Callable[[Dict[str, Any]], None]] = None,
1103
+ on_source: Optional[Callable[[Dict[str, Any]], None]] = None) -> Dict[str, Any]:
1104
+ """
1105
+ Initiates a deep research operation on a given query and polls until completion.
1106
+
1107
+ Args:
1108
+ query (str): The query to research.
1109
+ params (Optional[Union[Dict[str, Any], DeepResearchParams]]): Parameters for the deep research operation.
1110
+ on_activity (Optional[Callable[[Dict[str, Any]], None]]): Optional callback to receive activity updates in real-time.
1111
+
1112
+ Returns:
1113
+ Dict[str, Any]: The final research results.
1114
+
1115
+ Raises:
1116
+ Exception: If the research operation fails.
1117
+ """
1118
+ if params is None:
1119
+ params = {}
1120
+
1121
+ if isinstance(params, dict):
1122
+ research_params = DeepResearchParams(**params)
1123
+ else:
1124
+ research_params = params
1125
+
1126
+ response = self.async_deep_research(query, research_params)
1127
+ if not response.get('success') or 'id' not in response:
1128
+ return response
1129
+
1130
+ job_id = response['id']
1131
+ while True:
1132
+ status = self.check_deep_research_status(job_id)
1133
+
1134
+ if on_activity and 'activities' in status:
1135
+ for activity in status['activities']:
1136
+ on_activity(activity)
1137
+
1138
+ if on_source and 'sources' in status:
1139
+ for source in status['sources']:
1140
+ on_source(source)
1141
+
1142
+ if status['status'] == 'completed':
1143
+ return status
1144
+ elif status['status'] == 'failed':
1145
+ raise Exception(f'Deep research failed. Error: {status.get("error")}')
1146
+ elif status['status'] != 'processing':
1147
+ break
1148
+
1149
+ time.sleep(2) # Polling interval
1150
+
1151
+ return {'success': False, 'error': 'Deep research job terminated unexpectedly'}
1152
+
1153
+ def async_deep_research(self, query: str, params: Optional[Union[Dict[str, Any], DeepResearchParams]] = None) -> Dict[str, Any]:
1154
+ """
1155
+ Initiates an asynchronous deep research operation.
1156
+
1157
+ Args:
1158
+ query (str): The query to research.
1159
+ params (Optional[Union[Dict[str, Any], DeepResearchParams]]): Parameters for the deep research operation.
1160
+
1161
+ Returns:
1162
+ Dict[str, Any]: The response from the deep research initiation.
1163
+
1164
+ Raises:
1165
+ Exception: If the research initiation fails.
1166
+ """
1167
+ if params is None:
1168
+ params = {}
1169
+
1170
+ if isinstance(params, dict):
1171
+ research_params = DeepResearchParams(**params)
1172
+ else:
1173
+ research_params = params
1174
+
1175
+ headers = self._prepare_headers()
1176
+ json_data = {'query': query, **research_params.dict(exclude_none=True)}
1177
+
1178
+ try:
1179
+ response = self._post_request(f'{self.api_url}/v1/deep-research', json_data, headers)
1180
+ if response.status_code == 200:
1181
+ try:
1182
+ return response.json()
1183
+ except:
1184
+ raise Exception('Failed to parse Firecrawl response as JSON.')
1185
+ else:
1186
+ self._handle_error(response, 'start deep research')
1187
+ except Exception as e:
1188
+ raise ValueError(str(e))
1189
+
1190
+ return {'success': False, 'error': 'Internal server error'}
1191
+
1192
+ def check_deep_research_status(self, id: str) -> Dict[str, Any]:
1193
+ """
1194
+ Check the status of a deep research operation.
1195
+
1196
+ Args:
1197
+ id (str): The ID of the deep research operation.
1198
+
1199
+ Returns:
1200
+ Dict[str, Any]: The current status and results of the research operation.
1201
+
1202
+ Raises:
1203
+ Exception: If the status check fails.
1204
+ """
1205
+ headers = self._prepare_headers()
1206
+ try:
1207
+ response = self._get_request(f'{self.api_url}/v1/deep-research/{id}', headers)
1208
+ if response.status_code == 200:
1209
+ try:
1210
+ return response.json()
1211
+ except:
1212
+ raise Exception('Failed to parse Firecrawl response as JSON.')
1213
+ elif response.status_code == 404:
1214
+ raise Exception('Deep research job not found')
1215
+ else:
1216
+ self._handle_error(response, 'check deep research status')
1217
+ except Exception as e:
1218
+ raise ValueError(str(e))
1219
+
1220
+ return {'success': False, 'error': 'Internal server error'}
1221
+
943
1222
  class CrawlWatcher:
944
1223
  def __init__(self, id: str, app: FirecrawlApp):
945
1224
  self.id = id
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: firecrawl
3
- Version: 1.12.0
3
+ Version: 1.13.1
4
4
  Summary: Python SDK for Firecrawl API
5
5
  Home-page: https://github.com/mendableai/firecrawl
6
6
  Author: Mendable.ai
@@ -0,0 +1,11 @@
1
+ firecrawl/__init__.py,sha256=5qKTtUIYizD2sOcptUkiB1vyxmA7I9I3kSYuJhzHzNo,2544
2
+ firecrawl/firecrawl.py,sha256=Ytc8l9xABF8l_mykEB8KS8C6szuC0Z6MU6YuhJgFJ_E,51591
3
+ firecrawl/__tests__/e2e_withAuth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
+ firecrawl/__tests__/e2e_withAuth/test.py,sha256=6OawnVF4IPeGyXg_Izi3t8U7MyT90roaJBJIG5UfllM,7935
5
+ firecrawl/__tests__/v1/e2e_withAuth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
+ firecrawl/__tests__/v1/e2e_withAuth/test.py,sha256=tL5kJJ4el37Wc-Z2TRSuSWwWG2M40h3VPxHYuWijD00,19888
7
+ firecrawl-1.13.1.dist-info/LICENSE,sha256=nPCunEDwjRGHlmjvsiDUyIWbkqqyj3Ej84ntnh0g0zA,1084
8
+ firecrawl-1.13.1.dist-info/METADATA,sha256=Tf8mmN_z0HbFo9DIhQtWPDkDyx9CC0OHm_3GlG0QohE,10572
9
+ firecrawl-1.13.1.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
10
+ firecrawl-1.13.1.dist-info/top_level.txt,sha256=jTvz79zWhiyAezfmmHe4FQ-hR60C59UU5FrjMjijLu8,10
11
+ firecrawl-1.13.1.dist-info/RECORD,,
@@ -1,11 +0,0 @@
1
- firecrawl/__init__.py,sha256=aBmlOwFbTn9Wj2nS_RE2WlsEwddPMSLMCuT44y4ox6Q,2544
2
- firecrawl/firecrawl.py,sha256=kBu9hbW_fBHPnYHw4PSDQP3MdxE6BVUhlCUkwD2VMI4,40940
3
- firecrawl/__tests__/e2e_withAuth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
- firecrawl/__tests__/e2e_withAuth/test.py,sha256=6OawnVF4IPeGyXg_Izi3t8U7MyT90roaJBJIG5UfllM,7935
5
- firecrawl/__tests__/v1/e2e_withAuth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
- firecrawl/__tests__/v1/e2e_withAuth/test.py,sha256=tL5kJJ4el37Wc-Z2TRSuSWwWG2M40h3VPxHYuWijD00,19888
7
- firecrawl-1.12.0.dist-info/LICENSE,sha256=nPCunEDwjRGHlmjvsiDUyIWbkqqyj3Ej84ntnh0g0zA,1084
8
- firecrawl-1.12.0.dist-info/METADATA,sha256=pzMzEpSjoIt4IEyZ9_8Cp6xTng8d3xShHBKwDrd6ULI,10572
9
- firecrawl-1.12.0.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
10
- firecrawl-1.12.0.dist-info/top_level.txt,sha256=jTvz79zWhiyAezfmmHe4FQ-hR60C59UU5FrjMjijLu8,10
11
- firecrawl-1.12.0.dist-info/RECORD,,