firecrawl 4.3.0__py3-none-any.whl → 4.3.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of firecrawl might be problematic. Click here for more details.

firecrawl/__init__.py CHANGED
@@ -17,7 +17,7 @@ from .v1 import (
17
17
  V1ChangeTrackingOptions,
18
18
  )
19
19
 
20
- __version__ = "4.3.0"
20
+ __version__ = "4.3.2"
21
21
 
22
22
  # Define the logger for the Firecrawl project
23
23
  logger: logging.Logger = logging.getLogger("firecrawl")
@@ -33,3 +33,10 @@ async def test_async_get_token_usage():
33
33
  tokens = await client.get_token_usage()
34
34
  assert hasattr(tokens, "remaining_tokens")
35
35
 
36
+
37
+ @pytest.mark.asyncio
38
+ async def test_async_get_queue_status():
39
+ client = AsyncFirecrawl(api_key=os.getenv("API_KEY"), api_url=os.getenv("API_URL"))
40
+ status = await client.get_queue_status()
41
+ assert hasattr(status, "jobs_in_queue")
42
+
firecrawl/client.py CHANGED
@@ -56,23 +56,34 @@ class V2Proxy:
56
56
  self._client = client_instance
57
57
 
58
58
  if client_instance:
59
+ self.scrape = client_instance.scrape
59
60
  self.search = client_instance.search
60
61
  self.crawl = client_instance.crawl
62
+ self.start_crawl = client_instance.start_crawl
61
63
  self.get_crawl_status = client_instance.get_crawl_status
62
64
  self.cancel_crawl = client_instance.cancel_crawl
63
- self.start_crawl = client_instance.start_crawl
65
+ self.get_crawl_errors = client_instance.get_crawl_errors
66
+ self.get_active_crawls = client_instance.get_active_crawls
67
+ self.active_crawls = client_instance.active_crawls
64
68
  self.crawl_params_preview = client_instance.crawl_params_preview
69
+
65
70
  self.extract = client_instance.extract
71
+ self.start_extract = client_instance.start_extract
72
+ self.get_extract_status = client_instance.get_extract_status
73
+
66
74
  self.start_batch_scrape = client_instance.start_batch_scrape
67
75
  self.get_batch_scrape_status = client_instance.get_batch_scrape_status
68
76
  self.cancel_batch_scrape = client_instance.cancel_batch_scrape
69
77
  self.batch_scrape = client_instance.batch_scrape
70
78
  self.get_batch_scrape_errors = client_instance.get_batch_scrape_errors
71
- self.get_extract_status = client_instance.get_extract_status
79
+
72
80
  self.map = client_instance.map
73
81
  self.get_concurrency = client_instance.get_concurrency
74
82
  self.get_credit_usage = client_instance.get_credit_usage
75
83
  self.get_token_usage = client_instance.get_token_usage
84
+ self.get_queue_status = client_instance.get_queue_status
85
+
86
+ self.watcher = client_instance.watcher
76
87
 
77
88
  def __getattr__(self, name):
78
89
  """Forward attribute access to the underlying client."""
@@ -99,9 +110,9 @@ class AsyncV1Proxy:
99
110
 
100
111
  class AsyncV2Proxy:
101
112
  """Proxy class that forwards method calls to the appropriate version client."""
102
- _client: Optional[Any] = None
113
+ _client: Optional[AsyncFirecrawlClient] = None
103
114
 
104
- def __init__(self, client_instance: Optional[Any] = None):
115
+ def __init__(self, client_instance: Optional[AsyncFirecrawlClient] = None):
105
116
  self._client = client_instance
106
117
 
107
118
  if client_instance:
@@ -132,6 +143,8 @@ class AsyncV2Proxy:
132
143
  self.get_concurrency = client_instance.get_concurrency
133
144
  self.get_credit_usage = client_instance.get_credit_usage
134
145
  self.get_token_usage = client_instance.get_token_usage
146
+ self.get_queue_status = client_instance.get_queue_status
147
+
135
148
  self.watcher = client_instance.watcher
136
149
 
137
150
  def __getattr__(self, name):
@@ -193,7 +206,8 @@ class Firecrawl:
193
206
  self.get_concurrency = self._v2_client.get_concurrency
194
207
  self.get_credit_usage = self._v2_client.get_credit_usage
195
208
  self.get_token_usage = self._v2_client.get_token_usage
196
-
209
+ self.get_queue_status = self._v2_client.get_queue_status
210
+
197
211
  self.watcher = self._v2_client.watcher
198
212
 
199
213
  class AsyncFirecrawl:
@@ -238,6 +252,7 @@ class AsyncFirecrawl:
238
252
  self.get_concurrency = self._v2_client.get_concurrency
239
253
  self.get_credit_usage = self._v2_client.get_credit_usage
240
254
  self.get_token_usage = self._v2_client.get_token_usage
255
+ self.get_queue_status = self._v2_client.get_queue_status
241
256
 
242
257
  self.watcher = self._v2_client.watcher
243
258
 
firecrawl/types.py CHANGED
@@ -65,6 +65,9 @@ from .v2.types import (
65
65
  ExecuteJavascriptAction,
66
66
  PDFAction,
67
67
 
68
+ # Usage types
69
+ QueueStatusResponse,
70
+
68
71
  # Location and format types
69
72
  Location,
70
73
 
@@ -142,6 +145,9 @@ __all__ = [
142
145
  'ScrapeAction',
143
146
  'ExecuteJavascriptAction',
144
147
  'PDFAction',
148
+
149
+ # Usage types
150
+ 'QueueStatusResponse',
145
151
 
146
152
  # Location and format types
147
153
  'Location',
firecrawl/v1/client.py CHANGED
@@ -309,6 +309,7 @@ class V1MapParams(pydantic.BaseModel):
309
309
  limit: Optional[int] = None
310
310
  timeout: Optional[int] = 30000
311
311
  useIndex: Optional[bool] = None
312
+ location: Optional[V1LocationConfig] = None
312
313
 
313
314
  class V1MapResponse(pydantic.BaseModel):
314
315
  """Response from mapping operations."""
@@ -1333,6 +1334,7 @@ class V1FirecrawlApp:
1333
1334
  limit: Optional[int] = None,
1334
1335
  timeout: Optional[int] = 30000,
1335
1336
  use_index: Optional[bool] = None,
1337
+ location: Optional[V1LocationConfig] = None,
1336
1338
  **kwargs) -> V1MapResponse:
1337
1339
  """
1338
1340
  Map and discover links from a URL.
@@ -1377,6 +1379,8 @@ class V1FirecrawlApp:
1377
1379
  map_params['timeout'] = timeout
1378
1380
  if use_index is not None:
1379
1381
  map_params['useIndex'] = use_index
1382
+ if location is not None:
1383
+ map_params['location'] = location.dict(by_alias=True, exclude_none=True)
1380
1384
 
1381
1385
  # Add any additional kwargs
1382
1386
  map_params.update(kwargs)
@@ -3910,6 +3914,7 @@ class AsyncV1FirecrawlApp(V1FirecrawlApp):
3910
3914
  sitemap_only: Optional[bool] = None,
3911
3915
  limit: Optional[int] = None,
3912
3916
  timeout: Optional[int] = 30000,
3917
+ location: Optional[V1LocationConfig] = None,
3913
3918
  params: Optional[V1MapParams] = None) -> V1MapResponse:
3914
3919
  """
3915
3920
  Asynchronously map and discover links from a URL.
@@ -3952,6 +3957,8 @@ class AsyncV1FirecrawlApp(V1FirecrawlApp):
3952
3957
  map_params['limit'] = limit
3953
3958
  if timeout is not None:
3954
3959
  map_params['timeout'] = timeout
3960
+ if location is not None:
3961
+ map_params['location'] = location.dict(by_alias=True, exclude_none=True)
3955
3962
 
3956
3963
  # Create final params object
3957
3964
  final_params = V1MapParams(**map_params)
firecrawl/v2/client.py CHANGED
@@ -421,6 +421,7 @@ class FirecrawlClient:
421
421
  limit: Optional[int] = None,
422
422
  sitemap: Optional[Literal["only", "include", "skip"]] = None,
423
423
  timeout: Optional[int] = None,
424
+ location: Optional[Location] = None,
424
425
  ) -> MapData:
425
426
  """Map a URL and return discovered links.
426
427
 
@@ -441,7 +442,8 @@ class FirecrawlClient:
441
442
  limit=limit,
442
443
  sitemap=sitemap if sitemap is not None else "include",
443
444
  timeout=timeout,
444
- ) if any(v is not None for v in [search, include_subdomains, limit, sitemap, timeout]) else None
445
+ location=location
446
+ ) if any(v is not None for v in [search, include_subdomains, limit, sitemap, timeout, location]) else None
445
447
 
446
448
  return map_module.map(self.http_client, url, options)
447
449
 
@@ -253,6 +253,18 @@ class AsyncFirecrawlClient:
253
253
  async def get_token_usage(self):
254
254
  from .methods.aio import usage as async_usage # type: ignore[attr-defined]
255
255
  return await async_usage.get_token_usage(self.async_http_client)
256
+
257
+ async def get_credit_usage_historical(self, by_api_key: bool = False):
258
+ from .methods.aio import usage as async_usage # type: ignore[attr-defined]
259
+ return await async_usage.get_credit_usage_historical(self.async_http_client, by_api_key)
260
+
261
+ async def get_token_usage_historical(self, by_api_key: bool = False):
262
+ from .methods.aio import usage as async_usage # type: ignore[attr-defined]
263
+ return await async_usage.get_token_usage_historical(self.async_http_client, by_api_key)
264
+
265
+ async def get_queue_status(self):
266
+ from .methods.aio import usage as async_usage # type: ignore[attr-defined]
267
+ return await async_usage.get_queue_status(self.async_http_client)
256
268
 
257
269
  # Watcher (sync object usable from async contexts)
258
270
  def watcher(
@@ -20,6 +20,8 @@ def _prepare_map_request(url: str, options: Optional[MapOptions] = None) -> Dict
20
20
  data["limit"] = options.limit
21
21
  if options.timeout is not None:
22
22
  data["timeout"] = options.timeout
23
+ if options.location is not None:
24
+ data["location"] = options.location.model_dump(exclude_none=True)
23
25
  payload.update(data)
24
26
  return payload
25
27
 
@@ -1,6 +1,6 @@
1
1
  from ...utils.http_client_async import AsyncHttpClient
2
2
  from ...utils.error_handler import handle_response_error
3
- from ...types import ConcurrencyCheck, CreditUsage, TokenUsage, CreditUsageHistoricalResponse, TokenUsageHistoricalResponse
3
+ from ...types import ConcurrencyCheck, CreditUsage, TokenUsage, CreditUsageHistoricalResponse, TokenUsageHistoricalResponse, QueueStatusResponse
4
4
 
5
5
 
6
6
  async def get_concurrency(client: AsyncHttpClient) -> ConcurrencyCheck:
@@ -49,6 +49,23 @@ async def get_token_usage(client: AsyncHttpClient) -> TokenUsage:
49
49
  )
50
50
 
51
51
 
52
+ async def get_queue_status(client: AsyncHttpClient) -> QueueStatusResponse:
53
+ resp = await client.get("/v2/team/queue-status")
54
+ if resp.status_code >= 400:
55
+ handle_response_error(resp, "get queue status")
56
+ body = resp.json()
57
+ if not body.get("success"):
58
+ raise Exception(body.get("error", "Unknown error"))
59
+ data = body.get("data", body)
60
+ return QueueStatusResponse(
61
+ jobs_in_queue=data.get("jobsInQueue", 0),
62
+ active_jobs_in_queue=data.get("activeJobsInQueue", 0),
63
+ waiting_jobs_in_queue=data.get("waitingJobsInQueue", 0),
64
+ max_concurrency=data.get("maxConcurrency", 0),
65
+ most_recent_success=data.get("mostRecentSuccess", None),
66
+ )
67
+
68
+
52
69
  async def get_credit_usage_historical(client: AsyncHttpClient, by_api_key: bool = False) -> CreditUsageHistoricalResponse:
53
70
  query = "?byApiKey=true" if by_api_key else ""
54
71
  resp = await client.get(f"/v2/team/credit-usage/historical{query}")
@@ -27,6 +27,8 @@ def _prepare_map_request(url: str, options: Optional[MapOptions] = None) -> Dict
27
27
  data["limit"] = options.limit
28
28
  if options.timeout is not None:
29
29
  data["timeout"] = options.timeout
30
+ if options.location is not None:
31
+ data["location"] = options.location.model_dump(exclude_none=True)
30
32
  payload.update(data)
31
33
 
32
34
  return payload
firecrawl/v2/types.py CHANGED
@@ -445,6 +445,7 @@ class MapOptions(BaseModel):
445
445
  include_subdomains: Optional[bool] = None
446
446
  limit: Optional[int] = None
447
447
  timeout: Optional[int] = None
448
+ location: Optional['Location'] = None
448
449
 
449
450
  class MapRequest(BaseModel):
450
451
  """Request for mapping a website."""
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: firecrawl
3
- Version: 4.3.0
3
+ Version: 4.3.2
4
4
  Summary: Python SDK for Firecrawl API
5
5
  Home-page: https://github.com/firecrawl/firecrawl
6
6
  Author: Mendable.ai
@@ -1,7 +1,7 @@
1
- firecrawl/__init__.py,sha256=Lrk_X9oaNHYEYdzuPTdwX7kjhfoZbVCsyEd4wd9Qi5Q,2192
2
- firecrawl/client.py,sha256=tp3mUo_3aGPuZ53kpU4bhM-5EtwD_IUWrJ7wm0GMuCc,11159
1
+ firecrawl/__init__.py,sha256=EELRjS0VrQqhi-iR1LaRhECU1yXWbjtofVsp5NaxlTc,2192
2
+ firecrawl/client.py,sha256=Lmrg2jniCETU6_xVMn_fgLrgDXiBixK9hSkkdsCGiog,11840
3
3
  firecrawl/firecrawl.backup.py,sha256=v1FEN3jR4g5Aupg4xp6SLkuFvYMQuUKND2YELbYjE6c,200430
4
- firecrawl/types.py,sha256=W9N2pqQuevEIIjYHN9rbDf31E-nwdCECqIn11Foz2T8,2836
4
+ firecrawl/types.py,sha256=T1g8r12xIWxJSoSNrL58SSgc1F8SykrwVx92BhTvZuc,2926
5
5
  firecrawl/__tests__/e2e/v2/conftest.py,sha256=I28TUpN5j0-9gM79NlbrDS8Jlsheao657od2f-2xK0Y,2587
6
6
  firecrawl/__tests__/e2e/v2/test_async.py,sha256=ZXpf1FVOJgNclITglrxIyFwP4cOiqzWLicGaxIm70BQ,2526
7
7
  firecrawl/__tests__/e2e/v2/test_batch_scrape.py,sha256=H9GtuwHIFdOQ958SOVThi_kvDDxcXAK_ECRh95ogonQ,3265
@@ -18,7 +18,7 @@ firecrawl/__tests__/e2e/v2/aio/test_aio_extract.py,sha256=3CNRIFzgBMcOYOLhnKcK1k
18
18
  firecrawl/__tests__/e2e/v2/aio/test_aio_map.py,sha256=nckl1kbiEaaTdu5lm__tOoTDG-txTYwwSH3KZEvyKzc,1199
19
19
  firecrawl/__tests__/e2e/v2/aio/test_aio_scrape.py,sha256=b17A7advBEjxrjdait2w8GHztZeKy_P3zZ3ixm5H7xw,4453
20
20
  firecrawl/__tests__/e2e/v2/aio/test_aio_search.py,sha256=ehV0Ai_hknAkaoE551j2lbktV4bi_J0h3FKzC7G15Iw,8246
21
- firecrawl/__tests__/e2e/v2/aio/test_aio_usage.py,sha256=Dh9BVo48NKSZOKgLbO7n8fpMjvYmeMXDFzbIhnCTMhE,1014
21
+ firecrawl/__tests__/e2e/v2/aio/test_aio_usage.py,sha256=lVGfwR79eaZamUZXgKStUJcpclCnnlpwHGo2pMOUhCY,1255
22
22
  firecrawl/__tests__/e2e/v2/aio/test_aio_watcher.py,sha256=hwES4Nu5c0hniZ9heIPDfvh_2JmJ2wPoX9ULTZ0Asjs,1471
23
23
  firecrawl/__tests__/unit/v2/methods/test_batch_request_preparation.py,sha256=HeOxN-sPYSssytcIRAEicJSZsFt_Oa5qGXAtdumR54c,4040
24
24
  firecrawl/__tests__/unit/v2/methods/test_crawl_params.py,sha256=p9hzg14uAs1iHKXPDSXhGU6hEzPBF_Ae34RAf5XYa10,2387
@@ -42,17 +42,17 @@ firecrawl/__tests__/unit/v2/methods/aio/test_ensure_async.py,sha256=pUwuWhRbVUTb
42
42
  firecrawl/__tests__/unit/v2/utils/test_validation.py,sha256=E4n4jpBhH_W7E0ikI5r8KMAKiOhbfGD3i_B8-dv3PlI,10803
43
43
  firecrawl/__tests__/unit/v2/watcher/test_ws_watcher.py,sha256=87w47n0iOihtu4jTR4-4rw1-xVKWmLg2BOBGxjQPnUk,9517
44
44
  firecrawl/v1/__init__.py,sha256=aP1oisPeZVGGZynvENc07JySMOZfv_4zAlxQ0ecMJXA,481
45
- firecrawl/v1/client.py,sha256=33o_sPOyPsRfM1j2PUiKTkvbnPCkmL7-Ou54D1sx-rE,207710
45
+ firecrawl/v1/client.py,sha256=2Rq38RxGnuf2dMCmr4cc3f-ythavcBkUyJmRrwLmMHg,208104
46
46
  firecrawl/v2/__init__.py,sha256=Jc6a8tBjYG5OPkjDM5pl-notyys-7DEj7PLEfepv3fc,137
47
- firecrawl/v2/client.py,sha256=aEISMnyKKzh5jcrLdpkC3WCZ7cMWj5vo1Gk7ljc9gwk,31821
48
- firecrawl/v2/client_async.py,sha256=XyzojIJlWatBGlAMish22H-XHkkH9zHsD6MGtAdtFg8,10487
49
- firecrawl/v2/types.py,sha256=itKhycxDQ9a3wltK28qFLxLVVvsvb5APoS1kOiiS8ac,24341
47
+ firecrawl/v2/client.py,sha256=1PznRrX1kl-Wenv2Ilm5oVoU3V9HDtkMXuTsOFr8Ow8,31906
48
+ firecrawl/v2/client_async.py,sha256=HAewaYnHrvQQgkSQfwwNeWvAuj3JZqceQk6T10RKxeg,11204
49
+ firecrawl/v2/types.py,sha256=fJ6FySoZ30-4HAdU3XLPeWplQmB9b5p_ohWMh8Dg9_Y,24383
50
50
  firecrawl/v2/watcher.py,sha256=FOU71tqSKxgeuGycu4ye0SLc2dw7clIcoQjPsi-4Csc,14229
51
51
  firecrawl/v2/watcher_async.py,sha256=AVjW2mgABniolSsauK4u0FW8ya6WzRUdyEg2R-8vGCw,10278
52
52
  firecrawl/v2/methods/batch.py,sha256=jFSIPtvulUrPz3Y3zT1gDNwYEf8Botpfh4GOeYsVYRI,14852
53
53
  firecrawl/v2/methods/crawl.py,sha256=DWH1wUUDpE0zPSRALkQj_vF-PdsT0A1NyAGtnfcDaR8,18634
54
54
  firecrawl/v2/methods/extract.py,sha256=-Jr4BtraU3b7hd3JIY73V-S69rUclxyXyUpoQb6DCQk,4274
55
- firecrawl/v2/methods/map.py,sha256=4SADb0-lkbdOWDmO6k8_TzK0yRti5xsN40N45nUl9uA,2592
55
+ firecrawl/v2/methods/map.py,sha256=C7ltFFwhC6XVZKAVVnPsapmBi6Lp_2Mm9r7TtB0cZ0I,2711
56
56
  firecrawl/v2/methods/scrape.py,sha256=CSHBwC-P91UfrW3zHirjNAs2h899FKcWvd1DY_4fJdo,1921
57
57
  firecrawl/v2/methods/search.py,sha256=6BKiQ1aKJjWBKm9BBtKxFKGD74kCKBeMIp_OgjcDFAw,7673
58
58
  firecrawl/v2/methods/usage.py,sha256=NqkmFd-ziw8ijbZxwaxjxZHl85u0LTe_TYqr_NGWFwE,3693
@@ -60,10 +60,10 @@ firecrawl/v2/methods/aio/__init__.py,sha256=RocMJnGwnLIvGu3G8ZvY8INkipC7WHZiu2bE
60
60
  firecrawl/v2/methods/aio/batch.py,sha256=4Uj05ffpMhQA2J_mkvHYYogdXb0IgbKGbomO43b4m94,6741
61
61
  firecrawl/v2/methods/aio/crawl.py,sha256=j2Tb2AcGsT6bCiUbB2yjrfvGZqkinUt0tU-SzWmB7Jw,11551
62
62
  firecrawl/v2/methods/aio/extract.py,sha256=IfNr2ETqt4dR73JFzrEYI4kk5vpKnJOG0BmPEjGEoO4,4217
63
- firecrawl/v2/methods/aio/map.py,sha256=EuT-5A0cQr_e5SBfEZ6pnl8u0JUwEEvSwhyT2N-QoKU,2326
63
+ firecrawl/v2/methods/aio/map.py,sha256=WQTl2zSr_9KhrQMuvnRCCymiYjEailp8cAcMssG6Xx4,2445
64
64
  firecrawl/v2/methods/aio/scrape.py,sha256=ilA9qco8YGwCFpE0PN1XBQUyuHPQwH2QioZ-xsfxhgU,1386
65
65
  firecrawl/v2/methods/aio/search.py,sha256=_TqTFGQLlOCCLNdWcOvakTqPGD2r9AOlBg8RasOgmvw,6177
66
- firecrawl/v2/methods/aio/usage.py,sha256=qM5PsPuA_N1pUTnZF4Raq4VkBf_XCMSQdhOJBFFs17k,3239
66
+ firecrawl/v2/methods/aio/usage.py,sha256=iUzTkdAWRheq-V5rRXcW0bc3MSODaVS1AqroRF0fO9M,3964
67
67
  firecrawl/v2/utils/__init__.py,sha256=i1GgxySmqEXpWSBQCu3iZBPIJG7fXj0QXCDWGwerWNs,338
68
68
  firecrawl/v2/utils/error_handler.py,sha256=Iuf916dHphDY8ObNNlWy75628DFeJ0Rv8ljRp4LttLE,4199
69
69
  firecrawl/v2/utils/get_version.py,sha256=0CxW_41q2hlzIxEWOivUCaYw3GFiSIH32RPUMcIgwAY,492
@@ -71,10 +71,10 @@ firecrawl/v2/utils/http_client.py,sha256=gUrC1CvU5sj03w27Lbq-3-yH38Yi_OXiI01-piw
71
71
  firecrawl/v2/utils/http_client_async.py,sha256=iy89_bk2HS3afSRHZ8016eMCa9Fk-5MFTntcOHfbPgE,1936
72
72
  firecrawl/v2/utils/normalize.py,sha256=nlTU6QRghT1YKZzNZlIQj4STSRuSUGrS9cCErZIcY5w,3636
73
73
  firecrawl/v2/utils/validation.py,sha256=qWWiWaVcvODmVxf9rxIVy1j_dyuJCvdMMUoYhvWUEIU,15269
74
- firecrawl-4.3.0.dist-info/licenses/LICENSE,sha256=nPCunEDwjRGHlmjvsiDUyIWbkqqyj3Ej84ntnh0g0zA,1084
74
+ firecrawl-4.3.2.dist-info/licenses/LICENSE,sha256=nPCunEDwjRGHlmjvsiDUyIWbkqqyj3Ej84ntnh0g0zA,1084
75
75
  tests/test_change_tracking.py,sha256=_IJ5ShLcoj2fHDBaw-nE4I4lHdmDB617ocK_XMHhXps,4177
76
76
  tests/test_timeout_conversion.py,sha256=PWlIEMASQNhu4cp1OW_ebklnE9NCiigPnEFCtI5N3w0,3996
77
- firecrawl-4.3.0.dist-info/METADATA,sha256=xWtTodBOtFYrrFuKAOC7i657fmT45ZtpRXyNWd7i7mU,7392
78
- firecrawl-4.3.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
79
- firecrawl-4.3.0.dist-info/top_level.txt,sha256=8T3jOaSN5mtLghO-R3MQ8KO290gIX8hmfxQmglBPdLE,16
80
- firecrawl-4.3.0.dist-info/RECORD,,
77
+ firecrawl-4.3.2.dist-info/METADATA,sha256=tlvxxrTf5VI0Vz-zBYhEO75830MJZ1qV5wjRtqrptw4,7392
78
+ firecrawl-4.3.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
79
+ firecrawl-4.3.2.dist-info/top_level.txt,sha256=8T3jOaSN5mtLghO-R3MQ8KO290gIX8hmfxQmglBPdLE,16
80
+ firecrawl-4.3.2.dist-info/RECORD,,