firecrawl 2.1.1__py3-none-any.whl → 2.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of firecrawl might be problematic. Click here for more details.
- firecrawl/__init__.py +1 -1
- firecrawl/firecrawl.py +139 -38
- {firecrawl-2.1.1.dist-info → firecrawl-2.2.0.dist-info}/METADATA +1 -1
- {firecrawl-2.1.1.dist-info → firecrawl-2.2.0.dist-info}/RECORD +7 -7
- {firecrawl-2.1.1.dist-info → firecrawl-2.2.0.dist-info}/LICENSE +0 -0
- {firecrawl-2.1.1.dist-info → firecrawl-2.2.0.dist-info}/WHEEL +0 -0
- {firecrawl-2.1.1.dist-info → firecrawl-2.2.0.dist-info}/top_level.txt +0 -0
firecrawl/__init__.py
CHANGED
firecrawl/firecrawl.py
CHANGED
|
@@ -570,7 +570,6 @@ class FirecrawlApp:
|
|
|
570
570
|
location: Optional[str] = None,
|
|
571
571
|
timeout: Optional[int] = None,
|
|
572
572
|
scrape_options: Optional[ScrapeOptions] = None,
|
|
573
|
-
params: Optional[Union[Dict[str, Any], SearchParams]] = None,
|
|
574
573
|
**kwargs) -> SearchResponse:
|
|
575
574
|
"""
|
|
576
575
|
Search for content using Firecrawl.
|
|
@@ -585,7 +584,6 @@ class FirecrawlApp:
|
|
|
585
584
|
location (Optional[str]): Geo-targeting
|
|
586
585
|
timeout (Optional[int]): Request timeout in milliseconds
|
|
587
586
|
scrape_options (Optional[ScrapeOptions]): Result scraping configuration
|
|
588
|
-
params (Optional[Union[Dict[str, Any], SearchParams]]): Additional search parameters
|
|
589
587
|
**kwargs: Additional keyword arguments for future compatibility
|
|
590
588
|
|
|
591
589
|
Returns:
|
|
@@ -598,13 +596,11 @@ class FirecrawlApp:
|
|
|
598
596
|
Raises:
|
|
599
597
|
Exception: If search fails or response cannot be parsed
|
|
600
598
|
"""
|
|
599
|
+
# Validate any additional kwargs
|
|
600
|
+
self._validate_kwargs(kwargs, "search")
|
|
601
|
+
|
|
601
602
|
# Build search parameters
|
|
602
603
|
search_params = {}
|
|
603
|
-
if params:
|
|
604
|
-
if isinstance(params, dict):
|
|
605
|
-
search_params.update(params)
|
|
606
|
-
else:
|
|
607
|
-
search_params.update(params.dict(exclude_none=True))
|
|
608
604
|
|
|
609
605
|
# Add individual parameters
|
|
610
606
|
if limit is not None:
|
|
@@ -705,6 +701,9 @@ class FirecrawlApp:
|
|
|
705
701
|
Raises:
|
|
706
702
|
Exception: If crawl fails
|
|
707
703
|
"""
|
|
704
|
+
# Validate any additional kwargs
|
|
705
|
+
self._validate_kwargs(kwargs, "crawl_url")
|
|
706
|
+
|
|
708
707
|
crawl_params = {}
|
|
709
708
|
|
|
710
709
|
# Add individual parameters
|
|
@@ -808,6 +807,9 @@ class FirecrawlApp:
|
|
|
808
807
|
Raises:
|
|
809
808
|
Exception: If crawl initiation fails
|
|
810
809
|
"""
|
|
810
|
+
# Validate any additional kwargs
|
|
811
|
+
self._validate_kwargs(kwargs, "async_crawl_url")
|
|
812
|
+
|
|
811
813
|
crawl_params = {}
|
|
812
814
|
|
|
813
815
|
# Add individual parameters
|
|
@@ -1076,7 +1078,7 @@ class FirecrawlApp:
|
|
|
1076
1078
|
sitemap_only: Optional[bool] = None,
|
|
1077
1079
|
limit: Optional[int] = None,
|
|
1078
1080
|
timeout: Optional[int] = None,
|
|
1079
|
-
|
|
1081
|
+
**kwargs) -> MapResponse:
|
|
1080
1082
|
"""
|
|
1081
1083
|
Map and discover links from a URL.
|
|
1082
1084
|
|
|
@@ -1088,7 +1090,7 @@ class FirecrawlApp:
|
|
|
1088
1090
|
sitemap_only (Optional[bool]): Only use sitemap.xml
|
|
1089
1091
|
limit (Optional[int]): Maximum URLs to return
|
|
1090
1092
|
timeout (Optional[int]): Request timeout in milliseconds
|
|
1091
|
-
|
|
1093
|
+
**kwargs: Additional parameters to pass to the API
|
|
1092
1094
|
|
|
1093
1095
|
Returns:
|
|
1094
1096
|
MapResponse: Response containing:
|
|
@@ -1099,10 +1101,11 @@ class FirecrawlApp:
|
|
|
1099
1101
|
Raises:
|
|
1100
1102
|
Exception: If mapping fails or response cannot be parsed
|
|
1101
1103
|
"""
|
|
1104
|
+
# Validate any additional kwargs
|
|
1105
|
+
self._validate_kwargs(kwargs, "map_url")
|
|
1106
|
+
|
|
1102
1107
|
# Build map parameters
|
|
1103
1108
|
map_params = {}
|
|
1104
|
-
if params:
|
|
1105
|
-
map_params.update(params.dict(exclude_none=True))
|
|
1106
1109
|
|
|
1107
1110
|
# Add individual parameters
|
|
1108
1111
|
if search is not None:
|
|
@@ -1118,6 +1121,9 @@ class FirecrawlApp:
|
|
|
1118
1121
|
if timeout is not None:
|
|
1119
1122
|
map_params['timeout'] = timeout
|
|
1120
1123
|
|
|
1124
|
+
# Add any additional kwargs
|
|
1125
|
+
map_params.update(kwargs)
|
|
1126
|
+
|
|
1121
1127
|
# Create final params object
|
|
1122
1128
|
final_params = MapParams(**map_params)
|
|
1123
1129
|
params_dict = final_params.dict(exclude_none=True)
|
|
@@ -1205,6 +1211,9 @@ class FirecrawlApp:
|
|
|
1205
1211
|
Raises:
|
|
1206
1212
|
Exception: If batch scrape fails
|
|
1207
1213
|
"""
|
|
1214
|
+
# Validate any additional kwargs
|
|
1215
|
+
self._validate_kwargs(kwargs, "batch_scrape_urls")
|
|
1216
|
+
|
|
1208
1217
|
scrape_params = {}
|
|
1209
1218
|
|
|
1210
1219
|
# Add individual parameters
|
|
@@ -1328,6 +1337,9 @@ class FirecrawlApp:
|
|
|
1328
1337
|
Raises:
|
|
1329
1338
|
Exception: If job initiation fails
|
|
1330
1339
|
"""
|
|
1340
|
+
# Validate any additional kwargs
|
|
1341
|
+
self._validate_kwargs(kwargs, "async_batch_scrape_urls")
|
|
1342
|
+
|
|
1331
1343
|
scrape_params = {}
|
|
1332
1344
|
|
|
1333
1345
|
# Add individual parameters
|
|
@@ -1446,6 +1458,9 @@ class FirecrawlApp:
|
|
|
1446
1458
|
Raises:
|
|
1447
1459
|
Exception: If batch scrape job fails to start
|
|
1448
1460
|
"""
|
|
1461
|
+
# Validate any additional kwargs
|
|
1462
|
+
self._validate_kwargs(kwargs, "batch_scrape_urls_and_watch")
|
|
1463
|
+
|
|
1449
1464
|
scrape_params = {}
|
|
1450
1465
|
|
|
1451
1466
|
# Add individual parameters
|
|
@@ -1849,24 +1864,33 @@ class FirecrawlApp:
|
|
|
1849
1864
|
show_full_text=show_full_text,
|
|
1850
1865
|
experimental_stream=experimental_stream
|
|
1851
1866
|
)
|
|
1852
|
-
|
|
1853
|
-
|
|
1867
|
+
|
|
1868
|
+
if not response.success or not response.id:
|
|
1869
|
+
return GenerateLLMsTextStatusResponse(
|
|
1870
|
+
success=False,
|
|
1871
|
+
error='Failed to start LLMs.txt generation',
|
|
1872
|
+
status='failed',
|
|
1873
|
+
expiresAt=''
|
|
1874
|
+
)
|
|
1854
1875
|
|
|
1855
|
-
job_id = response
|
|
1876
|
+
job_id = response.id
|
|
1856
1877
|
while True:
|
|
1857
1878
|
status = self.check_generate_llms_text_status(job_id)
|
|
1858
1879
|
|
|
1859
|
-
if status
|
|
1880
|
+
if status.status == 'completed':
|
|
1860
1881
|
return status
|
|
1861
|
-
elif status
|
|
1862
|
-
|
|
1863
|
-
elif status
|
|
1864
|
-
|
|
1882
|
+
elif status.status == 'failed':
|
|
1883
|
+
return status
|
|
1884
|
+
elif status.status != 'processing':
|
|
1885
|
+
return GenerateLLMsTextStatusResponse(
|
|
1886
|
+
success=False,
|
|
1887
|
+
error='LLMs.txt generation job terminated unexpectedly',
|
|
1888
|
+
status='failed',
|
|
1889
|
+
expiresAt=''
|
|
1890
|
+
)
|
|
1865
1891
|
|
|
1866
1892
|
time.sleep(2) # Polling interval
|
|
1867
1893
|
|
|
1868
|
-
return {'success': False, 'error': 'LLMs.txt generation job terminated unexpectedly'}
|
|
1869
|
-
|
|
1870
1894
|
def async_generate_llms_text(
|
|
1871
1895
|
self,
|
|
1872
1896
|
url: str,
|
|
@@ -1903,10 +1927,13 @@ class FirecrawlApp:
|
|
|
1903
1927
|
json_data['origin'] = f"python-sdk@{version}"
|
|
1904
1928
|
|
|
1905
1929
|
try:
|
|
1906
|
-
|
|
1907
|
-
|
|
1930
|
+
req = self._post_request(f'{self.api_url}/v1/llmstxt', json_data, headers)
|
|
1931
|
+
response = req.json()
|
|
1932
|
+
print("json_data", json_data)
|
|
1933
|
+
print("response", response)
|
|
1934
|
+
if response.get('success'):
|
|
1908
1935
|
try:
|
|
1909
|
-
return response
|
|
1936
|
+
return GenerateLLMsTextResponse(**response)
|
|
1910
1937
|
except:
|
|
1911
1938
|
raise Exception('Failed to parse Firecrawl response as JSON.')
|
|
1912
1939
|
else:
|
|
@@ -1914,7 +1941,10 @@ class FirecrawlApp:
|
|
|
1914
1941
|
except Exception as e:
|
|
1915
1942
|
raise ValueError(str(e))
|
|
1916
1943
|
|
|
1917
|
-
return
|
|
1944
|
+
return GenerateLLMsTextResponse(
|
|
1945
|
+
success=False,
|
|
1946
|
+
error='Internal server error'
|
|
1947
|
+
)
|
|
1918
1948
|
|
|
1919
1949
|
def check_generate_llms_text_status(self, id: str) -> GenerateLLMsTextStatusResponse:
|
|
1920
1950
|
"""
|
|
@@ -1941,9 +1971,10 @@ class FirecrawlApp:
|
|
|
1941
1971
|
response = self._get_request(f'{self.api_url}/v1/llmstxt/{id}', headers)
|
|
1942
1972
|
if response.status_code == 200:
|
|
1943
1973
|
try:
|
|
1944
|
-
|
|
1945
|
-
|
|
1946
|
-
|
|
1974
|
+
json_data = response.json()
|
|
1975
|
+
return GenerateLLMsTextStatusResponse(**json_data)
|
|
1976
|
+
except Exception as e:
|
|
1977
|
+
raise Exception(f'Failed to parse Firecrawl response as GenerateLLMsTextStatusResponse: {str(e)}')
|
|
1947
1978
|
elif response.status_code == 404:
|
|
1948
1979
|
raise Exception('LLMs.txt generation job not found')
|
|
1949
1980
|
else:
|
|
@@ -1951,7 +1982,7 @@ class FirecrawlApp:
|
|
|
1951
1982
|
except Exception as e:
|
|
1952
1983
|
raise ValueError(str(e))
|
|
1953
1984
|
|
|
1954
|
-
return
|
|
1985
|
+
return GenerateLLMsTextStatusResponse(success=False, error='Internal server error', status='failed', expiresAt='')
|
|
1955
1986
|
|
|
1956
1987
|
def _prepare_headers(
|
|
1957
1988
|
self,
|
|
@@ -2378,6 +2409,56 @@ class FirecrawlApp:
|
|
|
2378
2409
|
|
|
2379
2410
|
return {'success': False, 'error': 'Internal server error'}
|
|
2380
2411
|
|
|
2412
|
+
def _validate_kwargs(self, kwargs: Dict[str, Any], method_name: str) -> None:
|
|
2413
|
+
"""
|
|
2414
|
+
Validate additional keyword arguments before they are passed to the API.
|
|
2415
|
+
This provides early validation before the Pydantic model validation.
|
|
2416
|
+
|
|
2417
|
+
Args:
|
|
2418
|
+
kwargs (Dict[str, Any]): Additional keyword arguments to validate
|
|
2419
|
+
method_name (str): Name of the method these kwargs are for
|
|
2420
|
+
|
|
2421
|
+
Raises:
|
|
2422
|
+
ValueError: If kwargs contain invalid or unsupported parameters
|
|
2423
|
+
"""
|
|
2424
|
+
if not kwargs:
|
|
2425
|
+
return
|
|
2426
|
+
|
|
2427
|
+
# Known parameter mappings for each method
|
|
2428
|
+
method_params = {
|
|
2429
|
+
"scrape_url": {"formats", "include_tags", "exclude_tags", "only_main_content", "wait_for",
|
|
2430
|
+
"timeout", "location", "mobile", "skip_tls_verification", "remove_base64_images",
|
|
2431
|
+
"block_ads", "proxy", "extract", "json_options", "actions"},
|
|
2432
|
+
"search": {"limit", "tbs", "filter", "lang", "country", "location", "timeout", "scrape_options"},
|
|
2433
|
+
"crawl_url": {"include_paths", "exclude_paths", "max_depth", "max_discovery_depth", "limit",
|
|
2434
|
+
"allow_backward_links", "allow_external_links", "ignore_sitemap", "scrape_options",
|
|
2435
|
+
"webhook", "deduplicate_similar_urls", "ignore_query_parameters", "regex_on_full_url"},
|
|
2436
|
+
"map_url": {"search", "ignore_sitemap", "include_subdomains", "sitemap_only", "limit", "timeout"},
|
|
2437
|
+
"batch_scrape_urls": {"formats", "headers", "include_tags", "exclude_tags", "only_main_content",
|
|
2438
|
+
"wait_for", "timeout", "location", "mobile", "skip_tls_verification",
|
|
2439
|
+
"remove_base64_images", "block_ads", "proxy", "extract", "json_options",
|
|
2440
|
+
"actions", "agent"},
|
|
2441
|
+
"async_batch_scrape_urls": {"formats", "headers", "include_tags", "exclude_tags", "only_main_content",
|
|
2442
|
+
"wait_for", "timeout", "location", "mobile", "skip_tls_verification",
|
|
2443
|
+
"remove_base64_images", "block_ads", "proxy", "extract", "json_options",
|
|
2444
|
+
"actions", "agent"},
|
|
2445
|
+
"batch_scrape_urls_and_watch": {"formats", "headers", "include_tags", "exclude_tags", "only_main_content",
|
|
2446
|
+
"wait_for", "timeout", "location", "mobile", "skip_tls_verification",
|
|
2447
|
+
"remove_base64_images", "block_ads", "proxy", "extract", "json_options",
|
|
2448
|
+
"actions", "agent"}
|
|
2449
|
+
}
|
|
2450
|
+
|
|
2451
|
+
# Get allowed parameters for this method
|
|
2452
|
+
allowed_params = method_params.get(method_name, set())
|
|
2453
|
+
|
|
2454
|
+
# Check for unknown parameters
|
|
2455
|
+
unknown_params = set(kwargs.keys()) - allowed_params
|
|
2456
|
+
if unknown_params:
|
|
2457
|
+
raise ValueError(f"Unsupported parameter(s) for {method_name}: {', '.join(unknown_params)}. Please refer to the API documentation for the correct parameters.")
|
|
2458
|
+
|
|
2459
|
+
# Additional type validation can be added here if needed
|
|
2460
|
+
# For now, we rely on Pydantic models for detailed type validation
|
|
2461
|
+
|
|
2381
2462
|
class CrawlWatcher:
|
|
2382
2463
|
"""
|
|
2383
2464
|
A class to watch and handle crawl job events via WebSocket connection.
|
|
@@ -2694,7 +2775,8 @@ class AsyncFirecrawlApp(FirecrawlApp):
|
|
|
2694
2775
|
async def scrape_url(
|
|
2695
2776
|
self,
|
|
2696
2777
|
url: str,
|
|
2697
|
-
|
|
2778
|
+
*,
|
|
2779
|
+
formats: Optional[List[Literal["markdown", "html", "rawHtml", "content", "links", "screenshot", "screenshot@fullPage", "extract", "json", "changeTracking"]]] = None,
|
|
2698
2780
|
include_tags: Optional[List[str]] = None,
|
|
2699
2781
|
exclude_tags: Optional[List[str]] = None,
|
|
2700
2782
|
only_main_content: Optional[bool] = None,
|
|
@@ -2708,9 +2790,10 @@ class AsyncFirecrawlApp(FirecrawlApp):
|
|
|
2708
2790
|
proxy: Optional[Literal["basic", "stealth"]] = None,
|
|
2709
2791
|
extract: Optional[JsonConfig] = None,
|
|
2710
2792
|
json_options: Optional[JsonConfig] = None,
|
|
2711
|
-
actions: Optional[List[Union[WaitAction, ScreenshotAction, ClickAction, WriteAction, PressAction, ScrollAction, ScrapeAction, ExecuteJavascriptAction]]] = None
|
|
2793
|
+
actions: Optional[List[Union[WaitAction, ScreenshotAction, ClickAction, WriteAction, PressAction, ScrollAction, ScrapeAction, ExecuteJavascriptAction]]] = None,
|
|
2794
|
+
**kwargs) -> ScrapeResponse[Any]:
|
|
2712
2795
|
"""
|
|
2713
|
-
Scrape
|
|
2796
|
+
Scrape a single URL asynchronously.
|
|
2714
2797
|
|
|
2715
2798
|
Args:
|
|
2716
2799
|
url (str): Target URL to scrape
|
|
@@ -2729,17 +2812,26 @@ class AsyncFirecrawlApp(FirecrawlApp):
|
|
|
2729
2812
|
extract (Optional[JsonConfig]): Content extraction settings
|
|
2730
2813
|
json_options (Optional[JsonConfig]): JSON extraction settings
|
|
2731
2814
|
actions (Optional[List[Union[WaitAction, ScreenshotAction, ClickAction, WriteAction, PressAction, ScrollAction, ScrapeAction, ExecuteJavascriptAction]]]): Actions to perform
|
|
2815
|
+
**kwargs: Additional parameters to pass to the API
|
|
2732
2816
|
|
|
2733
2817
|
Returns:
|
|
2734
|
-
|
|
2735
|
-
|
|
2736
|
-
|
|
2737
|
-
|
|
2738
|
-
|
|
2818
|
+
ScrapeResponse with:
|
|
2819
|
+
* success - Whether scrape was successful
|
|
2820
|
+
* markdown - Markdown content if requested
|
|
2821
|
+
* html - HTML content if requested
|
|
2822
|
+
* rawHtml - Raw HTML content if requested
|
|
2823
|
+
* links - Extracted links if requested
|
|
2824
|
+
* screenshot - Screenshot if requested
|
|
2825
|
+
* extract - Extracted data if requested
|
|
2826
|
+
* json - JSON data if requested
|
|
2827
|
+
* error - Error message if scrape failed
|
|
2739
2828
|
|
|
2740
2829
|
Raises:
|
|
2741
|
-
|
|
2830
|
+
Exception: If scraping fails
|
|
2742
2831
|
"""
|
|
2832
|
+
# Validate any additional kwargs
|
|
2833
|
+
self._validate_kwargs(kwargs, "scrape_url")
|
|
2834
|
+
|
|
2743
2835
|
headers = self._prepare_headers()
|
|
2744
2836
|
|
|
2745
2837
|
# Build scrape parameters
|
|
@@ -2863,6 +2955,9 @@ class AsyncFirecrawlApp(FirecrawlApp):
|
|
|
2863
2955
|
Raises:
|
|
2864
2956
|
Exception: If batch scrape fails
|
|
2865
2957
|
"""
|
|
2958
|
+
# Validate any additional kwargs
|
|
2959
|
+
self._validate_kwargs(kwargs, "batch_scrape_urls")
|
|
2960
|
+
|
|
2866
2961
|
scrape_params = {}
|
|
2867
2962
|
|
|
2868
2963
|
# Add individual parameters
|
|
@@ -2991,6 +3086,9 @@ class AsyncFirecrawlApp(FirecrawlApp):
|
|
|
2991
3086
|
Raises:
|
|
2992
3087
|
Exception: If job initiation fails
|
|
2993
3088
|
"""
|
|
3089
|
+
# Validate any additional kwargs
|
|
3090
|
+
self._validate_kwargs(kwargs, "async_batch_scrape_urls")
|
|
3091
|
+
|
|
2994
3092
|
scrape_params = {}
|
|
2995
3093
|
|
|
2996
3094
|
# Add individual parameters
|
|
@@ -3110,6 +3208,9 @@ class AsyncFirecrawlApp(FirecrawlApp):
|
|
|
3110
3208
|
Raises:
|
|
3111
3209
|
Exception: If crawl fails
|
|
3112
3210
|
"""
|
|
3211
|
+
# Validate any additional kwargs
|
|
3212
|
+
self._validate_kwargs(kwargs, "crawl_url")
|
|
3213
|
+
|
|
3113
3214
|
crawl_params = {}
|
|
3114
3215
|
|
|
3115
3216
|
# Add individual parameters
|
|
@@ -1,12 +1,12 @@
|
|
|
1
|
-
firecrawl/__init__.py,sha256=
|
|
2
|
-
firecrawl/firecrawl.py,sha256=
|
|
1
|
+
firecrawl/__init__.py,sha256=FlLNQdG6xpLs8ppLhPSF-bLx9L3o_A8gmU2UzAACSv8,2570
|
|
2
|
+
firecrawl/firecrawl.py,sha256=wyxYLkEKiW9GO4PKNElewsOJOJzBq_hHIDTl3nd5j94,182693
|
|
3
3
|
firecrawl/__tests__/e2e_withAuth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
4
4
|
firecrawl/__tests__/e2e_withAuth/test.py,sha256=-Fq2vPcMo0iQi4dwsUkkCd931ybDaTxMBnZbRfGdDcA,7931
|
|
5
5
|
firecrawl/__tests__/v1/e2e_withAuth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
6
6
|
firecrawl/__tests__/v1/e2e_withAuth/test.py,sha256=DcCw-cohtnL-t9XPekUtRoQrgg3UCWu8Ikqudf9ory8,19880
|
|
7
7
|
tests/test_change_tracking.py,sha256=_IJ5ShLcoj2fHDBaw-nE4I4lHdmDB617ocK_XMHhXps,4177
|
|
8
|
-
firecrawl-2.
|
|
9
|
-
firecrawl-2.
|
|
10
|
-
firecrawl-2.
|
|
11
|
-
firecrawl-2.
|
|
12
|
-
firecrawl-2.
|
|
8
|
+
firecrawl-2.2.0.dist-info/LICENSE,sha256=nPCunEDwjRGHlmjvsiDUyIWbkqqyj3Ej84ntnh0g0zA,1084
|
|
9
|
+
firecrawl-2.2.0.dist-info/METADATA,sha256=b4f32c9eyvFDwj11eOUf0KKy1fCsZTKfJFN-eRH_ohA,10583
|
|
10
|
+
firecrawl-2.2.0.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
|
|
11
|
+
firecrawl-2.2.0.dist-info/top_level.txt,sha256=8T3jOaSN5mtLghO-R3MQ8KO290gIX8hmfxQmglBPdLE,16
|
|
12
|
+
firecrawl-2.2.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|