firecrawl 4.12.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- firecrawl/__init__.py +87 -0
- firecrawl/__tests__/e2e/v2/aio/conftest.py +62 -0
- firecrawl/__tests__/e2e/v2/aio/test_aio_batch_scrape.py +69 -0
- firecrawl/__tests__/e2e/v2/aio/test_aio_crawl.py +189 -0
- firecrawl/__tests__/e2e/v2/aio/test_aio_extract.py +39 -0
- firecrawl/__tests__/e2e/v2/aio/test_aio_map.py +41 -0
- firecrawl/__tests__/e2e/v2/aio/test_aio_scrape.py +138 -0
- firecrawl/__tests__/e2e/v2/aio/test_aio_search.py +249 -0
- firecrawl/__tests__/e2e/v2/aio/test_aio_usage.py +42 -0
- firecrawl/__tests__/e2e/v2/aio/test_aio_watcher.py +43 -0
- firecrawl/__tests__/e2e/v2/conftest.py +73 -0
- firecrawl/__tests__/e2e/v2/test_async.py +73 -0
- firecrawl/__tests__/e2e/v2/test_batch_scrape.py +106 -0
- firecrawl/__tests__/e2e/v2/test_crawl.py +278 -0
- firecrawl/__tests__/e2e/v2/test_extract.py +55 -0
- firecrawl/__tests__/e2e/v2/test_map.py +61 -0
- firecrawl/__tests__/e2e/v2/test_scrape.py +191 -0
- firecrawl/__tests__/e2e/v2/test_search.py +270 -0
- firecrawl/__tests__/e2e/v2/test_usage.py +26 -0
- firecrawl/__tests__/e2e/v2/test_watcher.py +65 -0
- firecrawl/__tests__/unit/test_recursive_schema_v1.py +1209 -0
- firecrawl/__tests__/unit/v2/methods/aio/test_aio_crawl_params.py +12 -0
- firecrawl/__tests__/unit/v2/methods/aio/test_aio_crawl_request_preparation.py +79 -0
- firecrawl/__tests__/unit/v2/methods/aio/test_aio_crawl_validation.py +12 -0
- firecrawl/__tests__/unit/v2/methods/aio/test_aio_map_request_preparation.py +20 -0
- firecrawl/__tests__/unit/v2/methods/aio/test_aio_scrape_request_preparation.py +50 -0
- firecrawl/__tests__/unit/v2/methods/aio/test_aio_search_request_preparation.py +64 -0
- firecrawl/__tests__/unit/v2/methods/aio/test_batch_request_preparation_async.py +28 -0
- firecrawl/__tests__/unit/v2/methods/aio/test_ensure_async.py +117 -0
- firecrawl/__tests__/unit/v2/methods/test_agent.py +367 -0
- firecrawl/__tests__/unit/v2/methods/test_agent_request_preparation.py +226 -0
- firecrawl/__tests__/unit/v2/methods/test_batch_request_preparation.py +90 -0
- firecrawl/__tests__/unit/v2/methods/test_branding.py +214 -0
- firecrawl/__tests__/unit/v2/methods/test_crawl_params.py +70 -0
- firecrawl/__tests__/unit/v2/methods/test_crawl_request_preparation.py +240 -0
- firecrawl/__tests__/unit/v2/methods/test_crawl_validation.py +107 -0
- firecrawl/__tests__/unit/v2/methods/test_map_request_preparation.py +54 -0
- firecrawl/__tests__/unit/v2/methods/test_pagination.py +671 -0
- firecrawl/__tests__/unit/v2/methods/test_scrape_request_preparation.py +109 -0
- firecrawl/__tests__/unit/v2/methods/test_search_request_preparation.py +169 -0
- firecrawl/__tests__/unit/v2/methods/test_search_validation.py +236 -0
- firecrawl/__tests__/unit/v2/methods/test_usage_types.py +18 -0
- firecrawl/__tests__/unit/v2/methods/test_webhook.py +123 -0
- firecrawl/__tests__/unit/v2/utils/test_metadata_extras.py +94 -0
- firecrawl/__tests__/unit/v2/utils/test_metadata_extras_multivalue.py +22 -0
- firecrawl/__tests__/unit/v2/utils/test_recursive_schema.py +1133 -0
- firecrawl/__tests__/unit/v2/utils/test_validation.py +311 -0
- firecrawl/__tests__/unit/v2/watcher/test_ws_watcher.py +332 -0
- firecrawl/client.py +281 -0
- firecrawl/firecrawl.backup.py +4635 -0
- firecrawl/types.py +167 -0
- firecrawl/v1/__init__.py +14 -0
- firecrawl/v1/client.py +5164 -0
- firecrawl/v2/__init__.py +4 -0
- firecrawl/v2/client.py +967 -0
- firecrawl/v2/client_async.py +408 -0
- firecrawl/v2/methods/agent.py +144 -0
- firecrawl/v2/methods/aio/__init__.py +1 -0
- firecrawl/v2/methods/aio/agent.py +137 -0
- firecrawl/v2/methods/aio/batch.py +188 -0
- firecrawl/v2/methods/aio/crawl.py +351 -0
- firecrawl/v2/methods/aio/extract.py +133 -0
- firecrawl/v2/methods/aio/map.py +65 -0
- firecrawl/v2/methods/aio/scrape.py +33 -0
- firecrawl/v2/methods/aio/search.py +176 -0
- firecrawl/v2/methods/aio/usage.py +89 -0
- firecrawl/v2/methods/batch.py +499 -0
- firecrawl/v2/methods/crawl.py +592 -0
- firecrawl/v2/methods/extract.py +161 -0
- firecrawl/v2/methods/map.py +83 -0
- firecrawl/v2/methods/scrape.py +64 -0
- firecrawl/v2/methods/search.py +215 -0
- firecrawl/v2/methods/usage.py +84 -0
- firecrawl/v2/types.py +1143 -0
- firecrawl/v2/utils/__init__.py +9 -0
- firecrawl/v2/utils/error_handler.py +107 -0
- firecrawl/v2/utils/get_version.py +15 -0
- firecrawl/v2/utils/http_client.py +178 -0
- firecrawl/v2/utils/http_client_async.py +69 -0
- firecrawl/v2/utils/normalize.py +125 -0
- firecrawl/v2/utils/validation.py +692 -0
- firecrawl/v2/watcher.py +301 -0
- firecrawl/v2/watcher_async.py +243 -0
- firecrawl-4.12.0.dist-info/METADATA +234 -0
- firecrawl-4.12.0.dist-info/RECORD +92 -0
- firecrawl-4.12.0.dist-info/WHEEL +5 -0
- firecrawl-4.12.0.dist-info/licenses/LICENSE +21 -0
- firecrawl-4.12.0.dist-info/top_level.txt +2 -0
- tests/test_agent_integration.py +277 -0
- tests/test_api_key_handling.py +44 -0
- tests/test_change_tracking.py +98 -0
- tests/test_timeout_conversion.py +117 -0
|
@@ -0,0 +1,249 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import pytest
|
|
3
|
+
from dotenv import load_dotenv
|
|
4
|
+
from firecrawl import AsyncFirecrawl
|
|
5
|
+
from firecrawl.types import (
|
|
6
|
+
SearchData,
|
|
7
|
+
Document,
|
|
8
|
+
ScrapeOptions,
|
|
9
|
+
ScrapeFormats,
|
|
10
|
+
SearchResultWeb,
|
|
11
|
+
SearchResultNews,
|
|
12
|
+
SearchResultImages,
|
|
13
|
+
)
|
|
14
|
+
|
|
15
|
+
load_dotenv()
|
|
16
|
+
|
|
17
|
+
firecrawl = AsyncFirecrawl(api_key=os.getenv("API_KEY"), api_url=os.getenv("API_URL"))
|
|
18
|
+
|
|
19
|
+
def _collect_texts(entries):
|
|
20
|
+
texts = []
|
|
21
|
+
for r in entries or []:
|
|
22
|
+
title = getattr(r, 'title', None) if hasattr(r, 'title') else None
|
|
23
|
+
desc = getattr(r, 'description', None) if hasattr(r, 'description') else None
|
|
24
|
+
if title:
|
|
25
|
+
texts.append(str(title).lower())
|
|
26
|
+
if desc:
|
|
27
|
+
texts.append(str(desc).lower())
|
|
28
|
+
return texts
|
|
29
|
+
|
|
30
|
+
def _is_document(entry) -> bool:
|
|
31
|
+
try:
|
|
32
|
+
from firecrawl.v2.types import Document
|
|
33
|
+
return isinstance(entry, Document) or \
|
|
34
|
+
hasattr(entry, 'markdown') or \
|
|
35
|
+
hasattr(entry, 'html') or \
|
|
36
|
+
hasattr(entry, 'raw_html') or \
|
|
37
|
+
hasattr(entry, 'json') or \
|
|
38
|
+
hasattr(entry, 'screenshot') or \
|
|
39
|
+
hasattr(entry, 'change_tracking') or \
|
|
40
|
+
hasattr(entry, 'summary')
|
|
41
|
+
except Exception:
|
|
42
|
+
return hasattr(entry, 'markdown') or \
|
|
43
|
+
hasattr(entry, 'html') or \
|
|
44
|
+
hasattr(entry, 'raw_html') or \
|
|
45
|
+
hasattr(entry, 'json') or \
|
|
46
|
+
hasattr(entry, 'screenshot') or \
|
|
47
|
+
hasattr(entry, 'change_tracking') or \
|
|
48
|
+
hasattr(entry, 'summary')
|
|
49
|
+
|
|
50
|
+
@pytest.mark.asyncio
|
|
51
|
+
async def test_async_search_minimal_request():
|
|
52
|
+
results = await firecrawl.search(
|
|
53
|
+
query="What is the capital of France?"
|
|
54
|
+
)
|
|
55
|
+
assert isinstance(results, SearchData)
|
|
56
|
+
assert hasattr(results, 'web')
|
|
57
|
+
assert results.web is not None
|
|
58
|
+
assert len(results.web) > 0
|
|
59
|
+
assert hasattr(results, 'news')
|
|
60
|
+
assert results.news is None
|
|
61
|
+
assert hasattr(results, 'images')
|
|
62
|
+
assert results.images is None
|
|
63
|
+
|
|
64
|
+
for result in results.web:
|
|
65
|
+
assert isinstance(result, SearchResultWeb)
|
|
66
|
+
assert hasattr(result, 'url')
|
|
67
|
+
assert hasattr(result, 'title')
|
|
68
|
+
assert hasattr(result, 'description')
|
|
69
|
+
assert result.url.startswith('http')
|
|
70
|
+
assert result.title is not None
|
|
71
|
+
assert result.description is not None
|
|
72
|
+
|
|
73
|
+
all_text = ' '.join(_collect_texts(results.web))
|
|
74
|
+
assert 'paris' in all_text
|
|
75
|
+
|
|
76
|
+
assert results.news is None
|
|
77
|
+
assert results.images is None
|
|
78
|
+
|
|
79
|
+
@pytest.mark.asyncio
|
|
80
|
+
async def test_async_search_with_sources():
|
|
81
|
+
results = await firecrawl.search(
|
|
82
|
+
query="firecrawl",
|
|
83
|
+
sources=["web", "news", "images"],
|
|
84
|
+
limit=3
|
|
85
|
+
)
|
|
86
|
+
assert isinstance(results, SearchData)
|
|
87
|
+
assert results.web is not None
|
|
88
|
+
assert len(results.web) <= 3
|
|
89
|
+
assert isinstance(results.web[0], SearchResultWeb)
|
|
90
|
+
|
|
91
|
+
if results.news is not None:
|
|
92
|
+
assert len(results.news) <= 3
|
|
93
|
+
assert isinstance(results.news[0], SearchResultNews)
|
|
94
|
+
|
|
95
|
+
if results.images is not None:
|
|
96
|
+
assert len(results.images) <= 3
|
|
97
|
+
assert isinstance(results.images[0], SearchResultImages)
|
|
98
|
+
|
|
99
|
+
web_titles = [result.title.lower() for result in results.web]
|
|
100
|
+
web_descriptions = [result.description.lower() for result in results.web]
|
|
101
|
+
all_web_text = ' '.join(web_titles + web_descriptions)
|
|
102
|
+
assert 'firecrawl' in all_web_text
|
|
103
|
+
|
|
104
|
+
@pytest.mark.asyncio
|
|
105
|
+
async def test_async_search_result_structure():
|
|
106
|
+
results = await firecrawl.search(
|
|
107
|
+
query="test query",
|
|
108
|
+
limit=1
|
|
109
|
+
)
|
|
110
|
+
if results.web and len(results.web) > 0:
|
|
111
|
+
result = results.web[0]
|
|
112
|
+
assert hasattr(result, 'url')
|
|
113
|
+
assert hasattr(result, 'title')
|
|
114
|
+
assert hasattr(result, 'description')
|
|
115
|
+
assert isinstance(result.url, str)
|
|
116
|
+
assert isinstance(result.title, str) or result.title is None
|
|
117
|
+
assert isinstance(result.description, str) or result.description is None
|
|
118
|
+
assert result.url.startswith('http')
|
|
119
|
+
|
|
120
|
+
@pytest.mark.asyncio
|
|
121
|
+
async def test_async_search_all_parameters():
|
|
122
|
+
from firecrawl.types import ScrapeOptions, Location, WaitAction
|
|
123
|
+
schema = {
|
|
124
|
+
"type": "object",
|
|
125
|
+
"properties": {
|
|
126
|
+
"title": {"type": "string"},
|
|
127
|
+
"description": {"type": "string"},
|
|
128
|
+
"url": {"type": "string"}
|
|
129
|
+
},
|
|
130
|
+
"required": ["title", "description"]
|
|
131
|
+
}
|
|
132
|
+
results = await firecrawl.search(
|
|
133
|
+
query="artificial intelligence",
|
|
134
|
+
sources=[
|
|
135
|
+
{"type": "web"},
|
|
136
|
+
{"type": "news"}
|
|
137
|
+
],
|
|
138
|
+
limit=3,
|
|
139
|
+
tbs="qdr:m",
|
|
140
|
+
location="US",
|
|
141
|
+
ignore_invalid_urls=True,
|
|
142
|
+
timeout=60000,
|
|
143
|
+
integration="_e2e-test",
|
|
144
|
+
scrape_options=ScrapeOptions(
|
|
145
|
+
formats=[
|
|
146
|
+
"markdown",
|
|
147
|
+
"html",
|
|
148
|
+
{
|
|
149
|
+
"type": "json",
|
|
150
|
+
"prompt": "Extract the title and description from the page",
|
|
151
|
+
"schema": schema
|
|
152
|
+
},
|
|
153
|
+
{"type": "summary"}
|
|
154
|
+
],
|
|
155
|
+
headers={"User-Agent": "Firecrawl-Test/1.0"},
|
|
156
|
+
include_tags=["h1", "h2", "p"],
|
|
157
|
+
exclude_tags=["nav", "footer"],
|
|
158
|
+
only_main_content=True,
|
|
159
|
+
wait_for=2000,
|
|
160
|
+
mobile=False,
|
|
161
|
+
skip_tls_verification=False,
|
|
162
|
+
remove_base64_images=True,
|
|
163
|
+
block_ads=True,
|
|
164
|
+
proxy="basic",
|
|
165
|
+
max_age=3600000,
|
|
166
|
+
store_in_cache=True,
|
|
167
|
+
location=Location(
|
|
168
|
+
country="US",
|
|
169
|
+
languages=["en"]
|
|
170
|
+
),
|
|
171
|
+
actions=[
|
|
172
|
+
WaitAction(milliseconds=1000)
|
|
173
|
+
]
|
|
174
|
+
)
|
|
175
|
+
)
|
|
176
|
+
assert isinstance(results, SearchData)
|
|
177
|
+
assert hasattr(results, 'web')
|
|
178
|
+
assert hasattr(results, 'news')
|
|
179
|
+
assert hasattr(results, 'images')
|
|
180
|
+
assert results.web is not None
|
|
181
|
+
assert len(results.web) <= 3
|
|
182
|
+
|
|
183
|
+
non_doc_entries = [r for r in (results.web or []) if not _is_document(r)]
|
|
184
|
+
if non_doc_entries:
|
|
185
|
+
all_web_text = ' '.join(_collect_texts(non_doc_entries))
|
|
186
|
+
ai_terms = ['artificial', 'intelligence', 'ai', 'machine', 'learning']
|
|
187
|
+
assert any(term in all_web_text for term in ai_terms)
|
|
188
|
+
|
|
189
|
+
for result in results.web:
|
|
190
|
+
assert isinstance(result, (SearchResultWeb, Document))
|
|
191
|
+
if isinstance(result, Document):
|
|
192
|
+
assert (result.markdown is not None) or (result.html is not None)
|
|
193
|
+
else:
|
|
194
|
+
assert hasattr(result, 'url')
|
|
195
|
+
assert isinstance(result.url, str) and result.url.startswith('http')
|
|
196
|
+
|
|
197
|
+
if results.news is not None:
|
|
198
|
+
assert len(results.news) <= 3
|
|
199
|
+
for result in results.news:
|
|
200
|
+
assert isinstance(result, (SearchResultNews, Document))
|
|
201
|
+
if isinstance(result, Document):
|
|
202
|
+
assert (result.markdown is not None) or (result.html is not None)
|
|
203
|
+
else:
|
|
204
|
+
assert hasattr(result, 'url')
|
|
205
|
+
assert isinstance(result.url, str) and result.url.startswith('http')
|
|
206
|
+
|
|
207
|
+
assert results.images is None
|
|
208
|
+
|
|
209
|
+
@pytest.mark.asyncio
|
|
210
|
+
async def test_async_search_formats_flexibility():
|
|
211
|
+
# Test with list format
|
|
212
|
+
results1 = await firecrawl.search(
|
|
213
|
+
query="python programming",
|
|
214
|
+
limit=1,
|
|
215
|
+
scrape_options=ScrapeOptions(
|
|
216
|
+
formats=["markdown"]
|
|
217
|
+
)
|
|
218
|
+
)
|
|
219
|
+
# Test with ScrapeFormats object
|
|
220
|
+
results2 = await firecrawl.search(
|
|
221
|
+
query="python programming",
|
|
222
|
+
limit=1,
|
|
223
|
+
scrape_options=ScrapeOptions(
|
|
224
|
+
formats=ScrapeFormats(markdown=True)
|
|
225
|
+
)
|
|
226
|
+
)
|
|
227
|
+
assert isinstance(results1, SearchData)
|
|
228
|
+
assert isinstance(results2, SearchData)
|
|
229
|
+
assert results1.web is not None
|
|
230
|
+
assert results2.web is not None
|
|
231
|
+
|
|
232
|
+
@pytest.mark.asyncio
|
|
233
|
+
async def test_async_search_with_json_format_object():
|
|
234
|
+
json_schema = {
|
|
235
|
+
"type": "object",
|
|
236
|
+
"properties": {
|
|
237
|
+
"title": {"type": "string"}
|
|
238
|
+
},
|
|
239
|
+
"required": ["title"],
|
|
240
|
+
}
|
|
241
|
+
results = await firecrawl.search(
|
|
242
|
+
query="site:docs.firecrawl.dev",
|
|
243
|
+
limit=1,
|
|
244
|
+
scrape_options=ScrapeOptions(
|
|
245
|
+
formats=[{"type": "json", "prompt": "Extract page title", "schema": json_schema}]
|
|
246
|
+
),
|
|
247
|
+
)
|
|
248
|
+
assert isinstance(results, SearchData)
|
|
249
|
+
assert results.web is not None and len(results.web) >= 0
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import pytest
|
|
3
|
+
from dotenv import load_dotenv
|
|
4
|
+
from firecrawl import AsyncFirecrawl
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
load_dotenv()
|
|
8
|
+
|
|
9
|
+
if not os.getenv("API_KEY"):
|
|
10
|
+
raise ValueError("API_KEY is not set")
|
|
11
|
+
|
|
12
|
+
if not os.getenv("API_URL"):
|
|
13
|
+
raise ValueError("API_URL is not set")
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@pytest.mark.asyncio
|
|
17
|
+
async def test_async_get_concurrency():
|
|
18
|
+
client = AsyncFirecrawl(api_key=os.getenv("API_KEY"), api_url=os.getenv("API_URL"))
|
|
19
|
+
conc = await client.get_concurrency()
|
|
20
|
+
assert hasattr(conc, "concurrency") and hasattr(conc, "max_concurrency")
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
@pytest.mark.asyncio
|
|
24
|
+
async def test_async_get_credit_usage():
|
|
25
|
+
client = AsyncFirecrawl(api_key=os.getenv("API_KEY"), api_url=os.getenv("API_URL"))
|
|
26
|
+
credits = await client.get_credit_usage()
|
|
27
|
+
assert hasattr(credits, "remaining_credits")
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
@pytest.mark.asyncio
|
|
31
|
+
async def test_async_get_token_usage():
|
|
32
|
+
client = AsyncFirecrawl(api_key=os.getenv("API_KEY"), api_url=os.getenv("API_URL"))
|
|
33
|
+
tokens = await client.get_token_usage()
|
|
34
|
+
assert hasattr(tokens, "remaining_tokens")
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
@pytest.mark.asyncio
|
|
38
|
+
async def test_async_get_queue_status():
|
|
39
|
+
client = AsyncFirecrawl(api_key=os.getenv("API_KEY"), api_url=os.getenv("API_URL"))
|
|
40
|
+
status = await client.get_queue_status()
|
|
41
|
+
assert hasattr(status, "jobs_in_queue")
|
|
42
|
+
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import asyncio
|
|
3
|
+
import pytest
|
|
4
|
+
from dotenv import load_dotenv
|
|
5
|
+
from firecrawl import AsyncFirecrawl
|
|
6
|
+
from firecrawl.v2.watcher_async import AsyncWatcher
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
load_dotenv()
|
|
10
|
+
|
|
11
|
+
if not os.getenv("API_KEY"):
|
|
12
|
+
raise ValueError("API_KEY is not set")
|
|
13
|
+
|
|
14
|
+
if not os.getenv("API_URL"):
|
|
15
|
+
raise ValueError("API_URL is not set")
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@pytest.mark.asyncio
|
|
19
|
+
async def test_async_watcher_crawl_progresses():
|
|
20
|
+
client = AsyncFirecrawl(api_key=os.getenv("API_KEY"), api_url=os.getenv("API_URL"))
|
|
21
|
+
start = await client.start_crawl("https://docs.firecrawl.dev", limit=2)
|
|
22
|
+
statuses = []
|
|
23
|
+
async for snapshot in AsyncWatcher(client, start.id, kind="crawl", timeout=180):
|
|
24
|
+
statuses.append(snapshot.status)
|
|
25
|
+
if snapshot.status in ("completed", "failed"):
|
|
26
|
+
break
|
|
27
|
+
assert statuses and statuses[-1] in ("completed", "failed")
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
@pytest.mark.asyncio
|
|
31
|
+
async def test_async_watcher_batch_progresses():
|
|
32
|
+
client = AsyncFirecrawl(api_key=os.getenv("API_KEY"), api_url=os.getenv("API_URL"))
|
|
33
|
+
start = await client.start_batch_scrape([
|
|
34
|
+
"https://docs.firecrawl.dev",
|
|
35
|
+
"https://firecrawl.dev",
|
|
36
|
+
], formats=["markdown"], max_concurrency=1)
|
|
37
|
+
statuses = []
|
|
38
|
+
async for snapshot in AsyncWatcher(client, start.id, kind="batch", timeout=240):
|
|
39
|
+
statuses.append(snapshot.status)
|
|
40
|
+
if snapshot.status in ("completed", "failed", "cancelled"):
|
|
41
|
+
break
|
|
42
|
+
assert statuses and statuses[-1] in ("completed", "failed", "cancelled")
|
|
43
|
+
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import json
|
|
3
|
+
import pytest
|
|
4
|
+
import requests
|
|
5
|
+
from dotenv import load_dotenv
|
|
6
|
+
|
|
7
|
+
load_dotenv()
|
|
8
|
+
|
|
9
|
+
def _idmux(identity_request: dict) -> dict:
|
|
10
|
+
idmux_url = os.getenv("IDMUX_URL")
|
|
11
|
+
if not idmux_url:
|
|
12
|
+
raise EnvironmentError("IDMUX_URL is not set. E2E tests must use idmux for credentials.")
|
|
13
|
+
run_number = int(os.getenv("GITHUB_RUN_NUMBER") or 0)
|
|
14
|
+
payload = {
|
|
15
|
+
"refName": os.getenv("GITHUB_REF_NAME") or "local",
|
|
16
|
+
"runNumber": run_number,
|
|
17
|
+
"concurrency": identity_request.get("concurrency", 100),
|
|
18
|
+
**identity_request,
|
|
19
|
+
}
|
|
20
|
+
resp = requests.post(idmux_url + "/", json=payload)
|
|
21
|
+
resp.raise_for_status()
|
|
22
|
+
return resp.json()
|
|
23
|
+
|
|
24
|
+
@pytest.fixture(scope="session")
|
|
25
|
+
def api_url():
|
|
26
|
+
# Prefer TEST_URL, then FIRECRAWL_API_URL (for parity with JS), then legacy API_URL
|
|
27
|
+
return (
|
|
28
|
+
os.getenv("TEST_URL")
|
|
29
|
+
or os.getenv("FIRECRAWL_API_URL")
|
|
30
|
+
or os.getenv("API_URL")
|
|
31
|
+
or "https://api.firecrawl.dev"
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
# Resolve identity and export environment at import time so tests that read env at module import succeed
|
|
35
|
+
_IDENTITY = None
|
|
36
|
+
_API_URL = (
|
|
37
|
+
os.getenv("TEST_URL")
|
|
38
|
+
or os.getenv("FIRECRAWL_API_URL")
|
|
39
|
+
or os.getenv("API_URL")
|
|
40
|
+
or "https://api.firecrawl.dev"
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
_IDMUX_URL = os.getenv("IDMUX_URL")
|
|
44
|
+
if _IDMUX_URL:
|
|
45
|
+
run_name = os.getenv("PYTEST_RUN_NAME") or "py-e2e"
|
|
46
|
+
# If IDMUX_URL is set, idmux MUST succeed; do not silently fall back
|
|
47
|
+
_IDENTITY = _idmux({"name": run_name})
|
|
48
|
+
os.environ["API_KEY"] = _IDENTITY.get("apiKey", "")
|
|
49
|
+
os.environ["API_URL"] = _API_URL
|
|
50
|
+
|
|
51
|
+
@pytest.fixture(scope="session")
|
|
52
|
+
def api_identity():
|
|
53
|
+
return _IDENTITY or {"apiKey": os.getenv("API_KEY") or "", "teamId": os.getenv("TEST_TEAM_ID") or os.getenv("TEAM_ID") or ""}
|
|
54
|
+
|
|
55
|
+
@pytest.fixture(autouse=True)
|
|
56
|
+
def _inject_client(request, api_identity, api_url):
|
|
57
|
+
# For class-based tests that rely on self.client, inject a client if missing
|
|
58
|
+
inst = getattr(request, "instance", None)
|
|
59
|
+
if inst is not None and not hasattr(inst, "client"):
|
|
60
|
+
try:
|
|
61
|
+
from firecrawl import Firecrawl
|
|
62
|
+
inst.client = Firecrawl(api_key=api_identity.get("apiKey", ""), api_url=api_url)
|
|
63
|
+
except Exception:
|
|
64
|
+
pass
|
|
65
|
+
# For function-based modules that expect a module-level `firecrawl` symbol
|
|
66
|
+
mod = getattr(request, "module", None)
|
|
67
|
+
if mod is not None and not hasattr(mod, "firecrawl"):
|
|
68
|
+
try:
|
|
69
|
+
from firecrawl import Firecrawl
|
|
70
|
+
setattr(mod, "firecrawl", Firecrawl(api_key=api_identity.get("apiKey", ""), api_url=api_url))
|
|
71
|
+
except Exception:
|
|
72
|
+
pass
|
|
73
|
+
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import asyncio
|
|
3
|
+
import pytest
|
|
4
|
+
from dotenv import load_dotenv
|
|
5
|
+
|
|
6
|
+
from firecrawl import AsyncFirecrawl
|
|
7
|
+
from firecrawl.v2.types import Document
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
load_dotenv()
|
|
11
|
+
|
|
12
|
+
if not os.getenv("API_KEY"):
|
|
13
|
+
raise ValueError("API_KEY is not set")
|
|
14
|
+
|
|
15
|
+
if not os.getenv("API_URL"):
|
|
16
|
+
raise ValueError("API_URL is not set")
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
@pytest.mark.asyncio
|
|
20
|
+
async def test_async_scrape_minimal():
|
|
21
|
+
client = AsyncFirecrawl(api_key=os.getenv("API_KEY"), api_url=os.getenv("API_URL"))
|
|
22
|
+
doc = await client.scrape("https://docs.firecrawl.dev")
|
|
23
|
+
assert isinstance(doc, Document)
|
|
24
|
+
# Accept any primary content or alternate outputs
|
|
25
|
+
assert doc.markdown is not None and doc.markdown and len(doc.markdown) > 0
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
@pytest.mark.asyncio
|
|
29
|
+
async def test_async_crawl_start_and_status():
|
|
30
|
+
client = AsyncFirecrawl(api_key=os.getenv("API_KEY"), api_url=os.getenv("API_URL"))
|
|
31
|
+
start = await client.start_crawl("https://docs.firecrawl.dev", limit=2)
|
|
32
|
+
job_id = start.id
|
|
33
|
+
|
|
34
|
+
# Poll status until terminal or timeout
|
|
35
|
+
deadline = asyncio.get_event_loop().time() + 180
|
|
36
|
+
status = await client.get_crawl_status(job_id)
|
|
37
|
+
while status.status not in ("completed", "failed") and asyncio.get_event_loop().time() < deadline:
|
|
38
|
+
await asyncio.sleep(2)
|
|
39
|
+
status = await client.get_crawl_status(job_id)
|
|
40
|
+
|
|
41
|
+
assert status.status in ("completed", "failed")
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
@pytest.mark.asyncio
|
|
45
|
+
async def test_async_batch_start_and_status():
|
|
46
|
+
client = AsyncFirecrawl(api_key=os.getenv("API_KEY"), api_url=os.getenv("API_URL"))
|
|
47
|
+
start = await client.start_batch_scrape([
|
|
48
|
+
"https://docs.firecrawl.dev",
|
|
49
|
+
"https://firecrawl.dev",
|
|
50
|
+
], formats=["markdown"], max_concurrency=1)
|
|
51
|
+
job_id = start.id
|
|
52
|
+
|
|
53
|
+
deadline = asyncio.get_event_loop().time() + 240
|
|
54
|
+
status = await client.get_batch_scrape_status(job_id)
|
|
55
|
+
while status.status not in ("completed", "failed", "cancelled") and asyncio.get_event_loop().time() < deadline:
|
|
56
|
+
await asyncio.sleep(2)
|
|
57
|
+
status = await client.get_batch_scrape_status(job_id)
|
|
58
|
+
|
|
59
|
+
assert status.status in ("completed", "failed", "cancelled")
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
@pytest.mark.asyncio
|
|
63
|
+
async def test_async_usage_minimal():
|
|
64
|
+
client = AsyncFirecrawl(api_key=os.getenv("API_KEY"), api_url=os.getenv("API_URL"))
|
|
65
|
+
conc = await client.get_concurrency()
|
|
66
|
+
assert hasattr(conc, "concurrency") and hasattr(conc, "max_concurrency")
|
|
67
|
+
|
|
68
|
+
credits = await client.get_credit_usage()
|
|
69
|
+
assert hasattr(credits, "remaining_credits")
|
|
70
|
+
|
|
71
|
+
tokens = await client.get_token_usage()
|
|
72
|
+
assert hasattr(tokens, "remaining_tokens")
|
|
73
|
+
|
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import pytest
|
|
3
|
+
from dotenv import load_dotenv
|
|
4
|
+
from firecrawl import Firecrawl
|
|
5
|
+
from firecrawl.v2.types import ScrapeOptions
|
|
6
|
+
|
|
7
|
+
load_dotenv()
|
|
8
|
+
|
|
9
|
+
if not os.getenv("API_KEY"):
|
|
10
|
+
raise ValueError("API_KEY is not set")
|
|
11
|
+
|
|
12
|
+
if not os.getenv("API_URL"):
|
|
13
|
+
raise ValueError("API_URL is not set")
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class TestBatchScrapeE2E:
|
|
17
|
+
"""End-to-end tests for batch scrape (v2)."""
|
|
18
|
+
|
|
19
|
+
def setup_method(self):
|
|
20
|
+
self.client = Firecrawl(api_key=os.getenv("API_KEY"), api_url=os.getenv("API_URL"))
|
|
21
|
+
|
|
22
|
+
def test_batch_scrape_minimal(self):
|
|
23
|
+
"""Start a small batch and wait for completion."""
|
|
24
|
+
urls = [
|
|
25
|
+
"https://docs.firecrawl.dev",
|
|
26
|
+
"https://firecrawl.dev",
|
|
27
|
+
]
|
|
28
|
+
|
|
29
|
+
job = self.client.batch_scrape(urls, formats=["markdown"], poll_interval=1, wait_timeout=120)
|
|
30
|
+
|
|
31
|
+
assert job.status in ["completed", "failed"]
|
|
32
|
+
assert job.completed >= 0
|
|
33
|
+
assert job.total >= 0
|
|
34
|
+
assert isinstance(job.data, list)
|
|
35
|
+
|
|
36
|
+
def test_start_batch_minimal_and_status(self):
|
|
37
|
+
"""Start via start_batch_scrape (minimal), then fetch status once."""
|
|
38
|
+
urls = [
|
|
39
|
+
"https://docs.firecrawl.dev",
|
|
40
|
+
"https://firecrawl.dev",
|
|
41
|
+
]
|
|
42
|
+
|
|
43
|
+
start_resp = self.client.start_batch_scrape(urls, formats=["markdown"], ignore_invalid_urls=True)
|
|
44
|
+
assert start_resp.id is not None
|
|
45
|
+
assert start_resp.url is not None
|
|
46
|
+
|
|
47
|
+
job = self.client.get_batch_scrape_status(start_resp.id)
|
|
48
|
+
assert job.status in ["scraping", "completed", "failed"]
|
|
49
|
+
assert job.total >= 0
|
|
50
|
+
|
|
51
|
+
def test_wait_batch_with_all_params(self):
|
|
52
|
+
"""Blocking waiter with JSON and changeTracking formats plus many options."""
|
|
53
|
+
urls = [
|
|
54
|
+
"https://docs.firecrawl.dev",
|
|
55
|
+
"https://firecrawl.dev",
|
|
56
|
+
]
|
|
57
|
+
|
|
58
|
+
json_schema = {
|
|
59
|
+
"type": "object",
|
|
60
|
+
"properties": {
|
|
61
|
+
"title": {"type": "string"}
|
|
62
|
+
},
|
|
63
|
+
"required": ["title"],
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
opts = ScrapeOptions(
|
|
67
|
+
formats=[
|
|
68
|
+
"markdown",
|
|
69
|
+
{"type": "json", "prompt": "Extract page title", "schema": json_schema},
|
|
70
|
+
{"type": "changeTracking", "prompt": "Track changes", "modes": ["json"]},
|
|
71
|
+
],
|
|
72
|
+
only_main_content=True,
|
|
73
|
+
mobile=False,
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
job = self.client.batch_scrape(
|
|
77
|
+
urls,
|
|
78
|
+
formats=opts.formats,
|
|
79
|
+
only_main_content=opts.only_main_content,
|
|
80
|
+
mobile=opts.mobile,
|
|
81
|
+
ignore_invalid_urls=True,
|
|
82
|
+
max_concurrency=2,
|
|
83
|
+
zero_data_retention=False,
|
|
84
|
+
poll_interval=1,
|
|
85
|
+
wait_timeout=180,
|
|
86
|
+
integration="_e2e-test",
|
|
87
|
+
)
|
|
88
|
+
|
|
89
|
+
assert job.status in ["completed", "failed"]
|
|
90
|
+
assert job.completed >= 0
|
|
91
|
+
assert job.total >= 0
|
|
92
|
+
assert isinstance(job.data, list)
|
|
93
|
+
|
|
94
|
+
def test_cancel_batch(self):
|
|
95
|
+
"""Start a batch and cancel it."""
|
|
96
|
+
urls = [
|
|
97
|
+
"https://docs.firecrawl.dev",
|
|
98
|
+
"https://firecrawl.dev",
|
|
99
|
+
]
|
|
100
|
+
|
|
101
|
+
start_resp = self.client.start_batch_scrape(urls, formats=["markdown"], max_concurrency=1)
|
|
102
|
+
assert start_resp.id is not None
|
|
103
|
+
|
|
104
|
+
cancelled = self.client.cancel_batch_scrape(start_resp.id)
|
|
105
|
+
assert cancelled is True
|
|
106
|
+
|