firecrawl-py 3.3.1__py3-none-any.whl → 3.3.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of firecrawl-py might be problematic. Click here for more details.

Files changed (81) hide show
  1. firecrawl/__init__.py +1 -1
  2. {firecrawl_py-3.3.1.dist-info → firecrawl_py-3.3.2.dist-info}/METADATA +1 -1
  3. firecrawl_py-3.3.2.dist-info/RECORD +79 -0
  4. {firecrawl_py-3.3.1.dist-info → firecrawl_py-3.3.2.dist-info}/top_level.txt +0 -2
  5. build/lib/firecrawl/__init__.py +0 -87
  6. build/lib/firecrawl/__tests__/e2e/v2/aio/test_aio_batch_scrape.py +0 -79
  7. build/lib/firecrawl/__tests__/e2e/v2/aio/test_aio_crawl.py +0 -188
  8. build/lib/firecrawl/__tests__/e2e/v2/aio/test_aio_extract.py +0 -38
  9. build/lib/firecrawl/__tests__/e2e/v2/aio/test_aio_map.py +0 -40
  10. build/lib/firecrawl/__tests__/e2e/v2/aio/test_aio_scrape.py +0 -137
  11. build/lib/firecrawl/__tests__/e2e/v2/aio/test_aio_search.py +0 -248
  12. build/lib/firecrawl/__tests__/e2e/v2/aio/test_aio_usage.py +0 -35
  13. build/lib/firecrawl/__tests__/e2e/v2/aio/test_aio_watcher.py +0 -43
  14. build/lib/firecrawl/__tests__/e2e/v2/conftest.py +0 -73
  15. build/lib/firecrawl/__tests__/e2e/v2/test_async.py +0 -73
  16. build/lib/firecrawl/__tests__/e2e/v2/test_batch_scrape.py +0 -105
  17. build/lib/firecrawl/__tests__/e2e/v2/test_crawl.py +0 -276
  18. build/lib/firecrawl/__tests__/e2e/v2/test_extract.py +0 -54
  19. build/lib/firecrawl/__tests__/e2e/v2/test_map.py +0 -60
  20. build/lib/firecrawl/__tests__/e2e/v2/test_scrape.py +0 -154
  21. build/lib/firecrawl/__tests__/e2e/v2/test_search.py +0 -269
  22. build/lib/firecrawl/__tests__/e2e/v2/test_usage.py +0 -26
  23. build/lib/firecrawl/__tests__/e2e/v2/test_watcher.py +0 -65
  24. build/lib/firecrawl/__tests__/unit/v2/methods/aio/test_aio_crawl_params.py +0 -12
  25. build/lib/firecrawl/__tests__/unit/v2/methods/aio/test_aio_crawl_request_preparation.py +0 -61
  26. build/lib/firecrawl/__tests__/unit/v2/methods/aio/test_aio_crawl_validation.py +0 -12
  27. build/lib/firecrawl/__tests__/unit/v2/methods/aio/test_aio_map_request_preparation.py +0 -19
  28. build/lib/firecrawl/__tests__/unit/v2/methods/aio/test_aio_scrape_request_preparation.py +0 -50
  29. build/lib/firecrawl/__tests__/unit/v2/methods/aio/test_aio_search_request_preparation.py +0 -63
  30. build/lib/firecrawl/__tests__/unit/v2/methods/aio/test_batch_request_preparation_async.py +0 -28
  31. build/lib/firecrawl/__tests__/unit/v2/methods/aio/test_ensure_async.py +0 -117
  32. build/lib/firecrawl/__tests__/unit/v2/methods/test_batch_request_preparation.py +0 -90
  33. build/lib/firecrawl/__tests__/unit/v2/methods/test_crawl_params.py +0 -70
  34. build/lib/firecrawl/__tests__/unit/v2/methods/test_crawl_request_preparation.py +0 -240
  35. build/lib/firecrawl/__tests__/unit/v2/methods/test_crawl_validation.py +0 -107
  36. build/lib/firecrawl/__tests__/unit/v2/methods/test_map_request_preparation.py +0 -53
  37. build/lib/firecrawl/__tests__/unit/v2/methods/test_scrape_request_preparation.py +0 -92
  38. build/lib/firecrawl/__tests__/unit/v2/methods/test_search_request_preparation.py +0 -167
  39. build/lib/firecrawl/__tests__/unit/v2/methods/test_search_validation.py +0 -236
  40. build/lib/firecrawl/__tests__/unit/v2/methods/test_usage_types.py +0 -18
  41. build/lib/firecrawl/__tests__/unit/v2/methods/test_webhook.py +0 -123
  42. build/lib/firecrawl/__tests__/unit/v2/utils/test_validation.py +0 -290
  43. build/lib/firecrawl/__tests__/unit/v2/watcher/test_ws_watcher.py +0 -332
  44. build/lib/firecrawl/client.py +0 -242
  45. build/lib/firecrawl/firecrawl.backup.py +0 -4635
  46. build/lib/firecrawl/types.py +0 -161
  47. build/lib/firecrawl/v1/__init__.py +0 -14
  48. build/lib/firecrawl/v1/client.py +0 -4653
  49. build/lib/firecrawl/v2/__init__.py +0 -4
  50. build/lib/firecrawl/v2/client.py +0 -805
  51. build/lib/firecrawl/v2/client_async.py +0 -250
  52. build/lib/firecrawl/v2/methods/aio/__init__.py +0 -1
  53. build/lib/firecrawl/v2/methods/aio/batch.py +0 -85
  54. build/lib/firecrawl/v2/methods/aio/crawl.py +0 -171
  55. build/lib/firecrawl/v2/methods/aio/extract.py +0 -126
  56. build/lib/firecrawl/v2/methods/aio/map.py +0 -59
  57. build/lib/firecrawl/v2/methods/aio/scrape.py +0 -33
  58. build/lib/firecrawl/v2/methods/aio/search.py +0 -172
  59. build/lib/firecrawl/v2/methods/aio/usage.py +0 -42
  60. build/lib/firecrawl/v2/methods/batch.py +0 -417
  61. build/lib/firecrawl/v2/methods/crawl.py +0 -469
  62. build/lib/firecrawl/v2/methods/extract.py +0 -131
  63. build/lib/firecrawl/v2/methods/map.py +0 -77
  64. build/lib/firecrawl/v2/methods/scrape.py +0 -64
  65. build/lib/firecrawl/v2/methods/search.py +0 -197
  66. build/lib/firecrawl/v2/methods/usage.py +0 -41
  67. build/lib/firecrawl/v2/types.py +0 -665
  68. build/lib/firecrawl/v2/utils/__init__.py +0 -9
  69. build/lib/firecrawl/v2/utils/error_handler.py +0 -107
  70. build/lib/firecrawl/v2/utils/get_version.py +0 -15
  71. build/lib/firecrawl/v2/utils/http_client.py +0 -153
  72. build/lib/firecrawl/v2/utils/http_client_async.py +0 -65
  73. build/lib/firecrawl/v2/utils/normalize.py +0 -107
  74. build/lib/firecrawl/v2/utils/validation.py +0 -324
  75. build/lib/firecrawl/v2/watcher.py +0 -301
  76. build/lib/firecrawl/v2/watcher_async.py +0 -242
  77. build/lib/tests/test_change_tracking.py +0 -98
  78. build/lib/tests/test_timeout_conversion.py +0 -117
  79. firecrawl_py-3.3.1.dist-info/RECORD +0 -153
  80. {firecrawl_py-3.3.1.dist-info → firecrawl_py-3.3.2.dist-info}/LICENSE +0 -0
  81. {firecrawl_py-3.3.1.dist-info → firecrawl_py-3.3.2.dist-info}/WHEEL +0 -0
@@ -1,242 +0,0 @@
1
- """
2
- Firecrawl Client
3
-
4
- A Firecrawl client that enables you to scrape content from websites, crawl entire sites, search the web, and extract structured data using AI.
5
-
6
- The client supports both v1 and v2 API versions, providing access to features like:
7
- - Web scraping with advanced options (screenshots, markdown conversion, etc.)
8
- - Site crawling with configurable depth and limits
9
- - Web search with content extraction
10
- - Structured data extraction using AI models
11
- - Deep research capabilities
12
-
13
- Usage:
14
- from firecrawl import Firecrawl
15
- firecrawl = Firecrawl(api_key="your-api-key")
16
- result = firecrawl.scrape("https://example.com")
17
-
18
- Check example.py for other usage examples.
19
- """
20
-
21
- from typing import Any, Dict, Optional, List, Union
22
- import logging
23
-
24
-
25
- from .v1 import V1FirecrawlApp, AsyncV1FirecrawlApp
26
- from .v2 import FirecrawlClient as V2FirecrawlClient
27
- from .v2.client_async import AsyncFirecrawlClient
28
- from .v2.types import Document
29
-
30
- logger = logging.getLogger("firecrawl")
31
-
32
- class V1Proxy:
33
- """Type-annotated proxy for v1 client methods."""
34
- _client: Optional[V1FirecrawlApp]
35
-
36
- def __init__(self, client_instance: Optional[V1FirecrawlApp]):
37
- self._client = client_instance
38
-
39
- if client_instance:
40
- self.scrape_url = client_instance.scrape_url
41
- self.crawl_url = client_instance.crawl_url
42
- self.batch_scrape_urls = client_instance.batch_scrape_urls
43
- self.async_batch_scrape_urls = client_instance.async_batch_scrape_urls
44
- self.async_crawl_url = client_instance.async_crawl_url
45
- self.check_crawl_status = client_instance.check_crawl_status
46
- self.map_url = client_instance.map_url
47
- self.extract = client_instance.extract
48
- self.deep_research = client_instance.deep_research
49
- self.generate_llms_text = client_instance.generate_llms_text
50
-
51
- class V2Proxy:
52
- """Proxy class that forwards method calls to the appropriate version client."""
53
- _client: Optional[V2FirecrawlClient]
54
-
55
- def __init__(self, client_instance: Optional[V2FirecrawlClient]):
56
- self._client = client_instance
57
-
58
- if client_instance:
59
- # self.scrape = client_instance.scrape
60
- self.search = client_instance.search
61
- self.crawl = client_instance.crawl
62
- self.get_crawl_status = client_instance.get_crawl_status
63
- self.cancel_crawl = client_instance.cancel_crawl
64
- self.start_crawl = client_instance.start_crawl
65
- self.crawl_params_preview = client_instance.crawl_params_preview
66
- self.extract = client_instance.extract
67
- self.start_batch_scrape = client_instance.start_batch_scrape
68
- self.get_batch_scrape_status = client_instance.get_batch_scrape_status
69
- self.cancel_batch_scrape = client_instance.cancel_batch_scrape
70
- self.batch_scrape = client_instance.batch_scrape
71
- self.get_batch_scrape_errors = client_instance.get_batch_scrape_errors
72
- self.get_extract_status = client_instance.get_extract_status
73
- self.map = client_instance.map
74
- self.get_concurrency = client_instance.get_concurrency
75
- self.get_credit_usage = client_instance.get_credit_usage
76
- self.get_token_usage = client_instance.get_token_usage
77
-
78
- def __getattr__(self, name):
79
- """Forward attribute access to the underlying client."""
80
- return getattr(self._client, name)
81
-
82
- class AsyncV1Proxy:
83
- """Type-annotated proxy for v1 client methods."""
84
- _client: Optional[AsyncV1FirecrawlApp]
85
-
86
- def __init__(self, client_instance: Optional[AsyncV1FirecrawlApp]):
87
- self._client = client_instance
88
-
89
- if client_instance:
90
- self.scrape_url = client_instance.scrape_url
91
- self.crawl_url = client_instance.crawl_url
92
- self.batch_scrape_urls = client_instance.batch_scrape_urls
93
- self.async_batch_scrape_urls = client_instance.async_batch_scrape_urls
94
- self.async_crawl_url = client_instance.async_crawl_url
95
- self.check_crawl_status = client_instance.check_crawl_status
96
- self.map_url = client_instance.map_url
97
- self.extract = client_instance.extract
98
- self.deep_research = client_instance.deep_research
99
- self.generate_llms_text = client_instance.generate_llms_text
100
-
101
- class AsyncV2Proxy:
102
- """Proxy class that forwards method calls to the appropriate version client."""
103
- _client: Optional[Any] = None
104
-
105
- def __init__(self, client_instance: Optional[Any] = None):
106
- self._client = client_instance
107
-
108
- if client_instance:
109
- self.scrape = client_instance.scrape
110
- self.search = client_instance.search
111
- self.crawl = client_instance.crawl
112
- self.start_crawl = client_instance.start_crawl
113
- self.wait_crawl = client_instance.wait_crawl
114
- self.get_crawl_status = client_instance.get_crawl_status
115
- self.cancel_crawl = client_instance.cancel_crawl
116
- self.get_crawl_errors = client_instance.get_crawl_errors
117
- self.get_active_crawls = client_instance.get_active_crawls
118
- self.active_crawls = client_instance.active_crawls
119
- self.crawl_params_preview = client_instance.crawl_params_preview
120
-
121
- self.extract = client_instance.extract
122
- self.start_extract = client_instance.start_extract
123
- self.get_extract_status = client_instance.get_extract_status
124
-
125
- self.start_batch_scrape = client_instance.start_batch_scrape
126
- self.get_batch_scrape_status = client_instance.get_batch_scrape_status
127
- self.cancel_batch_scrape = client_instance.cancel_batch_scrape
128
- self.wait_batch_scrape = client_instance.wait_batch_scrape
129
- self.batch_scrape = client_instance.batch_scrape
130
- self.get_batch_scrape_errors = client_instance.get_batch_scrape_errors
131
-
132
- self.map = client_instance.map
133
- self.get_concurrency = client_instance.get_concurrency
134
- self.get_credit_usage = client_instance.get_credit_usage
135
- self.get_token_usage = client_instance.get_token_usage
136
- self.watcher = client_instance.watcher
137
-
138
- def __getattr__(self, name):
139
- """Forward attribute access to the underlying client."""
140
- if self._client:
141
- return getattr(self._client, name)
142
- raise AttributeError(f"Async v2 client not implemented yet: {name}")
143
-
144
-
145
- class Firecrawl:
146
- """
147
- Unified Firecrawl client (v2 by default, v1 under ``.v1``).
148
-
149
- Provides a single entrypoint that exposes the latest API directly while
150
- keeping a feature-frozen v1 available for incremental migration.
151
- """
152
-
153
- def __init__(self, api_key: str = None, api_url: str = "https://api.firecrawl.dev"):
154
- """Initialize the unified client.
155
-
156
- Args:
157
- api_key: Firecrawl API key (or set ``FIRECRAWL_API_KEY``)
158
- api_url: Base API URL (defaults to production)
159
- """
160
- self.api_key = api_key
161
- self.api_url = api_url
162
-
163
- # Initialize version-specific clients
164
- self._v1_client = V1FirecrawlApp(api_key=api_key, api_url=api_url) if V1FirecrawlApp else None
165
- self._v2_client = V2FirecrawlClient(api_key=api_key, api_url=api_url) if V2FirecrawlClient else None
166
-
167
- # Create version-specific proxies
168
- self.v1 = V1Proxy(self._v1_client) if self._v1_client else None
169
- self.v2 = V2Proxy(self._v2_client)
170
-
171
-
172
- self.scrape = self._v2_client.scrape
173
- self.crawl = self._v2_client.crawl
174
- self.start_crawl = self._v2_client.start_crawl
175
- self.crawl_params_preview = self._v2_client.crawl_params_preview
176
- self.get_crawl_status = self._v2_client.get_crawl_status
177
- self.cancel_crawl = self._v2_client.cancel_crawl
178
- self.get_crawl_errors = self._v2_client.get_crawl_errors
179
- self.active_crawls = self._v2_client.active_crawls
180
-
181
- self.start_batch_scrape = self._v2_client.start_batch_scrape
182
- self.get_batch_scrape_status = self._v2_client.get_batch_scrape_status
183
- self.cancel_batch_scrape = self._v2_client.cancel_batch_scrape
184
- self.batch_scrape = self._v2_client.batch_scrape
185
- self.get_batch_scrape_errors = self._v2_client.get_batch_scrape_errors
186
- self.get_extract_status = self._v2_client.get_extract_status
187
- self.map = self._v2_client.map
188
- self.search = self._v2_client.search
189
- self.extract = self._v2_client.extract
190
- self.get_concurrency = self._v2_client.get_concurrency
191
- self.get_credit_usage = self._v2_client.get_credit_usage
192
- self.get_token_usage = self._v2_client.get_token_usage
193
- self.watcher = self._v2_client.watcher
194
-
195
- class AsyncFirecrawl:
196
- """Async unified Firecrawl client (v2 by default, v1 under ``.v1``)."""
197
-
198
- def __init__(self, api_key: str = None, api_url: str = "https://api.firecrawl.dev"):
199
- self.api_key = api_key
200
- self.api_url = api_url
201
-
202
- # Initialize version-specific clients
203
- self._v1_client = AsyncV1FirecrawlApp(api_key=api_key, api_url=api_url) if AsyncV1FirecrawlApp else None
204
- self._v2_client = AsyncFirecrawlClient(api_key=api_key, api_url=api_url) if AsyncFirecrawlClient else None
205
-
206
- # Create version-specific proxies
207
- self.v1 = AsyncV1Proxy(self._v1_client) if self._v1_client else None
208
- self.v2 = AsyncV2Proxy(self._v2_client)
209
-
210
- # Expose v2 async surface directly on the top-level client for ergonomic access
211
- # Keep method names aligned with the sync client
212
- self.scrape = self._v2_client.scrape
213
- self.search = self._v2_client.search
214
- self.map = self._v2_client.map
215
-
216
- self.start_crawl = self._v2_client.start_crawl
217
- self.get_crawl_status = self._v2_client.get_crawl_status
218
- self.cancel_crawl = self._v2_client.cancel_crawl
219
- self.crawl = self._v2_client.crawl
220
- self.get_crawl_errors = self._v2_client.get_crawl_errors
221
- self.active_crawls = self._v2_client.active_crawls
222
- self.crawl_params_preview = self._v2_client.crawl_params_preview
223
-
224
- self.start_batch_scrape = self._v2_client.start_batch_scrape
225
- self.get_batch_scrape_status = self._v2_client.get_batch_scrape_status
226
- self.cancel_batch_scrape = self._v2_client.cancel_batch_scrape
227
- self.batch_scrape = self._v2_client.batch_scrape
228
- self.get_batch_scrape_errors = self._v2_client.get_batch_scrape_errors
229
-
230
- self.start_extract = self._v2_client.start_extract
231
- self.get_extract_status = self._v2_client.get_extract_status
232
- self.extract = self._v2_client.extract
233
-
234
- self.get_concurrency = self._v2_client.get_concurrency
235
- self.get_credit_usage = self._v2_client.get_credit_usage
236
- self.get_token_usage = self._v2_client.get_token_usage
237
-
238
- self.watcher = self._v2_client.watcher
239
-
240
- # Export Firecrawl as an alias for FirecrawlApp
241
- FirecrawlApp = Firecrawl
242
- AsyncFirecrawlApp = AsyncFirecrawl