firecrawl 2.1.0__tar.gz → 2.1.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of firecrawl might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: firecrawl
3
- Version: 2.1.0
3
+ Version: 2.1.2
4
4
  Summary: Python SDK for Firecrawl API
5
5
  Home-page: https://github.com/mendableai/firecrawl
6
6
  Author: Mendable.ai
@@ -13,7 +13,7 @@ import os
13
13
 
14
14
  from .firecrawl import FirecrawlApp, JsonConfig, ScrapeOptions # noqa
15
15
 
16
- __version__ = "2.1.0"
16
+ __version__ = "2.1.2"
17
17
 
18
18
  # Define the logger for the Firecrawl project
19
19
  logger: logging.Logger = logging.getLogger("firecrawl")
@@ -1849,24 +1849,33 @@ class FirecrawlApp:
1849
1849
  show_full_text=show_full_text,
1850
1850
  experimental_stream=experimental_stream
1851
1851
  )
1852
- if not response.get('success') or 'id' not in response:
1853
- return response
1852
+
1853
+ if not response.success or not response.id:
1854
+ return GenerateLLMsTextStatusResponse(
1855
+ success=False,
1856
+ error='Failed to start LLMs.txt generation',
1857
+ status='failed',
1858
+ expiresAt=''
1859
+ )
1854
1860
 
1855
- job_id = response['id']
1861
+ job_id = response.id
1856
1862
  while True:
1857
1863
  status = self.check_generate_llms_text_status(job_id)
1858
1864
 
1859
- if status['status'] == 'completed':
1865
+ if status.status == 'completed':
1860
1866
  return status
1861
- elif status['status'] == 'failed':
1862
- raise Exception(f'LLMs.txt generation failed. Error: {status.get("error")}')
1863
- elif status['status'] != 'processing':
1864
- break
1867
+ elif status.status == 'failed':
1868
+ return status
1869
+ elif status.status != 'processing':
1870
+ return GenerateLLMsTextStatusResponse(
1871
+ success=False,
1872
+ error='LLMs.txt generation job terminated unexpectedly',
1873
+ status='failed',
1874
+ expiresAt=''
1875
+ )
1865
1876
 
1866
1877
  time.sleep(2) # Polling interval
1867
1878
 
1868
- return {'success': False, 'error': 'LLMs.txt generation job terminated unexpectedly'}
1869
-
1870
1879
  def async_generate_llms_text(
1871
1880
  self,
1872
1881
  url: str,
@@ -1903,10 +1912,13 @@ class FirecrawlApp:
1903
1912
  json_data['origin'] = f"python-sdk@{version}"
1904
1913
 
1905
1914
  try:
1906
- response = self._post_request(f'{self.api_url}/v1/llmstxt', json_data, headers)
1907
- if response.status_code == 200:
1915
+ req = self._post_request(f'{self.api_url}/v1/llmstxt', json_data, headers)
1916
+ response = req.json()
1917
+ print("json_data", json_data)
1918
+ print("response", response)
1919
+ if response.get('success'):
1908
1920
  try:
1909
- return response.json()
1921
+ return GenerateLLMsTextResponse(**response)
1910
1922
  except:
1911
1923
  raise Exception('Failed to parse Firecrawl response as JSON.')
1912
1924
  else:
@@ -1914,7 +1926,10 @@ class FirecrawlApp:
1914
1926
  except Exception as e:
1915
1927
  raise ValueError(str(e))
1916
1928
 
1917
- return {'success': False, 'error': 'Internal server error'}
1929
+ return GenerateLLMsTextResponse(
1930
+ success=False,
1931
+ error='Internal server error'
1932
+ )
1918
1933
 
1919
1934
  def check_generate_llms_text_status(self, id: str) -> GenerateLLMsTextStatusResponse:
1920
1935
  """
@@ -1941,9 +1956,10 @@ class FirecrawlApp:
1941
1956
  response = self._get_request(f'{self.api_url}/v1/llmstxt/{id}', headers)
1942
1957
  if response.status_code == 200:
1943
1958
  try:
1944
- return response.json()
1945
- except:
1946
- raise Exception('Failed to parse Firecrawl response as JSON.')
1959
+ json_data = response.json()
1960
+ return GenerateLLMsTextStatusResponse(**json_data)
1961
+ except Exception as e:
1962
+ raise Exception(f'Failed to parse Firecrawl response as GenerateLLMsTextStatusResponse: {str(e)}')
1947
1963
  elif response.status_code == 404:
1948
1964
  raise Exception('LLMs.txt generation job not found')
1949
1965
  else:
@@ -1951,7 +1967,7 @@ class FirecrawlApp:
1951
1967
  except Exception as e:
1952
1968
  raise ValueError(str(e))
1953
1969
 
1954
- return {'success': False, 'error': 'Internal server error'}
1970
+ return GenerateLLMsTextStatusResponse(success=False, error='Internal server error', status='failed', expiresAt='')
1955
1971
 
1956
1972
  def _prepare_headers(
1957
1973
  self,
@@ -3153,7 +3169,6 @@ class AsyncFirecrawlApp(FirecrawlApp):
3153
3169
  response = await self._async_post_request(
3154
3170
  f'{self.api_url}/v1/crawl', params_dict, headers)
3155
3171
 
3156
- print(response)
3157
3172
  if response.get('success'):
3158
3173
  try:
3159
3174
  id = response.get('id')
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: firecrawl
3
- Version: 2.1.0
3
+ Version: 2.1.2
4
4
  Summary: Python SDK for Firecrawl API
5
5
  Home-page: https://github.com/mendableai/firecrawl
6
6
  Author: Mendable.ai
@@ -1,2 +1,4 @@
1
+ build
2
+ dist
1
3
  firecrawl
2
4
  tests
File without changes
File without changes
File without changes
File without changes
File without changes