uk_bin_collection 0.151.0__py3-none-any.whl → 0.152.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. uk_bin_collection/tests/input.json +137 -66
  2. uk_bin_collection/uk_bin_collection/councils/AdurAndWorthingCouncils.py +69 -24
  3. uk_bin_collection/uk_bin_collection/councils/AngusCouncil.py +149 -0
  4. uk_bin_collection/uk_bin_collection/councils/BarkingDagenham.py +11 -2
  5. uk_bin_collection/uk_bin_collection/councils/BexleyCouncil.py +24 -47
  6. uk_bin_collection/uk_bin_collection/councils/BrightonandHoveCityCouncil.py +11 -2
  7. uk_bin_collection/uk_bin_collection/councils/BroadlandDistrictCouncil.py +21 -6
  8. uk_bin_collection/uk_bin_collection/councils/CharnwoodBoroughCouncil.py +14 -3
  9. uk_bin_collection/uk_bin_collection/councils/CheltenhamBoroughCouncil.py +12 -12
  10. uk_bin_collection/uk_bin_collection/councils/CheshireEastCouncil.py +24 -2
  11. uk_bin_collection/uk_bin_collection/councils/ChichesterDistrictCouncil.py +105 -53
  12. uk_bin_collection/uk_bin_collection/councils/ChorleyCouncil.py +4 -0
  13. uk_bin_collection/uk_bin_collection/councils/CroydonCouncil.py +114 -261
  14. uk_bin_collection/uk_bin_collection/councils/DartfordBoroughCouncil.py +13 -0
  15. uk_bin_collection/uk_bin_collection/councils/DoverDistrictCouncil.py +17 -2
  16. uk_bin_collection/uk_bin_collection/councils/EastDevonDC.py +14 -1
  17. uk_bin_collection/uk_bin_collection/councils/EastbourneBoroughCouncil.py +76 -0
  18. uk_bin_collection/uk_bin_collection/councils/EastleighBoroughCouncil.py +59 -45
  19. uk_bin_collection/uk_bin_collection/councils/EnvironmentFirst.py +2 -0
  20. uk_bin_collection/uk_bin_collection/councils/EppingForestDistrictCouncil.py +47 -15
  21. uk_bin_collection/uk_bin_collection/councils/FermanaghOmaghDistrictCouncil.py +102 -0
  22. uk_bin_collection/uk_bin_collection/councils/GlasgowCityCouncil.py +13 -1
  23. uk_bin_collection/uk_bin_collection/councils/GuildfordCouncil.py +2 -3
  24. uk_bin_collection/uk_bin_collection/councils/HerefordshireCouncil.py +13 -2
  25. uk_bin_collection/uk_bin_collection/councils/HuntingdonDistrictCouncil.py +18 -4
  26. uk_bin_collection/uk_bin_collection/councils/LewesDistrictCouncil.py +76 -0
  27. uk_bin_collection/uk_bin_collection/councils/LiverpoolCityCouncil.py +16 -4
  28. uk_bin_collection/uk_bin_collection/councils/MaidstoneBoroughCouncil.py +42 -47
  29. uk_bin_collection/uk_bin_collection/councils/NewhamCouncil.py +13 -6
  30. uk_bin_collection/uk_bin_collection/councils/NorthEastDerbyshireDistrictCouncil.py +61 -39
  31. uk_bin_collection/uk_bin_collection/councils/NorthSomersetCouncil.py +14 -9
  32. uk_bin_collection/uk_bin_collection/councils/NorthTynesideCouncil.py +2 -2
  33. uk_bin_collection/uk_bin_collection/councils/NorthumberlandCouncil.py +50 -14
  34. uk_bin_collection/uk_bin_collection/councils/SloughBoroughCouncil.py +140 -0
  35. uk_bin_collection/uk_bin_collection/councils/SouthRibbleCouncil.py +115 -65
  36. uk_bin_collection/uk_bin_collection/councils/StokeOnTrentCityCouncil.py +10 -5
  37. uk_bin_collection/uk_bin_collection/councils/TewkesburyBoroughCouncil.py +40 -0
  38. uk_bin_collection/uk_bin_collection/councils/TorbayCouncil.py +1 -3
  39. uk_bin_collection/uk_bin_collection/councils/WakefieldCityCouncil.py +3 -0
  40. {uk_bin_collection-0.151.0.dist-info → uk_bin_collection-0.152.1.dist-info}/METADATA +1 -1
  41. {uk_bin_collection-0.151.0.dist-info → uk_bin_collection-0.152.1.dist-info}/RECORD +44 -38
  42. {uk_bin_collection-0.151.0.dist-info → uk_bin_collection-0.152.1.dist-info}/LICENSE +0 -0
  43. {uk_bin_collection-0.151.0.dist-info → uk_bin_collection-0.152.1.dist-info}/WHEEL +0 -0
  44. {uk_bin_collection-0.151.0.dist-info → uk_bin_collection-0.152.1.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,149 @@
1
+ import time
2
+ import re
3
+ from datetime import datetime
4
+
5
+ from bs4 import BeautifulSoup
6
+ from selenium.webdriver.common.by import By
7
+ from selenium.webdriver.common.keys import Keys
8
+ from selenium.webdriver.support import expected_conditions as EC
9
+ from selenium.webdriver.support.ui import Select, WebDriverWait
10
+
11
+ from uk_bin_collection.uk_bin_collection.common import *
12
+ from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
13
+
14
+
15
+ class CouncilClass(AbstractGetBinDataClass):
16
+ def parse_data(self, page: str, **kwargs) -> dict:
17
+ driver = None
18
+ try:
19
+ user_postcode = kwargs.get("postcode")
20
+ if not user_postcode:
21
+ raise ValueError("No postcode provided.")
22
+ check_postcode(user_postcode)
23
+
24
+ user_uprn = kwargs.get("uprn")
25
+ check_uprn(user_uprn)
26
+
27
+ headless = kwargs.get("headless")
28
+ web_driver = kwargs.get("web_driver")
29
+ driver = create_webdriver(web_driver, headless, None, __name__)
30
+ page = "https://www.angus.gov.uk/bins_litter_and_recycling/bin_collection_days"
31
+
32
+ driver.get(page)
33
+
34
+ wait = WebDriverWait(driver, 10)
35
+ accept_cookies_button = wait.until(
36
+ EC.element_to_be_clickable((By.ID, "ccc-recommended-settings"))
37
+ )
38
+ accept_cookies_button.click()
39
+
40
+ find_your_collection_button = wait.until(
41
+ EC.element_to_be_clickable(
42
+ (By.XPATH, "/html/body/div[2]/div[2]/div/div/section/div[2]/div/article/div/div/p[2]/a")
43
+ )
44
+ )
45
+ find_your_collection_button.click()
46
+
47
+ iframe = wait.until(EC.presence_of_element_located((By.ID, "fillform-frame-1")))
48
+ driver.switch_to.frame(iframe)
49
+
50
+ postcode_input = wait.until(EC.presence_of_element_located((By.ID, "searchString")))
51
+ postcode_input.send_keys(user_postcode + Keys.TAB + Keys.ENTER)
52
+
53
+ time.sleep(15)
54
+
55
+ select_elem = wait.until(EC.presence_of_element_located((By.ID, "customerAddress")))
56
+ WebDriverWait(driver, 10).until(
57
+ lambda d: len(select_elem.find_elements(By.TAG_NAME, "option")) > 1
58
+ )
59
+ dropdown = Select(select_elem)
60
+ dropdown.select_by_value(user_uprn)
61
+
62
+ time.sleep(10)
63
+
64
+ wait.until(
65
+ EC.presence_of_element_located(
66
+ (By.CSS_SELECTOR, "span.fieldInput.content.html.non-input"))
67
+ )
68
+
69
+ soup = BeautifulSoup(driver.page_source, "html.parser")
70
+ bin_data = {"bins": []}
71
+ current_date = datetime.now()
72
+ current_formatted_date = None
73
+
74
+ spans = soup.select("span.fieldInput.content.html.non-input")
75
+ print(f"Found {len(spans)} bin info spans.")
76
+
77
+ for i, span in enumerate(spans):
78
+ try:
79
+ # Look for any non-empty <u> tag recursively
80
+ date_tag = next(
81
+ (u for u in span.find_all("u") if u and u.text.strip()),
82
+ None
83
+ )
84
+ bin_type_tag = span.find("b")
85
+
86
+ if date_tag:
87
+ raw_date = date_tag.text.strip().replace(",", "")
88
+ full_date_str = f"{raw_date} {current_date.year}"
89
+ full_date_str = re.sub(r"\s+", " ", full_date_str)
90
+
91
+ try:
92
+ parsed_date = datetime.strptime(full_date_str, "%A %d %B %Y")
93
+ if parsed_date.date() < current_date.date():
94
+ parsed_date = parsed_date.replace(year=current_date.year + 1)
95
+ current_formatted_date = parsed_date.strftime("%d/%m/%Y")
96
+ print(f"[{i}] Parsed date: {current_formatted_date}")
97
+ except ValueError as ve:
98
+ print(f"[{i}] Could not parse date: '{full_date_str}' - {ve}")
99
+ continue
100
+ else:
101
+ print(f"[{i}] No date tag found, using last valid date: {current_formatted_date}")
102
+
103
+ if not current_formatted_date:
104
+ print(f"[{i}] No current date to associate bin type with — skipping.")
105
+ continue
106
+
107
+ if not bin_type_tag or not bin_type_tag.text.strip():
108
+ print(f"[{i}] No bin type found — skipping.")
109
+ continue
110
+
111
+ bin_type = bin_type_tag.text.strip()
112
+
113
+ # Optional seasonal override
114
+ try:
115
+ overrides_dict = get_seasonal_overrides()
116
+ if current_formatted_date in overrides_dict:
117
+ current_formatted_date = overrides_dict[current_formatted_date]
118
+ except Exception:
119
+ pass
120
+
121
+ print(f"[{i}] Found bin: {bin_type} on {current_formatted_date}")
122
+
123
+ bin_data["bins"].append({
124
+ "type": bin_type,
125
+ "collectionDate": current_formatted_date
126
+ })
127
+
128
+ except Exception as inner_e:
129
+ print(f"[{i}] Skipping span due to error: {inner_e}")
130
+ continue
131
+
132
+ except Exception as inner_e:
133
+ print(f"Skipping span due to error: {inner_e}")
134
+ continue
135
+
136
+ if not bin_data["bins"]:
137
+ raise ValueError("No bin data found.")
138
+
139
+ print(bin_data)
140
+
141
+ return bin_data
142
+
143
+ except Exception as e:
144
+ print(f"An error occurred: {e}")
145
+ raise
146
+
147
+ finally:
148
+ if driver:
149
+ driver.quit()
@@ -84,10 +84,19 @@ class CouncilClass(AbstractGetBinDataClass):
84
84
  EC.element_to_be_clickable((By.ID, "address")),
85
85
  message="Address dropdown not found",
86
86
  )
87
+
87
88
  dropdown = Select(address_select)
88
89
 
89
- dropdown.select_by_visible_text(user_paon)
90
- print("Address selected successfully")
90
+ found = False
91
+ for option in dropdown.options:
92
+ if user_paon in option.text:
93
+ option.click()
94
+ found = True
95
+ print("Address selected successfully")
96
+ break
97
+
98
+ if not found:
99
+ raise Exception(f"No matching address containing '{user_paon}' found.")
91
100
 
92
101
  driver.switch_to.active_element.send_keys(Keys.TAB + Keys.ENTER)
93
102
  print("Pressed ENTER on Next button")
@@ -24,74 +24,51 @@ class CouncilClass(AbstractGetBinDataClass):
24
24
  def parse_data(self, page: str, **kwargs) -> dict:
25
25
  driver = None
26
26
  try:
27
- page = "https://waste.bexley.gov.uk/waste"
28
-
29
- data = {"bins": []}
30
-
31
27
  user_uprn = kwargs.get("uprn")
32
- user_paon = kwargs.get("paon")
33
- user_postcode = kwargs.get("postcode")
34
28
  web_driver = kwargs.get("web_driver")
35
29
  headless = kwargs.get("headless")
36
30
 
31
+ page = f"https://waste.bexley.gov.uk/waste/{user_uprn}"
32
+
33
+ print(f"Trying URL: {page}") # Debug
34
+
37
35
  # Create Selenium webdriver
38
36
  driver = create_webdriver(web_driver, headless, None, __name__)
39
37
  driver.get(page)
40
38
 
41
- wait = WebDriverWait(driver, 10)
42
-
43
- inputElement_postcodesearch = wait.until(
44
- EC.element_to_be_clickable((By.ID, "pc"))
45
- )
46
- inputElement_postcodesearch.send_keys(user_postcode)
47
-
48
- find_address_btn = wait.until(
49
- EC.element_to_be_clickable((By.XPATH, '//*[@id="sub"]'))
50
- )
51
- find_address_btn.click()
39
+ # Wait for the main content container to be present
40
+ wait = WebDriverWait(driver, 30) # Increased timeout to 30 seconds
52
41
 
53
- dropdown_options = wait.until(
54
- EC.presence_of_element_located((By.XPATH, '//*[@id="address"]'))
55
- )
56
- time.sleep(2)
57
- dropdown_options.click()
58
- time.sleep(1)
59
-
60
- # Wait for the element to be clickable
61
- address = WebDriverWait(driver, 10).until(
62
- EC.element_to_be_clickable(
63
- (By.XPATH, f'//li[contains(text(), "{user_paon}")]')
42
+ # First wait for container
43
+ main_content = wait.until(
44
+ EC.presence_of_element_located(
45
+ (By.XPATH, "/html/body/div[1]/div/div[2]/div")
64
46
  )
65
47
  )
66
48
 
67
- # Click the element
68
- address.click()
69
-
70
- submit_address = wait.until(
71
- EC.presence_of_element_located((By.XPATH, '//*[@id="go"]'))
72
- )
73
- time.sleep(2)
74
- submit_address.click()
49
+ # Then wait for loading indicator to disappear
50
+ wait.until(EC.invisibility_of_element_located((By.ID, "loading-indicator")))
75
51
 
76
- results_found = wait.until(
77
- EC.element_to_be_clickable(
78
- (By.XPATH, '//h1[contains(text(), "Your bin days")]')
79
- )
80
- )
52
+ # Add after the loading indicator wait
53
+ time.sleep(3) # Give extra time for JavaScript to populate the data
81
54
 
82
- final_page = wait.until(
83
- EC.presence_of_element_located((By.CLASS_NAME, "waste__collections"))
55
+ # Then wait for at least one bin section to appear
56
+ wait.until(
57
+ EC.presence_of_element_located((By.CLASS_NAME, "waste-service-name"))
84
58
  )
85
59
 
60
+ # Now parse the page content
86
61
  soup = BeautifulSoup(driver.page_source, features="html.parser")
87
62
 
88
- # Find all waste services
89
-
90
- # Initialize the data dictionary
91
63
  data = {"bins": []}
92
64
  bin_sections = soup.find_all("h3", class_="waste-service-name")
93
65
 
94
- # Loop through each bin field
66
+ if not bin_sections:
67
+ print("No bin sections found after waiting for content")
68
+ print(f"Page source: {driver.page_source}")
69
+ return data
70
+
71
+ # Rest of your existing bin processing code
95
72
  for bin_section in bin_sections:
96
73
  # Extract the bin type (e.g., "Brown Caddy", "Green Wheelie Bin", etc.)
97
74
  bin_type = bin_section.get_text(strip=True).split("\n")[
@@ -63,8 +63,16 @@ class CouncilClass(AbstractGetBinDataClass):
63
63
 
64
64
  # Create a 'Select' for it, then select the first address in the list
65
65
  # (Index 0 is "Make a selection from the list")
66
- dropdownSelect = Select(parent_element)
67
- dropdownSelect.select_by_visible_text(str(user_paon))
66
+ options = parent_element.find_elements(By.TAG_NAME, "option")
67
+ found = False
68
+ for option in options:
69
+ if user_paon in option.text:
70
+ option.click()
71
+ found = True
72
+ break
73
+
74
+ if not found:
75
+ raise Exception(f"Address containing '{user_paon}' not found in dropdown options")
68
76
 
69
77
  submit_btn = wait.until(
70
78
  EC.presence_of_element_located(
@@ -125,6 +133,7 @@ class CouncilClass(AbstractGetBinDataClass):
125
133
  break
126
134
  dict_data = {"type": bin_type, "collectionDate": bin_date}
127
135
  data["bins"].append(dict_data)
136
+ print(data)
128
137
  except Exception as e:
129
138
  # Here you can log the exception if needed
130
139
  print(f"An error occurred: {e}")
@@ -83,15 +83,30 @@ class CouncilClass(AbstractGetBinDataClass):
83
83
  )
84
84
  print("Found address dropdown")
85
85
 
86
- # Create a Select object for the dropdown
87
86
  dropdown_select = Select(address_dropdown)
88
87
 
89
- # Search for the exact address
90
- print(f"Looking for address: {user_paon}")
88
+ print(f"Looking for address containing: {user_paon}")
91
89
 
92
- # Select the address by visible text
93
- dropdown_select.select_by_visible_text(user_paon)
94
- print(f"Selected address: {user_paon}")
90
+ found = False
91
+ user_paon_clean = user_paon.lower().strip()
92
+
93
+ for option in dropdown_select.options:
94
+ option_text_clean = option.text.lower().strip()
95
+
96
+ if (
97
+ option_text_clean == user_paon_clean # Exact match if full address given
98
+ or option_text_clean.startswith(f"{user_paon_clean} ") # Startswith match if just a number
99
+ ):
100
+ option.click()
101
+ found = True
102
+ print(f"Selected address: {option.text.strip()}")
103
+ break
104
+
105
+ if not found:
106
+ all_options = [opt.text for opt in dropdown_select.options]
107
+ raise Exception(
108
+ f"Could not find a matching address for '{user_paon}'. Available options: {all_options}"
109
+ )
95
110
 
96
111
  print("Looking for submit button after address selection...")
97
112
  submit_btn = wait.until(
@@ -1,10 +1,11 @@
1
+ from datetime import timedelta
2
+
1
3
  from bs4 import BeautifulSoup
4
+ from dateutil.relativedelta import relativedelta
5
+
2
6
  from uk_bin_collection.uk_bin_collection.common import *
3
7
  from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
4
8
 
5
- from datetime import timedelta
6
- from dateutil.relativedelta import relativedelta
7
-
8
9
 
9
10
  # import the wonderful Beautiful Soup and the URL grabber
10
11
  class CouncilClass(AbstractGetBinDataClass):
@@ -15,7 +16,17 @@ class CouncilClass(AbstractGetBinDataClass):
15
16
  """
16
17
 
17
18
  def parse_data(self, page: str, **kwargs) -> dict:
19
+ try:
20
+ user_uprn = kwargs.get("uprn")
21
+ check_uprn(user_uprn)
22
+ url = f"https://my.charnwood.gov.uk/location?put=cbc{user_uprn}&rememberme=0&redirect=%2F"
23
+ if not user_uprn:
24
+ url = kwargs.get("url")
25
+ except Exception as e:
26
+ raise ValueError(f"Error getting identifier: {str(e)}")
27
+
18
28
  # Make a BS4 object
29
+ page = requests.get(url)
19
30
  soup = BeautifulSoup(page.text, features="html.parser")
20
31
  soup.prettify()
21
32
 
@@ -266,9 +266,9 @@ class CouncilClass(AbstractGetBinDataClass):
266
266
  ).date()
267
267
 
268
268
  for refuse_date in refuse_dates:
269
- collection_date = datetime.strptime(refuse_date, "%d/%m/%Y") + timedelta(
269
+ collection_date = (datetime.strptime(refuse_date, "%d/%m/%Y") + timedelta(
270
270
  days=refuse_day_offset
271
- )
271
+ )).date()
272
272
  if collection_date in bh_dict:
273
273
  collection_date = bh_dict[collection_date]
274
274
  collection_date = collection_date.strftime("%d/%m/%Y")
@@ -281,9 +281,9 @@ class CouncilClass(AbstractGetBinDataClass):
281
281
 
282
282
  for recycling_date in recycling_dates:
283
283
 
284
- collection_date = datetime.strptime(recycling_date, "%d/%m/%Y") + timedelta(
284
+ collection_date = (datetime.strptime(recycling_date, "%d/%m/%Y") + timedelta(
285
285
  days=recycling_day_offset
286
- )
286
+ )).date()
287
287
  if collection_date in bh_dict:
288
288
  collection_date = bh_dict[collection_date]
289
289
  collection_date = collection_date.strftime("%d/%m/%Y")
@@ -296,9 +296,9 @@ class CouncilClass(AbstractGetBinDataClass):
296
296
 
297
297
  for garden_date in garden_dates:
298
298
 
299
- collection_date = datetime.strptime(garden_date, "%d/%m/%Y") + timedelta(
299
+ collection_date = (datetime.strptime(garden_date, "%d/%m/%Y") + timedelta(
300
300
  days=garden_day_offset
301
- )
301
+ )).date()
302
302
  if collection_date in bh_dict:
303
303
  collection_date = bh_dict[collection_date]
304
304
  collection_date = collection_date.strftime("%d/%m/%Y")
@@ -318,9 +318,9 @@ class CouncilClass(AbstractGetBinDataClass):
318
318
 
319
319
  for food_date in food_dates:
320
320
 
321
- collection_date = datetime.strptime(food_date, "%d/%m/%Y") + timedelta(
321
+ collection_date = (datetime.strptime(food_date, "%d/%m/%Y") + timedelta(
322
322
  days=food_day_offset
323
- )
323
+ )).date()
324
324
  if collection_date in bh_dict:
325
325
  collection_date = bh_dict[collection_date]
326
326
  collection_date = collection_date.strftime("%d/%m/%Y")
@@ -354,9 +354,9 @@ class CouncilClass(AbstractGetBinDataClass):
354
354
 
355
355
  for food_date in food_dates_first:
356
356
 
357
- collection_date = datetime.strptime(food_date, "%d/%m/%Y") + timedelta(
357
+ collection_date = (datetime.strptime(food_date, "%d/%m/%Y") + timedelta(
358
358
  days=food_day_offset
359
- )
359
+ )).date()
360
360
  if collection_date in bh_dict:
361
361
  collection_date = bh_dict[collection_date]
362
362
  collection_date = collection_date.strftime("%d/%m/%Y")
@@ -368,9 +368,9 @@ class CouncilClass(AbstractGetBinDataClass):
368
368
  bindata["bins"].append(dict_data)
369
369
  for food_date in food_dates_second:
370
370
 
371
- collection_date = datetime.strptime(food_date, "%d/%m/%Y") + timedelta(
371
+ collection_date = (datetime.strptime(food_date, "%d/%m/%Y") + timedelta(
372
372
  days=second_week_offset
373
- )
373
+ )).date()
374
374
  if collection_date in bh_dict:
375
375
  collection_date = bh_dict[collection_date]
376
376
  collection_date = collection_date.strftime("%d/%m/%Y")
@@ -1,5 +1,8 @@
1
- from typing import Dict, Any, Optional
2
- from bs4 import BeautifulSoup, Tag, NavigableString
1
+ from typing import Any, Dict, Optional
2
+
3
+ from bs4 import BeautifulSoup, NavigableString, Tag
4
+
5
+ from uk_bin_collection.uk_bin_collection.common import *
3
6
  from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
4
7
 
5
8
  """
@@ -13,6 +16,25 @@ class CouncilClass(AbstractGetBinDataClass):
13
16
  """
14
17
 
15
18
  def parse_data(self, page: Any, **kwargs: Any) -> Dict[str, Any]:
19
+
20
+ try:
21
+ user_uprn = kwargs.get("uprn")
22
+ check_uprn(user_uprn)
23
+ url = f"https://online.cheshireeast.gov.uk/MyCollectionDay/SearchByAjax/GetBartecJobList?uprn={user_uprn}"
24
+ if not user_uprn:
25
+ # This is a fallback for if the user stored a URL in old system. Ensures backwards compatibility.
26
+ url = kwargs.get("url")
27
+ except Exception as e:
28
+ raise ValueError(f"Error getting identifier: {str(e)}")
29
+
30
+ # Add warning suppression for the insecure request
31
+ import urllib3
32
+
33
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
34
+
35
+ # Make request with SSL verification disabled
36
+ page = requests.get(url, verify=False)
37
+
16
38
  soup = BeautifulSoup(page.text, features="html.parser")
17
39
 
18
40
  bin_data_dict: Dict[str, Any] = {"bins": []}