uk_bin_collection 0.74.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- uk_bin_collection/README.rst +0 -0
- uk_bin_collection/tests/council_feature_input_parity.py +79 -0
- uk_bin_collection/tests/features/environment.py +7 -0
- uk_bin_collection/tests/features/validate_council_outputs.feature +767 -0
- uk_bin_collection/tests/input.json +1077 -0
- uk_bin_collection/tests/output.schema +41 -0
- uk_bin_collection/tests/step_defs/step_helpers/file_handler.py +46 -0
- uk_bin_collection/tests/step_defs/test_validate_council.py +87 -0
- uk_bin_collection/tests/test_collect_data.py +104 -0
- uk_bin_collection/tests/test_common_functions.py +342 -0
- uk_bin_collection/uk_bin_collection/collect_data.py +133 -0
- uk_bin_collection/uk_bin_collection/common.py +292 -0
- uk_bin_collection/uk_bin_collection/councils/AdurAndWorthingCouncils.py +43 -0
- uk_bin_collection/uk_bin_collection/councils/ArunCouncil.py +97 -0
- uk_bin_collection/uk_bin_collection/councils/AylesburyValeCouncil.py +69 -0
- uk_bin_collection/uk_bin_collection/councils/BCPCouncil.py +51 -0
- uk_bin_collection/uk_bin_collection/councils/BarnetCouncil.py +180 -0
- uk_bin_collection/uk_bin_collection/councils/BarnsleyMBCouncil.py +109 -0
- uk_bin_collection/uk_bin_collection/councils/BasingstokeCouncil.py +72 -0
- uk_bin_collection/uk_bin_collection/councils/BathAndNorthEastSomersetCouncil.py +100 -0
- uk_bin_collection/uk_bin_collection/councils/BedfordBoroughCouncil.py +49 -0
- uk_bin_collection/uk_bin_collection/councils/BedfordshireCouncil.py +70 -0
- uk_bin_collection/uk_bin_collection/councils/BexleyCouncil.py +147 -0
- uk_bin_collection/uk_bin_collection/councils/BirminghamCityCouncil.py +119 -0
- uk_bin_collection/uk_bin_collection/councils/BlackburnCouncil.py +105 -0
- uk_bin_collection/uk_bin_collection/councils/BoltonCouncil.py +104 -0
- uk_bin_collection/uk_bin_collection/councils/BradfordMDC.py +103 -0
- uk_bin_collection/uk_bin_collection/councils/BrightonandHoveCityCouncil.py +137 -0
- uk_bin_collection/uk_bin_collection/councils/BristolCityCouncil.py +141 -0
- uk_bin_collection/uk_bin_collection/councils/BromleyBoroughCouncil.py +115 -0
- uk_bin_collection/uk_bin_collection/councils/BroxtoweBoroughCouncil.py +107 -0
- uk_bin_collection/uk_bin_collection/councils/BuckinghamshireCouncil.py +95 -0
- uk_bin_collection/uk_bin_collection/councils/BuryCouncil.py +65 -0
- uk_bin_collection/uk_bin_collection/councils/CalderdaleCouncil.py +123 -0
- uk_bin_collection/uk_bin_collection/councils/CannockChaseDistrictCouncil.py +65 -0
- uk_bin_collection/uk_bin_collection/councils/CardiffCouncil.py +172 -0
- uk_bin_collection/uk_bin_collection/councils/CastlepointDistrictCouncil.py +96 -0
- uk_bin_collection/uk_bin_collection/councils/CharnwoodBoroughCouncil.py +54 -0
- uk_bin_collection/uk_bin_collection/councils/ChelmsfordCityCouncil.py +127 -0
- uk_bin_collection/uk_bin_collection/councils/CheshireEastCouncil.py +32 -0
- uk_bin_collection/uk_bin_collection/councils/CheshireWestAndChesterCouncil.py +125 -0
- uk_bin_collection/uk_bin_collection/councils/ChorleyCouncil.py +134 -0
- uk_bin_collection/uk_bin_collection/councils/ConwyCountyBorough.py +27 -0
- uk_bin_collection/uk_bin_collection/councils/CrawleyBoroughCouncil.py +61 -0
- uk_bin_collection/uk_bin_collection/councils/CroydonCouncil.py +291 -0
- uk_bin_collection/uk_bin_collection/councils/DerbyshireDalesDistrictCouncil.py +100 -0
- uk_bin_collection/uk_bin_collection/councils/DoncasterCouncil.py +77 -0
- uk_bin_collection/uk_bin_collection/councils/DorsetCouncil.py +58 -0
- uk_bin_collection/uk_bin_collection/councils/DoverDistrictCouncil.py +41 -0
- uk_bin_collection/uk_bin_collection/councils/DurhamCouncil.py +49 -0
- uk_bin_collection/uk_bin_collection/councils/EastCambridgeshireCouncil.py +44 -0
- uk_bin_collection/uk_bin_collection/councils/EastDevonDC.py +74 -0
- uk_bin_collection/uk_bin_collection/councils/EastLindseyDistrictCouncil.py +108 -0
- uk_bin_collection/uk_bin_collection/councils/EastRidingCouncil.py +142 -0
- uk_bin_collection/uk_bin_collection/councils/EastSuffolkCouncil.py +112 -0
- uk_bin_collection/uk_bin_collection/councils/EastleighBoroughCouncil.py +70 -0
- uk_bin_collection/uk_bin_collection/councils/EnvironmentFirst.py +48 -0
- uk_bin_collection/uk_bin_collection/councils/ErewashBoroughCouncil.py +61 -0
- uk_bin_collection/uk_bin_collection/councils/FenlandDistrictCouncil.py +65 -0
- uk_bin_collection/uk_bin_collection/councils/ForestOfDeanDistrictCouncil.py +113 -0
- uk_bin_collection/uk_bin_collection/councils/GatesheadCouncil.py +118 -0
- uk_bin_collection/uk_bin_collection/councils/GedlingBoroughCouncil.py +1580 -0
- uk_bin_collection/uk_bin_collection/councils/GlasgowCityCouncil.py +55 -0
- uk_bin_collection/uk_bin_collection/councils/GuildfordCouncil.py +150 -0
- uk_bin_collection/uk_bin_collection/councils/HaltonBoroughCouncil.py +142 -0
- uk_bin_collection/uk_bin_collection/councils/HaringeyCouncil.py +59 -0
- uk_bin_collection/uk_bin_collection/councils/HarrogateBoroughCouncil.py +63 -0
- uk_bin_collection/uk_bin_collection/councils/HighPeakCouncil.py +134 -0
- uk_bin_collection/uk_bin_collection/councils/HullCityCouncil.py +48 -0
- uk_bin_collection/uk_bin_collection/councils/HuntingdonDistrictCouncil.py +44 -0
- uk_bin_collection/uk_bin_collection/councils/KingstonUponThamesCouncil.py +84 -0
- uk_bin_collection/uk_bin_collection/councils/KirkleesCouncil.py +130 -0
- uk_bin_collection/uk_bin_collection/councils/KnowsleyMBCouncil.py +139 -0
- uk_bin_collection/uk_bin_collection/councils/LancasterCityCouncil.py +71 -0
- uk_bin_collection/uk_bin_collection/councils/LeedsCityCouncil.py +137 -0
- uk_bin_collection/uk_bin_collection/councils/LisburnCastlereaghCityCouncil.py +101 -0
- uk_bin_collection/uk_bin_collection/councils/LiverpoolCityCouncil.py +65 -0
- uk_bin_collection/uk_bin_collection/councils/LondonBoroughHounslow.py +82 -0
- uk_bin_collection/uk_bin_collection/councils/LondonBoroughRedbridge.py +161 -0
- uk_bin_collection/uk_bin_collection/councils/MaldonDistrictCouncil.py +52 -0
- uk_bin_collection/uk_bin_collection/councils/MalvernHillsDC.py +57 -0
- uk_bin_collection/uk_bin_collection/councils/ManchesterCityCouncil.py +106 -0
- uk_bin_collection/uk_bin_collection/councils/MansfieldDistrictCouncil.py +38 -0
- uk_bin_collection/uk_bin_collection/councils/MertonCouncil.py +58 -0
- uk_bin_collection/uk_bin_collection/councils/MidAndEastAntrimBoroughCouncil.py +128 -0
- uk_bin_collection/uk_bin_collection/councils/MidSussexDistrictCouncil.py +80 -0
- uk_bin_collection/uk_bin_collection/councils/MiltonKeynesCityCouncil.py +54 -0
- uk_bin_collection/uk_bin_collection/councils/MoleValleyDistrictCouncil.py +98 -0
- uk_bin_collection/uk_bin_collection/councils/NeathPortTalbotCouncil.py +139 -0
- uk_bin_collection/uk_bin_collection/councils/NewarkAndSherwoodDC.py +52 -0
- uk_bin_collection/uk_bin_collection/councils/NewcastleCityCouncil.py +57 -0
- uk_bin_collection/uk_bin_collection/councils/NewhamCouncil.py +58 -0
- uk_bin_collection/uk_bin_collection/councils/NewportCityCouncil.py +203 -0
- uk_bin_collection/uk_bin_collection/councils/NorthEastDerbyshireDistrictCouncil.py +115 -0
- uk_bin_collection/uk_bin_collection/councils/NorthEastLincs.py +53 -0
- uk_bin_collection/uk_bin_collection/councils/NorthKestevenDistrictCouncil.py +45 -0
- uk_bin_collection/uk_bin_collection/councils/NorthLanarkshireCouncil.py +46 -0
- uk_bin_collection/uk_bin_collection/councils/NorthLincolnshireCouncil.py +58 -0
- uk_bin_collection/uk_bin_collection/councils/NorthNorfolkDistrictCouncil.py +108 -0
- uk_bin_collection/uk_bin_collection/councils/NorthNorthamptonshireCouncil.py +72 -0
- uk_bin_collection/uk_bin_collection/councils/NorthSomersetCouncil.py +76 -0
- uk_bin_collection/uk_bin_collection/councils/NorthTynesideCouncil.py +220 -0
- uk_bin_collection/uk_bin_collection/councils/NorthWestLeicestershire.py +114 -0
- uk_bin_collection/uk_bin_collection/councils/NorthYorkshire.py +58 -0
- uk_bin_collection/uk_bin_collection/councils/NorthumberlandCouncil.py +123 -0
- uk_bin_collection/uk_bin_collection/councils/NottinghamCityCouncil.py +36 -0
- uk_bin_collection/uk_bin_collection/councils/OldhamCouncil.py +51 -0
- uk_bin_collection/uk_bin_collection/councils/PortsmouthCityCouncil.py +131 -0
- uk_bin_collection/uk_bin_collection/councils/PrestonCityCouncil.py +97 -0
- uk_bin_collection/uk_bin_collection/councils/ReadingBoroughCouncil.py +30 -0
- uk_bin_collection/uk_bin_collection/councils/ReigateAndBansteadBoroughCouncil.py +81 -0
- uk_bin_collection/uk_bin_collection/councils/RenfrewshireCouncil.py +135 -0
- uk_bin_collection/uk_bin_collection/councils/RhonddaCynonTaffCouncil.py +80 -0
- uk_bin_collection/uk_bin_collection/councils/RochdaleCouncil.py +69 -0
- uk_bin_collection/uk_bin_collection/councils/RochfordCouncil.py +60 -0
- uk_bin_collection/uk_bin_collection/councils/RugbyBoroughCouncil.py +93 -0
- uk_bin_collection/uk_bin_collection/councils/RushcliffeBoroughCouncil.py +100 -0
- uk_bin_collection/uk_bin_collection/councils/RushmoorCouncil.py +81 -0
- uk_bin_collection/uk_bin_collection/councils/SalfordCityCouncil.py +70 -0
- uk_bin_collection/uk_bin_collection/councils/SevenoaksDistrictCouncil.py +106 -0
- uk_bin_collection/uk_bin_collection/councils/SheffieldCityCouncil.py +54 -0
- uk_bin_collection/uk_bin_collection/councils/ShropshireCouncil.py +45 -0
- uk_bin_collection/uk_bin_collection/councils/SolihullCouncil.py +48 -0
- uk_bin_collection/uk_bin_collection/councils/SomersetCouncil.py +203 -0
- uk_bin_collection/uk_bin_collection/councils/SouthAyrshireCouncil.py +73 -0
- uk_bin_collection/uk_bin_collection/councils/SouthCambridgeshireCouncil.py +65 -0
- uk_bin_collection/uk_bin_collection/councils/SouthGloucestershireCouncil.py +74 -0
- uk_bin_collection/uk_bin_collection/councils/SouthLanarkshireCouncil.py +78 -0
- uk_bin_collection/uk_bin_collection/councils/SouthNorfolkCouncil.py +91 -0
- uk_bin_collection/uk_bin_collection/councils/SouthOxfordshireCouncil.py +93 -0
- uk_bin_collection/uk_bin_collection/councils/SouthTynesideCouncil.py +98 -0
- uk_bin_collection/uk_bin_collection/councils/StAlbansCityAndDistrictCouncil.py +43 -0
- uk_bin_collection/uk_bin_collection/councils/StHelensBC.py +56 -0
- uk_bin_collection/uk_bin_collection/councils/StaffordshireMoorlandsDistrictCouncil.py +112 -0
- uk_bin_collection/uk_bin_collection/councils/StockportBoroughCouncil.py +39 -0
- uk_bin_collection/uk_bin_collection/councils/StokeOnTrentCityCouncil.py +79 -0
- uk_bin_collection/uk_bin_collection/councils/StratfordUponAvonCouncil.py +94 -0
- uk_bin_collection/uk_bin_collection/councils/SunderlandCityCouncil.py +100 -0
- uk_bin_collection/uk_bin_collection/councils/SwaleBoroughCouncil.py +52 -0
- uk_bin_collection/uk_bin_collection/councils/TamesideMBCouncil.py +62 -0
- uk_bin_collection/uk_bin_collection/councils/TandridgeDistrictCouncil.py +60 -0
- uk_bin_collection/uk_bin_collection/councils/TelfordAndWrekinCouncil.py +50 -0
- uk_bin_collection/uk_bin_collection/councils/TestValleyBoroughCouncil.py +203 -0
- uk_bin_collection/uk_bin_collection/councils/TonbridgeAndMallingBC.py +101 -0
- uk_bin_collection/uk_bin_collection/councils/TorbayCouncil.py +51 -0
- uk_bin_collection/uk_bin_collection/councils/TorridgeDistrictCouncil.py +154 -0
- uk_bin_collection/uk_bin_collection/councils/ValeofGlamorganCouncil.py +119 -0
- uk_bin_collection/uk_bin_collection/councils/ValeofWhiteHorseCouncil.py +103 -0
- uk_bin_collection/uk_bin_collection/councils/WakefieldCityCouncil.py +89 -0
- uk_bin_collection/uk_bin_collection/councils/WarwickDistrictCouncil.py +34 -0
- uk_bin_collection/uk_bin_collection/councils/WaverleyBoroughCouncil.py +119 -0
- uk_bin_collection/uk_bin_collection/councils/WealdenDistrictCouncil.py +86 -0
- uk_bin_collection/uk_bin_collection/councils/WelhatCouncil.py +73 -0
- uk_bin_collection/uk_bin_collection/councils/WestBerkshireCouncil.py +134 -0
- uk_bin_collection/uk_bin_collection/councils/WestLindseyDistrictCouncil.py +118 -0
- uk_bin_collection/uk_bin_collection/councils/WestLothianCouncil.py +103 -0
- uk_bin_collection/uk_bin_collection/councils/WestNorthamptonshireCouncil.py +34 -0
- uk_bin_collection/uk_bin_collection/councils/WestSuffolkCouncil.py +64 -0
- uk_bin_collection/uk_bin_collection/councils/WiganBoroughCouncil.py +97 -0
- uk_bin_collection/uk_bin_collection/councils/WiltshireCouncil.py +135 -0
- uk_bin_collection/uk_bin_collection/councils/WindsorAndMaidenheadCouncil.py +134 -0
- uk_bin_collection/uk_bin_collection/councils/WokingBoroughCouncil.py +114 -0
- uk_bin_collection/uk_bin_collection/councils/WyreCouncil.py +89 -0
- uk_bin_collection/uk_bin_collection/councils/YorkCouncil.py +45 -0
- uk_bin_collection/uk_bin_collection/councils/council_class_template/councilclasstemplate.py +33 -0
- uk_bin_collection/uk_bin_collection/get_bin_data.py +165 -0
- uk_bin_collection-0.74.0.dist-info/LICENSE +21 -0
- uk_bin_collection-0.74.0.dist-info/METADATA +247 -0
- uk_bin_collection-0.74.0.dist-info/RECORD +171 -0
- uk_bin_collection-0.74.0.dist-info/WHEEL +4 -0
- uk_bin_collection-0.74.0.dist-info/entry_points.txt +3 -0
@@ -0,0 +1,125 @@
|
|
1
|
+
import time
|
2
|
+
|
3
|
+
from bs4 import BeautifulSoup
|
4
|
+
from selenium.webdriver.common.by import By
|
5
|
+
from selenium.webdriver.support import expected_conditions as EC
|
6
|
+
from selenium.webdriver.support.ui import Select
|
7
|
+
from selenium.webdriver.support.wait import WebDriverWait
|
8
|
+
|
9
|
+
from uk_bin_collection.uk_bin_collection.common import *
|
10
|
+
from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
|
11
|
+
|
12
|
+
|
13
|
+
# import the wonderful Beautiful Soup and the URL grabber
|
14
|
+
class CouncilClass(AbstractGetBinDataClass):
|
15
|
+
"""
|
16
|
+
Concrete classes have to implement all abstract operations of the
|
17
|
+
base class. They can also override some operations with a default
|
18
|
+
implementation.
|
19
|
+
"""
|
20
|
+
|
21
|
+
def parse_data(self, page: str, **kwargs) -> dict:
|
22
|
+
driver = None
|
23
|
+
try:
|
24
|
+
data = {"bins": []}
|
25
|
+
collections = []
|
26
|
+
|
27
|
+
user_paon = kwargs.get("paon")
|
28
|
+
user_postcode = kwargs.get("postcode")
|
29
|
+
web_driver = kwargs.get("web_driver")
|
30
|
+
headless = kwargs.get("headless")
|
31
|
+
check_paon(user_paon)
|
32
|
+
check_postcode(user_postcode)
|
33
|
+
|
34
|
+
# Create Selenium webdriver
|
35
|
+
driver = create_webdriver(web_driver, headless)
|
36
|
+
driver.get(
|
37
|
+
"https://www.cheshirewestandchester.gov.uk/residents/waste-and-recycling/your-bin-collection/collection-day"
|
38
|
+
)
|
39
|
+
|
40
|
+
time.sleep(5)
|
41
|
+
|
42
|
+
cookie_close_button = WebDriverWait(driver, timeout=15).until(
|
43
|
+
EC.presence_of_element_located((By.ID, "ccc-close"))
|
44
|
+
)
|
45
|
+
cookie_close_button.click()
|
46
|
+
|
47
|
+
find_collection_button = WebDriverWait(driver, timeout=10).until(
|
48
|
+
EC.presence_of_element_located(
|
49
|
+
(By.LINK_TEXT, "Find your collection day")
|
50
|
+
)
|
51
|
+
)
|
52
|
+
find_collection_button.click()
|
53
|
+
|
54
|
+
banner_close_button = WebDriverWait(driver, timeout=30).until(
|
55
|
+
EC.presence_of_element_located((By.ID, "close-cookie-message"))
|
56
|
+
)
|
57
|
+
banner_close_button.click()
|
58
|
+
|
59
|
+
time.sleep(5)
|
60
|
+
|
61
|
+
frame = driver.find_element(
|
62
|
+
By.XPATH, "/html/body/div[4]/section/div/div[2]/div[2]/div/iframe"
|
63
|
+
)
|
64
|
+
driver.switch_to.frame(frame)
|
65
|
+
|
66
|
+
# Wait for the postcode field to appear then populate it
|
67
|
+
inputElement_postcode = WebDriverWait(driver, 30).until(
|
68
|
+
EC.presence_of_element_located((By.NAME, "postcode_search"))
|
69
|
+
)
|
70
|
+
inputElement_postcode.send_keys(user_postcode)
|
71
|
+
|
72
|
+
address_box_text = WebDriverWait(driver, 30).until(
|
73
|
+
EC.presence_of_element_located((By.ID, "label_Choose_Address"))
|
74
|
+
)
|
75
|
+
address_box_text.click()
|
76
|
+
time.sleep(2)
|
77
|
+
|
78
|
+
address_selection_menu = Select(
|
79
|
+
driver.find_element(By.ID, "Choose_Address")
|
80
|
+
)
|
81
|
+
for idx, addr_option in enumerate(address_selection_menu.options):
|
82
|
+
option_name = addr_option.text[0 : len(user_paon)]
|
83
|
+
if option_name == user_paon:
|
84
|
+
selected_address = addr_option
|
85
|
+
break
|
86
|
+
address_selection_menu.select_by_visible_text(selected_address.text)
|
87
|
+
|
88
|
+
WebDriverWait(driver, 30).until(
|
89
|
+
EC.presence_of_element_located(
|
90
|
+
(By.XPATH, '//*[@id="bin-schedule-content"]/div/h3')
|
91
|
+
)
|
92
|
+
)
|
93
|
+
|
94
|
+
soup = BeautifulSoup(driver.page_source, features="html.parser")
|
95
|
+
soup.prettify()
|
96
|
+
|
97
|
+
# Get collections
|
98
|
+
bin_cards = soup.find_all("div", {"class": "bin-schedule-content-info"})
|
99
|
+
for card in bin_cards:
|
100
|
+
bin_name = card.contents[0].text.strip() + " bin"
|
101
|
+
bin_date = datetime.strptime(
|
102
|
+
card.contents[1].text.split(":")[1].strip(), "%A, %d %B %Y"
|
103
|
+
)
|
104
|
+
collections.append((bin_name, bin_date))
|
105
|
+
|
106
|
+
ordered_data = sorted(collections, key=lambda x: x[1])
|
107
|
+
for item in ordered_data:
|
108
|
+
dict_data = {
|
109
|
+
"type": item[0].capitalize(),
|
110
|
+
"collectionDate": item[1].strftime(date_format),
|
111
|
+
}
|
112
|
+
data["bins"].append(dict_data)
|
113
|
+
|
114
|
+
except Exception as e:
|
115
|
+
# Here you can log the exception if needed
|
116
|
+
print(f"An error occurred: {e}")
|
117
|
+
# Optionally, re-raise the exception if you want it to propagate
|
118
|
+
raise
|
119
|
+
|
120
|
+
finally:
|
121
|
+
# This block ensures that the driver is closed regardless of an exception
|
122
|
+
if driver:
|
123
|
+
driver.quit()
|
124
|
+
|
125
|
+
return data
|
@@ -0,0 +1,134 @@
|
|
1
|
+
import time
|
2
|
+
import urllib.parse
|
3
|
+
from bs4 import BeautifulSoup
|
4
|
+
from selenium.webdriver.common.by import By
|
5
|
+
from selenium.webdriver.support import expected_conditions as EC
|
6
|
+
from selenium.webdriver.support.ui import Select
|
7
|
+
from selenium.webdriver.support.wait import WebDriverWait
|
8
|
+
|
9
|
+
from uk_bin_collection.uk_bin_collection.common import *
|
10
|
+
from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
|
11
|
+
|
12
|
+
|
13
|
+
def format_bin_type(bin_colour: str):
|
14
|
+
bin_types = {
|
15
|
+
"grey": "Garden waste (Grey Bin)",
|
16
|
+
"brown": "Paper and card (Brown Bin)",
|
17
|
+
"blue": "Bottles and cans (Blue Bin)",
|
18
|
+
"green": "General waste (Green Bin)",
|
19
|
+
}
|
20
|
+
bin_colour = urllib.parse.unquote(bin_colour).split(" ")[0].lower()
|
21
|
+
return bin_types[bin_colour]
|
22
|
+
|
23
|
+
|
24
|
+
# import the wonderful Beautiful Soup and the URL grabber
|
25
|
+
class CouncilClass(AbstractGetBinDataClass):
|
26
|
+
"""
|
27
|
+
Concrete classes have to implement all abstract operations of the
|
28
|
+
base class. They can also override some operations with a default
|
29
|
+
implementation.
|
30
|
+
"""
|
31
|
+
|
32
|
+
def parse_data(self, page: str, **kwargs) -> dict:
|
33
|
+
driver = None
|
34
|
+
try:
|
35
|
+
data = {"bins": []}
|
36
|
+
user_uprn = kwargs.get("uprn")
|
37
|
+
user_postcode = kwargs.get("postcode")
|
38
|
+
web_driver = kwargs.get("web_driver")
|
39
|
+
headless = kwargs.get("headless")
|
40
|
+
check_uprn(user_uprn)
|
41
|
+
check_postcode(user_postcode)
|
42
|
+
|
43
|
+
# Create Selenium webdriver
|
44
|
+
user_agent = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36"
|
45
|
+
driver = create_webdriver(web_driver, headless, user_agent)
|
46
|
+
driver.get("https://myaccount.chorley.gov.uk/wastecollections.aspx")
|
47
|
+
|
48
|
+
# Accept cookies banner
|
49
|
+
cookieBanner = WebDriverWait(driver, 10).until(
|
50
|
+
EC.presence_of_element_located((By.ID, "PrivacyPolicyNotification"))
|
51
|
+
)
|
52
|
+
cookieClose = cookieBanner.find_element(
|
53
|
+
By.CSS_SELECTOR, "span.ui-icon-circle-close"
|
54
|
+
)
|
55
|
+
cookieClose.click()
|
56
|
+
|
57
|
+
# Populate postcode field
|
58
|
+
inputElement_postcode = driver.find_element(
|
59
|
+
By.ID,
|
60
|
+
"MainContent_addressSearch_txtPostCodeLookup",
|
61
|
+
)
|
62
|
+
inputElement_postcode.send_keys(user_postcode)
|
63
|
+
|
64
|
+
# Click search button
|
65
|
+
findAddress = WebDriverWait(driver, 10).until(
|
66
|
+
EC.presence_of_element_located(
|
67
|
+
(
|
68
|
+
By.ID,
|
69
|
+
"MainContent_addressSearch_btnFindAddress",
|
70
|
+
)
|
71
|
+
)
|
72
|
+
)
|
73
|
+
findAddress.click()
|
74
|
+
|
75
|
+
time.sleep(1)
|
76
|
+
|
77
|
+
# Wait for the 'Select address' dropdown to appear and select option matching UPRN
|
78
|
+
dropdown = WebDriverWait(driver, 10).until(
|
79
|
+
EC.presence_of_element_located(
|
80
|
+
(
|
81
|
+
By.ID,
|
82
|
+
"MainContent_addressSearch_ddlAddress",
|
83
|
+
)
|
84
|
+
)
|
85
|
+
)
|
86
|
+
# Create a 'Select' for it, then select the matching URPN option
|
87
|
+
dropdownSelect = Select(dropdown)
|
88
|
+
dropdownSelect.select_by_value(user_uprn)
|
89
|
+
|
90
|
+
# Wait for the submit button to appear, then click it to get the collection dates
|
91
|
+
submit = WebDriverWait(driver, 10).until(
|
92
|
+
EC.presence_of_element_located((By.ID, "MainContent_btnSearch"))
|
93
|
+
)
|
94
|
+
submit.click()
|
95
|
+
|
96
|
+
soup = BeautifulSoup(driver.page_source, features="html.parser")
|
97
|
+
|
98
|
+
# Get the property details
|
99
|
+
property_details = soup.find(
|
100
|
+
"table",
|
101
|
+
{"class": "WasteCollection"},
|
102
|
+
)
|
103
|
+
|
104
|
+
# Get the dates
|
105
|
+
for row in property_details.tbody.find_all("tr", recursive=False):
|
106
|
+
month_col = row.td
|
107
|
+
month = month_col.get_text(strip=True)
|
108
|
+
|
109
|
+
for date_col in month_col.find_next_siblings("td"):
|
110
|
+
day = date_col.p.contents[0].strip()
|
111
|
+
|
112
|
+
if day == "":
|
113
|
+
continue
|
114
|
+
|
115
|
+
for bin_type in date_col.find_all("img"):
|
116
|
+
bin_colour = bin_type.get("src").split("/")[-1].split(".")[0]
|
117
|
+
date_object = datetime.strptime(f"{day} {month}", "%d %B %Y")
|
118
|
+
date_formatted = date_object.strftime("%d/%m/%Y")
|
119
|
+
|
120
|
+
dict_data = {
|
121
|
+
"type": format_bin_type(bin_colour),
|
122
|
+
"collectionDate": date_formatted,
|
123
|
+
}
|
124
|
+
data["bins"].append(dict_data)
|
125
|
+
except Exception as e:
|
126
|
+
# Here you can log the exception if needed
|
127
|
+
print(f"An error occurred: {e}")
|
128
|
+
# Optionally, re-raise the exception if you want it to propagate
|
129
|
+
raise
|
130
|
+
finally:
|
131
|
+
# This block ensures that the driver is closed regardless of an exception
|
132
|
+
if driver:
|
133
|
+
driver.quit()
|
134
|
+
return data
|
@@ -0,0 +1,27 @@
|
|
1
|
+
from bs4 import BeautifulSoup
|
2
|
+
from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
|
3
|
+
from uk_bin_collection.uk_bin_collection.common import *
|
4
|
+
from datetime import datetime
|
5
|
+
|
6
|
+
|
7
|
+
class CouncilClass(AbstractGetBinDataClass):
|
8
|
+
def parse_data(self, page: str, **kwargs) -> dict:
|
9
|
+
soup = BeautifulSoup(page.text, features="html.parser")
|
10
|
+
data = {"bins": []}
|
11
|
+
|
12
|
+
for bin_section in soup.select('div[class*="containererf"]'):
|
13
|
+
date_text = bin_section.find(id="content").text.strip()
|
14
|
+
collection_date = datetime.strptime(date_text, "%A, %d/%m/%Y")
|
15
|
+
|
16
|
+
bin_types = bin_section.find(id="main1").findAll("li")
|
17
|
+
for bin_type in bin_types:
|
18
|
+
bin_type_name = bin_type.text.split("(")[0].strip()
|
19
|
+
|
20
|
+
data["bins"].append(
|
21
|
+
{
|
22
|
+
"type": bin_type_name,
|
23
|
+
"collectionDate": collection_date.strftime(date_format),
|
24
|
+
}
|
25
|
+
)
|
26
|
+
|
27
|
+
return data
|
@@ -0,0 +1,61 @@
|
|
1
|
+
from bs4 import BeautifulSoup
|
2
|
+
from dateutil.relativedelta import relativedelta
|
3
|
+
|
4
|
+
from uk_bin_collection.uk_bin_collection.common import *
|
5
|
+
from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
|
6
|
+
|
7
|
+
|
8
|
+
# import the wonderful Beautiful Soup and the URL grabber
|
9
|
+
class CouncilClass(AbstractGetBinDataClass):
|
10
|
+
"""
|
11
|
+
Concrete classes have to implement all abstract operations of the
|
12
|
+
base class. They can also override some operations with a default
|
13
|
+
implementation.
|
14
|
+
"""
|
15
|
+
|
16
|
+
def parse_data(self, page: str, **kwargs) -> dict:
|
17
|
+
# Make a BS4 object
|
18
|
+
uprn = kwargs.get("uprn")
|
19
|
+
usrn = kwargs.get("paon")
|
20
|
+
check_uprn(uprn)
|
21
|
+
check_usrn(usrn)
|
22
|
+
|
23
|
+
day = datetime.now().date().strftime("%d")
|
24
|
+
month = datetime.now().date().strftime("%m")
|
25
|
+
year = datetime.now().date().strftime("%Y")
|
26
|
+
|
27
|
+
api_url = (
|
28
|
+
f"https://my.crawley.gov.uk/appshost/firmstep/self/apps/custompage/waste?language=en&uprn={uprn}"
|
29
|
+
f"&usrn={usrn}&day={day}&month={month}&year={year}"
|
30
|
+
)
|
31
|
+
response = requests.get(api_url)
|
32
|
+
|
33
|
+
soup = BeautifulSoup(response.text, features="html.parser")
|
34
|
+
soup.prettify()
|
35
|
+
|
36
|
+
data = {"bins": []}
|
37
|
+
|
38
|
+
titles = [title.text for title in soup.select(".title")]
|
39
|
+
collection_tag = soup.body.find_all(
|
40
|
+
"div", {"class": "col-md-6 col-sm-6 col-xs-6"}, string="Next collection"
|
41
|
+
)
|
42
|
+
bin_index = 0
|
43
|
+
for tag in collection_tag:
|
44
|
+
for item in tag.next_elements:
|
45
|
+
if (
|
46
|
+
str(item).startswith('<div class="date text-right text-grey">')
|
47
|
+
and str(item) != ""
|
48
|
+
):
|
49
|
+
collection_date = datetime.strptime(item.text, "%A %d %B")
|
50
|
+
next_collection = collection_date.replace(year=datetime.now().year)
|
51
|
+
if datetime.now().month == 12 and next_collection.month == 1:
|
52
|
+
next_collection = next_collection + relativedelta(years=1)
|
53
|
+
|
54
|
+
dict_data = {
|
55
|
+
"type": titles[bin_index].strip(),
|
56
|
+
"collectionDate": next_collection.strftime(date_format),
|
57
|
+
}
|
58
|
+
data["bins"].append(dict_data)
|
59
|
+
bin_index += 1
|
60
|
+
break
|
61
|
+
return data
|
@@ -0,0 +1,291 @@
|
|
1
|
+
import time
|
2
|
+
|
3
|
+
from bs4 import BeautifulSoup
|
4
|
+
from uk_bin_collection.uk_bin_collection.common import *
|
5
|
+
from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
|
6
|
+
|
7
|
+
|
8
|
+
def get_headers(base_url: str, method: str) -> dict[str, str]:
|
9
|
+
"""
|
10
|
+
Gets request headers
|
11
|
+
:rtype: dict[str, str]
|
12
|
+
:param base_url: Base URL to use
|
13
|
+
:param method: Method to use
|
14
|
+
:return: Request headers
|
15
|
+
"""
|
16
|
+
headers = {
|
17
|
+
"Accept-Encoding": "gzip, deflate, br",
|
18
|
+
"Accept-Language": "en-GB,en-US;q=0.9,en;q=0.8",
|
19
|
+
"Cache-Control": "max-age=0",
|
20
|
+
"Connection": "keep-alive",
|
21
|
+
"Host": "service.croydon.gov.uk",
|
22
|
+
"Origin": base_url,
|
23
|
+
"sec-ch-ua": '"Not_A Brand";v="99", "Google Chrome";v="109", "Chromium";v="109"',
|
24
|
+
"sec-ch-ua-mobile": "?0",
|
25
|
+
"sec-ch-ua-platform": "Windows",
|
26
|
+
"Sec-Fetch-Dest": "document",
|
27
|
+
"Sec-Fetch-User": "?1",
|
28
|
+
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko)"
|
29
|
+
" Chrome/109.0.0.0 Safari/537.36",
|
30
|
+
}
|
31
|
+
if method.lower() == "post":
|
32
|
+
headers["Accept"] = "application/json, text/javascript, */*; q=0.01"
|
33
|
+
headers["Content-Type"] = "application/x-www-form-urlencoded; charset=UTF-8"
|
34
|
+
headers["Sec-Fetch-Mode"] = "cors"
|
35
|
+
headers["Sec-Fetch-Mode"] = "same-origin"
|
36
|
+
headers["X-Requested-With"] = "XMLHttpRequest"
|
37
|
+
else:
|
38
|
+
headers["Accept"] = (
|
39
|
+
"text/html,application/xhtml+xml,application/xml;"
|
40
|
+
"q=0.9,image/avif,image/webp,image/apng,*/*;"
|
41
|
+
"q=0.8,application/signed-exchange;v=b3;q=0.9"
|
42
|
+
)
|
43
|
+
headers["Sec-Fetch-Mode"] = "navigate"
|
44
|
+
headers["Sec-Fetch-Mode"] = "none"
|
45
|
+
return headers
|
46
|
+
|
47
|
+
|
48
|
+
def get_session_storage_global() -> object:
|
49
|
+
"""
|
50
|
+
Gets session storage global object
|
51
|
+
:rtype: object
|
52
|
+
:return: Session storage global object
|
53
|
+
"""
|
54
|
+
return {
|
55
|
+
"destination_stack": [
|
56
|
+
"w/webpage/bin-day-enter-address",
|
57
|
+
"w/webpage/your-bin-collection-details?context_record_id=86086077"
|
58
|
+
"&webpage_token=5c047b2c10b4aad66bef2054aac6bea52ad7a5e185ffdf7090b01f8ddc96728f",
|
59
|
+
"w/webpage/bin-day-enter-address",
|
60
|
+
"w/webpage/your-bin-collection-details?context_record_id=86085229"
|
61
|
+
"&webpage_token=cf1b8fd6213f4823277d98c1dd8a992e6ebef1fabc7d892714e5d9dade448c37",
|
62
|
+
"w/webpage/bin-day-enter-address",
|
63
|
+
"w/webpage/your-bin-collection-details?context_record_id=86084221"
|
64
|
+
"&webpage_token=7f52fb51019bf0e6bfe9647b1b31000124bd92a9d95781f1557f58b3ed40da52",
|
65
|
+
"w/webpage/bin-day-enter-address",
|
66
|
+
"w/webpage/your-bin-collection-details?context_record_id=86083209"
|
67
|
+
"&webpage_token=de50c265da927336f526d9d9a44947595c3aa38965aa8c495ac2fb73d272ece8",
|
68
|
+
"w/webpage/bin-day-enter-address",
|
69
|
+
],
|
70
|
+
"last_context_record_id": "86086077",
|
71
|
+
}
|
72
|
+
|
73
|
+
|
74
|
+
def get_csrf_token(s: requests.session, base_url: str) -> str:
|
75
|
+
"""
|
76
|
+
Gets a CSRF token
|
77
|
+
:rtype: str
|
78
|
+
:param s: requests.session() to use
|
79
|
+
:param base_url: Base URL to use
|
80
|
+
:return: CSRF token
|
81
|
+
"""
|
82
|
+
csrf_token = ""
|
83
|
+
response = s.get(
|
84
|
+
base_url + "/wasteservices/w/webpage/bin-day-enter-address",
|
85
|
+
headers=get_headers(base_url, "GET"),
|
86
|
+
)
|
87
|
+
if response.status_code == 200:
|
88
|
+
soup = BeautifulSoup(response.text, features="html.parser")
|
89
|
+
soup.prettify()
|
90
|
+
app_body = soup.find("div", {"class": "app-body"})
|
91
|
+
script = app_body.find("script", {"type": "text/javascript"}).string
|
92
|
+
p = re.compile("var CSRF = ('|\")(.*?)('|\");")
|
93
|
+
m = p.search(script)
|
94
|
+
csrf_token = m.groups()[1]
|
95
|
+
else:
|
96
|
+
raise ValueError(
|
97
|
+
"Code 1: Failed to get a CSRF token. Please ensure the council website is online first,"
|
98
|
+
" then open an issue on GitHub."
|
99
|
+
)
|
100
|
+
return csrf_token
|
101
|
+
|
102
|
+
|
103
|
+
def get_address_id(
|
104
|
+
s: requests.session, base_url: str, csrf_token: str, postcode: str, paon: str
|
105
|
+
) -> str:
|
106
|
+
"""
|
107
|
+
Gets the address ID
|
108
|
+
:rtype: str
|
109
|
+
:param s: requests.session() to use
|
110
|
+
:param base_url: Base URL to use
|
111
|
+
:param csrf_token: CSRF token to use
|
112
|
+
:param postcode: Postcode to use
|
113
|
+
:param paon: House number/address to find
|
114
|
+
:return: address ID
|
115
|
+
"""
|
116
|
+
address_id = "0"
|
117
|
+
# Get the addresses for the postcode
|
118
|
+
form_data = {
|
119
|
+
"code_action": "search",
|
120
|
+
"code_params": '{"search_item":"' + postcode + '","is_ss":true}',
|
121
|
+
"fragment_action": "handle_event",
|
122
|
+
"fragment_id": "PCF0020408EECEC1",
|
123
|
+
"fragment_collection_class": "formtable",
|
124
|
+
"fragment_collection_editable_values": '{"PCF0021449EECEC1":"1"}',
|
125
|
+
"_session_storage": json.dumps(
|
126
|
+
{
|
127
|
+
"/wasteservices/w/webpage/bin-day-enter-address": {},
|
128
|
+
"_global": get_session_storage_global(),
|
129
|
+
}
|
130
|
+
),
|
131
|
+
"action_cell_id": "PCL0005629EECEC1",
|
132
|
+
"action_page_id": "PAG0000898EECEC1",
|
133
|
+
"form_check_ajax": csrf_token,
|
134
|
+
}
|
135
|
+
response = s.post(
|
136
|
+
base_url
|
137
|
+
+ "/wasteservices/w/webpage/bin-day-enter-address?webpage_subpage_id=PAG0000898EECEC1"
|
138
|
+
"&webpage_token=faab02e1f62a58f7bad4c2ae5b8622e19846b97dde2a76f546c4bb1230cee044"
|
139
|
+
"&widget_action=fragment_action",
|
140
|
+
headers=get_headers(base_url, "POST"),
|
141
|
+
data=form_data,
|
142
|
+
)
|
143
|
+
if response.status_code == 200:
|
144
|
+
json_response = json.loads(response.text)
|
145
|
+
addresses = json_response["response"]["items"]
|
146
|
+
# Find the matching address id for the paon
|
147
|
+
for address in addresses:
|
148
|
+
# Check for full matches first
|
149
|
+
if address.get("dropdown_display_field") == paon:
|
150
|
+
address_id = address.get("id")
|
151
|
+
break
|
152
|
+
# Check for matching start if no full match found
|
153
|
+
if address_id == "0":
|
154
|
+
for address in addresses:
|
155
|
+
if address.get("dropdown_display_field").split()[0] == paon.strip():
|
156
|
+
address_id = address.get("id")
|
157
|
+
break
|
158
|
+
# Check match was found
|
159
|
+
if address_id == "0":
|
160
|
+
raise ValueError(
|
161
|
+
"Code 2: No matching address for house number/full address found."
|
162
|
+
)
|
163
|
+
else:
|
164
|
+
raise ValueError("Code 3: No addresses found for provided postcode.")
|
165
|
+
return address_id
|
166
|
+
|
167
|
+
|
168
|
+
def get_collection_data(
|
169
|
+
s: requests.session, base_url: str, csrf_token: str, address_id: str
|
170
|
+
) -> str:
|
171
|
+
"""
|
172
|
+
Gets the collection data
|
173
|
+
:rtype: str
|
174
|
+
:param s: requests.session() to use
|
175
|
+
:param base_url: Base URL to use
|
176
|
+
:param csrf_token: CSRF token to use
|
177
|
+
:param address_id: Address id to use
|
178
|
+
:param retries: Retries count
|
179
|
+
:return: Collection data
|
180
|
+
"""
|
181
|
+
collection_data = ""
|
182
|
+
if address_id != "0":
|
183
|
+
form_data = {
|
184
|
+
"form_check": csrf_token,
|
185
|
+
"submitted_page_id": "PAG0000898EECEC1",
|
186
|
+
"submitted_widget_group_id": "PWG0002644EECEC1",
|
187
|
+
"submitted_widget_group_type": "modify",
|
188
|
+
"submission_token": "63e9126bacd815.12997577",
|
189
|
+
"payload[PAG0000898EECEC1][PWG0002644EECEC1][PCL0005629EECEC1][formtable]"
|
190
|
+
"[C_63e9126bacfb3][PCF0020408EECEC1]": address_id,
|
191
|
+
"payload[PAG0000898EECEC1][PWG0002644EECEC1][PCL0005629EECEC1][formtable]"
|
192
|
+
"[C_63e9126bacfb3][PCF0021449EECEC1]": "1",
|
193
|
+
"payload[PAG0000898EECEC1][PWG0002644EECEC1][PCL0005629EECEC1][formtable]"
|
194
|
+
"[C_63e9126bacfb3][PCF0020072EECEC1]": "Next",
|
195
|
+
"submit_fragment_id": "PCF0020072EECEC1",
|
196
|
+
"_session_storage": json.dumps({"_global": get_session_storage_global()}),
|
197
|
+
"_update_page_content_request": 1,
|
198
|
+
"form_check_ajax": csrf_token,
|
199
|
+
}
|
200
|
+
response = s.post(
|
201
|
+
base_url
|
202
|
+
+ "/wasteservices/w/webpage/bin-day-enter-address?webpage_subpage_id=PAG0000898EECEC1"
|
203
|
+
"&webpage_token=faab02e1f62a58f7bad4c2ae5b8622e19846b97dde2a76f546c4bb1230cee044",
|
204
|
+
headers=get_headers(base_url, "POST"),
|
205
|
+
data=form_data,
|
206
|
+
)
|
207
|
+
if response.status_code == 200 and len(response.text) > 0:
|
208
|
+
json_response = json.loads(response.text)
|
209
|
+
form_data = {
|
210
|
+
"_dummy": 1,
|
211
|
+
"_session_storage": json.dumps(
|
212
|
+
{"_global": get_session_storage_global()}
|
213
|
+
),
|
214
|
+
"_update_page_content_request": 1,
|
215
|
+
"form_check_ajax": csrf_token,
|
216
|
+
}
|
217
|
+
response = s.post(
|
218
|
+
base_url + json_response["redirect_url"],
|
219
|
+
headers=get_headers(base_url, "POST"),
|
220
|
+
data=form_data,
|
221
|
+
)
|
222
|
+
if response.status_code == 200 and len(response.text) > 0:
|
223
|
+
json_response = json.loads(response.text)
|
224
|
+
collection_data = json_response["data"]
|
225
|
+
else:
|
226
|
+
raise ValueError("Code 4: Failed to get bin data.")
|
227
|
+
else:
|
228
|
+
raise ValueError(
|
229
|
+
"Code 5: Failed to get bin data. Too many requests. Please wait a few minutes before trying again."
|
230
|
+
)
|
231
|
+
return collection_data
|
232
|
+
|
233
|
+
|
234
|
+
class CouncilClass(AbstractGetBinDataClass):
|
235
|
+
"""
|
236
|
+
Concrete classes have to implement all abstract operations of the
|
237
|
+
base class. They can also override some operations with a default
|
238
|
+
implementation.
|
239
|
+
"""
|
240
|
+
|
241
|
+
def parse_data(self, page: str, **kwargs) -> dict:
|
242
|
+
requests.packages.urllib3.disable_warnings()
|
243
|
+
s = requests.session()
|
244
|
+
base_url = "https://service.croydon.gov.uk"
|
245
|
+
paon = kwargs.get("paon")
|
246
|
+
postcode = kwargs.get("postcode")
|
247
|
+
check_paon(paon)
|
248
|
+
check_postcode(postcode)
|
249
|
+
|
250
|
+
# Firstly, get a CSRF (cross-site request forgery) token
|
251
|
+
csrf_token = get_csrf_token(s, base_url)
|
252
|
+
# Next, get the address_id
|
253
|
+
address_id = get_address_id(s, base_url, csrf_token, postcode, paon)
|
254
|
+
# Finally, use the address_id to get the collection data
|
255
|
+
collection_data = get_collection_data(s, base_url, csrf_token, address_id)
|
256
|
+
if collection_data != "":
|
257
|
+
soup = BeautifulSoup(collection_data, features="html.parser")
|
258
|
+
soup.prettify()
|
259
|
+
|
260
|
+
# Find the list elements
|
261
|
+
collection_record_elements = soup.find_all(
|
262
|
+
"div", {"class": "listing_template_record"}
|
263
|
+
)
|
264
|
+
|
265
|
+
# Form a JSON wrapper
|
266
|
+
data = {"bins": []}
|
267
|
+
|
268
|
+
for e in collection_record_elements:
|
269
|
+
collection_type = e.find_all(
|
270
|
+
"div", {"class": "fragment_presenter_template_show"}
|
271
|
+
)[0].text.strip()
|
272
|
+
collection_date = (
|
273
|
+
e.find("div", {"class": "bin-collection-next"})
|
274
|
+
.attrs["data-current_value"]
|
275
|
+
.strip()
|
276
|
+
)
|
277
|
+
dict_data = {
|
278
|
+
"type": collection_type,
|
279
|
+
"collectionDate": datetime.strptime(
|
280
|
+
collection_date, "%d/%m/%Y %H:%M"
|
281
|
+
).strftime(date_format),
|
282
|
+
}
|
283
|
+
data["bins"].append(dict_data)
|
284
|
+
|
285
|
+
if len(data["bins"]) == 0:
|
286
|
+
raise ValueError(
|
287
|
+
"Code 5: No bin data found. Please ensure the council website is showing data first,"
|
288
|
+
" then open an issue on GitHub."
|
289
|
+
)
|
290
|
+
|
291
|
+
return data
|