uk_bin_collection 0.138.1__py3-none-any.whl → 0.139.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- uk_bin_collection/tests/input.json +28 -0
- uk_bin_collection/uk_bin_collection/councils/AberdeenCityCouncil.py +2 -1
- uk_bin_collection/uk_bin_collection/councils/AberdeenshireCouncil.py +1 -0
- uk_bin_collection/uk_bin_collection/councils/ArdsAndNorthDownCouncil.py +1 -0
- uk_bin_collection/uk_bin_collection/councils/BarnsleyMBCouncil.py +1 -0
- uk_bin_collection/uk_bin_collection/councils/BroxbourneCouncil.py +7 -3
- uk_bin_collection/uk_bin_collection/councils/CeredigionCountyCouncil.py +157 -0
- uk_bin_collection/uk_bin_collection/councils/CheltenhamBoroughCouncil.py +95 -61
- uk_bin_collection/uk_bin_collection/councils/CheshireEastCouncil.py +1 -0
- uk_bin_collection/uk_bin_collection/councils/CoventryCityCouncil.py +4 -1
- uk_bin_collection/uk_bin_collection/councils/ForestOfDeanDistrictCouncil.py +52 -41
- uk_bin_collection/uk_bin_collection/councils/GooglePublicCalendarCouncil.py +3 -4
- uk_bin_collection/uk_bin_collection/councils/LondonBoroughOfRichmondUponThames.py +11 -9
- uk_bin_collection/uk_bin_collection/councils/MiddlesbroughCouncil.py +13 -4
- uk_bin_collection/uk_bin_collection/councils/MonmouthshireCountyCouncil.py +5 -1
- uk_bin_collection/uk_bin_collection/councils/NewForestCouncil.py +1 -3
- uk_bin_collection/uk_bin_collection/councils/NorthDevonCountyCouncil.py +159 -0
- uk_bin_collection/uk_bin_collection/councils/NorwichCityCouncil.py +15 -3
- uk_bin_collection/uk_bin_collection/councils/NuneatonBedworthBoroughCouncil.py +873 -871
- uk_bin_collection/uk_bin_collection/councils/RugbyBoroughCouncil.py +1 -1
- uk_bin_collection/uk_bin_collection/councils/RushcliffeBoroughCouncil.py +3 -6
- uk_bin_collection/uk_bin_collection/councils/SouthHollandDistrictCouncil.py +136 -0
- uk_bin_collection/uk_bin_collection/councils/WalsallCouncil.py +6 -2
- uk_bin_collection/uk_bin_collection/councils/WalthamForest.py +1 -1
- uk_bin_collection/uk_bin_collection/councils/WestLindseyDistrictCouncil.py +6 -3
- uk_bin_collection/uk_bin_collection/councils/WychavonDistrictCouncil.py +1 -0
- {uk_bin_collection-0.138.1.dist-info → uk_bin_collection-0.139.0.dist-info}/METADATA +1 -1
- {uk_bin_collection-0.138.1.dist-info → uk_bin_collection-0.139.0.dist-info}/RECORD +31 -28
- {uk_bin_collection-0.138.1.dist-info → uk_bin_collection-0.139.0.dist-info}/LICENSE +0 -0
- {uk_bin_collection-0.138.1.dist-info → uk_bin_collection-0.139.0.dist-info}/WHEEL +0 -0
- {uk_bin_collection-0.138.1.dist-info → uk_bin_collection-0.139.0.dist-info}/entry_points.txt +0 -0
@@ -386,6 +386,14 @@
|
|
386
386
|
"wiki_name": "Castlepoint District Council",
|
387
387
|
"wiki_note": "For this council, 'uprn' is actually a 4-digit code for your street. Go [here](https://apps.castlepoint.gov.uk/cpapps/index.cfm?fa=wastecalendar) and inspect the source of the dropdown box to find the 4-digit number for your street."
|
388
388
|
},
|
389
|
+
"CeredigionCountyCouncil":{
|
390
|
+
"house_number":"BLAEN CWMMAGWR, TRISANT, CEREDIGION, SY23 4RQ",
|
391
|
+
"postcode":"SY23 4RQ",
|
392
|
+
"url":"https://www.ceredigion.gov.uk/resident/bins-recycling/",
|
393
|
+
"web_driver":"http://selenium:4444",
|
394
|
+
"wiki_name":"Ceredigion County Council",
|
395
|
+
"wiki_note":"House Number is the full address as it appears on the drop-down on the site when you search by postcode. This parser requires a Selenium webdriver."
|
396
|
+
},
|
389
397
|
"CharnwoodBoroughCouncil": {
|
390
398
|
"url": "https://my.charnwood.gov.uk/location?put=cbc10070067259&rememberme=0&redirect=%2F",
|
391
399
|
"wiki_command_url_override": "https://my.charnwood.gov.uk/location?put=cbcXXXXXXXX&rememberme=0&redirect=%2F",
|
@@ -1327,6 +1335,16 @@
|
|
1327
1335
|
"wiki_name": "North Ayrshire Council",
|
1328
1336
|
"wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN."
|
1329
1337
|
},
|
1338
|
+
"NorthDevonCountyCouncil":{
|
1339
|
+
"house_number": "1",
|
1340
|
+
"uprn": "100040249471",
|
1341
|
+
"postcode": "EX31 2LE",
|
1342
|
+
"skip_get_url": true,
|
1343
|
+
"url": "https://my.northdevon.gov.uk/service/WasteRecyclingCollectionCalendar",
|
1344
|
+
"web_driver": "http://selenium:4444",
|
1345
|
+
"wiki_name": "North Devon County Council",
|
1346
|
+
"wiki_note": "Pass the house number and postcode in their respective parameters. This parser requires a Selenium webdriver."
|
1347
|
+
},
|
1330
1348
|
"NorthEastDerbyshireDistrictCouncil": {
|
1331
1349
|
"postcode": "S42 5RB",
|
1332
1350
|
"skip_get_url": true,
|
@@ -1715,6 +1733,16 @@
|
|
1715
1733
|
"wiki_name": "South Hams District Council",
|
1716
1734
|
"wiki_note": "Use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find your UPRN."
|
1717
1735
|
},
|
1736
|
+
"SouthHollandDistrictCouncil": {
|
1737
|
+
"house_number": "1",
|
1738
|
+
"postcode": "PE6 0HE",
|
1739
|
+
"uprn": "100030872493",
|
1740
|
+
"skip_get_url": true,
|
1741
|
+
"url": "https://www.sholland.gov.uk/mycollections",
|
1742
|
+
"web_driver": "http://selenium:4444",
|
1743
|
+
"wiki_name": "South Holland District Council",
|
1744
|
+
"wiki_note": "Pass the UPRN and postcode in their respective parameters. This parser requires a Selenium webdriver."
|
1745
|
+
},
|
1718
1746
|
"SouthKestevenDistrictCouncil": {
|
1719
1747
|
"house_number": "2 Althorpe Close, Market Deeping, PE6 8BL",
|
1720
1748
|
"postcode": "PE68BL",
|
@@ -5,6 +5,7 @@ import requests
|
|
5
5
|
from uk_bin_collection.uk_bin_collection.common import *
|
6
6
|
from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
|
7
7
|
|
8
|
+
|
8
9
|
# import the wonderful Beautiful Soup and the URL grabber
|
9
10
|
class CouncilClass(AbstractGetBinDataClass):
|
10
11
|
"""
|
@@ -12,7 +13,7 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
12
13
|
base class. They can also override some operations with a default
|
13
14
|
implementation.
|
14
15
|
"""
|
15
|
-
|
16
|
+
|
16
17
|
def parse_data(self, page: str, **kwargs) -> dict:
|
17
18
|
|
18
19
|
user_uprn = kwargs.get("uprn")
|
@@ -4,6 +4,7 @@ from bs4 import BeautifulSoup
|
|
4
4
|
from uk_bin_collection.uk_bin_collection.common import *
|
5
5
|
from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
|
6
6
|
|
7
|
+
|
7
8
|
# import the wonderful Beautiful Soup and the URL grabber
|
8
9
|
class CouncilClass(AbstractGetBinDataClass):
|
9
10
|
"""
|
@@ -6,6 +6,7 @@ from bs4 import BeautifulSoup
|
|
6
6
|
from uk_bin_collection.uk_bin_collection.common import *
|
7
7
|
from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
|
8
8
|
|
9
|
+
|
9
10
|
class CouncilClass(AbstractGetBinDataClass):
|
10
11
|
"""
|
11
12
|
Concrete classes have to implement all abstract operations of the
|
@@ -43,7 +43,7 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
43
43
|
rows = table.find_all("tr")
|
44
44
|
|
45
45
|
current_year = datetime.now().year
|
46
|
-
current_month = datetime.now().month
|
46
|
+
current_month = datetime.now().month
|
47
47
|
|
48
48
|
# Process each row into a list of dictionaries
|
49
49
|
for row in rows[1:]: # Skip the header row
|
@@ -56,13 +56,17 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
56
56
|
# Safely try to parse collection date
|
57
57
|
if collection_date_text:
|
58
58
|
try:
|
59
|
-
collection_date = datetime.strptime(
|
59
|
+
collection_date = datetime.strptime(
|
60
|
+
collection_date_text, "%a %d %b"
|
61
|
+
)
|
60
62
|
if collection_date.month == 1 and current_month != 1:
|
61
63
|
collection_date = collection_date.replace(year=current_year + 1)
|
62
64
|
else:
|
63
65
|
collection_date = collection_date.replace(year=current_year)
|
64
66
|
|
65
|
-
formatted_collection_date = collection_date.strftime(
|
67
|
+
formatted_collection_date = collection_date.strftime(
|
68
|
+
"%d/%m/%Y"
|
69
|
+
) # Use your desired date format
|
66
70
|
dict_data = {
|
67
71
|
"type": service,
|
68
72
|
"collectionDate": formatted_collection_date,
|
@@ -0,0 +1,157 @@
|
|
1
|
+
from time import sleep
|
2
|
+
|
3
|
+
from bs4 import BeautifulSoup
|
4
|
+
from selenium.webdriver.common.by import By
|
5
|
+
from selenium.webdriver.support import expected_conditions as EC
|
6
|
+
from selenium.webdriver.support.ui import Select, WebDriverWait
|
7
|
+
|
8
|
+
from uk_bin_collection.uk_bin_collection.common import *
|
9
|
+
from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
|
10
|
+
|
11
|
+
|
12
|
+
# import the wonderful Beautiful Soup and the URL grabber
|
13
|
+
class CouncilClass(AbstractGetBinDataClass):
|
14
|
+
"""
|
15
|
+
Concrete classes have to implement all abstract operations of the base
|
16
|
+
class. They can also override some operations with a default
|
17
|
+
implementation.
|
18
|
+
"""
|
19
|
+
|
20
|
+
def parse_data(self, page: str, **kwargs) -> dict:
|
21
|
+
driver = None
|
22
|
+
try:
|
23
|
+
house_number = kwargs.get("paon")
|
24
|
+
user_postcode = kwargs.get("postcode")
|
25
|
+
web_driver = kwargs.get("web_driver")
|
26
|
+
headless = kwargs.get("headless")
|
27
|
+
check_paon(house_number)
|
28
|
+
check_postcode(user_postcode)
|
29
|
+
|
30
|
+
# Create Selenium webdriver
|
31
|
+
driver = create_webdriver(web_driver, headless, None, __name__)
|
32
|
+
driver.get("https://www.ceredigion.gov.uk/resident/bins-recycling/")
|
33
|
+
|
34
|
+
try:
|
35
|
+
accept_cookies = WebDriverWait(driver, timeout=10).until(
|
36
|
+
EC.element_to_be_clickable(
|
37
|
+
(By.XPATH, "//button[@id='ccc-reject-settings']")
|
38
|
+
)
|
39
|
+
)
|
40
|
+
accept_cookies.click()
|
41
|
+
except:
|
42
|
+
print(
|
43
|
+
"Accept cookies banner not found or clickable within the specified time."
|
44
|
+
)
|
45
|
+
pass
|
46
|
+
|
47
|
+
# Wait for postcode entry box
|
48
|
+
postcode_search = WebDriverWait(driver, 10).until(
|
49
|
+
EC.presence_of_element_located(
|
50
|
+
(By.XPATH, "//a[contains(text(), 'Postcode Search')]")
|
51
|
+
)
|
52
|
+
)
|
53
|
+
driver.execute_script("arguments[0].scrollIntoView(true);", postcode_search)
|
54
|
+
|
55
|
+
sleep(2) # Wait for the element to be in view
|
56
|
+
|
57
|
+
postcode_search.click()
|
58
|
+
|
59
|
+
postcode_entry_box = WebDriverWait(driver, 10).until(
|
60
|
+
EC.presence_of_element_located(
|
61
|
+
(By.XPATH, "//input[@data-ebv-desc='Postcode']")
|
62
|
+
)
|
63
|
+
)
|
64
|
+
|
65
|
+
# Enter postcode
|
66
|
+
postcode_entry_box.send_keys(user_postcode)
|
67
|
+
|
68
|
+
postcode_button = WebDriverWait(driver, 10).until(
|
69
|
+
EC.presence_of_element_located(
|
70
|
+
(By.XPATH, "//input[@value='Find Address']")
|
71
|
+
)
|
72
|
+
)
|
73
|
+
|
74
|
+
postcode_button.click()
|
75
|
+
|
76
|
+
address_dropdown = Select(
|
77
|
+
WebDriverWait(driver, 10).until(
|
78
|
+
EC.presence_of_element_located(
|
79
|
+
(By.XPATH, "//select[@data-ebv-desc='Select Address']")
|
80
|
+
)
|
81
|
+
)
|
82
|
+
)
|
83
|
+
|
84
|
+
address_dropdown.select_by_visible_text(house_number)
|
85
|
+
|
86
|
+
address_next_button = WebDriverWait(driver, 10).until(
|
87
|
+
EC.presence_of_element_located((By.XPATH, "//input[@value='Next']"))
|
88
|
+
)
|
89
|
+
|
90
|
+
address_next_button.click()
|
91
|
+
|
92
|
+
result = WebDriverWait(driver, 10).until(
|
93
|
+
EC.presence_of_element_located(
|
94
|
+
(By.XPATH, "//form[contains(., 'Next collection:')]")
|
95
|
+
)
|
96
|
+
)
|
97
|
+
|
98
|
+
# Make a BS4 object
|
99
|
+
soup = BeautifulSoup(
|
100
|
+
result.get_attribute("innerHTML"), features="html.parser"
|
101
|
+
)
|
102
|
+
|
103
|
+
data = {"bins": []}
|
104
|
+
|
105
|
+
# Find all panels containing collection info
|
106
|
+
collection_panels = soup.find_all("div", class_="eb-OL2RoeVH-panel")
|
107
|
+
|
108
|
+
for panel in collection_panels:
|
109
|
+
try:
|
110
|
+
# Extract the 'Next collection' date string
|
111
|
+
next_text = panel.find_all("span")[-1].text.strip()
|
112
|
+
match = re.search(
|
113
|
+
r"Next collection:\s*(\w+day)\s+(\d{1,2})(?:st|nd|rd|th)?\s+(\w+)",
|
114
|
+
next_text,
|
115
|
+
)
|
116
|
+
if not match:
|
117
|
+
continue
|
118
|
+
|
119
|
+
_, day, month = match.groups()
|
120
|
+
year = (
|
121
|
+
datetime.now().year
|
122
|
+
) # You could enhance this to calculate the correct year if needed
|
123
|
+
full_date = f"{day} {month} {year}"
|
124
|
+
|
125
|
+
collection_date = datetime.strptime(full_date, "%d %B %Y").strftime(
|
126
|
+
date_format
|
127
|
+
)
|
128
|
+
|
129
|
+
# Now get all bin types in the sibling image blocks
|
130
|
+
bin_image_blocks = panel.find_next_siblings(
|
131
|
+
"div", class_="waste_image"
|
132
|
+
)
|
133
|
+
for block in bin_image_blocks:
|
134
|
+
label = block.find("span")
|
135
|
+
if label:
|
136
|
+
bin_type = label.text.strip()
|
137
|
+
dict_data = {
|
138
|
+
"type": bin_type,
|
139
|
+
"collectionDate": collection_date,
|
140
|
+
}
|
141
|
+
data["bins"].append(dict_data)
|
142
|
+
except Exception as e:
|
143
|
+
print(f"Skipping one panel due to: {e}")
|
144
|
+
|
145
|
+
data["bins"].sort(
|
146
|
+
key=lambda x: datetime.strptime(x.get("collectionDate"), date_format)
|
147
|
+
)
|
148
|
+
except Exception as e:
|
149
|
+
# Here you can log the exception if needed
|
150
|
+
print(f"An error occurred: {e}")
|
151
|
+
# Optionally, re-raise the exception if you want it to propagate
|
152
|
+
raise
|
153
|
+
finally:
|
154
|
+
# This block ensures that the driver is closed regardless of an exception
|
155
|
+
if driver:
|
156
|
+
driver.quit()
|
157
|
+
return data
|
@@ -36,21 +36,29 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
36
36
|
s = requests.session()
|
37
37
|
|
38
38
|
# Ask for a new SessionId from the server
|
39
|
-
session_id_url =
|
40
|
-
"
|
39
|
+
session_id_url = (
|
40
|
+
"https://maps.cheltenham.gov.uk/map/Aurora.svc/"
|
41
|
+
"RequestSession?userName=guest+CBC&password=&"
|
41
42
|
"script=%5CAurora%5CCBC+Waste+Streets.AuroraScript%24"
|
43
|
+
)
|
42
44
|
session_id_response = s.get(session_id_url)
|
43
45
|
session_id_response.raise_for_status()
|
44
46
|
session_id = session_id_response.json().get("Session").get("SessionId")
|
45
47
|
|
46
48
|
# Ask what tasks we can do within the session
|
47
|
-
tasks_url =
|
49
|
+
tasks_url = (
|
50
|
+
f"https://maps.cheltenham.gov.uk/map/Aurora.svc/"
|
48
51
|
f"GetWorkflow?sessionId={session_id}&workflowId=wastestreet"
|
52
|
+
)
|
49
53
|
tasks_response = s.get(tasks_url)
|
50
54
|
tasks_response.raise_for_status()
|
51
55
|
# JSON response contained a BOM marker
|
52
56
|
tasks = json.loads(tasks_response.text[1:])
|
53
|
-
retrieve_results_task_id, initialise_map_task_id, drilldown_task_id =
|
57
|
+
retrieve_results_task_id, initialise_map_task_id, drilldown_task_id = (
|
58
|
+
None,
|
59
|
+
None,
|
60
|
+
None,
|
61
|
+
)
|
54
62
|
# Pull out the ID's of the tasks we will need
|
55
63
|
for task in tasks.get("Tasks"):
|
56
64
|
if task.get("$type") == "StatMap.Aurora.FetchResultSetTask, StatMapService":
|
@@ -59,64 +67,86 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
59
67
|
initialise_map_task_id = task.get("Id")
|
60
68
|
elif task.get("$type") == "StatMap.Aurora.DrillDownTask, StatMapService":
|
61
69
|
drilldown_task_id = task.get("Id")
|
62
|
-
if not all(
|
70
|
+
if not all(
|
71
|
+
[retrieve_results_task_id, initialise_map_task_id, drilldown_task_id]
|
72
|
+
):
|
63
73
|
raise ValueError("Not all task ID's found")
|
64
74
|
|
65
75
|
# Find the X / Y coordinates for the requested postcode
|
66
|
-
postcode_search_url =
|
76
|
+
postcode_search_url = (
|
77
|
+
"https://maps.cheltenham.gov.uk/map/Aurora.svc/FindLocation?"
|
67
78
|
f"sessionId={session_id}&address={postcode}&limit=1000"
|
79
|
+
)
|
68
80
|
postcode_search_response = s.get(postcode_search_url)
|
69
81
|
postcode_search_response.raise_for_status()
|
70
82
|
if len(locations_list := postcode_search_response.json().get("Locations")) == 0:
|
71
83
|
raise ValueError("Address locations empty")
|
72
84
|
for location in locations_list:
|
73
|
-
location_search_url =
|
85
|
+
location_search_url = (
|
86
|
+
"https://maps.cheltenham.gov.uk/map/Aurora.svc/FindLocation?"
|
74
87
|
f"sessionId={session_id}&locationId={location.get('Id')}"
|
88
|
+
)
|
75
89
|
location_search_response = s.get(location_search_url)
|
76
90
|
location_search_response.raise_for_status()
|
77
91
|
if not (location_list := location_search_response.json().get("Locations")):
|
78
92
|
raise KeyError("Locations wasn't present in results")
|
79
93
|
if not (location_detail := location_list[0].get("Details")):
|
80
94
|
raise KeyError("Details wasn't present in location")
|
81
|
-
location_uprn = [
|
82
|
-
|
95
|
+
location_uprn = [
|
96
|
+
detail.get("Value")
|
97
|
+
for detail in location_detail
|
98
|
+
if detail.get("Name") == "UPRN"
|
99
|
+
][0]
|
83
100
|
if str(location_uprn) == uprn:
|
84
|
-
location_usrn = str(
|
85
|
-
|
101
|
+
location_usrn = str(
|
102
|
+
[
|
103
|
+
detail.get("Value")
|
104
|
+
for detail in location_detail
|
105
|
+
if detail.get("Name") == "USRN"
|
106
|
+
][0]
|
107
|
+
)
|
86
108
|
location_x = location_list[0].get("X")
|
87
109
|
location_y = location_list[0].get("Y")
|
88
110
|
break
|
89
111
|
|
90
112
|
# Needed to initialise the server to allow follow on call
|
91
|
-
open_map_url =
|
113
|
+
open_map_url = (
|
114
|
+
"https://maps.cheltenham.gov.uk/map/Aurora.svc/OpenScriptMap?"
|
92
115
|
f"sessionId={session_id}"
|
116
|
+
)
|
93
117
|
if res := s.get(open_map_url):
|
94
118
|
res.raise_for_status()
|
95
119
|
|
96
120
|
# Needed to initialise the server to allow follow on call
|
97
|
-
save_state_map_url =
|
98
|
-
|
99
|
-
"
|
121
|
+
save_state_map_url = (
|
122
|
+
"https://maps.cheltenham.gov.uk/map/Aurora.svc/ExecuteTaskJob?"
|
123
|
+
f"sessionId={session_id}&taskId={initialise_map_task_id}&job="
|
124
|
+
"%7BTask%3A+%7B+%24type%3A+%27StatMap.Aurora.SaveStateTask%2C"
|
100
125
|
"+StatMapService%27+%7D%7D"
|
126
|
+
)
|
101
127
|
if res := s.get(save_state_map_url):
|
102
128
|
res.raise_for_status()
|
103
129
|
|
104
130
|
# Start search for address given by x / y coord
|
105
|
-
drilldown_map_url =
|
106
|
-
|
107
|
-
f"
|
108
|
-
"
|
131
|
+
drilldown_map_url = (
|
132
|
+
"https://maps.cheltenham.gov.uk/map/Aurora.svc/ExecuteTaskJob?"
|
133
|
+
f"sessionId={session_id}&taskId={drilldown_task_id}&job=%7B%22"
|
134
|
+
f"QueryX%22%3A{location_x}%2C%22QueryY%22%3A{location_y}%2C%22"
|
135
|
+
"Task%22%3A%7B%22Type%22%3A%22StatMap.Aurora.DrillDownTask%2C"
|
109
136
|
"+StatMapService%22%7D%7D"
|
137
|
+
)
|
110
138
|
if res := s.get(drilldown_map_url):
|
111
139
|
res.raise_for_status()
|
112
140
|
|
113
141
|
# Get results from search for address given by x / y coord
|
114
|
-
address_details_url =
|
115
|
-
|
116
|
-
f"&
|
117
|
-
f"
|
118
|
-
"
|
142
|
+
address_details_url = (
|
143
|
+
"https://maps.cheltenham.gov.uk/map/Aurora.svc/ExecuteTaskJob?"
|
144
|
+
f"sessionId={session_id}&taskId={retrieve_results_task_id}"
|
145
|
+
f"&job=%7B%22QueryX%22%3A{location_x}%2C%22QueryY%22%3A"
|
146
|
+
f"{location_y}%2C%22Task%22%3A%7B%22Type%22%3A%22"
|
147
|
+
"StatMap.Aurora.FetchResultSetTask%2C+StatMapService"
|
119
148
|
"%22%2C%22ResultSetName%22%3A%22inspection%22%7D%7D"
|
149
|
+
)
|
120
150
|
address_details_response = s.get(address_details_url)
|
121
151
|
address_details_response.raise_for_status()
|
122
152
|
# JSON response contained a BOM marker, skip first character
|
@@ -150,7 +180,9 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
150
180
|
# After we've got the correct result, pull out the week number each bin type is taken on
|
151
181
|
if (refuse_week_raw := result_dict.get("New_Refuse_Week".upper())) is not None:
|
152
182
|
refuse_week = int(refuse_week_raw)
|
153
|
-
if (
|
183
|
+
if (
|
184
|
+
recycling_week_raw := result_dict.get("New_Recycling_Week".upper())
|
185
|
+
) is not None:
|
154
186
|
recycling_week = int(recycling_week_raw)
|
155
187
|
if (garden_week_raw := result_dict.get("Garden_Bin_Week".upper())) is not None:
|
156
188
|
garden_week = int(garden_week_raw)
|
@@ -169,13 +201,17 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
169
201
|
]
|
170
202
|
|
171
203
|
refuse_day_offset = days_of_week.index(
|
172
|
-
str(result_dict.get("New_Refuse_Day_internal".upper())).upper()
|
204
|
+
str(result_dict.get("New_Refuse_Day_internal".upper())).upper()
|
205
|
+
)
|
173
206
|
recycling_day_offset = days_of_week.index(
|
174
|
-
str(result_dict.get("New_Recycling_Day".upper())).upper()
|
207
|
+
str(result_dict.get("New_Recycling_Day".upper())).upper()
|
208
|
+
)
|
175
209
|
garden_day_offset = days_of_week.index(
|
176
|
-
str(result_dict.get("New_Garden_Day".upper())).upper()
|
210
|
+
str(result_dict.get("New_Garden_Day".upper())).upper()
|
211
|
+
)
|
177
212
|
food_day_offset = days_of_week.index(
|
178
|
-
str(result_dict.get("New_Food_Day".upper())).upper()
|
213
|
+
str(result_dict.get("New_Food_Day".upper())).upper()
|
214
|
+
)
|
179
215
|
|
180
216
|
# Initialise WEEK-1/WEEK-2 based on known details
|
181
217
|
week_1_epoch = datetime(2025, 1, 13)
|
@@ -186,27 +222,20 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
186
222
|
# If there's an even number of weeks between the week-1
|
187
223
|
# epoch and this week, then this week is of type week-1
|
188
224
|
if (((this_week - week_1_epoch).days // 7)) % 2 == 0:
|
189
|
-
week = {
|
190
|
-
1: this_week,
|
191
|
-
2: this_week + timedelta(days=7)
|
192
|
-
}
|
225
|
+
week = {1: this_week, 2: this_week + timedelta(days=7)}
|
193
226
|
else:
|
194
|
-
week = {
|
195
|
-
1: this_week - timedelta(days=7),
|
196
|
-
2: this_week
|
197
|
-
}
|
227
|
+
week = {1: this_week - timedelta(days=7), 2: this_week}
|
198
228
|
|
199
|
-
refuse_dates: list[str] = get_dates_every_x_days(
|
200
|
-
week[refuse_week], 14, 28)
|
229
|
+
refuse_dates: list[str] = get_dates_every_x_days(week[refuse_week], 14, 28)
|
201
230
|
recycling_dates: list[str] = get_dates_every_x_days(
|
202
|
-
week[recycling_week], 14, 28
|
203
|
-
|
204
|
-
|
231
|
+
week[recycling_week], 14, 28
|
232
|
+
)
|
233
|
+
garden_dates: list[str] = get_dates_every_x_days(week[garden_week], 14, 28)
|
205
234
|
|
206
235
|
for refuse_date in refuse_dates:
|
207
236
|
collection_date = (
|
208
|
-
datetime.strptime(refuse_date, "%d/%m/%Y")
|
209
|
-
timedelta(days=refuse_day_offset)
|
237
|
+
datetime.strptime(refuse_date, "%d/%m/%Y")
|
238
|
+
+ timedelta(days=refuse_day_offset)
|
210
239
|
).strftime("%d/%m/%Y")
|
211
240
|
|
212
241
|
dict_data = {
|
@@ -218,8 +247,8 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
218
247
|
for recycling_date in recycling_dates:
|
219
248
|
|
220
249
|
collection_date = (
|
221
|
-
datetime.strptime(recycling_date, "%d/%m/%Y")
|
222
|
-
timedelta(days=recycling_day_offset)
|
250
|
+
datetime.strptime(recycling_date, "%d/%m/%Y")
|
251
|
+
+ timedelta(days=recycling_day_offset)
|
223
252
|
).strftime("%d/%m/%Y")
|
224
253
|
|
225
254
|
dict_data = {
|
@@ -231,8 +260,8 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
231
260
|
for garden_date in garden_dates:
|
232
261
|
|
233
262
|
collection_date = (
|
234
|
-
datetime.strptime(garden_date, "%d/%m/%Y")
|
235
|
-
timedelta(days=garden_day_offset)
|
263
|
+
datetime.strptime(garden_date, "%d/%m/%Y")
|
264
|
+
+ timedelta(days=garden_day_offset)
|
236
265
|
).strftime("%d/%m/%Y")
|
237
266
|
|
238
267
|
dict_data = {
|
@@ -241,15 +270,18 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
241
270
|
}
|
242
271
|
bindata["bins"].append(dict_data)
|
243
272
|
|
244
|
-
if (
|
245
|
-
|
273
|
+
if (
|
274
|
+
food_waste_week := str(
|
275
|
+
result_dict.get("FOOD_WASTE_WEEK_EXTERNAL", "")
|
276
|
+
).upper()
|
277
|
+
) == "weekly".upper():
|
246
278
|
food_dates: list[str] = get_dates_every_x_days(week[1], 7, 56)
|
247
279
|
|
248
280
|
for food_date in food_dates:
|
249
281
|
|
250
282
|
collection_date = (
|
251
|
-
datetime.strptime(food_date, "%d/%m/%Y")
|
252
|
-
timedelta(days=food_day_offset)
|
283
|
+
datetime.strptime(food_date, "%d/%m/%Y")
|
284
|
+
+ timedelta(days=food_day_offset)
|
253
285
|
).strftime("%d/%m/%Y")
|
254
286
|
|
255
287
|
dict_data = {
|
@@ -266,21 +298,24 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
266
298
|
first_week = int(first_week.strip())
|
267
299
|
|
268
300
|
second_week_day, _, second_week_number = second_week_detail.partition(
|
269
|
-
"WEEK"
|
301
|
+
"WEEK"
|
302
|
+
)
|
270
303
|
second_week_number = int(second_week_number.strip())
|
271
304
|
second_week_day: str = second_week_day.strip()[:3]
|
272
305
|
|
273
306
|
food_dates_first: list[str] = get_dates_every_x_days(
|
274
|
-
week[first_week], 14, 28
|
307
|
+
week[first_week], 14, 28
|
308
|
+
)
|
275
309
|
food_dates_second: list[str] = get_dates_every_x_days(
|
276
|
-
week[second_week_number], 14, 28
|
310
|
+
week[second_week_number], 14, 28
|
311
|
+
)
|
277
312
|
second_week_offset = days_of_week.index(second_week_day)
|
278
313
|
|
279
314
|
for food_date in food_dates_first:
|
280
315
|
|
281
316
|
collection_date = (
|
282
|
-
datetime.strptime(food_date, "%d/%m/%Y")
|
283
|
-
timedelta(days=food_day_offset)
|
317
|
+
datetime.strptime(food_date, "%d/%m/%Y")
|
318
|
+
+ timedelta(days=food_day_offset)
|
284
319
|
).strftime("%d/%m/%Y")
|
285
320
|
|
286
321
|
dict_data = {
|
@@ -291,8 +326,8 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
291
326
|
for food_date in food_dates_second:
|
292
327
|
|
293
328
|
collection_date = (
|
294
|
-
datetime.strptime(food_date, "%d/%m/%Y")
|
295
|
-
timedelta(days=second_week_offset)
|
329
|
+
datetime.strptime(food_date, "%d/%m/%Y")
|
330
|
+
+ timedelta(days=second_week_offset)
|
296
331
|
).strftime("%d/%m/%Y")
|
297
332
|
|
298
333
|
dict_data = {
|
@@ -302,7 +337,6 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
302
337
|
bindata["bins"].append(dict_data)
|
303
338
|
|
304
339
|
bindata["bins"].sort(
|
305
|
-
key=lambda x: datetime.strptime(
|
306
|
-
x.get("collectionDate", ""), "%d/%m/%Y")
|
340
|
+
key=lambda x: datetime.strptime(x.get("collectionDate", ""), "%d/%m/%Y")
|
307
341
|
)
|
308
342
|
return bindata
|
@@ -6,6 +6,7 @@ from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataC
|
|
6
6
|
This module provides bin collection data for Cheshire East Council.
|
7
7
|
"""
|
8
8
|
|
9
|
+
|
9
10
|
class CouncilClass(AbstractGetBinDataClass):
|
10
11
|
"""
|
11
12
|
A class to fetch and parse bin collection data for Cheshire East Council.
|
@@ -20,7 +20,10 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
20
20
|
curr_date = datetime.today()
|
21
21
|
|
22
22
|
soup = BeautifulSoup(page.content, features="html.parser")
|
23
|
-
button = soup.find(
|
23
|
+
button = soup.find(
|
24
|
+
"a",
|
25
|
+
text="Find out which bin will be collected when and sign up for a free email reminder.",
|
26
|
+
)
|
24
27
|
|
25
28
|
if button["href"]:
|
26
29
|
URI = button["href"]
|