uk_bin_collection 0.144.1__py3-none-any.whl → 0.144.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- uk_bin_collection/tests/input.json +19 -1
- uk_bin_collection/uk_bin_collection/common.py +1 -0
- uk_bin_collection/uk_bin_collection/councils/Hillingdon.py +273 -0
- uk_bin_collection/uk_bin_collection/councils/KnowsleyMBCouncil.py +67 -91
- uk_bin_collection/uk_bin_collection/councils/MidUlsterDistrictCouncil.py +135 -0
- uk_bin_collection/uk_bin_collection/councils/SandwellBoroughCouncil.py +74 -41
- {uk_bin_collection-0.144.1.dist-info → uk_bin_collection-0.144.3.dist-info}/METADATA +1 -1
- {uk_bin_collection-0.144.1.dist-info → uk_bin_collection-0.144.3.dist-info}/RECORD +11 -9
- {uk_bin_collection-0.144.1.dist-info → uk_bin_collection-0.144.3.dist-info}/LICENSE +0 -0
- {uk_bin_collection-0.144.1.dist-info → uk_bin_collection-0.144.3.dist-info}/WHEEL +0 -0
- {uk_bin_collection-0.144.1.dist-info → uk_bin_collection-0.144.3.dist-info}/entry_points.txt +0 -0
@@ -990,6 +990,15 @@
|
|
990
990
|
"wiki_name": "Highland Council",
|
991
991
|
"wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN."
|
992
992
|
},
|
993
|
+
"Hillingdon": {
|
994
|
+
"house_number": "1, Milverton Drive, Ickenham, UB10 8PP, Ickenham, Hillingdon",
|
995
|
+
"postcode": "UB10 8PP",
|
996
|
+
"skip_get_url": true,
|
997
|
+
"url": "https://www.hillingdon.gov.uk/collection-day",
|
998
|
+
"web_driver": "http://selenium:4444",
|
999
|
+
"wiki_name": "High Peak Council",
|
1000
|
+
"wiki_note": "Pass the postcode and the full address as it appears in the address pulldown menu."
|
1001
|
+
},
|
993
1002
|
"HinckleyandBosworthBoroughCouncil": {
|
994
1003
|
"uprn": "100030533512",
|
995
1004
|
"url": "https://www.hinckley-bosworth.gov.uk",
|
@@ -1053,7 +1062,7 @@
|
|
1053
1062
|
"wiki_note": "Provide your UPRN. Find your UPRN using [FindMyAddress](https://www.findmyaddress.co.uk/search)."
|
1054
1063
|
},
|
1055
1064
|
"KnowsleyMBCouncil": {
|
1056
|
-
"house_number": "
|
1065
|
+
"house_number": "2 ALTMOOR ROAD HUYTON L36 3UY",
|
1057
1066
|
"postcode": "L36 3UY",
|
1058
1067
|
"skip_get_url": true,
|
1059
1068
|
"url": "https://knowsleytransaction.mendixcloud.com/link/youarebeingredirected?target=bincollectioninformation",
|
@@ -1277,6 +1286,15 @@
|
|
1277
1286
|
"wiki_name": "Midlothian Council",
|
1278
1287
|
"wiki_note": "Pass the house name/number wrapped in double quotes along with the postcode parameter."
|
1279
1288
|
},
|
1289
|
+
"MidUlsterDistrictCouncil": {
|
1290
|
+
"house_number": "20 HILLHEAD, STEWARTSTOWN, BT71 5HY",
|
1291
|
+
"postcode": "BT71 5HY",
|
1292
|
+
"skip_get_url": true,
|
1293
|
+
"url": "https://www.midulstercouncil.org",
|
1294
|
+
"web_driver": "http://selenium:4444",
|
1295
|
+
"wiki_name": "Mid Ulster District Council",
|
1296
|
+
"wiki_note": "Pass the full address of the house postcode as displayed on the site. This parser requires a Selenium webdriver."
|
1297
|
+
},
|
1280
1298
|
"MiltonKeynesCityCouncil": {
|
1281
1299
|
"uprn": "25109551",
|
1282
1300
|
"url": "https://mycouncil.milton-keynes.gov.uk/en/service/Waste_Collection_Round_Checker",
|
@@ -337,6 +337,7 @@ def create_webdriver(
|
|
337
337
|
options.add_argument("--disable-gpu")
|
338
338
|
options.add_argument("--start-maximized")
|
339
339
|
options.add_argument("--disable-dev-shm-usage")
|
340
|
+
options.add_argument("--window-size=1920,1080")
|
340
341
|
if user_agent:
|
341
342
|
options.add_argument(f"--user-agent={user_agent}")
|
342
343
|
options.add_experimental_option("excludeSwitches", ["enable-logging"])
|
@@ -0,0 +1,273 @@
|
|
1
|
+
import json
|
2
|
+
from datetime import datetime, timedelta
|
3
|
+
from typing import Any, Dict
|
4
|
+
|
5
|
+
from bs4 import BeautifulSoup
|
6
|
+
from dateutil.parser import parse
|
7
|
+
from selenium.common.exceptions import (
|
8
|
+
NoSuchElementException,
|
9
|
+
StaleElementReferenceException,
|
10
|
+
TimeoutException,
|
11
|
+
)
|
12
|
+
from selenium.webdriver.common.by import By
|
13
|
+
from selenium.webdriver.common.keys import Keys
|
14
|
+
from selenium.webdriver.remote.webdriver import WebDriver
|
15
|
+
from selenium.webdriver.support import expected_conditions as EC
|
16
|
+
from selenium.webdriver.support.wait import WebDriverWait
|
17
|
+
|
18
|
+
from uk_bin_collection.uk_bin_collection.common import *
|
19
|
+
from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
|
20
|
+
|
21
|
+
# Dictionary mapping day names to their weekday numbers (Monday=0, Sunday=6)
|
22
|
+
DAYS_OF_WEEK = {
|
23
|
+
"Monday": 0,
|
24
|
+
"Tuesday": 1,
|
25
|
+
"Wednesday": 2,
|
26
|
+
"Thursday": 3,
|
27
|
+
"Friday": 4,
|
28
|
+
"Saturday": 5,
|
29
|
+
"Sunday": 6,
|
30
|
+
}
|
31
|
+
|
32
|
+
|
33
|
+
# This function checks for bank holiday collection changes,
|
34
|
+
# but the page seems manually written so might break easily
|
35
|
+
def get_bank_holiday_changes(driver: WebDriver) -> Dict[str, str]:
|
36
|
+
"""Fetch and parse bank holiday collection changes from the council website."""
|
37
|
+
bank_holiday_url = "https://www.hillingdon.gov.uk/bank-holiday-collections"
|
38
|
+
driver.get(bank_holiday_url)
|
39
|
+
|
40
|
+
# Wait for page to load
|
41
|
+
wait = WebDriverWait(driver, 10)
|
42
|
+
wait.until(EC.presence_of_element_located((By.TAG_NAME, "table")))
|
43
|
+
|
44
|
+
# Parse the page
|
45
|
+
soup = BeautifulSoup(driver.page_source, features="html.parser")
|
46
|
+
changes: Dict[str, str] = {}
|
47
|
+
|
48
|
+
# Find all tables with collection changes
|
49
|
+
tables = soup.find_all("table")
|
50
|
+
for table in tables:
|
51
|
+
# Check if this is a collection changes table
|
52
|
+
headers = [th.text.strip() for th in table.find_all("th")]
|
53
|
+
if "Normal collection day" in headers and "Revised collection day" in headers:
|
54
|
+
# Process each row
|
55
|
+
for row in table.find_all("tr")[1:]: # Skip header row
|
56
|
+
cols = row.find_all("td")
|
57
|
+
if len(cols) >= 2:
|
58
|
+
normal_date = cols[0].text.strip()
|
59
|
+
revised_date = cols[1].text.strip()
|
60
|
+
|
61
|
+
# Parse dates
|
62
|
+
try:
|
63
|
+
normal_date = parse(normal_date, fuzzy=True).strftime(
|
64
|
+
"%d/%m/%Y"
|
65
|
+
)
|
66
|
+
revised_date = parse(revised_date, fuzzy=True).strftime(
|
67
|
+
"%d/%m/%Y"
|
68
|
+
)
|
69
|
+
changes[normal_date] = revised_date
|
70
|
+
except Exception as e:
|
71
|
+
print(f"Error parsing dates: {e}")
|
72
|
+
continue
|
73
|
+
|
74
|
+
return changes
|
75
|
+
|
76
|
+
|
77
|
+
class CouncilClass(AbstractGetBinDataClass):
|
78
|
+
def parse_data(self, page: str, **kwargs: Any) -> Dict[str, Any]:
|
79
|
+
driver = None
|
80
|
+
try:
|
81
|
+
data: Dict[str, Any] = {"bins": []}
|
82
|
+
user_paon = kwargs.get("paon")
|
83
|
+
user_postcode = kwargs.get("postcode")
|
84
|
+
web_driver = kwargs.get("web_driver")
|
85
|
+
headless = kwargs.get("headless")
|
86
|
+
url = kwargs.get("url")
|
87
|
+
|
88
|
+
check_paon(user_paon)
|
89
|
+
check_postcode(user_postcode)
|
90
|
+
|
91
|
+
driver = create_webdriver(web_driver, headless, None, __name__)
|
92
|
+
driver.get(url)
|
93
|
+
|
94
|
+
# Handle cookie banner if present
|
95
|
+
wait = WebDriverWait(driver, 30)
|
96
|
+
try:
|
97
|
+
cookie_button = wait.until(
|
98
|
+
EC.element_to_be_clickable(
|
99
|
+
(
|
100
|
+
By.CLASS_NAME,
|
101
|
+
"btn btn--cookiemessage btn--cancel btn--contrast",
|
102
|
+
)
|
103
|
+
)
|
104
|
+
)
|
105
|
+
cookie_button.click()
|
106
|
+
except (TimeoutException, NoSuchElementException):
|
107
|
+
pass
|
108
|
+
|
109
|
+
# Enter postcode
|
110
|
+
post_code_input = wait.until(
|
111
|
+
EC.element_to_be_clickable(
|
112
|
+
(
|
113
|
+
By.ID,
|
114
|
+
"WASTECOLLECTIONDAYLOOKUPINCLUDEGARDEN_ADDRESSLOOKUPPOSTCODE",
|
115
|
+
)
|
116
|
+
)
|
117
|
+
)
|
118
|
+
post_code_input.clear()
|
119
|
+
post_code_input.send_keys(user_postcode)
|
120
|
+
post_code_input.send_keys(Keys.TAB + Keys.ENTER)
|
121
|
+
|
122
|
+
# Wait for address options to populate
|
123
|
+
try:
|
124
|
+
# Wait for the address dropdown to be present and clickable
|
125
|
+
address_select = wait.until(
|
126
|
+
EC.presence_of_element_located(
|
127
|
+
(
|
128
|
+
By.ID,
|
129
|
+
"WASTECOLLECTIONDAYLOOKUPINCLUDEGARDEN_ADDRESSLOOKUPADDRESS",
|
130
|
+
)
|
131
|
+
)
|
132
|
+
)
|
133
|
+
|
134
|
+
# Wait for actual address options to appear
|
135
|
+
wait.until(
|
136
|
+
lambda driver: len(driver.find_elements(By.TAG_NAME, "option")) > 1
|
137
|
+
)
|
138
|
+
|
139
|
+
# Find and select address
|
140
|
+
options = address_select.find_elements(By.TAG_NAME, "option")[
|
141
|
+
1:
|
142
|
+
] # Skip placeholder
|
143
|
+
if not options:
|
144
|
+
raise Exception(f"No addresses found for postcode: {user_postcode}")
|
145
|
+
|
146
|
+
# Normalize user input by keeping only alphanumeric characters
|
147
|
+
normalized_user_input = "".join(
|
148
|
+
c for c in user_paon if c.isalnum()
|
149
|
+
).lower()
|
150
|
+
|
151
|
+
# Find matching address in dropdown
|
152
|
+
for option in options:
|
153
|
+
# Normalize option text by keeping only alphanumeric characters
|
154
|
+
normalized_option = "".join(
|
155
|
+
c for c in option.text if c.isalnum()
|
156
|
+
).lower()
|
157
|
+
if normalized_user_input in normalized_option:
|
158
|
+
option.click()
|
159
|
+
break
|
160
|
+
except TimeoutException:
|
161
|
+
raise Exception("Timeout waiting for address options to populate")
|
162
|
+
|
163
|
+
# Wait for collection table and day text
|
164
|
+
wait.until(
|
165
|
+
EC.presence_of_element_located(
|
166
|
+
(By.ID, "WASTECOLLECTIONDAYLOOKUPINCLUDEGARDEN_COLLECTIONTABLE")
|
167
|
+
)
|
168
|
+
)
|
169
|
+
|
170
|
+
# Wait for collection day text to be fully populated
|
171
|
+
wait.until(
|
172
|
+
lambda driver: len(
|
173
|
+
driver.find_element(
|
174
|
+
By.ID, "WASTECOLLECTIONDAYLOOKUPINCLUDEGARDEN_COLLECTIONTABLE"
|
175
|
+
)
|
176
|
+
.find_elements(By.TAG_NAME, "tr")[2]
|
177
|
+
.find_elements(By.TAG_NAME, "td")[1]
|
178
|
+
.text.strip()
|
179
|
+
.split()
|
180
|
+
)
|
181
|
+
> 1
|
182
|
+
)
|
183
|
+
|
184
|
+
# Parse the table
|
185
|
+
soup = BeautifulSoup(driver.page_source, features="html.parser")
|
186
|
+
table = soup.find(
|
187
|
+
"div", id="WASTECOLLECTIONDAYLOOKUPINCLUDEGARDEN_COLLECTIONTABLE"
|
188
|
+
).find("table")
|
189
|
+
|
190
|
+
# Get collection day
|
191
|
+
collection_day_text = table.find_all("tr")[2].find_all("td")[1].text.strip()
|
192
|
+
day_of_week = next(
|
193
|
+
(
|
194
|
+
day
|
195
|
+
for day in DAYS_OF_WEEK
|
196
|
+
if day.lower() in collection_day_text.lower()
|
197
|
+
),
|
198
|
+
None,
|
199
|
+
)
|
200
|
+
if not day_of_week:
|
201
|
+
raise Exception(
|
202
|
+
f"Could not determine collection day from text: '{collection_day_text}'"
|
203
|
+
)
|
204
|
+
|
205
|
+
# Calculate next collection date
|
206
|
+
today = datetime.now()
|
207
|
+
days_ahead = (DAYS_OF_WEEK[day_of_week] - today.weekday()) % 7
|
208
|
+
if days_ahead == 0: # If today is collection day, get next week's date
|
209
|
+
days_ahead = 7
|
210
|
+
next_collection = today + timedelta(days=days_ahead)
|
211
|
+
|
212
|
+
# Add collection dates for each bin type
|
213
|
+
bin_types = ["General Waste", "Recycling", "Food Waste"]
|
214
|
+
for bin_type in bin_types:
|
215
|
+
data["bins"].append(
|
216
|
+
{
|
217
|
+
"type": bin_type,
|
218
|
+
"collectionDate": next_collection.strftime("%d/%m/%Y"),
|
219
|
+
}
|
220
|
+
)
|
221
|
+
|
222
|
+
# Process collection details
|
223
|
+
bin_rows = soup.select("div.bin--row:not(:first-child)")
|
224
|
+
for row in bin_rows:
|
225
|
+
try:
|
226
|
+
bin_type = row.select_one("div.col-md-3").text.strip()
|
227
|
+
collection_dates_div = row.select("div.col-md-3")[1]
|
228
|
+
next_collection_text = "".join(
|
229
|
+
collection_dates_div.find_all(text=True, recursive=False)
|
230
|
+
).strip()
|
231
|
+
cleaned_date_text = remove_ordinal_indicator_from_date_string(
|
232
|
+
next_collection_text
|
233
|
+
)
|
234
|
+
parsed_date = parse(cleaned_date_text, fuzzy=True)
|
235
|
+
bin_date = parsed_date.strftime("%d/%m/%Y")
|
236
|
+
|
237
|
+
if bin_type and bin_date:
|
238
|
+
data["bins"].append(
|
239
|
+
{
|
240
|
+
"type": bin_type,
|
241
|
+
"collectionDate": bin_date,
|
242
|
+
}
|
243
|
+
)
|
244
|
+
except Exception as e:
|
245
|
+
print(f"Error processing item: {e}")
|
246
|
+
continue
|
247
|
+
|
248
|
+
# Get bank holiday changes
|
249
|
+
print("\nChecking for bank holiday collection changes...")
|
250
|
+
bank_holiday_changes = get_bank_holiday_changes(driver)
|
251
|
+
|
252
|
+
# Apply any bank holiday changes to collection dates
|
253
|
+
for bin_data in data["bins"]:
|
254
|
+
original_date = bin_data["collectionDate"]
|
255
|
+
if original_date in bank_holiday_changes:
|
256
|
+
new_date = bank_holiday_changes[original_date]
|
257
|
+
print(
|
258
|
+
f"Bank holiday change: {bin_data['type']} collection moved from {original_date} to {new_date}"
|
259
|
+
)
|
260
|
+
bin_data["collectionDate"] = new_date
|
261
|
+
|
262
|
+
except Exception as e:
|
263
|
+
print(f"An error occurred: {e}")
|
264
|
+
raise
|
265
|
+
finally:
|
266
|
+
if driver:
|
267
|
+
driver.quit()
|
268
|
+
|
269
|
+
# Print the final data dictionary for debugging
|
270
|
+
print("\nFinal data dictionary:")
|
271
|
+
print(json.dumps(data, indent=2))
|
272
|
+
|
273
|
+
return data
|
@@ -1,140 +1,116 @@
|
|
1
1
|
import time
|
2
|
-
|
3
2
|
from bs4 import BeautifulSoup
|
3
|
+
from datetime import datetime
|
4
4
|
from selenium.webdriver.common.by import By
|
5
|
+
from selenium.webdriver.common.keys import Keys
|
5
6
|
from selenium.webdriver.support import expected_conditions as EC
|
6
|
-
from selenium.webdriver.support.ui import
|
7
|
-
from selenium.webdriver.support.wait import WebDriverWait
|
7
|
+
from selenium.webdriver.support.ui import WebDriverWait
|
8
8
|
|
9
9
|
from uk_bin_collection.uk_bin_collection.common import *
|
10
10
|
from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
|
11
11
|
|
12
12
|
|
13
|
-
# import the wonderful Beautiful Soup and the URL grabber
|
14
13
|
class CouncilClass(AbstractGetBinDataClass):
|
15
|
-
"""
|
16
|
-
Concrete classes have to implement all abstract operations of the
|
17
|
-
base class. They can also override some operations with a default
|
18
|
-
implementation.
|
19
|
-
"""
|
20
|
-
|
21
14
|
def parse_data(self, page: str, **kwargs) -> dict:
|
22
15
|
driver = None
|
23
16
|
try:
|
24
|
-
|
25
|
-
collections = []
|
17
|
+
bindata = {"bins": []}
|
26
18
|
user_paon = kwargs.get("paon")
|
27
19
|
user_postcode = kwargs.get("postcode")
|
28
20
|
web_driver = kwargs.get("web_driver")
|
29
21
|
headless = kwargs.get("headless")
|
22
|
+
|
30
23
|
check_paon(user_paon)
|
31
24
|
check_postcode(user_postcode)
|
32
25
|
|
33
|
-
# Create Selenium webdriver
|
34
26
|
driver = create_webdriver(web_driver, headless, None, __name__)
|
35
|
-
driver.
|
36
|
-
"https://knowsleytransaction.mendixcloud.com/link/youarebeingredirected?target=bincollectioninformation"
|
37
|
-
)
|
27
|
+
driver.set_window_size(1920, 1080) # 👈 ensure full viewport
|
38
28
|
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
29
|
+
driver.get("https://www.knowsley.gov.uk/bins-waste-and-recycling/your-household-bins/putting-your-bins-out")
|
30
|
+
|
31
|
+
# Dismiss cookie popup if it exists
|
32
|
+
try:
|
33
|
+
accept_cookies = WebDriverWait(driver, 10).until(
|
34
|
+
EC.element_to_be_clickable((By.XPATH, "//a[contains(@class, 'agree-button') and contains(text(), 'Accept all cookies')]"))
|
35
|
+
)
|
36
|
+
accept_cookies.click()
|
37
|
+
time.sleep(1)
|
38
|
+
except:
|
39
|
+
pass # Cookie popup not shown
|
40
|
+
|
41
|
+
# Step 1: Click "Search by postcode"
|
42
|
+
search_btn = WebDriverWait(driver, 60).until(
|
43
|
+
EC.element_to_be_clickable(
|
44
|
+
(By.XPATH, "//a[contains(text(), 'Search by postcode to find out when your bins are emptied')]")
|
46
45
|
)
|
47
46
|
)
|
48
|
-
|
47
|
+
search_btn.send_keys(Keys.RETURN)
|
49
48
|
|
50
|
-
#
|
51
|
-
|
49
|
+
# Step 2: Enter postcode
|
50
|
+
postcode_box = WebDriverWait(driver, 60).until(
|
52
51
|
EC.presence_of_element_located(
|
53
|
-
(
|
54
|
-
By.XPATH,
|
55
|
-
"/html/body/div[1]/div/div/div/div/div/div[2]/div/div/div/div/div/div[3]/div/div[1]/div/div[2]/div/button",
|
56
|
-
)
|
52
|
+
(By.XPATH, "//label[contains(text(), 'Please enter the post code')]/following-sibling::input")
|
57
53
|
)
|
58
54
|
)
|
59
|
-
|
55
|
+
postcode_box.send_keys(user_postcode)
|
60
56
|
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
(
|
65
|
-
By.XPATH,
|
66
|
-
"/html/body/div[1]/div/div/div/div/div/div[2]/div/div/div/div/div/div[3]/div/div[1]/div/div[3]/div/div",
|
67
|
-
)
|
57
|
+
postcode_search_btn = WebDriverWait(driver, 60).until(
|
58
|
+
EC.element_to_be_clickable(
|
59
|
+
(By.XPATH, "//label[contains(text(), 'Please enter the post code')]/parent::div/following-sibling::button")
|
68
60
|
)
|
69
61
|
)
|
62
|
+
postcode_search_btn.send_keys(Keys.RETURN)
|
70
63
|
|
71
|
-
#
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
WebDriverWait(driver, 20).until(
|
64
|
+
# Step 3: Select address from results
|
65
|
+
address_selection_button = WebDriverWait(driver, 60).until(
|
66
|
+
EC.element_to_be_clickable(
|
67
|
+
(By.XPATH, f"//span[contains(text(), '{user_paon}')]/ancestor::li//button")
|
68
|
+
)
|
69
|
+
)
|
70
|
+
address_selection_button.send_keys(Keys.RETURN)
|
71
|
+
|
72
|
+
# Step 4: Wait until the bin info is present
|
73
|
+
WebDriverWait(driver, 60).until(
|
82
74
|
EC.presence_of_element_located(
|
83
|
-
(
|
84
|
-
By.XPATH,
|
85
|
-
"/html/body/div[1]/div/div/div/div/div/div[2]/div/div/div/div/div/div[3]/div/div[1]/div/div[4]/div/div",
|
86
|
-
)
|
75
|
+
(By.XPATH, "//label[contains(text(), 'collection')]")
|
87
76
|
)
|
88
77
|
)
|
89
78
|
|
90
|
-
|
91
|
-
|
92
|
-
soup.prettify()
|
79
|
+
bin_info_container = driver.find_element(
|
80
|
+
By.XPATH, "//label[contains(text(), 'collection')]/ancestor::div[contains(@class, 'mx-dataview-content')]")
|
93
81
|
|
94
|
-
|
95
|
-
"div", {"class": "mx-name-textBox5 mx-textbox form-group"}
|
96
|
-
).find_next("div", {"class": "form-control-static"})
|
82
|
+
soup = BeautifulSoup(bin_info_container.get_attribute("innerHTML"), "html.parser")
|
97
83
|
|
98
|
-
|
99
|
-
|
100
|
-
.
|
101
|
-
|
102
|
-
|
103
|
-
)
|
104
|
-
collections.append(("Maroon bin", maroon_bin_date))
|
84
|
+
for group in soup.find_all("div", class_="form-group"):
|
85
|
+
label = group.find("label")
|
86
|
+
value = group.find("div", class_="form-control-static")
|
87
|
+
if not label or not value:
|
88
|
+
continue
|
105
89
|
|
106
|
-
|
107
|
-
|
108
|
-
.find_next("div", {"class": "form-control-static"})
|
109
|
-
.get_text(strip=True),
|
110
|
-
"%A %d/%m/%Y",
|
111
|
-
)
|
112
|
-
collections.append(("Grey bin", grey_bin_date))
|
90
|
+
label_text = label.text.strip()
|
91
|
+
value_text = value.text.strip()
|
113
92
|
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
93
|
+
if "bin next collection date" in label_text.lower():
|
94
|
+
bin_type = label_text.split(" bin")[0]
|
95
|
+
try:
|
96
|
+
collection_date = datetime.strptime(value_text, "%A %d/%m/%Y").strftime("%d/%m/%Y")
|
97
|
+
except ValueError:
|
98
|
+
continue
|
99
|
+
|
100
|
+
bindata["bins"].append({
|
101
|
+
"type": bin_type,
|
102
|
+
"collectionDate": collection_date,
|
103
|
+
})
|
121
104
|
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
"type": item[0].capitalize(),
|
126
|
-
"collectionDate": item[1].strftime(date_format),
|
127
|
-
}
|
128
|
-
data["bins"].append(dict_data)
|
105
|
+
bindata["bins"].sort(
|
106
|
+
key=lambda x: datetime.strptime(x["collectionDate"], "%d/%m/%Y")
|
107
|
+
)
|
129
108
|
|
130
109
|
except Exception as e:
|
131
|
-
# Here you can log the exception if needed
|
132
110
|
print(f"An error occurred: {e}")
|
133
|
-
# Optionally, re-raise the exception if you want it to propagate
|
134
111
|
raise
|
135
112
|
finally:
|
136
|
-
# This block ensures that the driver is closed regardless of an exception
|
137
113
|
if driver:
|
138
114
|
driver.quit()
|
139
115
|
|
140
|
-
return
|
116
|
+
return bindata
|
@@ -0,0 +1,135 @@
|
|
1
|
+
import time
|
2
|
+
|
3
|
+
from bs4 import BeautifulSoup
|
4
|
+
from selenium.webdriver.common.by import By
|
5
|
+
from selenium.webdriver.support import expected_conditions as EC
|
6
|
+
from selenium.webdriver.support.ui import Select, WebDriverWait
|
7
|
+
|
8
|
+
#import selenium keys
|
9
|
+
from selenium.webdriver.common.keys import Keys
|
10
|
+
|
11
|
+
from uk_bin_collection.uk_bin_collection.common import *
|
12
|
+
from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
|
13
|
+
|
14
|
+
# import the wonderful Beautiful Soup and the URL grabber
|
15
|
+
class CouncilClass(AbstractGetBinDataClass):
|
16
|
+
"""
|
17
|
+
Concrete classes have to implement all abstract operations of the
|
18
|
+
base class. They can also override some operations with a default
|
19
|
+
implementation.
|
20
|
+
"""
|
21
|
+
|
22
|
+
def parse_data(self, page: str, **kwargs) -> dict:
|
23
|
+
driver = None
|
24
|
+
try:
|
25
|
+
user_postcode = kwargs.get("postcode")
|
26
|
+
if not user_postcode:
|
27
|
+
raise ValueError("No postcode provided.")
|
28
|
+
check_postcode(user_postcode)
|
29
|
+
user_paon = kwargs.get("paon")
|
30
|
+
check_paon(user_paon)
|
31
|
+
|
32
|
+
headless = kwargs.get("headless")
|
33
|
+
web_driver = kwargs.get("web_driver")
|
34
|
+
driver = create_webdriver(web_driver, headless, None, __name__)
|
35
|
+
page = "https://www.midulstercouncil.org/resident/bins-recycling"
|
36
|
+
|
37
|
+
driver.get(page)
|
38
|
+
|
39
|
+
wait = WebDriverWait(driver, 10)
|
40
|
+
try:
|
41
|
+
accept_cookies_button = wait.until(
|
42
|
+
EC.element_to_be_clickable(
|
43
|
+
(
|
44
|
+
By.XPATH,
|
45
|
+
"//button/span[contains(text(), 'I Accept Cookies')]",
|
46
|
+
)
|
47
|
+
)
|
48
|
+
)
|
49
|
+
accept_cookies_button.click()
|
50
|
+
except Exception as e:
|
51
|
+
print(
|
52
|
+
"Accept cookies button not found or clickable within the specified time."
|
53
|
+
)
|
54
|
+
pass
|
55
|
+
|
56
|
+
postcode_input = wait.until(
|
57
|
+
EC.presence_of_element_located(
|
58
|
+
(By.ID, 'postcode-search-input')
|
59
|
+
)
|
60
|
+
)
|
61
|
+
postcode_input.send_keys(user_postcode)
|
62
|
+
|
63
|
+
|
64
|
+
# Wait for the element to be clickable
|
65
|
+
postcode_search_btn = wait.until(
|
66
|
+
EC.element_to_be_clickable(
|
67
|
+
(By.XPATH, "//button[contains(text(), 'Go')]")
|
68
|
+
)
|
69
|
+
)
|
70
|
+
|
71
|
+
postcode_search_btn.click()
|
72
|
+
|
73
|
+
address_btn = wait.until(
|
74
|
+
EC.element_to_be_clickable(
|
75
|
+
(By.XPATH, f"//button[contains(text(), '{user_paon}')]")
|
76
|
+
)
|
77
|
+
)
|
78
|
+
address_btn.send_keys(Keys.RETURN)
|
79
|
+
|
80
|
+
results_heading = wait.until(
|
81
|
+
EC.presence_of_element_located(
|
82
|
+
(By.XPATH, "//h3[contains(text(), 'Collection day:')]")
|
83
|
+
)
|
84
|
+
)
|
85
|
+
|
86
|
+
results = wait.until(
|
87
|
+
EC.presence_of_element_located(
|
88
|
+
(By.XPATH, "//div/h3[contains(text(), 'My address:')]/parent::div")
|
89
|
+
)
|
90
|
+
)
|
91
|
+
|
92
|
+
soup = BeautifulSoup(
|
93
|
+
results.get_attribute("innerHTML"), features="html.parser"
|
94
|
+
)
|
95
|
+
data = {"bins": []}
|
96
|
+
|
97
|
+
# 1. Extract the date string
|
98
|
+
try:
|
99
|
+
date_span = soup.select_one("h2.collection-day span.date-text")
|
100
|
+
if date_span:
|
101
|
+
date_text = date_span.text.strip()
|
102
|
+
current_year = datetime.now().year
|
103
|
+
full_date = f"{date_text} {current_year}" # e.g., "18 Apr 2025"
|
104
|
+
collection_date = datetime.strptime(full_date, "%d %b %Y").strftime(date_format)
|
105
|
+
else:
|
106
|
+
collection_date = None
|
107
|
+
except Exception as e:
|
108
|
+
print(f"Failed to parse date: {e}")
|
109
|
+
collection_date = None
|
110
|
+
|
111
|
+
# 2. Extract bin types
|
112
|
+
if collection_date:
|
113
|
+
bin_blocks = soup.select("div.bin")
|
114
|
+
for bin_block in bin_blocks:
|
115
|
+
bin_title_div = bin_block.select_one("div.bin-title")
|
116
|
+
if bin_title_div:
|
117
|
+
bin_type = bin_title_div.get_text(strip=True)
|
118
|
+
data["bins"].append({
|
119
|
+
"type": bin_type,
|
120
|
+
"collectionDate": collection_date,
|
121
|
+
})
|
122
|
+
|
123
|
+
# 3. Optional: sort bins by collectionDate
|
124
|
+
data["bins"].sort(key=lambda x: datetime.strptime(x.get("collectionDate"), date_format))
|
125
|
+
|
126
|
+
except Exception as e:
|
127
|
+
# Here you can log the exception if needed
|
128
|
+
print(f"An error occurred: {e}")
|
129
|
+
# Optionally, re-raise the exception if you want it to propagate
|
130
|
+
raise
|
131
|
+
finally:
|
132
|
+
# This block ensures that the driver is closed regardless of an exception
|
133
|
+
if driver:
|
134
|
+
driver.quit()
|
135
|
+
return data
|
@@ -1,10 +1,14 @@
|
|
1
|
+
import logging
|
1
2
|
import time
|
3
|
+
from datetime import datetime
|
2
4
|
|
3
5
|
import requests
|
4
6
|
|
5
7
|
from uk_bin_collection.uk_bin_collection.common import *
|
6
8
|
from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
|
7
9
|
|
10
|
+
logger = logging.getLogger(__name__)
|
11
|
+
|
8
12
|
|
9
13
|
# import the wonderful Beautiful Soup and the URL grabber
|
10
14
|
class CouncilClass(AbstractGetBinDataClass):
|
@@ -14,29 +18,51 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
14
18
|
implementation.
|
15
19
|
"""
|
16
20
|
|
21
|
+
SESSION_URL = "https://my.sandwell.gov.uk/authapi/isauthenticated?uri=https%253A%252F%252Fmy.sandwell.gov.uk%252Fen%252F..."
|
22
|
+
API_URL = "https://my.sandwell.gov.uk/apibroker/runLookup"
|
23
|
+
HEADERS = {
|
24
|
+
"Content-Type": "application/json",
|
25
|
+
"Accept": "application/json",
|
26
|
+
"User-Agent": "Mozilla/5.0",
|
27
|
+
"X-Requested-With": "XMLHttpRequest",
|
28
|
+
"Referer": "https://my.sandwell.gov.uk/fillform/?iframe_id=fillform-frame-1&db_id=",
|
29
|
+
}
|
30
|
+
LOOKUPS = [
|
31
|
+
(
|
32
|
+
"58a1a71694992",
|
33
|
+
"DWDate",
|
34
|
+
[
|
35
|
+
"Recycling (Blue)",
|
36
|
+
"Household Waste (Grey)",
|
37
|
+
"Food Waste (Brown)",
|
38
|
+
"Batteries",
|
39
|
+
],
|
40
|
+
),
|
41
|
+
("56b1cdaf6bb43", "GWDate", ["Garden Waste (Green)"]),
|
42
|
+
]
|
43
|
+
|
17
44
|
def parse_data(self, page: str, **kwargs) -> dict:
|
45
|
+
"""
|
46
|
+
Parse bin collection data for a given UPRN using the Sandwell API.
|
18
47
|
|
48
|
+
Args:
|
49
|
+
page (str): Unused HTML page content.
|
50
|
+
**kwargs: Must include 'uprn'.
|
51
|
+
|
52
|
+
Returns:
|
53
|
+
dict: A dictionary with bin collection types and dates.
|
54
|
+
"""
|
19
55
|
user_uprn = kwargs.get("uprn")
|
20
56
|
check_uprn(user_uprn)
|
21
57
|
bindata = {"bins": []}
|
22
58
|
|
23
|
-
|
24
|
-
|
25
|
-
API_URL = "https://my.sandwell.gov.uk/apibroker/runLookup"
|
26
|
-
|
27
|
-
headers = {
|
28
|
-
"Content-Type": "application/json",
|
29
|
-
"Accept": "application/json",
|
30
|
-
"User-Agent": "Mozilla/5.0",
|
31
|
-
"X-Requested-With": "XMLHttpRequest",
|
32
|
-
"Referer": "https://my.sandwell.gov.uk/fillform/?iframe_id=fillform-frame-1&db_id=",
|
33
|
-
}
|
34
|
-
s = requests.session()
|
59
|
+
session = requests.session()
|
35
60
|
# Establish a session and grab the session ID
|
36
|
-
r =
|
61
|
+
r = session.get(self.SESSION_URL)
|
37
62
|
r.raise_for_status()
|
38
63
|
session_data = r.json()
|
39
64
|
sid = session_data["auth-session"]
|
65
|
+
timestamp = str(int(time.time() * 1000))
|
40
66
|
|
41
67
|
payload = {
|
42
68
|
"formValues": {
|
@@ -45,7 +71,7 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
45
71
|
"value": user_uprn,
|
46
72
|
},
|
47
73
|
"NextCollectionFromDate": {
|
48
|
-
"value": datetime.
|
74
|
+
"value": datetime.today().strftime("%Y-%m-%d")
|
49
75
|
},
|
50
76
|
},
|
51
77
|
},
|
@@ -57,39 +83,46 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
57
83
|
"log_id": "",
|
58
84
|
"app_name": "AF-Renderer::Self",
|
59
85
|
# unix_timestamp
|
60
|
-
"_":
|
86
|
+
"_": timestamp,
|
61
87
|
"sid": sid,
|
62
88
|
}
|
63
89
|
# (request_id, date field to use from response, bin type labels)
|
64
|
-
|
65
|
-
|
66
|
-
"58a1a71694992",
|
67
|
-
"DWDate",
|
68
|
-
[
|
69
|
-
"Recycling (Blue)",
|
70
|
-
"Household Waste (Grey)",
|
71
|
-
"Food Waste (Brown)",
|
72
|
-
"Batteries",
|
73
|
-
],
|
74
|
-
),
|
75
|
-
("56b1cdaf6bb43", "GWDate", ["Garden Waste (Green)"]),
|
76
|
-
]
|
77
|
-
|
78
|
-
for request_id, date_key, bin_types in lookups:
|
90
|
+
|
91
|
+
for request_id, date_key, bin_types in self.LOOKUPS:
|
79
92
|
params = {"id": request_id, **base_params}
|
80
93
|
|
81
|
-
|
82
|
-
|
83
|
-
|
94
|
+
try:
|
95
|
+
resp = session.post(
|
96
|
+
self.API_URL, json=payload, headers=self.HEADERS, params=params
|
97
|
+
)
|
98
|
+
resp.raise_for_status()
|
99
|
+
result = resp.json()
|
84
100
|
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
101
|
+
rows_data = result["integration"]["transformed"]["rows_data"]
|
102
|
+
|
103
|
+
if not isinstance(rows_data, dict):
|
104
|
+
logger.warning("Unexpected rows_data format: %s", rows_data)
|
105
|
+
continue
|
106
|
+
|
107
|
+
for row in rows_data.values():
|
108
|
+
date = row.get(date_key)
|
109
|
+
if not date:
|
110
|
+
logger.warning(
|
111
|
+
"Date key '%s' missing in row: %s", date_key, row
|
112
|
+
)
|
113
|
+
continue
|
89
114
|
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
115
|
+
for bin_type in bin_types:
|
116
|
+
bindata["bins"].append(
|
117
|
+
{"type": bin_type, "collectionDate": date}
|
118
|
+
)
|
119
|
+
|
120
|
+
except requests.RequestException as e:
|
121
|
+
logger.error("API request failed: %s", e)
|
122
|
+
continue
|
123
|
+
except (KeyError, ValueError, TypeError) as e:
|
124
|
+
logger.warning("Unexpected structure in response: %s", e)
|
125
|
+
continue
|
94
126
|
|
127
|
+
logger.info("Parsed bins: %s", bindata["bins"])
|
95
128
|
return bindata
|
@@ -3,7 +3,7 @@ uk_bin_collection/tests/check_selenium_url_in_input.json.py,sha256=Iecdja0I3XIiY
|
|
3
3
|
uk_bin_collection/tests/council_feature_input_parity.py,sha256=DO6Mk4ImYgM5ZCZ-cutwz5RoYYWZRLYx2tr6zIs_9Rc,3843
|
4
4
|
uk_bin_collection/tests/features/environment.py,sha256=VQZjJdJI_kZn08M0j5cUgvKT4k3iTw8icJge1DGOkoA,127
|
5
5
|
uk_bin_collection/tests/features/validate_council_outputs.feature,sha256=SJK-Vc737hrf03tssxxbeg_JIvAH-ddB8f6gU1LTbuQ,251
|
6
|
-
uk_bin_collection/tests/input.json,sha256=
|
6
|
+
uk_bin_collection/tests/input.json,sha256=zxDyP74ZRcogEGCcRCGeDj1EOU61mpQJXSsv8z5sznE,122869
|
7
7
|
uk_bin_collection/tests/output.schema,sha256=ZwKQBwYyTDEM4G2hJwfLUVM-5v1vKRvRK9W9SS1sd18,1086
|
8
8
|
uk_bin_collection/tests/step_defs/step_helpers/file_handler.py,sha256=Ygzi4V0S1MIHqbdstUlIqtRIwnynvhu4UtpweJ6-5N8,1474
|
9
9
|
uk_bin_collection/tests/step_defs/test_validate_council.py,sha256=VZ0a81sioJULD7syAYHjvK_-nT_Rd36tUyzPetSA0gk,3475
|
@@ -12,7 +12,7 @@ uk_bin_collection/tests/test_common_functions.py,sha256=cCUwXKGijmsvTLz0KoaedXkp
|
|
12
12
|
uk_bin_collection/tests/test_conftest.py,sha256=qI_zgGjNOnwE9gmZUiuirL1SYz3TFw5yfGFgT4T3aG4,1100
|
13
13
|
uk_bin_collection/tests/test_get_data.py,sha256=sFJz_Fd6o-1r2gdmzY52JGwVi0Of_mDzvYSoc7a3RUw,7239
|
14
14
|
uk_bin_collection/uk_bin_collection/collect_data.py,sha256=dB7wWXsJX4fm5bIf84lexkvHIcO54CZ3JPxqmS-60YY,4654
|
15
|
-
uk_bin_collection/uk_bin_collection/common.py,sha256=
|
15
|
+
uk_bin_collection/uk_bin_collection/common.py,sha256=r3hV7HZv-WKr11nYq-99Dpmw_UPK6359MUFD6vDuFLc,11022
|
16
16
|
uk_bin_collection/uk_bin_collection/councils/AberdeenCityCouncil.py,sha256=Je8VwVLK9KnYl9vqf2gWJ7ZYDgUq3A7caDiIzk5Xof8,4194
|
17
17
|
uk_bin_collection/uk_bin_collection/councils/AberdeenshireCouncil.py,sha256=aO1CSdyqa8oAD0fB79y1Q9bikAWCP_JFa7CsyTa2j9s,1655
|
18
18
|
uk_bin_collection/uk_bin_collection/councils/AdurAndWorthingCouncils.py,sha256=ppbrmm-MzB1wOulK--CU_0j4P-djNf3ozMhHnmQFqLo,1511
|
@@ -146,6 +146,7 @@ uk_bin_collection/uk_bin_collection/councils/HerefordshireCouncil.py,sha256=JpQh
|
|
146
146
|
uk_bin_collection/uk_bin_collection/councils/HertsmereBoroughCouncil.py,sha256=ZbSsmqHStd2JtTMAq1Bhcvsj1BYp6ijELyOjZFX2GSw,6435
|
147
147
|
uk_bin_collection/uk_bin_collection/councils/HighPeakCouncil.py,sha256=x7dfy8mdt2iGl8qJxHb-uBh4u0knmi9MJ6irOJw9WYA,4805
|
148
148
|
uk_bin_collection/uk_bin_collection/councils/HighlandCouncil.py,sha256=GNxDU65QuZHV5va2IrKtcJ6TQoDdwmV03JvkVqOauP4,3291
|
149
|
+
uk_bin_collection/uk_bin_collection/councils/Hillingdon.py,sha256=R1enDv5gjwCUT3HKgj8C87xWrwvrutAN6XLu5P7tef8,10532
|
149
150
|
uk_bin_collection/uk_bin_collection/councils/HinckleyandBosworthBoroughCouncil.py,sha256=51vXTKrstfJhb7cLCcrsvA9qKCsptyNMZvy7ML9DasM,2344
|
150
151
|
uk_bin_collection/uk_bin_collection/councils/HounslowCouncil.py,sha256=LXhJ47rujx7k3naz0tFiTT1l5k6gAYcVdekJN1t_HLY,4564
|
151
152
|
uk_bin_collection/uk_bin_collection/councils/HullCityCouncil.py,sha256=UHcesBoctFVcXDYuwfag43KbcJcopkEDzJ-54NxtK0Q,1851
|
@@ -155,7 +156,7 @@ uk_bin_collection/uk_bin_collection/councils/IslingtonCouncil.py,sha256=xavzL6ZI
|
|
155
156
|
uk_bin_collection/uk_bin_collection/councils/KingsLynnandWestNorfolkBC.py,sha256=Shj18R-7NW4ivqJJFVJOLmf-EeN6hXP2Of30oI-SeAQ,1932
|
156
157
|
uk_bin_collection/uk_bin_collection/councils/KingstonUponThamesCouncil.py,sha256=iZ7njIxccCGBhUUWWd9Azh7cxUAKaofebCm3lo-TuxA,3543
|
157
158
|
uk_bin_collection/uk_bin_collection/councils/KirkleesCouncil.py,sha256=WPM7koIqK5Wz-iT9Mds6AptihGZtl4KZhkVTcT9cx_c,2762
|
158
|
-
uk_bin_collection/uk_bin_collection/councils/KnowsleyMBCouncil.py,sha256=
|
159
|
+
uk_bin_collection/uk_bin_collection/councils/KnowsleyMBCouncil.py,sha256=DsCVBCprCS2u_2E8IRBGOO_NWo-WC8i9jSxUI-rue_s,4654
|
159
160
|
uk_bin_collection/uk_bin_collection/councils/LancasterCityCouncil.py,sha256=FmHT6oyD4BwWuhxA80PHnGA7HPrLuyjP_54Cg8hT6k4,2537
|
160
161
|
uk_bin_collection/uk_bin_collection/councils/LeedsCityCouncil.py,sha256=VWdhw6qvCTj3EhFHf046xPWgc6szeFW2Xbt6W2J0e6w,4371
|
161
162
|
uk_bin_collection/uk_bin_collection/councils/LeicesterCityCouncil.py,sha256=o3kE8sjThQa4_AvSK5NH8VH7jWFO9MMPgoqLOTjyh0w,1851
|
@@ -183,6 +184,7 @@ uk_bin_collection/uk_bin_collection/councils/MidAndEastAntrimBoroughCouncil.py,s
|
|
183
184
|
uk_bin_collection/uk_bin_collection/councils/MidDevonCouncil.py,sha256=8MxqGgOJVseMkrTmEMT0EyDW7UMbXMoa5ZcJ2nD55Ew,3367
|
184
185
|
uk_bin_collection/uk_bin_collection/councils/MidSuffolkDistrictCouncil.py,sha256=h6M-v5jVYe7OlQ47Vf-0pEgECZLOOacK3_XE6zbpsM4,6329
|
185
186
|
uk_bin_collection/uk_bin_collection/councils/MidSussexDistrictCouncil.py,sha256=AZgC9wmDLEjUOtIFvf0ehF5LHturXTH4DkE3ioPSVBA,6254
|
187
|
+
uk_bin_collection/uk_bin_collection/councils/MidUlsterDistrictCouncil.py,sha256=dBDZvWQRlOQQDhLJVq0OzQy6iazstQ9NnDb6PIj5NOw,5020
|
186
188
|
uk_bin_collection/uk_bin_collection/councils/MiddlesbroughCouncil.py,sha256=BiYexiZj-9PxRnB7sYRy0G-72s3L9jfh2vd1Y2NQwtg,4223
|
187
189
|
uk_bin_collection/uk_bin_collection/councils/MidlothianCouncil.py,sha256=-VKvdIhrs859-YqxsNMzRWm2alP1avBR1_J8O9gJnYw,6725
|
188
190
|
uk_bin_collection/uk_bin_collection/councils/MiltonKeynesCityCouncil.py,sha256=7e2pGBLCw24pNItHeI9jkxQ3rEOZ4WC4zVlbvKYGdXE,2600
|
@@ -239,7 +241,7 @@ uk_bin_collection/uk_bin_collection/councils/RunnymedeBoroughCouncil.py,sha256=v
|
|
239
241
|
uk_bin_collection/uk_bin_collection/councils/RushcliffeBoroughCouncil.py,sha256=nWo8xeER71FEbnMTX8W9bcwZNpLEExWzPvgRT7DmcMc,4221
|
240
242
|
uk_bin_collection/uk_bin_collection/councils/RushmoorCouncil.py,sha256=ZsGnXjoEaOS6U7fI0w7-uqxayAHdNVKsJi2fqIWEls8,3375
|
241
243
|
uk_bin_collection/uk_bin_collection/councils/SalfordCityCouncil.py,sha256=XUGemp2cdzsvkWjnv2m4YKTMcoKDUfIlVy3YucX-_o4,2601
|
242
|
-
uk_bin_collection/uk_bin_collection/councils/SandwellBoroughCouncil.py,sha256=
|
244
|
+
uk_bin_collection/uk_bin_collection/councils/SandwellBoroughCouncil.py,sha256=KePbZ_Ec4mvVEKu7-hd2CDy_qaWjhiNUnmbYh-Ghw2A,4275
|
243
245
|
uk_bin_collection/uk_bin_collection/councils/SeftonCouncil.py,sha256=XUEz2li0oHrRhdkls5qzlZNZ0GuwSG7r0dwsL-qdoFA,2480
|
244
246
|
uk_bin_collection/uk_bin_collection/councils/SevenoaksDistrictCouncil.py,sha256=qqrrRaSVm9CYAtm0rB2ZnyH_nLwaReuacoUxZpo597k,4260
|
245
247
|
uk_bin_collection/uk_bin_collection/councils/SheffieldCityCouncil.py,sha256=9g9AeiackoWyej9EVlKUzywzAtMuBVD0f93ZryAUha8,2016
|
@@ -329,8 +331,8 @@ uk_bin_collection/uk_bin_collection/councils/YorkCouncil.py,sha256=I2kBYMlsD4bId
|
|
329
331
|
uk_bin_collection/uk_bin_collection/councils/council_class_template/councilclasstemplate.py,sha256=EQWRhZ2pEejlvm0fPyOTsOHKvUZmPnxEYO_OWRGKTjs,1158
|
330
332
|
uk_bin_collection/uk_bin_collection/create_new_council.py,sha256=m-IhmWmeWQlFsTZC4OxuFvtw5ZtB8EAJHxJTH4O59lQ,1536
|
331
333
|
uk_bin_collection/uk_bin_collection/get_bin_data.py,sha256=YvmHfZqanwrJ8ToGch34x-L-7yPe31nB_x77_Mgl_vo,4545
|
332
|
-
uk_bin_collection-0.144.
|
333
|
-
uk_bin_collection-0.144.
|
334
|
-
uk_bin_collection-0.144.
|
335
|
-
uk_bin_collection-0.144.
|
336
|
-
uk_bin_collection-0.144.
|
334
|
+
uk_bin_collection-0.144.3.dist-info/LICENSE,sha256=vABBUOzcrgfaTKpzeo-si9YVEun6juDkndqA8RKdKGs,1071
|
335
|
+
uk_bin_collection-0.144.3.dist-info/METADATA,sha256=p836krZfbmZyZq_lEotYfXqB6JdLNtK3y6b4lhFIioM,19858
|
336
|
+
uk_bin_collection-0.144.3.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
|
337
|
+
uk_bin_collection-0.144.3.dist-info/entry_points.txt,sha256=36WCSGMWSc916S3Hi1ZkazzDKHaJ6CD-4fCEFm5MIao,90
|
338
|
+
uk_bin_collection-0.144.3.dist-info/RECORD,,
|
File without changes
|
File without changes
|
{uk_bin_collection-0.144.1.dist-info → uk_bin_collection-0.144.3.dist-info}/entry_points.txt
RENAMED
File without changes
|