uk_bin_collection 0.147.2__py3-none-any.whl → 0.148.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- uk_bin_collection/compare_lad_codes.py +9 -2
- uk_bin_collection/tests/generate_map_test_results.py +4 -1
- uk_bin_collection/tests/input.json +25 -2
- uk_bin_collection/uk_bin_collection/councils/ArgyllandButeCouncil.py +112 -41
- uk_bin_collection/uk_bin_collection/councils/CheltenhamBoroughCouncil.py +71 -26
- uk_bin_collection/uk_bin_collection/councils/EastLindseyDistrictCouncil.py +4 -4
- uk_bin_collection/uk_bin_collection/councils/HarrogateBoroughCouncil.py +1 -1
- uk_bin_collection/uk_bin_collection/councils/HorshamDistrictCouncil.py +123 -0
- uk_bin_collection/uk_bin_collection/councils/WrexhamCountyBoroughCouncil.py +123 -0
- {uk_bin_collection-0.147.2.dist-info → uk_bin_collection-0.148.1.dist-info}/METADATA +1 -1
- {uk_bin_collection-0.147.2.dist-info → uk_bin_collection-0.148.1.dist-info}/RECORD +14 -12
- {uk_bin_collection-0.147.2.dist-info → uk_bin_collection-0.148.1.dist-info}/LICENSE +0 -0
- {uk_bin_collection-0.147.2.dist-info → uk_bin_collection-0.148.1.dist-info}/WHEEL +0 -0
- {uk_bin_collection-0.147.2.dist-info → uk_bin_collection-0.148.1.dist-info}/entry_points.txt +0 -0
@@ -1,6 +1,7 @@
|
|
1
1
|
import json
|
2
2
|
import geopandas as gpd
|
3
3
|
|
4
|
+
|
4
5
|
def extract_lad_codes(input_json_path):
|
5
6
|
with open(input_json_path, "r") as f:
|
6
7
|
data = json.load(f)
|
@@ -21,6 +22,7 @@ def extract_lad_codes(input_json_path):
|
|
21
22
|
|
22
23
|
return lad_codes, lad_code_to_council_input
|
23
24
|
|
25
|
+
|
24
26
|
def compare_with_geojson(input_lad_codes, geojson_path):
|
25
27
|
gdf = gpd.read_file(geojson_path)
|
26
28
|
geojson_lad_codes = set(gdf["LAD24CD"].dropna().unique())
|
@@ -37,17 +39,22 @@ def compare_with_geojson(input_lad_codes, geojson_path):
|
|
37
39
|
|
38
40
|
return matching, missing_in_input, extra_in_input, geojson_lad_map
|
39
41
|
|
42
|
+
|
40
43
|
# --- Run the comparison ---
|
41
44
|
input_json_path = "uk_bin_collection/tests/input.json"
|
42
45
|
geojson_path = "uk_bin_collection/Local_Authority_Boundaries.geojson"
|
43
46
|
|
44
47
|
input_lad_codes, input_name_map = extract_lad_codes(input_json_path)
|
45
|
-
matching, missing, extra, geojson_name_map = compare_with_geojson(
|
48
|
+
matching, missing, extra, geojson_name_map = compare_with_geojson(
|
49
|
+
input_lad_codes, geojson_path
|
50
|
+
)
|
46
51
|
|
47
52
|
# --- Print results ---
|
48
53
|
print(f"✅ Matching LAD24CDs ({len(matching)}):")
|
49
54
|
for code in sorted(matching):
|
50
|
-
print(
|
55
|
+
print(
|
56
|
+
f" {code} → input.json: {input_name_map.get(code)} | geojson: {geojson_name_map.get(code)}"
|
57
|
+
)
|
51
58
|
|
52
59
|
print(f"\n🟡 LADs in GeoJSON but missing in input.json ({len(missing)}):")
|
53
60
|
for code in sorted(missing):
|
@@ -4,16 +4,18 @@ import xml.etree.ElementTree as ET
|
|
4
4
|
from collections import defaultdict
|
5
5
|
import re
|
6
6
|
|
7
|
+
|
7
8
|
def extract_council_name(testname):
|
8
9
|
"""
|
9
10
|
Extracts the council name from the test name.
|
10
11
|
E.g. "test_scenario_outline[BarnetCouncil]" => "barnetcouncil"
|
11
12
|
"""
|
12
|
-
match = re.search(r
|
13
|
+
match = re.search(r"\[(.*?)\]", testname)
|
13
14
|
if match:
|
14
15
|
return match.group(1).strip().lower()
|
15
16
|
return None
|
16
17
|
|
18
|
+
|
17
19
|
def parse_junit_xml(path):
|
18
20
|
tree = ET.parse(path)
|
19
21
|
root = tree.getroot()
|
@@ -31,6 +33,7 @@ def parse_junit_xml(path):
|
|
31
33
|
|
32
34
|
return results
|
33
35
|
|
36
|
+
|
34
37
|
def main():
|
35
38
|
if len(sys.argv) != 2:
|
36
39
|
print("Usage: python generate_test_results.py <junit.xml path>")
|
@@ -45,8 +45,10 @@
|
|
45
45
|
},
|
46
46
|
"ArgyllandButeCouncil": {
|
47
47
|
"skip_get_url": true,
|
48
|
-
"
|
49
|
-
"
|
48
|
+
"postcode": "PA286LJ",
|
49
|
+
"uprn": "000125011723",
|
50
|
+
"url": "https://www.argyll-bute.gov.uk/rubbish-and-recycling/household-waste/bin-collection",
|
51
|
+
"web_driver": "http://selenium:4444",
|
50
52
|
"wiki_name": "Argyll and Bute Council",
|
51
53
|
"wiki_note": "Pass the UPRN. You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search).",
|
52
54
|
"LAD24CD": "S12000035"
|
@@ -1169,6 +1171,16 @@
|
|
1169
1171
|
"wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN.",
|
1170
1172
|
"LAD24CD": "E07000132"
|
1171
1173
|
},
|
1174
|
+
"HorshamDistrictCouncil": {
|
1175
|
+
"postcode": "RH12 1AA",
|
1176
|
+
"LAD24CD": "E07000227",
|
1177
|
+
"skip_get_url": true,
|
1178
|
+
"uprn": "010013792717",
|
1179
|
+
"url": "https://www.horsham.gov.uk/waste-recycling-and-bins/household-bin-collections/check-your-bin-collection-day",
|
1180
|
+
"web_driver": "http://selenium:4444",
|
1181
|
+
"wiki_name": "Horsham District Council",
|
1182
|
+
"wiki_note": "Pass the UPRN. You can find it using [FindMyAddress](https://www.findmyaddress.co.uk/search). This parser requires a Selenium webdriver."
|
1183
|
+
},
|
1172
1184
|
"HullCityCouncil": {
|
1173
1185
|
"LAD24CD": "E06000010",
|
1174
1186
|
"skip_get_url": true,
|
@@ -2649,6 +2661,17 @@
|
|
2649
2661
|
"wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN.",
|
2650
2662
|
"LAD24CD": "E07000237"
|
2651
2663
|
},
|
2664
|
+
"WrexhamCountyBoroughCouncil": {
|
2665
|
+
"house_number": "1",
|
2666
|
+
"postcode": "LL12 7RW",
|
2667
|
+
"uprn": "200002944225",
|
2668
|
+
"skip_get_url": true,
|
2669
|
+
"url": "https://www.wrexham.gov.uk/service/when-are-my-bins-collected",
|
2670
|
+
"web_driver": "http://selenium:4444",
|
2671
|
+
"wiki_name": "Wrexham County Borough Council",
|
2672
|
+
"wiki_note": "Provide your house number in the `house_number` parameter and postcode in the `postcode` parameter.",
|
2673
|
+
"LAD24CD": "W06000006"
|
2674
|
+
},
|
2652
2675
|
"WychavonDistrictCouncil": {
|
2653
2676
|
"postcode": "WR3 7RU",
|
2654
2677
|
"skip_get_url": true,
|
@@ -1,13 +1,15 @@
|
|
1
|
-
import time
|
2
|
-
|
3
|
-
import requests
|
4
1
|
from bs4 import BeautifulSoup
|
5
|
-
|
2
|
+
from selenium.webdriver.common.by import By
|
3
|
+
from selenium.webdriver.support import expected_conditions as EC
|
4
|
+
from selenium.webdriver.support.ui import Select, WebDriverWait
|
5
|
+
from datetime import datetime
|
6
6
|
from uk_bin_collection.uk_bin_collection.common import *
|
7
7
|
from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
|
8
8
|
|
9
9
|
|
10
10
|
# import the wonderful Beautiful Soup and the URL grabber
|
11
|
+
|
12
|
+
|
11
13
|
class CouncilClass(AbstractGetBinDataClass):
|
12
14
|
"""
|
13
15
|
Concrete classes have to implement all abstract operations of the
|
@@ -16,52 +18,121 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
16
18
|
"""
|
17
19
|
|
18
20
|
def parse_data(self, page: str, **kwargs) -> dict:
|
21
|
+
driver = None
|
22
|
+
try:
|
23
|
+
page = "https://www.argyll-bute.gov.uk/rubbish-and-recycling/household-waste/bin-collection"
|
24
|
+
|
25
|
+
user_uprn = kwargs.get("uprn")
|
26
|
+
user_postcode = kwargs.get("postcode")
|
27
|
+
web_driver = kwargs.get("web_driver")
|
28
|
+
headless = kwargs.get("headless")
|
29
|
+
check_uprn(user_uprn)
|
30
|
+
check_postcode(user_postcode)
|
31
|
+
# Create Selenium webdriver
|
32
|
+
driver = create_webdriver(web_driver, headless, None, __name__)
|
33
|
+
driver.get(page)
|
34
|
+
|
35
|
+
# Accept cookies
|
36
|
+
try:
|
37
|
+
accept_cookies = WebDriverWait(driver, timeout=10).until(
|
38
|
+
EC.element_to_be_clickable(
|
39
|
+
(By.XPATH, "//button[@id='ccc-recommended-settings']")
|
40
|
+
)
|
41
|
+
)
|
42
|
+
accept_cookies.click()
|
43
|
+
except:
|
44
|
+
print(
|
45
|
+
"Accept cookies banner not found or clickable within the specified time."
|
46
|
+
)
|
47
|
+
pass
|
48
|
+
# Wait for postcode entry box
|
49
|
+
|
50
|
+
postcode_input = WebDriverWait(driver, timeout=15).until(
|
51
|
+
EC.presence_of_element_located(
|
52
|
+
(By.XPATH, "//input[@id='edit-postcode']")
|
53
|
+
)
|
54
|
+
)
|
55
|
+
|
56
|
+
postcode_input.send_keys(user_postcode)
|
57
|
+
|
58
|
+
search_btn = WebDriverWait(driver, timeout=15).until(
|
59
|
+
EC.presence_of_element_located((By.ID, "edit-submit"))
|
60
|
+
)
|
61
|
+
search_btn.click()
|
62
|
+
|
63
|
+
address_results = Select(
|
64
|
+
WebDriverWait(driver, timeout=15).until(
|
65
|
+
EC.presence_of_element_located(
|
66
|
+
(By.XPATH, "//select[@id='edit-address']")
|
67
|
+
)
|
68
|
+
)
|
69
|
+
)
|
19
70
|
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
71
|
+
address_results.select_by_value(user_uprn)
|
72
|
+
submit_btn = WebDriverWait(driver, timeout=15).until(
|
73
|
+
EC.presence_of_element_located(
|
74
|
+
(By.XPATH, "//input[@value='Search for my bin collection details']")
|
75
|
+
)
|
76
|
+
)
|
77
|
+
submit_btn.click()
|
78
|
+
|
79
|
+
results = WebDriverWait(driver, timeout=15).until(
|
80
|
+
EC.presence_of_element_located(
|
81
|
+
(
|
82
|
+
By.XPATH,
|
83
|
+
"//th[contains(text(),'Collection date')]/ancestor::table",
|
84
|
+
)
|
85
|
+
)
|
86
|
+
)
|
24
87
|
|
25
|
-
|
88
|
+
soup = BeautifulSoup(
|
89
|
+
results.get_attribute("innerHTML"), features="html.parser"
|
90
|
+
)
|
26
91
|
|
27
|
-
|
92
|
+
today = datetime.today()
|
93
|
+
current_year = today.year
|
94
|
+
current_month = today.month
|
28
95
|
|
29
|
-
|
30
|
-
r = s.post(URI, data=data)
|
31
|
-
r.raise_for_status()
|
96
|
+
bin_data = {"bins": []}
|
32
97
|
|
33
|
-
|
34
|
-
|
98
|
+
# Skip header
|
99
|
+
for row in soup.find_all("tr")[1:]:
|
100
|
+
cells = row.find_all("td")
|
101
|
+
if len(cells) < 2:
|
102
|
+
continue
|
35
103
|
|
36
|
-
|
37
|
-
|
38
|
-
rows = table.find_all("tr")[1:] # Skip the header row
|
104
|
+
bin_type = cells[0].get_text(strip=True)
|
105
|
+
raw_date = cells[1].get_text(strip=True)
|
39
106
|
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
bin_type = cells[0].get_text(strip=True)
|
45
|
-
collection_date = cells[1].get_text(strip=True)
|
107
|
+
try:
|
108
|
+
# Parse day and month first to determine year
|
109
|
+
partial_date = datetime.strptime(raw_date, "%A %d %B")
|
110
|
+
month = partial_date.month
|
46
111
|
|
47
|
-
|
48
|
-
|
49
|
-
"%A %d %B",
|
50
|
-
)
|
112
|
+
# Determine correct year based on current month
|
113
|
+
year = current_year + 1 if month < current_month else current_year
|
51
114
|
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
115
|
+
# Re-parse with the correct year
|
116
|
+
full_date_str = f"{raw_date} {year}"
|
117
|
+
parsed_date = datetime.strptime(full_date_str, "%A %d %B %Y")
|
118
|
+
date_str = parsed_date.strftime(date_format)
|
119
|
+
except ValueError:
|
120
|
+
continue
|
56
121
|
|
57
|
-
|
58
|
-
"type": bin_type,
|
59
|
-
"collectionDate": collection_date.strftime(date_format),
|
60
|
-
}
|
61
|
-
bindata["bins"].append(dict_data)
|
122
|
+
bin_data["bins"].append({"type": bin_type, "collectionDate": date_str})
|
62
123
|
|
63
|
-
|
64
|
-
|
65
|
-
|
124
|
+
# Sort by date
|
125
|
+
bin_data["bins"].sort(
|
126
|
+
key=lambda x: datetime.strptime(x["collectionDate"], date_format)
|
127
|
+
)
|
66
128
|
|
67
|
-
|
129
|
+
except Exception as e:
|
130
|
+
# Here you can log the exception if needed
|
131
|
+
print(f"An error occurred: {e}")
|
132
|
+
# Optionally, re-raise the exception if you want it to propagate
|
133
|
+
raise
|
134
|
+
finally:
|
135
|
+
# This block ensures that the driver is closed regardless of an exception
|
136
|
+
if driver:
|
137
|
+
driver.quit()
|
138
|
+
return bin_data
|
@@ -1,6 +1,6 @@
|
|
1
|
-
# import re
|
2
|
-
|
3
1
|
import requests
|
2
|
+
from bs4 import BeautifulSoup
|
3
|
+
from dateutil.parser import parse
|
4
4
|
|
5
5
|
from uk_bin_collection.uk_bin_collection.common import (
|
6
6
|
check_postcode,
|
@@ -232,11 +232,46 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
232
232
|
)
|
233
233
|
garden_dates: list[str] = get_dates_every_x_days(week[garden_week], 14, 28)
|
234
234
|
|
235
|
+
# Build a dictionary of bank holiday changes
|
236
|
+
bank_holiday_bins_url = "https://www.cheltenham.gov.uk/bank-holiday-collections"
|
237
|
+
response = requests.get(bank_holiday_bins_url)
|
238
|
+
soup = BeautifulSoup(response.content, "html.parser")
|
239
|
+
response.close()
|
240
|
+
tables = soup.find_all("table")
|
241
|
+
|
242
|
+
# Build a dictionary to modify any bank holiday collections
|
243
|
+
bh_dict = {}
|
244
|
+
for table in tables:
|
245
|
+
# extract table body
|
246
|
+
for row in table.find_all("tr")[1:]:
|
247
|
+
if row.find_all("td")[1].text.strip() == "Normal collection day":
|
248
|
+
bh_dict[
|
249
|
+
parse(
|
250
|
+
row.find_all("td")[0].text.strip(),
|
251
|
+
dayfirst=True,
|
252
|
+
fuzzy=True,
|
253
|
+
).date()
|
254
|
+
] = parse(
|
255
|
+
row.find_all("td")[0].text.strip(), dayfirst=True, fuzzy=True
|
256
|
+
).date()
|
257
|
+
else:
|
258
|
+
bh_dict[
|
259
|
+
parse(
|
260
|
+
row.find_all("td")[0].text.strip(),
|
261
|
+
dayfirst=True,
|
262
|
+
fuzzy=True,
|
263
|
+
).date()
|
264
|
+
] = parse(
|
265
|
+
row.find_all("td")[1].text.strip(), dayfirst=True, fuzzy=True
|
266
|
+
).date()
|
267
|
+
|
235
268
|
for refuse_date in refuse_dates:
|
236
|
-
collection_date = (
|
237
|
-
|
238
|
-
|
239
|
-
|
269
|
+
collection_date = datetime.strptime(refuse_date, "%d/%m/%Y") + timedelta(
|
270
|
+
days=refuse_day_offset
|
271
|
+
)
|
272
|
+
if collection_date in bh_dict:
|
273
|
+
collection_date = bh_dict[collection_date]
|
274
|
+
collection_date = collection_date.strftime("%d/%m/%Y")
|
240
275
|
|
241
276
|
dict_data = {
|
242
277
|
"type": "Refuse Bin",
|
@@ -246,10 +281,12 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
246
281
|
|
247
282
|
for recycling_date in recycling_dates:
|
248
283
|
|
249
|
-
collection_date = (
|
250
|
-
|
251
|
-
|
252
|
-
|
284
|
+
collection_date = datetime.strptime(recycling_date, "%d/%m/%Y") + timedelta(
|
285
|
+
days=recycling_day_offset
|
286
|
+
)
|
287
|
+
if collection_date in bh_dict:
|
288
|
+
collection_date = bh_dict[collection_date]
|
289
|
+
collection_date = collection_date.strftime("%d/%m/%Y")
|
253
290
|
|
254
291
|
dict_data = {
|
255
292
|
"type": "Recycling Bin",
|
@@ -259,10 +296,12 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
259
296
|
|
260
297
|
for garden_date in garden_dates:
|
261
298
|
|
262
|
-
collection_date = (
|
263
|
-
|
264
|
-
|
265
|
-
|
299
|
+
collection_date = datetime.strptime(garden_date, "%d/%m/%Y") + timedelta(
|
300
|
+
days=garden_day_offset
|
301
|
+
)
|
302
|
+
if collection_date in bh_dict:
|
303
|
+
collection_date = bh_dict[collection_date]
|
304
|
+
collection_date = collection_date.strftime("%d/%m/%Y")
|
266
305
|
|
267
306
|
dict_data = {
|
268
307
|
"type": "Garden Waste Bin",
|
@@ -279,10 +318,12 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
279
318
|
|
280
319
|
for food_date in food_dates:
|
281
320
|
|
282
|
-
collection_date = (
|
283
|
-
|
284
|
-
|
285
|
-
|
321
|
+
collection_date = datetime.strptime(food_date, "%d/%m/%Y") + timedelta(
|
322
|
+
days=food_day_offset
|
323
|
+
)
|
324
|
+
if collection_date in bh_dict:
|
325
|
+
collection_date = bh_dict[collection_date]
|
326
|
+
collection_date = collection_date.strftime("%d/%m/%Y")
|
286
327
|
|
287
328
|
dict_data = {
|
288
329
|
"type": "Food Waste Bin",
|
@@ -313,10 +354,12 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
313
354
|
|
314
355
|
for food_date in food_dates_first:
|
315
356
|
|
316
|
-
collection_date = (
|
317
|
-
|
318
|
-
|
319
|
-
|
357
|
+
collection_date = datetime.strptime(food_date, "%d/%m/%Y") + timedelta(
|
358
|
+
days=food_day_offset
|
359
|
+
)
|
360
|
+
if collection_date in bh_dict:
|
361
|
+
collection_date = bh_dict[collection_date]
|
362
|
+
collection_date = collection_date.strftime("%d/%m/%Y")
|
320
363
|
|
321
364
|
dict_data = {
|
322
365
|
"type": "Food Waste Bin",
|
@@ -325,10 +368,12 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
325
368
|
bindata["bins"].append(dict_data)
|
326
369
|
for food_date in food_dates_second:
|
327
370
|
|
328
|
-
collection_date = (
|
329
|
-
|
330
|
-
|
331
|
-
|
371
|
+
collection_date = datetime.strptime(food_date, "%d/%m/%Y") + timedelta(
|
372
|
+
days=second_week_offset
|
373
|
+
)
|
374
|
+
if collection_date in bh_dict:
|
375
|
+
collection_date = bh_dict[collection_date]
|
376
|
+
collection_date = collection_date.strftime("%d/%m/%Y")
|
332
377
|
|
333
378
|
dict_data = {
|
334
379
|
"type": "Food Waste Bin",
|
@@ -33,7 +33,7 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
33
33
|
# Wait for the postcode field to appear then populate it
|
34
34
|
inputElement_postcode = WebDriverWait(driver, 30).until(
|
35
35
|
EC.presence_of_element_located(
|
36
|
-
(By.ID, "
|
36
|
+
(By.ID, "WASTECOLLECTIONDAYS202526_LOOKUP_ADDRESSLOOKUPPOSTCODE")
|
37
37
|
)
|
38
38
|
)
|
39
39
|
inputElement_postcode.send_keys(user_postcode)
|
@@ -41,7 +41,7 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
41
41
|
# Click search button
|
42
42
|
findAddress = WebDriverWait(driver, 10).until(
|
43
43
|
EC.presence_of_element_located(
|
44
|
-
(By.ID, "
|
44
|
+
(By.ID, "WASTECOLLECTIONDAYS202526_LOOKUP_ADDRESSLOOKUPSEARCH")
|
45
45
|
)
|
46
46
|
)
|
47
47
|
findAddress.click()
|
@@ -51,7 +51,7 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
51
51
|
EC.element_to_be_clickable(
|
52
52
|
(
|
53
53
|
By.XPATH,
|
54
|
-
"//select[@id='
|
54
|
+
"//select[@id='WASTECOLLECTIONDAYS202526_LOOKUP_ADDRESSLOOKUPADDRESS']//option[contains(., '"
|
55
55
|
+ user_paon
|
56
56
|
+ "')]",
|
57
57
|
)
|
@@ -61,7 +61,7 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
61
61
|
# Wait for the submit button to appear, then click it to get the collection dates
|
62
62
|
submit = WebDriverWait(driver, 10).until(
|
63
63
|
EC.presence_of_element_located(
|
64
|
-
(By.ID, "
|
64
|
+
(By.ID, "WASTECOLLECTIONDAYS202526_LOOKUP_FIELD2_NEXT")
|
65
65
|
)
|
66
66
|
)
|
67
67
|
submit.click()
|
@@ -37,7 +37,7 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
37
37
|
collections = []
|
38
38
|
|
39
39
|
# Find section with bins in
|
40
|
-
table = soup.find_all("table", {"class": "hbcRounds"})[
|
40
|
+
table = soup.find_all("table", {"class": "hbcRounds"})[0]
|
41
41
|
|
42
42
|
# For each bin section, get the text and the list elements
|
43
43
|
for row in table.find_all("tr"):
|
@@ -0,0 +1,123 @@
|
|
1
|
+
from time import sleep
|
2
|
+
|
3
|
+
from bs4 import BeautifulSoup
|
4
|
+
from selenium.webdriver.common.by import By
|
5
|
+
from selenium.webdriver.support import expected_conditions as EC
|
6
|
+
from selenium.webdriver.support.ui import Select, WebDriverWait
|
7
|
+
|
8
|
+
from uk_bin_collection.uk_bin_collection.common import *
|
9
|
+
from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
|
10
|
+
|
11
|
+
|
12
|
+
# import the wonderful Beautiful Soup and the URL grabber
|
13
|
+
|
14
|
+
|
15
|
+
class CouncilClass(AbstractGetBinDataClass):
|
16
|
+
"""
|
17
|
+
Concrete classes have to implement all abstract operations of the
|
18
|
+
base class. They can also override some operations with a default
|
19
|
+
implementation.
|
20
|
+
"""
|
21
|
+
|
22
|
+
def parse_data(self, page: str, **kwargs) -> dict:
|
23
|
+
driver = None
|
24
|
+
try:
|
25
|
+
page = "https://www.horsham.gov.uk/waste-recycling-and-bins/household-bin-collections/check-your-bin-collection-day"
|
26
|
+
|
27
|
+
bin_data = {"bins": []}
|
28
|
+
|
29
|
+
user_uprn = kwargs.get("uprn")
|
30
|
+
user_postcode = kwargs.get("postcode")
|
31
|
+
web_driver = kwargs.get("web_driver")
|
32
|
+
headless = kwargs.get("headless")
|
33
|
+
check_uprn(user_uprn)
|
34
|
+
check_postcode(user_postcode)
|
35
|
+
# Create Selenium webdriver
|
36
|
+
driver = create_webdriver(web_driver, headless, None, __name__)
|
37
|
+
driver.get(page)
|
38
|
+
|
39
|
+
# Accept cookies
|
40
|
+
try:
|
41
|
+
accept_cookies = WebDriverWait(driver, timeout=10).until(
|
42
|
+
EC.element_to_be_clickable(
|
43
|
+
(By.XPATH, "//button[@id='ccc-notify-accept']")
|
44
|
+
)
|
45
|
+
)
|
46
|
+
accept_cookies.click()
|
47
|
+
except:
|
48
|
+
print(
|
49
|
+
"Accept cookies banner not found or clickable within the specified time."
|
50
|
+
)
|
51
|
+
pass
|
52
|
+
# Wait for postcode entry box
|
53
|
+
|
54
|
+
postcode_input = WebDriverWait(driver, timeout=15).until(
|
55
|
+
EC.presence_of_element_located(
|
56
|
+
(By.XPATH, "//input[@value='Enter your postcode']")
|
57
|
+
)
|
58
|
+
)
|
59
|
+
|
60
|
+
postcode_input.send_keys(user_postcode)
|
61
|
+
search_btn = WebDriverWait(driver, timeout=15).until(
|
62
|
+
EC.presence_of_element_located((By.ID, "Submit1"))
|
63
|
+
)
|
64
|
+
search_btn.click()
|
65
|
+
|
66
|
+
address_results = Select(
|
67
|
+
WebDriverWait(driver, timeout=15).until(
|
68
|
+
EC.presence_of_element_located(
|
69
|
+
(
|
70
|
+
By.XPATH,
|
71
|
+
"//option[contains(text(),'Please select address...')]/parent::select",
|
72
|
+
)
|
73
|
+
)
|
74
|
+
)
|
75
|
+
)
|
76
|
+
|
77
|
+
address_results.select_by_value(user_uprn)
|
78
|
+
|
79
|
+
results = WebDriverWait(driver, timeout=15).until(
|
80
|
+
EC.presence_of_element_located(
|
81
|
+
(
|
82
|
+
By.XPATH,
|
83
|
+
"//th[contains(text(),'COLLECTION TYPE')]/ancestor::table",
|
84
|
+
)
|
85
|
+
)
|
86
|
+
)
|
87
|
+
|
88
|
+
soup = BeautifulSoup(
|
89
|
+
results.get_attribute("innerHTML"), features="html.parser"
|
90
|
+
)
|
91
|
+
|
92
|
+
# Skip the header, loop through each row in tbody
|
93
|
+
for row in soup.find_all("tbody")[0].find_all("tr"):
|
94
|
+
cells = row.find_all("td")
|
95
|
+
if len(cells) < 3:
|
96
|
+
continue
|
97
|
+
date_str = cells[1].get_text(strip=True)
|
98
|
+
collection_type = cells[2].get_text(strip=True)
|
99
|
+
|
100
|
+
try:
|
101
|
+
date = datetime.strptime(date_str, "%d/%m/%Y").strftime(date_format)
|
102
|
+
except ValueError:
|
103
|
+
continue # Skip if date is invalid
|
104
|
+
|
105
|
+
bin_data["bins"].append(
|
106
|
+
{"type": collection_type, "collectionDate": date}
|
107
|
+
)
|
108
|
+
|
109
|
+
# Sort by date
|
110
|
+
bin_data["bins"].sort(
|
111
|
+
key=lambda x: datetime.strptime(x["collectionDate"], date_format)
|
112
|
+
)
|
113
|
+
|
114
|
+
except Exception as e:
|
115
|
+
# Here you can log the exception if needed
|
116
|
+
print(f"An error occurred: {e}")
|
117
|
+
# Optionally, re-raise the exception if you want it to propagate
|
118
|
+
raise
|
119
|
+
finally:
|
120
|
+
# This block ensures that the driver is closed regardless of an exception
|
121
|
+
if driver:
|
122
|
+
driver.quit()
|
123
|
+
return bin_data
|
@@ -0,0 +1,123 @@
|
|
1
|
+
from time import sleep
|
2
|
+
|
3
|
+
from bs4 import BeautifulSoup
|
4
|
+
from selenium.webdriver.common.by import By
|
5
|
+
from selenium.webdriver.support import expected_conditions as EC
|
6
|
+
from selenium.webdriver.support.ui import Select, WebDriverWait
|
7
|
+
|
8
|
+
from uk_bin_collection.uk_bin_collection.common import *
|
9
|
+
from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
|
10
|
+
|
11
|
+
|
12
|
+
# import the wonderful Beautiful Soup and the URL grabber
|
13
|
+
|
14
|
+
|
15
|
+
class CouncilClass(AbstractGetBinDataClass):
|
16
|
+
"""
|
17
|
+
Concrete classes have to implement all abstract operations of the
|
18
|
+
base class. They can also override some operations with a default
|
19
|
+
implementation.
|
20
|
+
"""
|
21
|
+
|
22
|
+
def parse_data(self, page: str, **kwargs) -> dict:
|
23
|
+
driver = None
|
24
|
+
try:
|
25
|
+
page = "https://www.wrexham.gov.uk/service/when-are-my-bins-collected"
|
26
|
+
|
27
|
+
bin_data = {"bins": []}
|
28
|
+
|
29
|
+
user_uprn = kwargs.get("uprn")
|
30
|
+
user_postcode = kwargs.get("postcode")
|
31
|
+
web_driver = kwargs.get("web_driver")
|
32
|
+
headless = kwargs.get("headless")
|
33
|
+
check_uprn(user_uprn)
|
34
|
+
check_postcode(user_postcode)
|
35
|
+
# Create Selenium webdriver
|
36
|
+
driver = create_webdriver(web_driver, headless, None, __name__)
|
37
|
+
driver.get(page)
|
38
|
+
|
39
|
+
start_now_btn = WebDriverWait(driver, timeout=15).until(
|
40
|
+
EC.presence_of_element_located(
|
41
|
+
(By.XPATH, "//a[contains(text(),'Start now')]")
|
42
|
+
)
|
43
|
+
)
|
44
|
+
start_now_btn.click()
|
45
|
+
|
46
|
+
continue_without_signup_btn = WebDriverWait(driver, timeout=15).until(
|
47
|
+
EC.presence_of_element_located(
|
48
|
+
(
|
49
|
+
By.XPATH,
|
50
|
+
"//a[contains(text(),'or, continue without an account')]",
|
51
|
+
)
|
52
|
+
)
|
53
|
+
)
|
54
|
+
continue_without_signup_btn.click()
|
55
|
+
|
56
|
+
iframe_presense = WebDriverWait(driver, 30).until(
|
57
|
+
EC.presence_of_element_located((By.ID, "fillform-frame-1"))
|
58
|
+
)
|
59
|
+
|
60
|
+
driver.switch_to.frame(iframe_presense)
|
61
|
+
|
62
|
+
inputElement_postcodesearch = WebDriverWait(driver, 30).until(
|
63
|
+
EC.element_to_be_clickable((By.ID, "LocationSearch"))
|
64
|
+
)
|
65
|
+
|
66
|
+
inputElement_postcodesearch.send_keys(user_postcode)
|
67
|
+
|
68
|
+
# Wait for the 'Select address' dropdown to be updated
|
69
|
+
|
70
|
+
# Wait for 'Searching for...' to be removed from page
|
71
|
+
WebDriverWait(driver, timeout=15).until(
|
72
|
+
EC.none_of(EC.presence_of_element_located((By.CLASS_NAME, "spinner")))
|
73
|
+
)
|
74
|
+
|
75
|
+
dropdown = WebDriverWait(driver, 30).until(
|
76
|
+
EC.element_to_be_clickable((By.ID, "ChooseAddress"))
|
77
|
+
)
|
78
|
+
# Create a 'Select' for it, then select the first address in the list
|
79
|
+
# (Index 0 is "Select...")
|
80
|
+
dropdownSelect = Select(dropdown)
|
81
|
+
dropdownSelect.select_by_value(str(user_uprn))
|
82
|
+
|
83
|
+
results_wait = WebDriverWait(driver, 30).until(
|
84
|
+
EC.presence_of_element_located(
|
85
|
+
(By.XPATH, "//th[contains(text(),'Collection')]")
|
86
|
+
)
|
87
|
+
)
|
88
|
+
|
89
|
+
results = WebDriverWait(driver, 30).until(
|
90
|
+
EC.presence_of_element_located(
|
91
|
+
(By.XPATH, "//table[@id='wcbc_collection_details']")
|
92
|
+
)
|
93
|
+
)
|
94
|
+
|
95
|
+
soup = BeautifulSoup(
|
96
|
+
results.get_attribute("innerHTML"), features="html.parser"
|
97
|
+
)
|
98
|
+
|
99
|
+
for row in soup.find_all("tr")[1:]: # Skip the header row
|
100
|
+
date_cell, collection_cell = row.find_all("td")
|
101
|
+
date = datetime.strptime(date_cell.text.strip(), "%d/%m/%Y").strftime(
|
102
|
+
date_format
|
103
|
+
)
|
104
|
+
|
105
|
+
for bin_item in collection_cell.find_all("li"):
|
106
|
+
bin_type = bin_item.text.strip()
|
107
|
+
bin_data["bins"].append({"type": bin_type, "collectionDate": date})
|
108
|
+
|
109
|
+
# Optional: sort by date
|
110
|
+
bin_data["bins"].sort(
|
111
|
+
key=lambda x: datetime.strptime(x["collectionDate"], date_format)
|
112
|
+
)
|
113
|
+
|
114
|
+
except Exception as e:
|
115
|
+
# Here you can log the exception if needed
|
116
|
+
print(f"An error occurred: {e}")
|
117
|
+
# Optionally, re-raise the exception if you want it to propagate
|
118
|
+
raise
|
119
|
+
finally:
|
120
|
+
# This block ensures that the driver is closed regardless of an exception
|
121
|
+
if driver:
|
122
|
+
driver.quit()
|
123
|
+
return bin_data
|
@@ -1,13 +1,13 @@
|
|
1
1
|
uk_bin_collection/Local_Authority_Boundaries.geojson,sha256=_j-hUiL0--t2ewd_s29-j7_AKRlhagRMmOhXyco-B6I,1175922
|
2
2
|
uk_bin_collection/README.rst,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
3
|
-
uk_bin_collection/compare_lad_codes.py,sha256=
|
3
|
+
uk_bin_collection/compare_lad_codes.py,sha256=0bax1PbwfNp6IOI4XMKhBHF3illSG-NLbOH-DbBFIu8,2237
|
4
4
|
uk_bin_collection/map.html,sha256=1xqlWRc2g4poZwT9FVdsSAkXPmkZ3dmQeA_-ikbU9dg,4135
|
5
5
|
uk_bin_collection/tests/check_selenium_url_in_input.json.py,sha256=lf-JT7vvaSfvgbrfOhzrhfSzJqL82WajlRqo1GqfcMM,7875
|
6
6
|
uk_bin_collection/tests/council_feature_input_parity.py,sha256=DO6Mk4ImYgM5ZCZ-cutwz5RoYYWZRLYx2tr6zIs_9Rc,3843
|
7
7
|
uk_bin_collection/tests/features/environment.py,sha256=VQZjJdJI_kZn08M0j5cUgvKT4k3iTw8icJge1DGOkoA,127
|
8
8
|
uk_bin_collection/tests/features/validate_council_outputs.feature,sha256=SJK-Vc737hrf03tssxxbeg_JIvAH-ddB8f6gU1LTbuQ,251
|
9
|
-
uk_bin_collection/tests/generate_map_test_results.py,sha256=
|
10
|
-
uk_bin_collection/tests/input.json,sha256
|
9
|
+
uk_bin_collection/tests/generate_map_test_results.py,sha256=CKnGK2ZgiSXomRGkomX90DitgMP-X7wkHhyKORDcL2E,1144
|
10
|
+
uk_bin_collection/tests/input.json,sha256=oKY8pf4KyZjiZxc0FvlmFdxsgBteImQF0VVkCJQBwmk,134509
|
11
11
|
uk_bin_collection/tests/output.schema,sha256=ZwKQBwYyTDEM4G2hJwfLUVM-5v1vKRvRK9W9SS1sd18,1086
|
12
12
|
uk_bin_collection/tests/step_defs/step_helpers/file_handler.py,sha256=Ygzi4V0S1MIHqbdstUlIqtRIwnynvhu4UtpweJ6-5N8,1474
|
13
13
|
uk_bin_collection/tests/step_defs/test_validate_council.py,sha256=VZ0a81sioJULD7syAYHjvK_-nT_Rd36tUyzPetSA0gk,3475
|
@@ -23,7 +23,7 @@ uk_bin_collection/uk_bin_collection/councils/AdurAndWorthingCouncils.py,sha256=p
|
|
23
23
|
uk_bin_collection/uk_bin_collection/councils/AmberValleyBoroughCouncil.py,sha256=mTeluIIEcuxLxhfDQ95A1fp8RM6AkJT5tRGZPUbYGdk,1853
|
24
24
|
uk_bin_collection/uk_bin_collection/councils/AntrimAndNewtonabbeyCouncil.py,sha256=Hp5pteaC5RjL5ZqPZ564S9WQ6ZTKLMO6Dl_fxip2TUc,1653
|
25
25
|
uk_bin_collection/uk_bin_collection/councils/ArdsAndNorthDownCouncil.py,sha256=iMBldxNErgi-ok1o6xpqdNgMvR6qapaNqoTWDTqMeGo,3824
|
26
|
-
uk_bin_collection/uk_bin_collection/councils/ArgyllandButeCouncil.py,sha256=
|
26
|
+
uk_bin_collection/uk_bin_collection/councils/ArgyllandButeCouncil.py,sha256=UnHge6FwigJKYuE6QYiXE659dTaKvs1xhHHJXoAXhSQ,5075
|
27
27
|
uk_bin_collection/uk_bin_collection/councils/ArmaghBanbridgeCraigavonCouncil.py,sha256=o9NBbVCTdxKXnpYbP8-zxe1Gh8s57vwfV75Son_sAHE,2863
|
28
28
|
uk_bin_collection/uk_bin_collection/councils/ArunCouncil.py,sha256=yfhthv9nuogP19VOZ3TYQrq51qqjiCZcSel4sXhiKjs,4012
|
29
29
|
uk_bin_collection/uk_bin_collection/councils/AshfieldDistrictCouncil.py,sha256=fhX7S_A3jqoND7NE6qITPMPvdk3FJSKZ3Eoa5RtSg3I,4247
|
@@ -73,7 +73,7 @@ uk_bin_collection/uk_bin_collection/councils/CastlepointDistrictCouncil.py,sha25
|
|
73
73
|
uk_bin_collection/uk_bin_collection/councils/CeredigionCountyCouncil.py,sha256=np9iLnMVWpMYUiHZ4sJaSaU5pOWfmiCLQ8TIrOlY48o,5924
|
74
74
|
uk_bin_collection/uk_bin_collection/councils/CharnwoodBoroughCouncil.py,sha256=tXfzMetN6wxahuGGRp2mIyCCDSL4F2aG61HhUxw6COQ,2172
|
75
75
|
uk_bin_collection/uk_bin_collection/councils/ChelmsfordCityCouncil.py,sha256=EB88D0MNJwuDZ2GX1ENc5maGYx17mnHTCtNl6s-v11E,5090
|
76
|
-
uk_bin_collection/uk_bin_collection/councils/CheltenhamBoroughCouncil.py,sha256=
|
76
|
+
uk_bin_collection/uk_bin_collection/councils/CheltenhamBoroughCouncil.py,sha256=B7OgyFKlbvu8SowAALLI8j2EnKlmbXEj65XtB27rCWI,16357
|
77
77
|
uk_bin_collection/uk_bin_collection/councils/CherwellDistrictCouncil.py,sha256=VxTe9qk93MFgtELEgVrEz3W0vYaG_32EpPmky_b4j0k,2590
|
78
78
|
uk_bin_collection/uk_bin_collection/councils/CheshireEastCouncil.py,sha256=I7Dj8LzG-Q4yrJ99jLRIwKwW5WQ9he8UksvF_YPzTxI,1681
|
79
79
|
uk_bin_collection/uk_bin_collection/councils/CheshireWestAndChesterCouncil.py,sha256=5mKZf22NgdyBY-SqV0c2q8b8IJobkoZrsfGEVUcxUyM,3544
|
@@ -106,7 +106,7 @@ uk_bin_collection/uk_bin_collection/councils/EastAyrshireCouncil.py,sha256=i3AcW
|
|
106
106
|
uk_bin_collection/uk_bin_collection/councils/EastCambridgeshireCouncil.py,sha256=aYUVE5QqTxdj8FHhCB4EiFVDJahWJD9Pq0d1upBEvXg,1501
|
107
107
|
uk_bin_collection/uk_bin_collection/councils/EastDevonDC.py,sha256=U0VwSNIldMv5nUoiXtFgjbE0m6Kb-8W2WZQGVCNF_WI,3261
|
108
108
|
uk_bin_collection/uk_bin_collection/councils/EastHertsCouncil.py,sha256=FsHfejTGPjRUByDz157690LTD8JpqGplD_XVb7pTe3A,4862
|
109
|
-
uk_bin_collection/uk_bin_collection/councils/EastLindseyDistrictCouncil.py,sha256=
|
109
|
+
uk_bin_collection/uk_bin_collection/councils/EastLindseyDistrictCouncil.py,sha256=Laf-j0LLr7M4xmKhk8kjPNTtt66oXKYWm0ppxdUX3F0,4326
|
110
110
|
uk_bin_collection/uk_bin_collection/councils/EastLothianCouncil.py,sha256=zTp-GDWYeUIlFaqfkqGvo7XMtxJd0VbxdGgqaAwRACk,2792
|
111
111
|
uk_bin_collection/uk_bin_collection/councils/EastRenfrewshireCouncil.py,sha256=5giegMCKQ2JhVDR5M4mevVxIdhZtSW7kbuuoSkj3EGk,4361
|
112
112
|
uk_bin_collection/uk_bin_collection/councils/EastRidingCouncil.py,sha256=oL-NqriLVy_NChGASNh8qTqeakLn4iP_XzoMC6VlPGM,5216
|
@@ -142,7 +142,7 @@ uk_bin_collection/uk_bin_collection/councils/HackneyCouncil.py,sha256=vO3ugk5fcd
|
|
142
142
|
uk_bin_collection/uk_bin_collection/councils/HaltonBoroughCouncil.py,sha256=gq_CPqi6qM2oNiHhKKF1lZC86fyKL4lPhh_DN9pJZ04,5971
|
143
143
|
uk_bin_collection/uk_bin_collection/councils/HarboroughDistrictCouncil.py,sha256=uAbCgfrqkIkEKUyLVE8l72s5tzbfMFsw775i0nVRAyc,1934
|
144
144
|
uk_bin_collection/uk_bin_collection/councils/HaringeyCouncil.py,sha256=t_6AkAu4wrv8Q0WlDhWh_82I0djl5tk531Pzs-SjWzg,2647
|
145
|
-
uk_bin_collection/uk_bin_collection/councils/HarrogateBoroughCouncil.py,sha256=
|
145
|
+
uk_bin_collection/uk_bin_collection/councils/HarrogateBoroughCouncil.py,sha256=6Sf4l07YvpCmhuOMgD0VWsjfdwHPnEbbsEAsWbl4A44,2050
|
146
146
|
uk_bin_collection/uk_bin_collection/councils/HartDistrictCouncil.py,sha256=_llxT4JYYlwm20ZtS3fXwtDs6mwJyLTZBP2wBhvEpWk,2342
|
147
147
|
uk_bin_collection/uk_bin_collection/councils/HartlepoolBoroughCouncil.py,sha256=MUT1A24iZShT2p55rXEvgYwGUuw3W05Z4ZQAveehv-s,2842
|
148
148
|
uk_bin_collection/uk_bin_collection/councils/HastingsBoroughCouncil.py,sha256=9MCuit4awXSZTbZCXWBsQGX2tp2mHZ1eP1wENZdMvgA,1806
|
@@ -152,6 +152,7 @@ uk_bin_collection/uk_bin_collection/councils/HighPeakCouncil.py,sha256=x7dfy8mdt
|
|
152
152
|
uk_bin_collection/uk_bin_collection/councils/HighlandCouncil.py,sha256=GNxDU65QuZHV5va2IrKtcJ6TQoDdwmV03JvkVqOauP4,3291
|
153
153
|
uk_bin_collection/uk_bin_collection/councils/Hillingdon.py,sha256=2OUp0iYO1YeZuTq0XRUalgoay5JRZgfHKKEwYzdMAU0,11291
|
154
154
|
uk_bin_collection/uk_bin_collection/councils/HinckleyandBosworthBoroughCouncil.py,sha256=51vXTKrstfJhb7cLCcrsvA9qKCsptyNMZvy7ML9DasM,2344
|
155
|
+
uk_bin_collection/uk_bin_collection/councils/HorshamDistrictCouncil.py,sha256=U8WelJiHivT7CS3meUVcLURWOLRKes1pKZ81tcqKarM,4446
|
155
156
|
uk_bin_collection/uk_bin_collection/councils/HullCityCouncil.py,sha256=UHcesBoctFVcXDYuwfag43KbcJcopkEDzJ-54NxtK0Q,1851
|
156
157
|
uk_bin_collection/uk_bin_collection/councils/HuntingdonDistrictCouncil.py,sha256=dGyhhG6HRjQ2SPeiRwUPTGlk9dPIslagV2k0GjEOn1s,1587
|
157
158
|
uk_bin_collection/uk_bin_collection/councils/IpswichBoroughCouncil.py,sha256=57lmDl_FprG68gUhKQYpOa1M2pudyb1utfoMhUXNwzs,2802
|
@@ -327,6 +328,7 @@ uk_bin_collection/uk_bin_collection/councils/WokingBoroughCouncil.py,sha256=37ig
|
|
327
328
|
uk_bin_collection/uk_bin_collection/councils/WokinghamBoroughCouncil.py,sha256=H8aFHlacwV07X-6T9RQua4irqDA0cIQrF4O1FfPR7yI,4114
|
328
329
|
uk_bin_collection/uk_bin_collection/councils/WolverhamptonCityCouncil.py,sha256=ncXfu5RHPCFsArczXHy7g0l_HEZa7GC-QA1QRReP_00,1801
|
329
330
|
uk_bin_collection/uk_bin_collection/councils/WorcesterCityCouncil.py,sha256=dKHB2fPSmOGOwyvfpbdR4U8XW2ctBf63gCPxX06kwKA,1867
|
331
|
+
uk_bin_collection/uk_bin_collection/councils/WrexhamCountyBoroughCouncil.py,sha256=MwP9tp6zAD9Xat1ZTKzm8w7iO7nA0HfS7g2OrqRDS8U,4588
|
330
332
|
uk_bin_collection/uk_bin_collection/councils/WychavonDistrictCouncil.py,sha256=YuZdzEW0CZLwusm1VQcGRIKXAab_UDFLaCnN60itt_E,5776
|
331
333
|
uk_bin_collection/uk_bin_collection/councils/WyreCouncil.py,sha256=QpCkmRSQmJo0RLsjXoCYPDcoxuDzG_00qNV0AHTDmXo,3000
|
332
334
|
uk_bin_collection/uk_bin_collection/councils/WyreForestDistrictCouncil.py,sha256=3b7WzBXdYub6j13sqDL3jlqgICKmNyQaF4KxRxOMHWk,2000
|
@@ -334,8 +336,8 @@ uk_bin_collection/uk_bin_collection/councils/YorkCouncil.py,sha256=I2kBYMlsD4bId
|
|
334
336
|
uk_bin_collection/uk_bin_collection/councils/council_class_template/councilclasstemplate.py,sha256=QD4v4xpsEE0QheR_fGaNOIRMc2FatcUfKkkhAhseyVU,1159
|
335
337
|
uk_bin_collection/uk_bin_collection/create_new_council.py,sha256=m-IhmWmeWQlFsTZC4OxuFvtw5ZtB8EAJHxJTH4O59lQ,1536
|
336
338
|
uk_bin_collection/uk_bin_collection/get_bin_data.py,sha256=YvmHfZqanwrJ8ToGch34x-L-7yPe31nB_x77_Mgl_vo,4545
|
337
|
-
uk_bin_collection-0.
|
338
|
-
uk_bin_collection-0.
|
339
|
-
uk_bin_collection-0.
|
340
|
-
uk_bin_collection-0.
|
341
|
-
uk_bin_collection-0.
|
339
|
+
uk_bin_collection-0.148.1.dist-info/LICENSE,sha256=vABBUOzcrgfaTKpzeo-si9YVEun6juDkndqA8RKdKGs,1071
|
340
|
+
uk_bin_collection-0.148.1.dist-info/METADATA,sha256=VttvCNYiRX4HrDRXZN4qtx7PnsSIRvM51WhxN0QPsDA,20914
|
341
|
+
uk_bin_collection-0.148.1.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
|
342
|
+
uk_bin_collection-0.148.1.dist-info/entry_points.txt,sha256=36WCSGMWSc916S3Hi1ZkazzDKHaJ6CD-4fCEFm5MIao,90
|
343
|
+
uk_bin_collection-0.148.1.dist-info/RECORD,,
|
File without changes
|
File without changes
|
{uk_bin_collection-0.147.2.dist-info → uk_bin_collection-0.148.1.dist-info}/entry_points.txt
RENAMED
File without changes
|