uk_bin_collection 0.101.0__py3-none-any.whl → 0.103.0__py3-none-any.whl
Sign up to get free protection for your applications and to get access to all the features.
- uk_bin_collection/tests/input.json +1 -1
- uk_bin_collection/uk_bin_collection/councils/BasildonCouncil.py +42 -39
- uk_bin_collection/uk_bin_collection/councils/BelfastCityCouncil.py +13 -8
- uk_bin_collection/uk_bin_collection/councils/BexleyCouncil.py +24 -21
- uk_bin_collection/uk_bin_collection/councils/BoltonCouncil.py +1 -1
- uk_bin_collection/uk_bin_collection/councils/CheshireEastCouncil.py +25 -10
- uk_bin_collection/uk_bin_collection/councils/CornwallCouncil.py +21 -20
- uk_bin_collection/uk_bin_collection/councils/EnfieldCouncil.py +16 -18
- uk_bin_collection/uk_bin_collection/councils/GedlingBoroughCouncil.py +10 -4
- uk_bin_collection/uk_bin_collection/councils/IslingtonCouncil.py +6 -4
- uk_bin_collection/uk_bin_collection/councils/MoleValleyDistrictCouncil.py +37 -20
- uk_bin_collection/uk_bin_collection/councils/NorthTynesideCouncil.py +11 -9
- uk_bin_collection/uk_bin_collection/councils/RochfordCouncil.py +1 -2
- uk_bin_collection/uk_bin_collection/councils/RotherhamCouncil.py +8 -6
- uk_bin_collection/uk_bin_collection/councils/WakefieldCityCouncil.py +21 -11
- uk_bin_collection/uk_bin_collection/councils/WokinghamBoroughCouncil.py +1 -1
- {uk_bin_collection-0.101.0.dist-info → uk_bin_collection-0.103.0.dist-info}/METADATA +19 -1
- {uk_bin_collection-0.101.0.dist-info → uk_bin_collection-0.103.0.dist-info}/RECORD +21 -21
- {uk_bin_collection-0.101.0.dist-info → uk_bin_collection-0.103.0.dist-info}/LICENSE +0 -0
- {uk_bin_collection-0.101.0.dist-info → uk_bin_collection-0.103.0.dist-info}/WHEEL +0 -0
- {uk_bin_collection-0.101.0.dist-info → uk_bin_collection-0.103.0.dist-info}/entry_points.txt +0 -0
@@ -367,7 +367,7 @@
|
|
367
367
|
},
|
368
368
|
"EastDevonDC": {
|
369
369
|
"url": "https://eastdevon.gov.uk/recycling-and-waste/recycling-waste-information/when-is-my-bin-collected/future-collections-calendar/?UPRN=010090909915",
|
370
|
-
"wiki_command_url_override": "https://eastdevon.gov.uk/recycling-waste/recycling-
|
370
|
+
"wiki_command_url_override": "https://eastdevon.gov.uk/recycling-and-waste/recycling-waste-information/when-is-my-bin-collected/future-collections-calendar/?UPRN=XXXXXXXX",
|
371
371
|
"wiki_name": "East Devon District Council",
|
372
372
|
"wiki_note": "Replace XXXXXXXX with UPRN."
|
373
373
|
},
|
@@ -1,13 +1,13 @@
|
|
1
|
-
|
1
|
+
import requests
|
2
|
+
import json
|
3
|
+
from datetime import datetime
|
4
|
+
from uk_bin_collection.uk_bin_collection.common import check_uprn, date_format as DATE_FORMAT
|
2
5
|
from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
|
3
6
|
|
4
7
|
|
5
|
-
# import the wonderful Beautiful Soup and the URL grabber
|
6
8
|
class CouncilClass(AbstractGetBinDataClass):
|
7
9
|
"""
|
8
|
-
Concrete
|
9
|
-
base class. They can also override some operations with a default
|
10
|
-
implementation.
|
10
|
+
Concrete class that implements the abstract bin data fetching and parsing logic.
|
11
11
|
"""
|
12
12
|
|
13
13
|
def parse_data(self, page: str, **kwargs) -> dict:
|
@@ -18,64 +18,67 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
18
18
|
check_uprn(uprn)
|
19
19
|
|
20
20
|
payload = {
|
21
|
-
# Add your payload details here (replace this with the actual payload structure if required)
|
22
21
|
"uprn": uprn
|
23
22
|
}
|
24
23
|
|
25
|
-
|
26
|
-
headers = {
|
27
|
-
"Content-Type": "application/json"
|
28
|
-
}
|
24
|
+
headers = {"Content-Type": "application/json"}
|
29
25
|
|
30
26
|
response = requests.post(url_base, data=json.dumps(payload), headers=headers)
|
31
27
|
|
32
|
-
# Ensure the request was successful
|
33
28
|
if response.status_code == 200:
|
34
29
|
data = response.json()
|
35
30
|
|
36
31
|
# Initialize an empty list to store the bin collection details
|
37
|
-
|
38
32
|
bins = []
|
39
33
|
|
40
34
|
# Function to add collection details to bins list
|
41
35
|
def add_collection(service_name, collection_data):
|
42
|
-
bins.append(
|
43
|
-
|
44
|
-
|
45
|
-
|
36
|
+
bins.append(
|
37
|
+
{
|
38
|
+
"type": service_name,
|
39
|
+
"collectionDate": collection_data.get("current_collection_date"),
|
40
|
+
}
|
41
|
+
)
|
46
42
|
|
47
|
-
|
48
|
-
|
43
|
+
available_services = data.get("refuse", {}).get("available_services", {})
|
44
|
+
|
45
|
+
date_format = "%d-%m-%Y" # Define the desired date format
|
49
46
|
|
50
47
|
for service_name, service_data in available_services.items():
|
51
|
-
#
|
48
|
+
# Handle the different cases of service data
|
52
49
|
match service_data["container"]:
|
53
50
|
case "Green Wheelie Bin":
|
54
|
-
subscription_status =
|
55
|
-
|
51
|
+
subscription_status = (
|
52
|
+
service_data["subscription"]["active"]
|
53
|
+
if service_data.get("subscription")
|
54
|
+
else False
|
55
|
+
)
|
56
|
+
type_descr = f"Green Wheelie Bin ({'Active' if subscription_status else 'Expired'})"
|
56
57
|
case "N/A":
|
57
|
-
type_descr = service_data
|
58
|
+
type_descr = service_data.get("name", "Unknown Service")
|
58
59
|
case _:
|
59
|
-
type_descr = service_data
|
60
|
-
|
60
|
+
type_descr = service_data.get("container", "Unknown Container")
|
61
61
|
|
62
62
|
date_str = service_data.get("current_collection_date")
|
63
|
-
#
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
"
|
72
|
-
|
63
|
+
if date_str: # Ensure the date string exists
|
64
|
+
try:
|
65
|
+
# Parse and format the date string
|
66
|
+
date_obj = datetime.strptime(date_str, "%Y-%m-%d")
|
67
|
+
formatted_date = date_obj.strftime(DATE_FORMAT)
|
68
|
+
except ValueError:
|
69
|
+
formatted_date = "Invalid Date"
|
70
|
+
else:
|
71
|
+
formatted_date = "No Collection Date"
|
72
|
+
|
73
|
+
bins.append(
|
74
|
+
{
|
75
|
+
"type": type_descr, # Use service name from the data
|
76
|
+
"collectionDate": formatted_date,
|
77
|
+
}
|
78
|
+
)
|
73
79
|
|
74
80
|
else:
|
75
81
|
print(f"Failed to fetch data. Status code: {response.status_code}")
|
82
|
+
return {}
|
76
83
|
|
77
|
-
|
78
|
-
"bins": bins
|
79
|
-
}
|
80
|
-
|
81
|
-
return data
|
84
|
+
return {"bins": bins}
|
@@ -9,7 +9,6 @@ from uk_bin_collection.uk_bin_collection.common import *
|
|
9
9
|
from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
|
10
10
|
|
11
11
|
|
12
|
-
|
13
12
|
# import the wonderful Beautiful Soup and the URL grabber
|
14
13
|
class CouncilClass(AbstractGetBinDataClass):
|
15
14
|
"""
|
@@ -34,7 +33,7 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
34
33
|
|
35
34
|
session = requests.Session()
|
36
35
|
session.headers.update(headers)
|
37
|
-
|
36
|
+
|
38
37
|
user_uprn = kwargs.get("uprn")
|
39
38
|
user_postcode = kwargs.get("postcode")
|
40
39
|
URL = "https://online.belfastcity.gov.uk/find-bin-collection-day/Default.aspx"
|
@@ -47,14 +46,16 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
47
46
|
"__EVENTTARGET": "",
|
48
47
|
"__EVENTARGUMENT": "",
|
49
48
|
"__VIEWSTATE": self.get_session_variable(soup, "__VIEWSTATE"),
|
50
|
-
"__VIEWSTATEGENERATOR": self.get_session_variable(
|
49
|
+
"__VIEWSTATEGENERATOR": self.get_session_variable(
|
50
|
+
soup, "__VIEWSTATEGENERATOR"
|
51
|
+
),
|
51
52
|
"__SCROLLPOSITIONX": "0",
|
52
53
|
"__SCROLLPOSITIONY": "0",
|
53
54
|
"__EVENTVALIDATION": self.get_session_variable(soup, "__EVENTVALIDATION"),
|
54
55
|
"ctl00$MainContent$searchBy_radio": "P",
|
55
56
|
"ctl00$MainContent$Street_textbox": "",
|
56
57
|
"ctl00$MainContent$Postcode_textbox": user_postcode,
|
57
|
-
"ctl00$MainContent$AddressLookup_button": "Find address"
|
58
|
+
"ctl00$MainContent$AddressLookup_button": "Find address",
|
58
59
|
}
|
59
60
|
|
60
61
|
# Build intermediate ASP.NET variables for uprn Select address
|
@@ -65,7 +66,9 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
65
66
|
"__EVENTTARGET": "",
|
66
67
|
"__EVENTARGUMENT": "",
|
67
68
|
"__VIEWSTATE": self.get_session_variable(soup, "__VIEWSTATE"),
|
68
|
-
"__VIEWSTATEGENERATOR": self.get_session_variable(
|
69
|
+
"__VIEWSTATEGENERATOR": self.get_session_variable(
|
70
|
+
soup, "__VIEWSTATEGENERATOR"
|
71
|
+
),
|
69
72
|
"__SCROLLPOSITIONX": "0",
|
70
73
|
"__SCROLLPOSITIONY": "0",
|
71
74
|
"__EVENTVALIDATION": self.get_session_variable(soup, "__EVENTVALIDATION"),
|
@@ -73,14 +76,14 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
73
76
|
"ctl00$MainContent$Street_textbox": "",
|
74
77
|
"ctl00$MainContent$Postcode_textbox": user_postcode,
|
75
78
|
"ctl00$MainContent$lstAddresses": user_uprn,
|
76
|
-
"ctl00$MainContent$SelectAddress_button": "Select address"
|
79
|
+
"ctl00$MainContent$SelectAddress_button": "Select address",
|
77
80
|
}
|
78
81
|
|
79
82
|
# Actual http call to get Bins Data
|
80
83
|
response = session.post(URL, data=form_data)
|
81
84
|
response.raise_for_status()
|
82
85
|
soup = BeautifulSoup(response.text, "html.parser")
|
83
|
-
|
86
|
+
|
84
87
|
# Find Bins table and data
|
85
88
|
table = soup.find("div", {"id": "binsGrid"})
|
86
89
|
if table:
|
@@ -91,7 +94,9 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
91
94
|
collection_type = columns[0].get_text(strip=True)
|
92
95
|
collection_date_raw = columns[3].get_text(strip=True)
|
93
96
|
# if the month number is a single digit there are 2 spaces, stripping all spaces to make it consistent
|
94
|
-
collection_date = datetime.strptime(
|
97
|
+
collection_date = datetime.strptime(
|
98
|
+
collection_date_raw.replace(" ", ""), "%a%b%d%Y"
|
99
|
+
)
|
95
100
|
bin_entry = {
|
96
101
|
"type": collection_type,
|
97
102
|
"collectionDate": collection_date.strftime(date_format),
|
@@ -45,17 +45,13 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
45
45
|
)
|
46
46
|
inputElement_postcodesearch.send_keys(user_postcode)
|
47
47
|
|
48
|
-
|
49
|
-
|
50
48
|
find_address_btn = wait.until(
|
51
49
|
EC.element_to_be_clickable((By.XPATH, '//*[@id="sub"]'))
|
52
50
|
)
|
53
51
|
find_address_btn.click()
|
54
52
|
|
55
53
|
dropdown_options = wait.until(
|
56
|
-
EC.presence_of_element_located(
|
57
|
-
(By.XPATH, '//*[@id="address"]')
|
58
|
-
)
|
54
|
+
EC.presence_of_element_located((By.XPATH, '//*[@id="address"]'))
|
59
55
|
)
|
60
56
|
time.sleep(2)
|
61
57
|
dropdown_options.click()
|
@@ -71,11 +67,8 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
71
67
|
# Click the element
|
72
68
|
address.click()
|
73
69
|
|
74
|
-
|
75
70
|
submit_address = wait.until(
|
76
|
-
EC.presence_of_element_located(
|
77
|
-
(By.XPATH, '//*[@id="go"]')
|
78
|
-
)
|
71
|
+
EC.presence_of_element_located((By.XPATH, '//*[@id="go"]'))
|
79
72
|
)
|
80
73
|
time.sleep(2)
|
81
74
|
submit_address.click()
|
@@ -83,13 +76,11 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
83
76
|
results_found = wait.until(
|
84
77
|
EC.element_to_be_clickable(
|
85
78
|
(By.XPATH, '//h1[contains(text(), "Your bin days")]')
|
86
|
-
)
|
87
79
|
)
|
80
|
+
)
|
88
81
|
|
89
82
|
final_page = wait.until(
|
90
|
-
EC.presence_of_element_located(
|
91
|
-
(By.CLASS_NAME, "waste__collections")
|
92
|
-
)
|
83
|
+
EC.presence_of_element_located((By.CLASS_NAME, "waste__collections"))
|
93
84
|
)
|
94
85
|
|
95
86
|
soup = BeautifulSoup(driver.page_source, features="html.parser")
|
@@ -103,29 +94,41 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
103
94
|
# Loop through each bin field
|
104
95
|
for bin_section in bin_sections:
|
105
96
|
# Extract the bin type (e.g., "Brown Caddy", "Green Wheelie Bin", etc.)
|
106
|
-
bin_type = bin_section.get_text(strip=True).split("\n")[
|
97
|
+
bin_type = bin_section.get_text(strip=True).split("\n")[
|
98
|
+
0
|
99
|
+
] # The first part is the bin type
|
107
100
|
|
108
101
|
# Find the next sibling <dl> tag that contains the next collection information
|
109
102
|
summary_list = bin_section.find_next("dl", class_="govuk-summary-list")
|
110
103
|
|
111
104
|
if summary_list:
|
112
105
|
# Now, instead of finding by class, we'll search by text within the dt element
|
113
|
-
next_collection_dt = summary_list.find(
|
106
|
+
next_collection_dt = summary_list.find(
|
107
|
+
"dt", string=lambda text: "Next collection" in text
|
108
|
+
)
|
114
109
|
|
115
110
|
if next_collection_dt:
|
116
111
|
# Find the sibling <dd> tag for the collection date
|
117
|
-
next_collection = next_collection_dt.find_next_sibling(
|
112
|
+
next_collection = next_collection_dt.find_next_sibling(
|
113
|
+
"dd"
|
114
|
+
).get_text(strip=True)
|
118
115
|
|
119
116
|
if next_collection:
|
120
117
|
try:
|
121
118
|
# Parse the next collection date (assuming the format is like "Tuesday 15 October 2024")
|
122
|
-
parsed_date = datetime.strptime(
|
119
|
+
parsed_date = datetime.strptime(
|
120
|
+
next_collection, "%A %d %B %Y"
|
121
|
+
)
|
123
122
|
|
124
123
|
# Add the bin information to the data dictionary
|
125
|
-
data["bins"].append(
|
126
|
-
|
127
|
-
|
128
|
-
|
124
|
+
data["bins"].append(
|
125
|
+
{
|
126
|
+
"type": bin_type,
|
127
|
+
"collectionDate": parsed_date.strftime(
|
128
|
+
date_format
|
129
|
+
),
|
130
|
+
}
|
131
|
+
)
|
129
132
|
except ValueError as e:
|
130
133
|
print(f"Error parsing date for {bin_type}: {e}")
|
131
134
|
else:
|
@@ -82,7 +82,7 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
82
82
|
bin_type = " ".join(words).capitalize()
|
83
83
|
date_list = item.find_all("p")
|
84
84
|
for d in date_list:
|
85
|
-
clean_date_str = re.sub(r
|
85
|
+
clean_date_str = re.sub(r"[^A-Za-z0-9 ]+", "", d.text.strip())
|
86
86
|
next_collection = datetime.strptime(clean_date_str, "%A %d %B %Y")
|
87
87
|
collections.append((bin_type, next_collection))
|
88
88
|
|
@@ -1,26 +1,41 @@
|
|
1
|
-
from
|
1
|
+
from typing import Dict, Any, Optional
|
2
|
+
from bs4 import BeautifulSoup, Tag, NavigableString
|
2
3
|
from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
|
3
4
|
|
4
|
-
|
5
|
+
"""
|
6
|
+
This module provides bin collection data for Cheshire East Council.
|
7
|
+
"""
|
8
|
+
|
9
|
+
|
5
10
|
class CouncilClass(AbstractGetBinDataClass):
|
6
|
-
|
11
|
+
"""
|
12
|
+
A class to fetch and parse bin collection data for Cheshire East Council.
|
13
|
+
"""
|
14
|
+
|
15
|
+
def parse_data(self, page: Any, **kwargs: Any) -> Dict[str, Any]:
|
7
16
|
soup = BeautifulSoup(page.text, features="html.parser")
|
8
17
|
|
9
|
-
bin_data_dict = {"bins": []}
|
18
|
+
bin_data_dict: Dict[str, Any] = {"bins": []}
|
10
19
|
|
11
|
-
table = soup.find(
|
12
|
-
|
20
|
+
table: Optional[Tag | NavigableString] = soup.find(
|
21
|
+
"table", {"class": "job-details"}
|
22
|
+
)
|
23
|
+
if isinstance(table, Tag): # Ensure we only proceed if 'table' is a Tag
|
13
24
|
rows = table.find_all("tr", {"class": "data-row"})
|
14
25
|
|
15
26
|
for row in rows:
|
16
27
|
cells = row.find_all(
|
17
|
-
"td",
|
28
|
+
"td",
|
29
|
+
{
|
30
|
+
"class": lambda L: isinstance(L, str)
|
31
|
+
and L.startswith("visible-cell")
|
32
|
+
}, # Explicitly check if L is a string
|
18
33
|
)
|
19
|
-
labels = cells[0].find_all("label") if cells else []
|
34
|
+
labels: list[Tag] = cells[0].find_all("label") if cells else []
|
20
35
|
|
21
36
|
if len(labels) >= 3:
|
22
|
-
bin_type = labels[2].get_text(strip=True)
|
23
|
-
collection_date = labels[1].get_text(strip=True)
|
37
|
+
bin_type: str = labels[2].get_text(strip=True)
|
38
|
+
collection_date: str = labels[1].get_text(strip=True)
|
24
39
|
|
25
40
|
bin_data_dict["bins"].append(
|
26
41
|
{
|
@@ -4,7 +4,6 @@ from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataC
|
|
4
4
|
from dateutil.relativedelta import relativedelta
|
5
5
|
|
6
6
|
|
7
|
-
|
8
7
|
# import the wonderful Beautiful Soup and the URL grabber
|
9
8
|
class CouncilClass(AbstractGetBinDataClass):
|
10
9
|
"""
|
@@ -23,37 +22,39 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
23
22
|
check_uprn(user_uprn)
|
24
23
|
|
25
24
|
headers = {
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
25
|
+
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
|
26
|
+
"Accept-Language": "en-GB,en;q=0.9",
|
27
|
+
"Cache-Control": "no-cache",
|
28
|
+
"Connection": "keep-alive",
|
29
|
+
"Pragma": "no-cache",
|
30
|
+
"Sec-Fetch-Dest": "document",
|
31
|
+
"Sec-Fetch-Mode": "navigate",
|
32
|
+
"Sec-Fetch-Site": "none",
|
33
|
+
"Sec-Fetch-User": "?1",
|
34
|
+
"Upgrade-Insecure-Requests": "1",
|
35
|
+
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/125.0.6422.143 Safari/537.36",
|
36
|
+
"sec-ch-ua": '"Opera GX";v="111", "Chromium";v="125", "Not.A/Brand";v="24"',
|
37
|
+
"sec-ch-ua-mobile": "?0",
|
38
|
+
"sec-ch-ua-platform": '"Windows"',
|
40
39
|
}
|
41
40
|
params = {
|
42
|
-
|
41
|
+
"uprn": f"{user_uprn}",
|
43
42
|
# 'uprn': f'100040128734',
|
44
43
|
}
|
45
44
|
response = requests.get(
|
46
|
-
|
45
|
+
"https://www.cornwall.gov.uk/umbraco/surface/waste/MyCollectionDays",
|
47
46
|
params=params,
|
48
|
-
headers=headers
|
47
|
+
headers=headers,
|
49
48
|
)
|
50
49
|
|
51
50
|
soup = BeautifulSoup(response.text, features="html.parser")
|
52
51
|
soup.prettify()
|
53
52
|
|
54
|
-
for item in soup.find_all(
|
53
|
+
for item in soup.find_all("div", class_="collection text-center service"):
|
55
54
|
bin_type = item.contents[1].text + " bin"
|
56
|
-
collection_date = datetime.strptime(item.contents[5].text, "%d %b").replace(
|
55
|
+
collection_date = datetime.strptime(item.contents[5].text, "%d %b").replace(
|
56
|
+
year=curr_date.year
|
57
|
+
)
|
57
58
|
if curr_date.month == 12 and collection_date.month == 1:
|
58
59
|
collection_date = collection_date + relativedelta(years=1)
|
59
60
|
collections.append((bin_type, collection_date))
|
@@ -56,9 +56,7 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
56
56
|
postcode_input.send_keys(user_postcode)
|
57
57
|
|
58
58
|
find_address_button = WebDriverWait(driver, 10).until(
|
59
|
-
EC.presence_of_element_located(
|
60
|
-
(By.ID, 'submitButton0')
|
61
|
-
)
|
59
|
+
EC.presence_of_element_located((By.ID, "submitButton0"))
|
62
60
|
)
|
63
61
|
find_address_button.click()
|
64
62
|
|
@@ -80,7 +78,7 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
80
78
|
template_parts = first_option.split(", ")
|
81
79
|
template_parts[0] = user_paon # Replace the first part with user_paon
|
82
80
|
|
83
|
-
addr_label =
|
81
|
+
addr_label = ", ".join(template_parts)
|
84
82
|
for addr_option in select.options:
|
85
83
|
option_name = addr_option.accessible_name[0 : len(addr_label)]
|
86
84
|
if option_name == addr_label:
|
@@ -100,32 +98,27 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
100
98
|
# Find the div with the specified id
|
101
99
|
target_div = soup.find("div", {"id": target_div_id})
|
102
100
|
|
103
|
-
|
104
101
|
# Check if the div is found
|
105
102
|
if target_div:
|
106
103
|
bin_data = {"bins": []}
|
107
104
|
|
108
|
-
for bin_div in target_div.find_all(
|
109
|
-
"div"
|
110
|
-
):
|
105
|
+
for bin_div in target_div.find_all("div"):
|
111
106
|
# Extract the collection date from the message
|
112
107
|
try:
|
113
108
|
bin_collection_message = bin_div.find("p").text.strip()
|
114
109
|
date_pattern = r"\b\d{2}/\d{2}/\d{4}\b"
|
115
110
|
|
116
111
|
collection_date_string = (
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
112
|
+
re.search(date_pattern, bin_div.text)
|
113
|
+
.group(0)
|
114
|
+
.strip()
|
115
|
+
.replace(",", "")
|
116
|
+
)
|
122
117
|
except AttributeError:
|
123
118
|
continue
|
124
119
|
|
125
120
|
current_date = datetime.now()
|
126
|
-
parsed_date = datetime.strptime(
|
127
|
-
collection_date_string, "%d/%m/%Y"
|
128
|
-
)
|
121
|
+
parsed_date = datetime.strptime(collection_date_string, "%d/%m/%Y")
|
129
122
|
# Check if the parsed date is in the past and not today
|
130
123
|
if parsed_date.date() < current_date.date():
|
131
124
|
# If so, set the year to the next year
|
@@ -137,9 +130,14 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
137
130
|
contains_date(formatted_date)
|
138
131
|
|
139
132
|
# Extract the bin type from the message
|
140
|
-
bin_type_match = re.search(
|
133
|
+
bin_type_match = re.search(
|
134
|
+
r"Your next (.*?) collection", bin_collection_message
|
135
|
+
)
|
141
136
|
if bin_type_match:
|
142
|
-
bin_info = {
|
137
|
+
bin_info = {
|
138
|
+
"type": bin_type_match.group(1),
|
139
|
+
"collectionDate": formatted_date,
|
140
|
+
}
|
143
141
|
bin_data["bins"].append(bin_info)
|
144
142
|
else:
|
145
143
|
raise ValueError("Collection data not found.")
|
@@ -16,7 +16,7 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
16
16
|
def parse_data(self, page: str, **kwargs) -> dict:
|
17
17
|
data = {"bins": []}
|
18
18
|
collections = []
|
19
|
-
selected_collections = kwargs.get("paon").split(
|
19
|
+
selected_collections = kwargs.get("paon").split(",")
|
20
20
|
calendar_urls = []
|
21
21
|
run_date = datetime.now().date()
|
22
22
|
|
@@ -25,9 +25,13 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
25
25
|
for item in selected_collections:
|
26
26
|
item = item.strip().lower().replace(" ", "_")
|
27
27
|
if has_numbers(item):
|
28
|
-
calendar_urls.append(
|
28
|
+
calendar_urls.append(
|
29
|
+
f"https://www.gbcbincalendars.co.uk/json/gedling_borough_council_{item}_bin_schedule.json"
|
30
|
+
)
|
29
31
|
else:
|
30
|
-
calendar_urls.append(
|
32
|
+
calendar_urls.append(
|
33
|
+
f"https://www.gbcbincalendars.co.uk/json/gedling_borough_council_{item}_garden_bin_schedule.json"
|
34
|
+
)
|
31
35
|
|
32
36
|
# Parse each URL and load future data
|
33
37
|
for url in calendar_urls:
|
@@ -36,7 +40,9 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
36
40
|
raise ConnectionError(f"Could not get response from: {url}")
|
37
41
|
json_data = response.json()["collectionDates"]
|
38
42
|
for col in json_data:
|
39
|
-
bin_date = datetime.strptime(
|
43
|
+
bin_date = datetime.strptime(
|
44
|
+
col.get("collectionDate"), "%Y-%m-%d"
|
45
|
+
).date()
|
40
46
|
if bin_date >= run_date:
|
41
47
|
collections.append((col.get("alternativeName"), bin_date))
|
42
48
|
|
@@ -29,9 +29,11 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
29
29
|
waste_type = row.find("th").text.strip()
|
30
30
|
next_collection = parse(row.find("td").text.strip()).date()
|
31
31
|
|
32
|
-
data[
|
33
|
-
|
34
|
-
|
35
|
-
|
32
|
+
data["bins"].append(
|
33
|
+
{
|
34
|
+
"type": waste_type,
|
35
|
+
"collectionDate": next_collection.strftime(date_format),
|
36
|
+
}
|
37
|
+
)
|
36
38
|
|
37
39
|
return data
|
@@ -7,6 +7,7 @@ from bs4 import BeautifulSoup
|
|
7
7
|
from uk_bin_collection.uk_bin_collection.common import *
|
8
8
|
from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
|
9
9
|
|
10
|
+
|
10
11
|
class CouncilClass(AbstractGetBinDataClass):
|
11
12
|
def parse_data(self, page: str, **kwargs) -> dict:
|
12
13
|
|
@@ -14,7 +15,9 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
14
15
|
check_postcode(user_postcode)
|
15
16
|
|
16
17
|
# Fetch the page content
|
17
|
-
root_url = "https://myproperty.molevalley.gov.uk/molevalley/api/live_addresses/{}?format=json".format(
|
18
|
+
root_url = "https://myproperty.molevalley.gov.uk/molevalley/api/live_addresses/{}?format=json".format(
|
19
|
+
user_postcode
|
20
|
+
)
|
18
21
|
response = requests.get(root_url, verify=False)
|
19
22
|
|
20
23
|
if not response.ok:
|
@@ -63,23 +66,27 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
63
66
|
if bins_panel:
|
64
67
|
panel = bins_panel.find_parent("div", class_="panel")
|
65
68
|
print("Found 'Bins and Recycling' panel.")
|
66
|
-
|
69
|
+
|
67
70
|
# Extract bin collection info from the un-commented HTML
|
68
71
|
for strong_tag in panel.find_all("strong"):
|
69
72
|
bin_type = strong_tag.text.strip()
|
70
73
|
collection_string = strong_tag.find_next("p").text.strip()
|
71
|
-
|
74
|
+
|
72
75
|
# Debugging output
|
73
76
|
print(f"Processing bin type: {bin_type}")
|
74
77
|
print(f"Collection string: {collection_string}")
|
75
|
-
|
78
|
+
|
76
79
|
match = regex_date.search(collection_string)
|
77
80
|
if match:
|
78
|
-
collection_date = datetime.strptime(
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
81
|
+
collection_date = datetime.strptime(
|
82
|
+
match.group(1), "%d/%m/%Y"
|
83
|
+
).date()
|
84
|
+
data["bins"].append(
|
85
|
+
{
|
86
|
+
"type": bin_type,
|
87
|
+
"collectionDate": collection_date.strftime("%d/%m/%Y"),
|
88
|
+
}
|
89
|
+
)
|
83
90
|
all_collection_dates.append(collection_date)
|
84
91
|
else:
|
85
92
|
# Add a debug line to show which collections are missing dates
|
@@ -88,7 +95,7 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
88
95
|
# Search for additional collections like electrical and textiles
|
89
96
|
for p in panel.find_all("p"):
|
90
97
|
additional_match = regex_additional_collection.match(p.text.strip())
|
91
|
-
|
98
|
+
|
92
99
|
# Debugging output for additional collections
|
93
100
|
if additional_match:
|
94
101
|
bin_type = additional_match.group(1)
|
@@ -96,23 +103,33 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
96
103
|
if "each collection day" in additional_match.group(2):
|
97
104
|
if all_collection_dates:
|
98
105
|
collection_date = min(all_collection_dates)
|
99
|
-
data["bins"].append(
|
100
|
-
|
101
|
-
|
102
|
-
|
106
|
+
data["bins"].append(
|
107
|
+
{
|
108
|
+
"type": bin_type,
|
109
|
+
"collectionDate": collection_date.strftime(
|
110
|
+
"%d/%m/%Y"
|
111
|
+
),
|
112
|
+
}
|
113
|
+
)
|
103
114
|
else:
|
104
|
-
print(
|
115
|
+
print(
|
116
|
+
"No collection dates available for additional collection."
|
117
|
+
)
|
105
118
|
raise ValueError("No valid bin collection dates found.")
|
106
119
|
else:
|
107
|
-
print(
|
120
|
+
print(
|
121
|
+
f"No additional collection found in paragraph: {p.text.strip()}"
|
122
|
+
)
|
108
123
|
else:
|
109
|
-
raise ValueError(
|
124
|
+
raise ValueError(
|
125
|
+
"Unable to find 'Bins and Recycling' panel in the HTML data."
|
126
|
+
)
|
110
127
|
|
111
128
|
# Debugging to check collected data
|
112
129
|
print(f"Collected bin data: {data}")
|
113
|
-
|
130
|
+
|
114
131
|
# Handle the case where no collection dates were found
|
115
132
|
if not all_collection_dates:
|
116
133
|
raise ValueError("No valid collection dates were found in the data.")
|
117
|
-
|
118
|
-
return data
|
134
|
+
|
135
|
+
return data
|
@@ -23,7 +23,7 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
23
23
|
# Get the first form
|
24
24
|
response = s.get(
|
25
25
|
"https://my.northtyneside.gov.uk/category/81/bin-collection-dates",
|
26
|
-
verify
|
26
|
+
verify=False,
|
27
27
|
)
|
28
28
|
|
29
29
|
# Find the form ID and submit with a postcode
|
@@ -31,13 +31,13 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
31
31
|
form_build_id = soup.find("input", {"name": "form_build_id"})["value"]
|
32
32
|
response = s.post(
|
33
33
|
"https://my.northtyneside.gov.uk/category/81/bin-collection-dates",
|
34
|
-
data
|
34
|
+
data={
|
35
35
|
"postcode": user_postcode,
|
36
36
|
"op": "Find",
|
37
37
|
"form_build_id": form_build_id,
|
38
38
|
"form_id": "ntc_address_wizard",
|
39
39
|
},
|
40
|
-
verify
|
40
|
+
verify=False,
|
41
41
|
)
|
42
42
|
|
43
43
|
# Find the form ID and submit with the UPRN
|
@@ -45,18 +45,18 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
45
45
|
form_build_id = soup.find("input", {"name": "form_build_id"})["value"]
|
46
46
|
response = s.post(
|
47
47
|
"https://my.northtyneside.gov.uk/category/81/bin-collection-dates",
|
48
|
-
data
|
48
|
+
data={
|
49
49
|
"house_number": f"0000{user_uprn}",
|
50
50
|
"op": "Use",
|
51
51
|
"form_build_id": form_build_id,
|
52
52
|
"form_id": "ntc_address_wizard",
|
53
53
|
},
|
54
|
-
verify
|
54
|
+
verify=False,
|
55
55
|
)
|
56
56
|
|
57
57
|
# Parse form page and get the day of week and week offsets
|
58
58
|
soup = BeautifulSoup(response.text, features="html.parser")
|
59
|
-
info_section
|
59
|
+
info_section = soup.find("section", {"class": "block block-ntc-bins clearfix"})
|
60
60
|
|
61
61
|
regular_day, garden_day, special_day = None, None, None
|
62
62
|
# Get day of week and week label for refuse, garden and special collections.
|
@@ -82,7 +82,9 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
82
82
|
weeks_total = math.floor((datetime(2026, 4, 1) - datetime.now()).days / 7)
|
83
83
|
|
84
84
|
# The garden calendar only shows until end of November 2024, work out how many weeks that is
|
85
|
-
garden_weeks_total = math.floor(
|
85
|
+
garden_weeks_total = math.floor(
|
86
|
+
(datetime(2024, 12, 1) - datetime.now()).days / 7
|
87
|
+
)
|
86
88
|
|
87
89
|
regular_collections, garden_collections, special_collections = [], [], []
|
88
90
|
# Convert day text to series of dates using previous calculation
|
@@ -134,10 +136,10 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
134
136
|
|
135
137
|
return {
|
136
138
|
"bins": [
|
137
|
-
|
139
|
+
{
|
138
140
|
"type": item[0],
|
139
141
|
"collectionDate": item[1].strftime(date_format),
|
140
142
|
}
|
141
143
|
for item in sorted(collections, key=lambda x: x[1])
|
142
144
|
]
|
143
|
-
}
|
145
|
+
}
|
@@ -36,8 +36,7 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
36
36
|
collection_date = datetime.strptime(
|
37
37
|
remove_ordinal_indicator_from_date_string(
|
38
38
|
week_text[0].split(" - ")[0]
|
39
|
-
)
|
40
|
-
.strip(),
|
39
|
+
).strip(),
|
41
40
|
"%A %d %B",
|
42
41
|
)
|
43
42
|
next_collection = collection_date.replace(year=datetime.now().year)
|
@@ -17,7 +17,9 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
17
17
|
check_uprn(user_uprn)
|
18
18
|
|
19
19
|
response = requests.post(
|
20
|
-
"https://www.rotherham.gov.uk/bin-collections?address={}&submit=Submit".format(
|
20
|
+
"https://www.rotherham.gov.uk/bin-collections?address={}&submit=Submit".format(
|
21
|
+
user_uprn
|
22
|
+
)
|
21
23
|
)
|
22
24
|
# Make a BS4 object
|
23
25
|
soup = BeautifulSoup(response.text, features="html.parser")
|
@@ -25,15 +27,15 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
25
27
|
|
26
28
|
data = {"bins": []}
|
27
29
|
|
28
|
-
table = soup.select(
|
30
|
+
table = soup.select("table")[0]
|
29
31
|
|
30
32
|
if table:
|
31
|
-
rows = table.select(
|
33
|
+
rows = table.select("tr")
|
32
34
|
|
33
35
|
for index, row in enumerate(rows):
|
34
|
-
bin_info_cell = row.select(
|
35
|
-
if bin_info_cell:
|
36
|
-
bin_type = bin_info_cell[0].get_text(separator=
|
36
|
+
bin_info_cell = row.select("td")
|
37
|
+
if bin_info_cell:
|
38
|
+
bin_type = bin_info_cell[0].get_text(separator=" ", strip=True)
|
37
39
|
bin_collection = bin_info_cell[1]
|
38
40
|
|
39
41
|
if bin_collection:
|
@@ -3,6 +3,7 @@ from uk_bin_collection.uk_bin_collection.common import *
|
|
3
3
|
from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
|
4
4
|
from datetime import datetime
|
5
5
|
|
6
|
+
|
6
7
|
class CouncilClass(AbstractGetBinDataClass):
|
7
8
|
"""
|
8
9
|
Concrete class to scrape bin collection data.
|
@@ -25,23 +26,23 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
25
26
|
data = {"bins": []}
|
26
27
|
# Locate the section with bin collection data
|
27
28
|
sections = soup.find_all("div", {"class": "wil_c-content-section_heading"})
|
28
|
-
|
29
|
+
|
29
30
|
for s in sections:
|
30
31
|
if s.get_text(strip=True).lower() == "bin collections":
|
31
32
|
rows = s.find_next_sibling(
|
32
33
|
"div", {"class": "c-content-section_body"}
|
33
34
|
).find_all("div", class_="tablet:l-col-fb-4 u-mt-10")
|
34
|
-
|
35
|
+
|
35
36
|
for row in rows:
|
36
37
|
title_elem = row.find("div", class_="u-mb-4")
|
37
38
|
if title_elem:
|
38
39
|
title = title_elem.get_text(strip=True).capitalize()
|
39
|
-
|
40
|
+
|
40
41
|
# Find all collection info in the same section
|
41
42
|
collections = row.find_all("div", class_="u-mb-2")
|
42
43
|
for c in collections:
|
43
44
|
text = c.get_text(strip=True).lower()
|
44
|
-
|
45
|
+
|
45
46
|
if "next collection" in text:
|
46
47
|
date_text = text.replace("next collection - ", "")
|
47
48
|
try:
|
@@ -51,34 +52,43 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
51
52
|
|
52
53
|
dict_data = {
|
53
54
|
"type": title,
|
54
|
-
"collectionDate": next_collection_date
|
55
|
+
"collectionDate": next_collection_date,
|
55
56
|
}
|
56
57
|
data["bins"].append(dict_data)
|
57
58
|
except ValueError:
|
58
59
|
# Skip if the date isn't a valid date
|
59
60
|
print(f"Skipping invalid date: {date_text}")
|
60
|
-
|
61
|
+
|
61
62
|
# Get future collections
|
62
63
|
future_collections_section = row.find("ul", class_="u-mt-4")
|
63
64
|
if future_collections_section:
|
64
|
-
future_collections =
|
65
|
+
future_collections = (
|
66
|
+
future_collections_section.find_all("li")
|
67
|
+
)
|
65
68
|
for future_collection in future_collections:
|
66
|
-
future_date_text = future_collection.get_text(
|
69
|
+
future_date_text = future_collection.get_text(
|
70
|
+
strip=True
|
71
|
+
)
|
67
72
|
try:
|
68
73
|
future_collection_date = datetime.strptime(
|
69
74
|
future_date_text, "%A, %d %B %Y"
|
70
75
|
).strftime(date_format)
|
71
76
|
|
72
77
|
# Avoid duplicates of next collection date
|
73
|
-
if
|
78
|
+
if (
|
79
|
+
future_collection_date
|
80
|
+
!= next_collection_date
|
81
|
+
):
|
74
82
|
dict_data = {
|
75
83
|
"type": title,
|
76
|
-
"collectionDate": future_collection_date
|
84
|
+
"collectionDate": future_collection_date,
|
77
85
|
}
|
78
86
|
data["bins"].append(dict_data)
|
79
87
|
except ValueError:
|
80
88
|
# Skip if the future collection date isn't valid
|
81
|
-
print(
|
89
|
+
print(
|
90
|
+
f"Skipping invalid future date: {future_date_text}"
|
91
|
+
)
|
82
92
|
|
83
93
|
# Sort the collections by date
|
84
94
|
data["bins"].sort(
|
@@ -56,7 +56,7 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
56
56
|
)
|
57
57
|
)
|
58
58
|
).click()
|
59
|
-
|
59
|
+
|
60
60
|
# Wait for the Show collection dates button to appear, then click it to get the collection dates
|
61
61
|
inputElement_show_dates_button = WebDriverWait(driver, timeout).until(
|
62
62
|
EC.presence_of_element_located(
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: uk_bin_collection
|
3
|
-
Version: 0.
|
3
|
+
Version: 0.103.0
|
4
4
|
Summary: Python Lib to collect UK Bin Data
|
5
5
|
Author: Robert Bradley
|
6
6
|
Author-email: robbrad182@gmail.com
|
@@ -98,6 +98,24 @@ This integration can be installed directly via HACS. To install:
|
|
98
98
|
1. Restart your Home Assistant.
|
99
99
|
1. In the Home Assistant UI go to `Settings` > `Devices & Services` click `+ Add Integration` and search for `UK Bin Collection Data`.
|
100
100
|
|
101
|
+
### Overriding the Bin Icon and Bin Colour
|
102
|
+
We realise it is difficult to set a colour from the councils text for the Bin Type and to keep the integration generic we dont capture colour from a council(not all councils supply this as a field), only bin type and next collection date.
|
103
|
+
|
104
|
+
When you configure the componenent on the first screen you can set a JSON string to map the bin type to the colour and icon
|
105
|
+
|
106
|
+
Here is an example to set the colour and icon for the type `Empty Standard General Waste`. This type is the type returned from the council for the bin. You can do this for multiple bins.
|
107
|
+
|
108
|
+
If you miss this on the first setup you can reconfigure it.
|
109
|
+
|
110
|
+
```
|
111
|
+
{
|
112
|
+
"Empty Standard General Waste":
|
113
|
+
{
|
114
|
+
"icon": "mdi:trash-can",
|
115
|
+
"color": "blue"
|
116
|
+
}
|
117
|
+
}
|
118
|
+
|
101
119
|
---
|
102
120
|
|
103
121
|
## Standalone Usage
|
@@ -2,7 +2,7 @@ uk_bin_collection/README.rst,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,
|
|
2
2
|
uk_bin_collection/tests/council_feature_input_parity.py,sha256=DO6Mk4ImYgM5ZCZ-cutwz5RoYYWZRLYx2tr6zIs_9Rc,3843
|
3
3
|
uk_bin_collection/tests/features/environment.py,sha256=VQZjJdJI_kZn08M0j5cUgvKT4k3iTw8icJge1DGOkoA,127
|
4
4
|
uk_bin_collection/tests/features/validate_council_outputs.feature,sha256=SJK-Vc737hrf03tssxxbeg_JIvAH-ddB8f6gU1LTbuQ,251
|
5
|
-
uk_bin_collection/tests/input.json,sha256=
|
5
|
+
uk_bin_collection/tests/input.json,sha256=_AUgQoj2KfwyLGGCXBX-OkT0fjRVTpSxKcnpzbE2aS8,71114
|
6
6
|
uk_bin_collection/tests/output.schema,sha256=ZwKQBwYyTDEM4G2hJwfLUVM-5v1vKRvRK9W9SS1sd18,1086
|
7
7
|
uk_bin_collection/tests/step_defs/step_helpers/file_handler.py,sha256=Ygzi4V0S1MIHqbdstUlIqtRIwnynvhu4UtpweJ6-5N8,1474
|
8
8
|
uk_bin_collection/tests/step_defs/test_validate_council.py,sha256=LrOSt_loA1Mw3vTqaO2LpaDMu7rYJy6k5Kr-EOBln7s,3424
|
@@ -19,16 +19,16 @@ uk_bin_collection/uk_bin_collection/councils/AylesburyValeCouncil.py,sha256=Louq
|
|
19
19
|
uk_bin_collection/uk_bin_collection/councils/BCPCouncil.py,sha256=W7QBx6Mgso8RYosuXsaYo3GGNAu-tiyBSmuYxr1JSOU,1707
|
20
20
|
uk_bin_collection/uk_bin_collection/councils/BarnetCouncil.py,sha256=Sd4-pbv0QZsR7soxvXYqsfdOUIqZqS6notyoZthG77s,9182
|
21
21
|
uk_bin_collection/uk_bin_collection/councils/BarnsleyMBCouncil.py,sha256=MgF_7XyIcIoNzFR0OJsjBkLCZKgWxBrV6nTcutMxO1Q,4244
|
22
|
-
uk_bin_collection/uk_bin_collection/councils/BasildonCouncil.py,sha256=
|
22
|
+
uk_bin_collection/uk_bin_collection/councils/BasildonCouncil.py,sha256=UBHINX8WknQfnHU43Wp5kXAqmHl00aWM0Fh8NQdWBZA,3244
|
23
23
|
uk_bin_collection/uk_bin_collection/councils/BasingstokeCouncil.py,sha256=VPWGljnH4C3q8qs5ZmCtqjNjgWQvviALzjk00q3EZeQ,2632
|
24
24
|
uk_bin_collection/uk_bin_collection/councils/BathAndNorthEastSomersetCouncil.py,sha256=N_TPiIv8VBzN3rY0p3JtLlxSEru-6k1wW4UNIhN5X1M,3709
|
25
25
|
uk_bin_collection/uk_bin_collection/councils/BedfordBoroughCouncil.py,sha256=CvGB7w9HMn7XyEtwfd9MWZE_HlZ75pDcaKMsQJz0xhk,1669
|
26
26
|
uk_bin_collection/uk_bin_collection/councils/BedfordshireCouncil.py,sha256=U1HOr9YLMAlFoZysfw5n04E0bVuCliO5Yj1FMiiwcHA,2549
|
27
|
-
uk_bin_collection/uk_bin_collection/councils/BelfastCityCouncil.py,sha256=
|
28
|
-
uk_bin_collection/uk_bin_collection/councils/BexleyCouncil.py,sha256=
|
27
|
+
uk_bin_collection/uk_bin_collection/councils/BelfastCityCouncil.py,sha256=SGOv9mm3fByyR3TQDyNcSLeidX_7FlHelkxnh-NUTcY,4327
|
28
|
+
uk_bin_collection/uk_bin_collection/councils/BexleyCouncil.py,sha256=wqqCQZzu_q_pJrxTTlTrGGDgPB5EYgd4RiBy7nKnSHc,5835
|
29
29
|
uk_bin_collection/uk_bin_collection/councils/BirminghamCityCouncil.py,sha256=now2xgpfshYM33UWC18j6xa6BuBydO5Sl7OrDQOo6b0,4687
|
30
30
|
uk_bin_collection/uk_bin_collection/councils/BlackburnCouncil.py,sha256=jHbCK8sL09vdmdP7Xnh8lIrU5AHTnJLEZfOLephPvWg,4090
|
31
|
-
uk_bin_collection/uk_bin_collection/councils/BoltonCouncil.py,sha256=
|
31
|
+
uk_bin_collection/uk_bin_collection/councils/BoltonCouncil.py,sha256=WI68r8jB0IHPUT4CgmZMtng899AAMFTxkyTdPg9yLF8,4117
|
32
32
|
uk_bin_collection/uk_bin_collection/councils/BracknellForestCouncil.py,sha256=Llo1rULaAZ8rChVYZqXFFLo7CN6vbT0ULUJD6ActouY,9015
|
33
33
|
uk_bin_collection/uk_bin_collection/councils/BradfordMDC.py,sha256=VFrdcqKpHPw8v77Ll9QzBz_4carUfC1XYnxqUvDihkA,4275
|
34
34
|
uk_bin_collection/uk_bin_collection/councils/BrightonandHoveCityCouncil.py,sha256=k6qt4cds-Ejd97Z-__pw2BYvGVbFdc9SUfF73PPrTNA,5823
|
@@ -43,13 +43,13 @@ uk_bin_collection/uk_bin_collection/councils/CardiffCouncil.py,sha256=_k3sT_WR-g
|
|
43
43
|
uk_bin_collection/uk_bin_collection/councils/CastlepointDistrictCouncil.py,sha256=JVPYUIlU2ISgbUSr5AOOXNK6IFQFtQmhZyYIMAOedD4,3858
|
44
44
|
uk_bin_collection/uk_bin_collection/councils/CharnwoodBoroughCouncil.py,sha256=tXfzMetN6wxahuGGRp2mIyCCDSL4F2aG61HhUxw6COQ,2172
|
45
45
|
uk_bin_collection/uk_bin_collection/councils/ChelmsfordCityCouncil.py,sha256=EB88D0MNJwuDZ2GX1ENc5maGYx17mnHTCtNl6s-v11E,5090
|
46
|
-
uk_bin_collection/uk_bin_collection/councils/CheshireEastCouncil.py,sha256=
|
46
|
+
uk_bin_collection/uk_bin_collection/councils/CheshireEastCouncil.py,sha256=aMqT5sy1Z1gklFO5Xl893OgeBmpf19OwpizWEKWQ3hg,1680
|
47
47
|
uk_bin_collection/uk_bin_collection/councils/CheshireWestAndChesterCouncil.py,sha256=M58kIHCAZu1K5qBp6_M4sw8nMvHz38YkoxY_sNuhvOI,4780
|
48
48
|
uk_bin_collection/uk_bin_collection/councils/ChichesterDistrictCouncil.py,sha256=HxrLcJves7ZsE8FbooymeecTUmScY4R7Oi71vwCePPo,4118
|
49
49
|
uk_bin_collection/uk_bin_collection/councils/ChorleyCouncil.py,sha256=M7HjuUaFq8aSnOf_9m1QS4MmPPMmPhF3mLHSrfDPtV0,5194
|
50
50
|
uk_bin_collection/uk_bin_collection/councils/ColchesterCityCouncil.py,sha256=Mny-q2rQkWe2Tj1gINwEM1L4AkqQl1EDMAaKY0-deD4,3968
|
51
51
|
uk_bin_collection/uk_bin_collection/councils/ConwyCountyBorough.py,sha256=el75qv2QyfWZBU09tJLvD8vLQZ9pCg73u1NBFs6ybo8,1034
|
52
|
-
uk_bin_collection/uk_bin_collection/councils/CornwallCouncil.py,sha256=
|
52
|
+
uk_bin_collection/uk_bin_collection/councils/CornwallCouncil.py,sha256=WZiz50svwyZgO8QKUCLy7hfFuy2HmAx5h-TG3yAweRA,2836
|
53
53
|
uk_bin_collection/uk_bin_collection/councils/CrawleyBoroughCouncil.py,sha256=_BEKZAjlS5Ad5DjyxqAEFSLn8F-KYox0zmn4BXaAD6A,2367
|
54
54
|
uk_bin_collection/uk_bin_collection/councils/CroydonCouncil.py,sha256=QJH27plySbbmoNcLNUXq-hUiFmZ5zBlRS5mzOJgWSK8,11594
|
55
55
|
uk_bin_collection/uk_bin_collection/councils/DacorumBoroughCouncil.py,sha256=Tm_6pvBPj-6qStbe6-02LXaoCOlnnDvVXAAocGVvf_E,3970
|
@@ -69,7 +69,7 @@ uk_bin_collection/uk_bin_collection/councils/EastRidingCouncil.py,sha256=oL-Nqri
|
|
69
69
|
uk_bin_collection/uk_bin_collection/councils/EastSuffolkCouncil.py,sha256=qQ0oOfGd0sWcczse_B22YoeL9uj3og8v3UJLt_Sx29c,4353
|
70
70
|
uk_bin_collection/uk_bin_collection/councils/EastleighBoroughCouncil.py,sha256=V4Vso4DvawFiezKlmXbTlJEK9Sjhz9nA8WeYjwtO2e4,2310
|
71
71
|
uk_bin_collection/uk_bin_collection/councils/ElmbridgeBoroughCouncil.py,sha256=TgBOaReHWBbm0avV7HqRf0x7cxDe9cacTUcP9TFFprs,3005
|
72
|
-
uk_bin_collection/uk_bin_collection/councils/EnfieldCouncil.py,sha256=
|
72
|
+
uk_bin_collection/uk_bin_collection/councils/EnfieldCouncil.py,sha256=2yR5p-kdApOm1gHiynNECP0jQDvaYHOiT6MAQJAvunE,6144
|
73
73
|
uk_bin_collection/uk_bin_collection/councils/EnvironmentFirst.py,sha256=_9QJYDHpdnYK5R6znvZk1w0F9GnPnI8G4b6I_p26h4U,1695
|
74
74
|
uk_bin_collection/uk_bin_collection/councils/EppingForestDistrictCouncil.py,sha256=cKFllQ4zt6MGkwiz_HedZvw3iL1kRMLA6Ct2spUE5og,2085
|
75
75
|
uk_bin_collection/uk_bin_collection/councils/ErewashBoroughCouncil.py,sha256=QTQA6NjZtTL2baDeerIQW1SQpawwu6kGDMGdVvYQRRo,2501
|
@@ -78,7 +78,7 @@ uk_bin_collection/uk_bin_collection/councils/FarehamBoroughCouncil.py,sha256=25Q
|
|
78
78
|
uk_bin_collection/uk_bin_collection/councils/FenlandDistrictCouncil.py,sha256=sFrnKzIE2tIcz0YrC6A9HcevzgNdf6E6_HLGMWDKtGw,2513
|
79
79
|
uk_bin_collection/uk_bin_collection/councils/ForestOfDeanDistrictCouncil.py,sha256=xO5gqgsN9K-cQsuDoQF7ycZkjNdCPAQwIYOCFWxFJ_Y,4504
|
80
80
|
uk_bin_collection/uk_bin_collection/councils/GatesheadCouncil.py,sha256=SRCgYhYs6rv_8C1UEDVORHZgXxcJkoZBjzdYS4Lu-ew,4531
|
81
|
-
uk_bin_collection/uk_bin_collection/councils/GedlingBoroughCouncil.py,sha256=
|
81
|
+
uk_bin_collection/uk_bin_collection/councils/GedlingBoroughCouncil.py,sha256=XzfFMCwclh9zAJgsbaj4jywjdiH0wPaFicaVsLrN3ms,2297
|
82
82
|
uk_bin_collection/uk_bin_collection/councils/GlasgowCityCouncil.py,sha256=i7BympEhCm7D9yR0p5_QQICtWvNcDYNJIWB19SA0g2k,2303
|
83
83
|
uk_bin_collection/uk_bin_collection/councils/GloucesterCityCouncil.py,sha256=8Wjvmdvg5blHVrREaEnhhWZaWhYVP4v_KdDVPLIUxaU,4889
|
84
84
|
uk_bin_collection/uk_bin_collection/councils/GuildfordCouncil.py,sha256=9pVrmQhZcK2AD8gX8mNvP--L4L9KaY6L3B822VX6fec,5695
|
@@ -90,7 +90,7 @@ uk_bin_collection/uk_bin_collection/councils/HighlandCouncil.py,sha256=GNxDU65Qu
|
|
90
90
|
uk_bin_collection/uk_bin_collection/councils/HounslowCouncil.py,sha256=LXhJ47rujx7k3naz0tFiTT1l5k6gAYcVdekJN1t_HLY,4564
|
91
91
|
uk_bin_collection/uk_bin_collection/councils/HullCityCouncil.py,sha256=UHcesBoctFVcXDYuwfag43KbcJcopkEDzJ-54NxtK0Q,1851
|
92
92
|
uk_bin_collection/uk_bin_collection/councils/HuntingdonDistrictCouncil.py,sha256=dGyhhG6HRjQ2SPeiRwUPTGlk9dPIslagV2k0GjEOn1s,1587
|
93
|
-
uk_bin_collection/uk_bin_collection/councils/IslingtonCouncil.py,sha256=
|
93
|
+
uk_bin_collection/uk_bin_collection/councils/IslingtonCouncil.py,sha256=xavzL6ZIU9DG8Xro3vN0CEnYmNU31OGnOvnq78wgpQc,1258
|
94
94
|
uk_bin_collection/uk_bin_collection/councils/KingstonUponThamesCouncil.py,sha256=iZ7njIxccCGBhUUWWd9Azh7cxUAKaofebCm3lo-TuxA,3543
|
95
95
|
uk_bin_collection/uk_bin_collection/councils/KirkleesCouncil.py,sha256=gtAtM6FappSZ0-BKid7_pnSrnqPPjHn_UYWqMg9wQlM,4499
|
96
96
|
uk_bin_collection/uk_bin_collection/councils/KnowsleyMBCouncil.py,sha256=VdlWDESoHfr_X0r8-UMaLMUQhKZOa2BnpVPkX-1u3EQ,5605
|
@@ -114,7 +114,7 @@ uk_bin_collection/uk_bin_collection/councils/MidAndEastAntrimBoroughCouncil.py,s
|
|
114
114
|
uk_bin_collection/uk_bin_collection/councils/MidSussexDistrictCouncil.py,sha256=AZgC9wmDLEjUOtIFvf0ehF5LHturXTH4DkE3ioPSVBA,6254
|
115
115
|
uk_bin_collection/uk_bin_collection/councils/MidlothianCouncil.py,sha256=3K3X7kv1oOJkZm2ivnT8Lx0GAzPefgFSHjruwcFgO7I,2821
|
116
116
|
uk_bin_collection/uk_bin_collection/councils/MiltonKeynesCityCouncil.py,sha256=3olsWa77L34vz-c7NgeGK9xmNuR4Ws_oAk5D4UpIkPw,2005
|
117
|
-
uk_bin_collection/uk_bin_collection/councils/MoleValleyDistrictCouncil.py,sha256=
|
117
|
+
uk_bin_collection/uk_bin_collection/councils/MoleValleyDistrictCouncil.py,sha256=xWR5S0gwQu9gXxjl788Wux1KaC0CT7ZFw0iXuRLZCEM,5599
|
118
118
|
uk_bin_collection/uk_bin_collection/councils/NeathPortTalbotCouncil.py,sha256=ychYR2nsyk2UIb8tjWaKrLUT4hxSsHN558l3RqZ0mjw,5635
|
119
119
|
uk_bin_collection/uk_bin_collection/councils/NewForestCouncil.py,sha256=ylTn9KmWITtaO9_Z8kJCN2w2ALfhrfGt3SeJ78lgw7M,5391
|
120
120
|
uk_bin_collection/uk_bin_collection/councils/NewarkAndSherwoodDC.py,sha256=lAleYfCGUWCKOi7Ye_cjgfpI3pWwTcFctlYmh0hjebM,2140
|
@@ -130,7 +130,7 @@ uk_bin_collection/uk_bin_collection/councils/NorthLincolnshireCouncil.py,sha256=
|
|
130
130
|
uk_bin_collection/uk_bin_collection/councils/NorthNorfolkDistrictCouncil.py,sha256=VV_zqVZYv8ekXcUHhrBlTX_W5qLYE9IA3mT2xmrZqoI,4315
|
131
131
|
uk_bin_collection/uk_bin_collection/councils/NorthNorthamptonshireCouncil.py,sha256=kcMN-5GBjYDM9F1BKfHoYeydub8SuDxHamJbSvJRZ68,2337
|
132
132
|
uk_bin_collection/uk_bin_collection/councils/NorthSomersetCouncil.py,sha256=EbFVnPYZIOkru5_Y75kjljM3Cr3HIJgP-SU6hxgf6tk,2754
|
133
|
-
uk_bin_collection/uk_bin_collection/councils/NorthTynesideCouncil.py,sha256=
|
133
|
+
uk_bin_collection/uk_bin_collection/councils/NorthTynesideCouncil.py,sha256=MHfcSrfWBIiWwVnhOqUrOkr0Ahguu_oU4pHrv_vSa-Y,5929
|
134
134
|
uk_bin_collection/uk_bin_collection/councils/NorthWestLeicestershire.py,sha256=gJj0dyQc5QUefqusKGk2LLXfWbG5tlEXUOh8KAPh3RI,4584
|
135
135
|
uk_bin_collection/uk_bin_collection/councils/NorthYorkshire.py,sha256=2wTrr3VrZDp9-YtDPmWd649gXeWH4hbm2-Hw8Vau5Xs,1933
|
136
136
|
uk_bin_collection/uk_bin_collection/councils/NorthumberlandCouncil.py,sha256=KEFsxEvQ159fkuFo-fza67YCnnCZ5ElwE80zTrqDEWI,4990
|
@@ -146,8 +146,8 @@ uk_bin_collection/uk_bin_collection/councils/ReigateAndBansteadBoroughCouncil.py
|
|
146
146
|
uk_bin_collection/uk_bin_collection/councils/RenfrewshireCouncil.py,sha256=VlWm-w4d-UchoENe_hCTCGlfSHiMlS4wNEeMvxuNR2U,5109
|
147
147
|
uk_bin_collection/uk_bin_collection/councils/RhonddaCynonTaffCouncil.py,sha256=wInyVG_0wRrX_dRO9qbAzPhlXDseXapj2zQhsISw8gg,3233
|
148
148
|
uk_bin_collection/uk_bin_collection/councils/RochdaleCouncil.py,sha256=UTSwSw515VehGn4xkjjRhUlzS4lDj4hgna6y-4VW3uM,2379
|
149
|
-
uk_bin_collection/uk_bin_collection/councils/RochfordCouncil.py,sha256=
|
150
|
-
uk_bin_collection/uk_bin_collection/councils/RotherhamCouncil.py,sha256=
|
149
|
+
uk_bin_collection/uk_bin_collection/councils/RochfordCouncil.py,sha256=rfhD66A9HfHL46ldF9sbxvV7fPaaoNxzIJbHjVT6A90,2621
|
150
|
+
uk_bin_collection/uk_bin_collection/councils/RotherhamCouncil.py,sha256=LtMPM8lj5bfReDR4buHEo-aRC_HTBIeo1nf8GE5-R80,1790
|
151
151
|
uk_bin_collection/uk_bin_collection/councils/RugbyBoroughCouncil.py,sha256=a5ySLmFvvY56QMA7-bk6MVBxRp5tPBIBg4navH0eYas,4306
|
152
152
|
uk_bin_collection/uk_bin_collection/councils/RushcliffeBoroughCouncil.py,sha256=wMtiYRirT585vtsEOIyXHugk7aEj3pvyVWBaAePdqtE,4005
|
153
153
|
uk_bin_collection/uk_bin_collection/councils/RushmoorCouncil.py,sha256=ZsGnXjoEaOS6U7fI0w7-uqxayAHdNVKsJi2fqIWEls8,3375
|
@@ -192,7 +192,7 @@ uk_bin_collection/uk_bin_collection/councils/TunbridgeWellsCouncil.py,sha256=s8N
|
|
192
192
|
uk_bin_collection/uk_bin_collection/councils/UttlesfordDistrictCouncil.py,sha256=GSELWbSn5jtznv6FSLIMxK6CyQ27MW9FoY_m5jhTEBA,4175
|
193
193
|
uk_bin_collection/uk_bin_collection/councils/ValeofGlamorganCouncil.py,sha256=Phgb_ECiUOOkqOx6OsfsTHMCW5VQfRmOC2zgYIQhuZA,5044
|
194
194
|
uk_bin_collection/uk_bin_collection/councils/ValeofWhiteHorseCouncil.py,sha256=KBKGHcWAdPC_8-CfKnLOdP7Ww6RIvlxLIJGqBsq_77g,4208
|
195
|
-
uk_bin_collection/uk_bin_collection/councils/WakefieldCityCouncil.py,sha256
|
195
|
+
uk_bin_collection/uk_bin_collection/councils/WakefieldCityCouncil.py,sha256=vRfIU0Uloi1bgXqjOCpdb-EQ4oY-aismcANZRwOIFkc,4914
|
196
196
|
uk_bin_collection/uk_bin_collection/councils/WalsallCouncil.py,sha256=_anovUnXMr40lZLHyX3opIP73BwauCllKy-Z2SBrzPw,2076
|
197
197
|
uk_bin_collection/uk_bin_collection/councils/WalthamForest.py,sha256=P7MMw0EhpRmDbbnHb25tY5_yvYuZUFwJ1br4TOv24sY,4997
|
198
198
|
uk_bin_collection/uk_bin_collection/councils/WarwickDistrictCouncil.py,sha256=3WQrAxzYzKoV4LyOqNTp9xINVsNi1xW9t8etducGeag,1146
|
@@ -211,15 +211,15 @@ uk_bin_collection/uk_bin_collection/councils/WinchesterCityCouncil.py,sha256=W2k
|
|
211
211
|
uk_bin_collection/uk_bin_collection/councils/WindsorAndMaidenheadCouncil.py,sha256=7Qhznj95ktAQjpWm5C8pbD5UcvfXm7Mwb7_DQxwjGSM,1777
|
212
212
|
uk_bin_collection/uk_bin_collection/councils/WirralCouncil.py,sha256=X_e9zXEZAl_Mp6nPORHc9CTmf3QHdoMY3BCnKrXEr1I,2131
|
213
213
|
uk_bin_collection/uk_bin_collection/councils/WokingBoroughCouncil.py,sha256=37igH9g0xe4XIhRhcJ-ZJBU8MxTp5yzgpadWbdE33Yg,5205
|
214
|
-
uk_bin_collection/uk_bin_collection/councils/WokinghamBoroughCouncil.py,sha256=
|
214
|
+
uk_bin_collection/uk_bin_collection/councils/WokinghamBoroughCouncil.py,sha256=H8aFHlacwV07X-6T9RQua4irqDA0cIQrF4O1FfPR7yI,4114
|
215
215
|
uk_bin_collection/uk_bin_collection/councils/WychavonDistrictCouncil.py,sha256=YuZdzEW0CZLwusm1VQcGRIKXAab_UDFLaCnN60itt_E,5776
|
216
216
|
uk_bin_collection/uk_bin_collection/councils/WyreCouncil.py,sha256=zDDa7n4K_zm5PgDL08A26gD9yOOsOhuexI3x2seaBF4,3511
|
217
217
|
uk_bin_collection/uk_bin_collection/councils/YorkCouncil.py,sha256=I2kBYMlsD4bIdsvmoSzBjJAvTTi6yPfJa8xjJx1ys2w,1490
|
218
218
|
uk_bin_collection/uk_bin_collection/councils/council_class_template/councilclasstemplate.py,sha256=4s9ODGPAwPqwXc8SrTX5Wlfmizs3_58iXUtHc4Ir86o,1162
|
219
219
|
uk_bin_collection/uk_bin_collection/create_new_council.py,sha256=m-IhmWmeWQlFsTZC4OxuFvtw5ZtB8EAJHxJTH4O59lQ,1536
|
220
220
|
uk_bin_collection/uk_bin_collection/get_bin_data.py,sha256=YvmHfZqanwrJ8ToGch34x-L-7yPe31nB_x77_Mgl_vo,4545
|
221
|
-
uk_bin_collection-0.
|
222
|
-
uk_bin_collection-0.
|
223
|
-
uk_bin_collection-0.
|
224
|
-
uk_bin_collection-0.
|
225
|
-
uk_bin_collection-0.
|
221
|
+
uk_bin_collection-0.103.0.dist-info/LICENSE,sha256=vABBUOzcrgfaTKpzeo-si9YVEun6juDkndqA8RKdKGs,1071
|
222
|
+
uk_bin_collection-0.103.0.dist-info/METADATA,sha256=a_GcUI_307Qq6PiurZcZ1WUzk4AoPRjQLZPLj9TPKVs,17630
|
223
|
+
uk_bin_collection-0.103.0.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
224
|
+
uk_bin_collection-0.103.0.dist-info/entry_points.txt,sha256=36WCSGMWSc916S3Hi1ZkazzDKHaJ6CD-4fCEFm5MIao,90
|
225
|
+
uk_bin_collection-0.103.0.dist-info/RECORD,,
|
File without changes
|
File without changes
|
{uk_bin_collection-0.101.0.dist-info → uk_bin_collection-0.103.0.dist-info}/entry_points.txt
RENAMED
File without changes
|