uk_bin_collection 0.114.5__py3-none-any.whl → 0.115.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -12,6 +12,12 @@
12
12
  "wiki_name": "Adur and Worthing Councils",
13
13
  "wiki_note": "Replace XXXXXXXX with your UPRN. You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find it."
14
14
  },
15
+ "AntrimAndNewtonabbeyCouncil": {
16
+ "url": "https://antrimandnewtownabbey.gov.uk/residents/bins-recycling/bins-schedule/?Id=643",
17
+ "wiki_command_url_override": "https://antrimandnewtownabbey.gov.uk/residents/bins-recycling/bins-schedule/?Id=XXXX",
18
+ "wiki_name": "Antrim & Newtonabbey Council",
19
+ "wiki_note": "Navigate to [https://antrimandnewtownabbey.gov.uk/residents/bins-recycling/bins-schedule] and search for your street name. Use the URL with the ID to replace XXXXXXXX with your specific ID."
20
+ },
15
21
  "ArdsAndNorthDownCouncil": {
16
22
  "url": "https://www.ardsandnorthdown.gov.uk",
17
23
  "wiki_command_url_override": "https://www.ardsandnorthdown.gov.uk",
@@ -230,6 +236,13 @@
230
236
  "wiki_name": "Bromsgrove District Council",
231
237
  "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN."
232
238
  },
239
+ "BroxbourneCouncil": {
240
+ "url": "https://www.broxbourne.gov.uk",
241
+ "uprn": "148048608",
242
+ "postcode": "EN8 7FL",
243
+ "wiki_name": "Broxbourne Council",
244
+ "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN."
245
+ },
233
246
  "BroxtoweBoroughCouncil": {
234
247
  "postcode": "NG16 2LY",
235
248
  "skip_get_url": true,
@@ -736,6 +749,15 @@
736
749
  "wiki_name": "Harrogate Borough Council",
737
750
  "wiki_note": "Pass the UPRN, which can be found at [this site](https://secure.harrogate.gov.uk/inmyarea). URL doesn't need to be passed."
738
751
  },
752
+ "HertsmereBoroughCouncil": {
753
+ "house_number": "1",
754
+ "postcode": "WD7 9HZ",
755
+ "skip_get_url": true,
756
+ "url": "https://www.hertsmere.gov.uk",
757
+ "web_driver": "http://selenium:4444",
758
+ "wiki_name": "Hertsmere Borough Council",
759
+ "wiki_note": "Provide your house number in the `house_number` parameter and postcode in the `postcode` parameter."
760
+ },
739
761
  "HighlandCouncil": {
740
762
  "url": "https://www.highland.gov.uk",
741
763
  "wiki_command_url_override": "https://www.highland.gov.uk",
@@ -1664,6 +1686,13 @@
1664
1686
  "wiki_name": "Waltham Forest",
1665
1687
  "wiki_note": "Use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find your UPRN."
1666
1688
  },
1689
+ "WarringtonBoroughCouncil": {
1690
+ "url": "https://www.warrington.gov.uk",
1691
+ "wiki_command_url_override": "https://www.warrington.gov.uk",
1692
+ "uprn": "10094964379",
1693
+ "wiki_name": "Warrington Borough Council",
1694
+ "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN."
1695
+ },
1667
1696
  "WarwickDistrictCouncil": {
1668
1697
  "url": "https://estates7.warwickdc.gov.uk/PropertyPortal/Property/Recycling/100070263793",
1669
1698
  "wiki_command_url_override": "https://estates7.warwickdc.gov.uk/PropertyPortal/Property/Recycling/XXXXXXXX",
@@ -1715,6 +1744,13 @@
1715
1744
  "wiki_name": "West Berkshire Council",
1716
1745
  "wiki_note": "Provide your house number in the `house_number` parameter and postcode in the `postcode` parameter."
1717
1746
  },
1747
+ "WestLancashireBoroughCouncil": {
1748
+ "url": "https://www.westlancs.gov.uk",
1749
+ "uprn": "10012343339",
1750
+ "postcode": "WN8 0HR",
1751
+ "wiki_name": "West Lancashire Borough Council",
1752
+ "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN."
1753
+ },
1718
1754
  "WestLindseyDistrictCouncil": {
1719
1755
  "house_number": "PRIVATE ACCOMMODATION",
1720
1756
  "postcode": "LN8 2AR",
@@ -0,0 +1,53 @@
1
+ from bs4 import BeautifulSoup
2
+
3
+ from uk_bin_collection.uk_bin_collection.common import *
4
+ from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
5
+
6
+
7
+ # import the wonderful Beautiful Soup and the URL grabber
8
+ class CouncilClass(AbstractGetBinDataClass):
9
+ """
10
+ Concrete classes have to implement all abstract operations of the
11
+ base class. They can also override some operations with a default
12
+ implementation.
13
+ """
14
+
15
+ def parse_data(self, page: str, **kwargs) -> dict:
16
+
17
+ bindata = {"bins": []}
18
+
19
+ soup = BeautifulSoup(page.content, "html.parser")
20
+ soup.prettify
21
+
22
+ collection_divs = soup.select("div.feature-box.bins")
23
+ if not collection_divs:
24
+ raise Exception("No collections found")
25
+
26
+ for collection_div in collection_divs:
27
+ date_p = collection_div.select_one("p.date")
28
+ if not date_p:
29
+ continue
30
+
31
+ # Thu 22 Aug, 2024
32
+ date_ = datetime.strptime(date_p.text.strip(), "%a %d %b, %Y").strftime(
33
+ "%d/%m/%Y"
34
+ )
35
+ bins = collection_div.select("li")
36
+ if not bins:
37
+ continue
38
+ for bin in bins:
39
+ if not bin.text.strip():
40
+ continue
41
+ bin_type = bin.text.strip()
42
+
43
+ dict_data = {
44
+ "type": bin_type,
45
+ "collectionDate": date_,
46
+ }
47
+ bindata["bins"].append(dict_data)
48
+
49
+ bindata["bins"].sort(
50
+ key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y")
51
+ )
52
+
53
+ return bindata
@@ -1,3 +1,5 @@
1
+ import re
2
+
1
3
  import requests
2
4
  from bs4 import BeautifulSoup
3
5
 
@@ -89,12 +91,40 @@ class CouncilClass(AbstractGetBinDataClass):
89
91
  )
90
92
  ).strftime(date_format)
91
93
 
92
- # Build data dict for each entry
93
- dict_data = {
94
- "type": bin_type,
95
- "collectionDate": bin_date,
96
- }
97
- data["bins"].append(dict_data)
94
+ # Build data dict for each entry
95
+ dict_data = {
96
+ "type": bin_type,
97
+ "collectionDate": bin_date,
98
+ }
99
+ data["bins"].append(dict_data)
100
+
101
+ for bin in soup.find_all(attrs={"id": re.compile(r"CTID-D0TUYGxO-\d+-A")}):
102
+ dict_data = {
103
+ "type": "General Waste",
104
+ "collectionDate": datetime.strptime(
105
+ bin.text.strip(),
106
+ "%a %b %d %Y",
107
+ ).strftime(date_format),
108
+ }
109
+ data["bins"].append(dict_data)
110
+ for bin in soup.find_all(attrs={"id": re.compile(r"CTID-d3gapLk-\d+-A")}):
111
+ dict_data = {
112
+ "type": "Recycling Waste",
113
+ "collectionDate": datetime.strptime(
114
+ bin.text.strip(),
115
+ "%a %b %d %Y",
116
+ ).strftime(date_format),
117
+ }
118
+ data["bins"].append(dict_data)
119
+ for bin in soup.find_all(attrs={"id": re.compile(r"CTID-L8OidMPA-\d+-A")}):
120
+ dict_data = {
121
+ "type": "Garden Waste (Subscription Only)",
122
+ "collectionDate": datetime.strptime(
123
+ bin.text.strip(),
124
+ "%a %b %d %Y",
125
+ ).strftime(date_format),
126
+ }
127
+ data["bins"].append(dict_data)
98
128
 
99
129
  data["bins"].sort(
100
130
  key=lambda x: datetime.strptime(x.get("collectionDate"), date_format)
@@ -0,0 +1,71 @@
1
+ from datetime import datetime
2
+
3
+ import requests
4
+ from bs4 import BeautifulSoup
5
+
6
+ from uk_bin_collection.uk_bin_collection.common import *
7
+ from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
8
+
9
+
10
+ class CouncilClass(AbstractGetBinDataClass):
11
+ """
12
+ Concrete classes have to implement all abstract operations of the
13
+ base class. They can also override some operations with a default
14
+ implementation.
15
+ """
16
+
17
+ def parse_data(self, page: str, **kwargs) -> dict:
18
+ user_uprn = kwargs.get("uprn")
19
+ user_postcode = kwargs.get("postcode")
20
+ check_uprn(user_uprn)
21
+ check_postcode(user_postcode)
22
+ bindata = {"bins": []}
23
+
24
+ API_URL = "https://www.broxbourne.gov.uk/xfp/form/205"
25
+
26
+ post_data = {
27
+ "page": "490",
28
+ "locale": "en_GB",
29
+ "qacf7e570cf99fae4cb3a2e14d5a75fd0d6561058_0_0": user_postcode,
30
+ "qacf7e570cf99fae4cb3a2e14d5a75fd0d6561058_1_0": user_uprn,
31
+ "next": "Next",
32
+ }
33
+
34
+ r = requests.post(API_URL, data=post_data)
35
+ r.raise_for_status()
36
+
37
+ soup = BeautifulSoup(r.content, features="html.parser")
38
+ soup.prettify()
39
+
40
+ form__instructions = soup.find(attrs={"class": "form__instructions"})
41
+ table = form__instructions.find("table")
42
+
43
+ rows = table.find_all("tr")
44
+
45
+ current_year = datetime.now().year
46
+
47
+ # Process each row into a list of dictionaries
48
+ for row in rows[1:]: # Skip the header row
49
+ columns = row.find_all("td")
50
+ collection_date = (
51
+ columns[0].get_text(separator=" ").replace("\xa0", " ").strip()
52
+ )
53
+ service = columns[1].get_text(separator=" ").replace("\xa0", " ").strip()
54
+
55
+ collection_date = datetime.strptime(collection_date, "%a %d %b")
56
+
57
+ if collection_date.month == 1:
58
+ collection_date = collection_date.replace(year=current_year + 1)
59
+ else:
60
+ collection_date = collection_date.replace(year=current_year)
61
+
62
+ dict_data = {
63
+ "type": service,
64
+ "collectionDate": (collection_date).strftime(date_format),
65
+ }
66
+ bindata["bins"].append(dict_data)
67
+
68
+ bindata["bins"].sort(
69
+ key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y")
70
+ )
71
+ return bindata
@@ -0,0 +1,161 @@
1
+ import re
2
+ import time
3
+
4
+ import requests
5
+ from bs4 import BeautifulSoup
6
+ from selenium.webdriver.common.by import By
7
+ from selenium.webdriver.support import expected_conditions as EC
8
+ from selenium.webdriver.support.ui import Select
9
+ from selenium.webdriver.support.wait import WebDriverWait
10
+
11
+ from uk_bin_collection.uk_bin_collection.common import *
12
+ from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
13
+
14
+
15
+ # import the wonderful Beautiful Soup and the URL grabber
16
+ class CouncilClass(AbstractGetBinDataClass):
17
+ """
18
+ Concrete classes have to implement all abstract operations of the
19
+ base class. They can also override some operations with a default
20
+ implementation.
21
+ """
22
+
23
+ def parse_data(self, page: str, **kwargs) -> dict:
24
+
25
+ user_paon = kwargs.get("paon")
26
+ user_postcode = kwargs.get("postcode")
27
+ web_driver = kwargs.get("web_driver")
28
+ headless = kwargs.get("headless")
29
+ check_paon(user_paon)
30
+ check_postcode(user_postcode)
31
+ bindata = {"bins": []}
32
+
33
+ URI_1 = "https://www.hertsmere.gov.uk/Environment-Refuse-and-Recycling/Recycling--Waste/Bin-collections/Collections-and-calendar.aspx"
34
+ URI_2 = "https://hertsmere-services.onmats.com/w/webpage/round-search"
35
+
36
+ # Create Selenium webdriver
37
+ driver = create_webdriver(web_driver, headless, None, __name__)
38
+ driver.get(URI_1)
39
+
40
+ soup = BeautifulSoup(driver.page_source, "html.parser")
41
+
42
+ current_week = (soup.find("li", class_="current")).text.strip()
43
+
44
+ strong = soup.find_all("strong", text=re.compile(r"^Week"))
45
+
46
+ bin_weeks = []
47
+ for tag in strong:
48
+ parent = tag.parent
49
+ bin_type = (
50
+ (parent.text).split("-")[1].strip().replace("\xa0", " ").split(" and ")
51
+ )
52
+ for bin in bin_type:
53
+ dict_data = {
54
+ "week": tag.text.replace("\xa0", " "),
55
+ "bin_type": bin,
56
+ }
57
+ bin_weeks.append(dict_data)
58
+
59
+ driver.get(URI_2)
60
+
61
+ # Wait for the postcode field to appear then populate it
62
+ inputElement_postcode = WebDriverWait(driver, 30).until(
63
+ EC.presence_of_element_located(
64
+ (
65
+ By.CLASS_NAME,
66
+ "relation_path_type_ahead_search",
67
+ )
68
+ )
69
+ )
70
+ inputElement_postcode.send_keys(user_postcode)
71
+
72
+ WebDriverWait(driver, 10).until(
73
+ EC.element_to_be_clickable(
74
+ (
75
+ By.XPATH,
76
+ f"//ul[@class='result_list']/li[starts-with(@aria-label, '{user_paon}')]",
77
+ )
78
+ )
79
+ ).click()
80
+
81
+ WebDriverWait(driver, timeout=10).until(
82
+ EC.element_to_be_clickable(
83
+ (
84
+ By.CSS_SELECTOR,
85
+ "input.fragment_presenter_template_edit.btn.bg-primary.btn-medium[type='submit']",
86
+ )
87
+ )
88
+ ).click()
89
+
90
+ WebDriverWait(driver, timeout=10).until(
91
+ EC.presence_of_element_located(
92
+ (By.XPATH, "//h3[contains(text(), 'Collection days')]")
93
+ )
94
+ )
95
+
96
+ soup = BeautifulSoup(driver.page_source, "html.parser")
97
+
98
+ table = soup.find("table", class_="table listing table-striped")
99
+
100
+ # Check if the table was found
101
+ if table:
102
+ # Extract table rows and cells
103
+ table_data = []
104
+ for row in table.find("tbody").find_all("tr"):
105
+ # Extract cell data from each <td> tag
106
+ row_data = [cell.get_text(strip=True) for cell in row.find_all("td")]
107
+ table_data.append(row_data)
108
+
109
+ else:
110
+ print("Table not found.")
111
+
112
+ collection_day = (table_data[0])[1]
113
+
114
+ current_week_bins = [bin for bin in bin_weeks if bin["week"] == current_week]
115
+ next_week_bins = [bin for bin in bin_weeks if bin["week"] != current_week]
116
+
117
+ days_of_week = [
118
+ "Monday",
119
+ "Tuesday",
120
+ "Wednesday",
121
+ "Thursday",
122
+ "Friday",
123
+ "Saturday",
124
+ "Sunday",
125
+ ]
126
+
127
+ today = datetime.now()
128
+ today_idx = today.weekday() # Monday is 0 and Sunday is 6
129
+ target_idx = days_of_week.index(collection_day)
130
+
131
+ days_until_target = (target_idx - today_idx) % 7
132
+ if days_until_target == 0:
133
+ next_day = today
134
+ else:
135
+ next_day = today + timedelta(days=days_until_target)
136
+
137
+ current_week_dates = get_dates_every_x_days(next_day, 14, 7)
138
+ next_week_date = next_day + timedelta(days=7)
139
+ next_week_dates = get_dates_every_x_days(next_week_date, 14, 7)
140
+
141
+ for date in current_week_dates:
142
+ for bin in current_week_bins:
143
+ dict_data = {
144
+ "type": bin["bin_type"],
145
+ "collectionDate": date,
146
+ }
147
+ bindata["bins"].append(dict_data)
148
+
149
+ for date in next_week_dates:
150
+ for bin in next_week_bins:
151
+ dict_data = {
152
+ "type": bin["bin_type"],
153
+ "collectionDate": date,
154
+ }
155
+ bindata["bins"].append(dict_data)
156
+
157
+ bindata["bins"].sort(
158
+ key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y")
159
+ )
160
+
161
+ return bindata
@@ -36,7 +36,7 @@ class CouncilClass(AbstractGetBinDataClass):
36
36
  for date in v:
37
37
  dict_data = {
38
38
  "type": k,
39
- "collectionDate": date
39
+ "collectionDate": datetime.strptime(date, "%Y-%m-%d").strftime(date_format)
40
40
  }
41
41
  data["bins"].append(dict_data)
42
42
 
@@ -0,0 +1,50 @@
1
+ import requests
2
+
3
+ from uk_bin_collection.uk_bin_collection.common import *
4
+ from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
5
+
6
+
7
+ # import the wonderful Beautiful Soup and the URL grabber
8
+ class CouncilClass(AbstractGetBinDataClass):
9
+ """
10
+ Concrete classes have to implement all abstract operations of the
11
+ base class. They can also override some operations with a default
12
+ implementation.
13
+ """
14
+
15
+ def parse_data(self, page: str, **kwargs) -> dict:
16
+
17
+ user_uprn = kwargs.get("uprn")
18
+ check_uprn(user_uprn)
19
+ bindata = {"bins": []}
20
+
21
+ URI = f"https://www.warrington.gov.uk/bin-collections/get-jobs/{user_uprn}"
22
+
23
+ # Make the GET request
24
+ response = requests.get(URI)
25
+
26
+ # Parse the JSON response
27
+ bin_collection = response.json()
28
+
29
+ # Loop through each collection in bin_collection
30
+ for collection in bin_collection["schedule"]:
31
+ bin_type = collection["Name"]
32
+ collection_dates = collection["ScheduledStart"]
33
+
34
+ print(f"Bin Type: {bin_type}")
35
+ print(f"Collection Date: {collection_dates}")
36
+
37
+ dict_data = {
38
+ "type": bin_type,
39
+ "collectionDate": datetime.strptime(
40
+ collection_dates,
41
+ "%Y-%m-%dT%H:%M:%S",
42
+ ).strftime(date_format),
43
+ }
44
+ bindata["bins"].append(dict_data)
45
+
46
+ bindata["bins"].sort(
47
+ key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y")
48
+ )
49
+
50
+ return bindata
@@ -0,0 +1,114 @@
1
+ import re
2
+ from datetime import datetime
3
+
4
+ import requests
5
+ from bs4 import BeautifulSoup
6
+
7
+ from uk_bin_collection.uk_bin_collection.common import *
8
+ from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
9
+
10
+
11
+ class CouncilClass(AbstractGetBinDataClass):
12
+ """
13
+ Concrete classes have to implement all abstract operations of the
14
+ base class. They can also override some operations with a default
15
+ implementation.
16
+ """
17
+
18
+ def parse_data(self, page: str, **kwargs) -> dict:
19
+ user_uprn = kwargs.get("uprn")
20
+ user_postcode = kwargs.get("postcode")
21
+ check_uprn(user_uprn)
22
+ check_postcode(user_postcode)
23
+ bindata = {"bins": []}
24
+
25
+ user_postcode = user_postcode.replace(" ", "+")
26
+
27
+ API_URL = (
28
+ f"https://your.westlancs.gov.uk/yourwestlancs.aspx?address={user_postcode}"
29
+ )
30
+
31
+ session = requests.Session()
32
+ response = session.get(API_URL)
33
+ soup = BeautifulSoup(response.content, "html.parser")
34
+
35
+ soup = BeautifulSoup(response.content, features="html.parser")
36
+ soup.prettify()
37
+
38
+ pattern = r"SELECT\$\d+"
39
+
40
+ # Loop through each row to find the one with the target UPRN
41
+ for row in soup.find("table", class_="striped-table").find_all("tr"):
42
+ cells = row.find_all("td")
43
+ if len(cells) > 2 and cells[2].get_text(strip=True) == user_uprn:
44
+ link = row.find("a", href=True)
45
+ if link:
46
+ match = re.search(pattern, link["href"])
47
+
48
+ # Extract important form data like __VIEWSTATE and __EVENTVALIDATION
49
+ viewstate = soup.find("input", {"name": "__VIEWSTATE"})["value"]
50
+ eventvalidation = soup.find("input", {"name": "__EVENTVALIDATION"})[
51
+ "value"
52
+ ]
53
+
54
+ # Parameters for the "click" - usually __EVENTTARGET and __EVENTARGUMENT
55
+ post_data = {
56
+ "__VIEWSTATE": viewstate,
57
+ "__EVENTVALIDATION": eventvalidation,
58
+ "__EVENTTARGET": "ctl00$MainContent$GridView1",
59
+ "__EVENTARGUMENT": match.group(
60
+ 0
61
+ ), # Modify as needed for the specific link
62
+ }
63
+
64
+ post_response = session.post(API_URL, data=post_data)
65
+
66
+ soup = BeautifulSoup(post_response.text, features="html.parser")
67
+ StreetSceneTable = soup.find("table", {"id": "StreetSceneTable"})
68
+
69
+ if StreetSceneTable:
70
+
71
+ # Extract each collection date or information by locating the span elements
72
+ refuse_collection = soup.find(
73
+ "span", id="ctl00_MainContent_lbNextDomRoundZones"
74
+ ).text.strip()
75
+ recycling_collection = soup.find(
76
+ "span", id="ctl00_MainContent_lbNextRecRoundZones"
77
+ ).text.strip()
78
+ garden_waste_collection = soup.find(
79
+ "span", id="ctl00_MainContent_lbNextGardenRoundZones"
80
+ ).text.strip()
81
+
82
+ # Structure the extracted data in a dictionary
83
+ bin_schedule = [
84
+ {
85
+ "Service": "Refuse Collection",
86
+ "Date": refuse_collection,
87
+ },
88
+ {
89
+ "Service": "Recycling Collection",
90
+ "Date": recycling_collection,
91
+ },
92
+ {
93
+ "Service": "Garden Waste Collection",
94
+ "Date": garden_waste_collection,
95
+ },
96
+ ]
97
+
98
+ if bin_schedule:
99
+ for service in bin_schedule:
100
+ if service["Date"] != "Not subscribed":
101
+ dict_data = {
102
+ "type": service["Service"],
103
+ "collectionDate": service["Date"],
104
+ }
105
+ bindata["bins"].append(dict_data)
106
+
107
+ else:
108
+ print("No link found in the row with the target UPRN.")
109
+ break
110
+
111
+ bindata["bins"].sort(
112
+ key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y")
113
+ )
114
+ return bindata
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: uk_bin_collection
3
- Version: 0.114.5
3
+ Version: 0.115.0
4
4
  Summary: Python Lib to collect UK Bin Data
5
5
  Author: Robert Bradley
6
6
  Author-email: robbrad182@gmail.com
@@ -2,7 +2,7 @@ uk_bin_collection/README.rst,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,
2
2
  uk_bin_collection/tests/council_feature_input_parity.py,sha256=DO6Mk4ImYgM5ZCZ-cutwz5RoYYWZRLYx2tr6zIs_9Rc,3843
3
3
  uk_bin_collection/tests/features/environment.py,sha256=VQZjJdJI_kZn08M0j5cUgvKT4k3iTw8icJge1DGOkoA,127
4
4
  uk_bin_collection/tests/features/validate_council_outputs.feature,sha256=SJK-Vc737hrf03tssxxbeg_JIvAH-ddB8f6gU1LTbuQ,251
5
- uk_bin_collection/tests/input.json,sha256=5zb0mHkcv77mRLrlJknlb_oA2P8b5NXZn2WIRJQRJlo,98458
5
+ uk_bin_collection/tests/input.json,sha256=5mBEtJS5JmJUe4kjQfZt5A1YOgqkZJ9Fcp44_lLVm3U,100381
6
6
  uk_bin_collection/tests/output.schema,sha256=ZwKQBwYyTDEM4G2hJwfLUVM-5v1vKRvRK9W9SS1sd18,1086
7
7
  uk_bin_collection/tests/step_defs/step_helpers/file_handler.py,sha256=Ygzi4V0S1MIHqbdstUlIqtRIwnynvhu4UtpweJ6-5N8,1474
8
8
  uk_bin_collection/tests/step_defs/test_validate_council.py,sha256=VZ0a81sioJULD7syAYHjvK_-nT_Rd36tUyzPetSA0gk,3475
@@ -14,6 +14,7 @@ uk_bin_collection/uk_bin_collection/collect_data.py,sha256=dB7wWXsJX4fm5bIf84lex
14
14
  uk_bin_collection/uk_bin_collection/common.py,sha256=fJG9ruqsCYOaYm-fzRb_l5kTeeB7i9k7qphWt3t7kks,10107
15
15
  uk_bin_collection/uk_bin_collection/councils/AberdeenshireCouncil.py,sha256=aO1CSdyqa8oAD0fB79y1Q9bikAWCP_JFa7CsyTa2j9s,1655
16
16
  uk_bin_collection/uk_bin_collection/councils/AdurAndWorthingCouncils.py,sha256=ppbrmm-MzB1wOulK--CU_0j4P-djNf3ozMhHnmQFqLo,1511
17
+ uk_bin_collection/uk_bin_collection/councils/AntrimAndNewtonabbeyCouncil.py,sha256=Hp5pteaC5RjL5ZqPZ564S9WQ6ZTKLMO6Dl_fxip2TUc,1653
17
18
  uk_bin_collection/uk_bin_collection/councils/ArdsAndNorthDownCouncil.py,sha256=iMBldxNErgi-ok1o6xpqdNgMvR6qapaNqoTWDTqMeGo,3824
18
19
  uk_bin_collection/uk_bin_collection/councils/ArmaghBanbridgeCraigavonCouncil.py,sha256=o9NBbVCTdxKXnpYbP8-zxe1Gh8s57vwfV75Son_sAHE,2863
19
20
  uk_bin_collection/uk_bin_collection/councils/ArunCouncil.py,sha256=yfhthv9nuogP19VOZ3TYQrq51qqjiCZcSel4sXhiKjs,4012
@@ -34,12 +35,13 @@ uk_bin_collection/uk_bin_collection/councils/BlabyDistrictCouncil.py,sha256=I8LN
34
35
  uk_bin_collection/uk_bin_collection/councils/BlackburnCouncil.py,sha256=jHbCK8sL09vdmdP7Xnh8lIrU5AHTnJLEZfOLephPvWg,4090
35
36
  uk_bin_collection/uk_bin_collection/councils/BoltonCouncil.py,sha256=WI68r8jB0IHPUT4CgmZMtng899AAMFTxkyTdPg9yLF8,4117
36
37
  uk_bin_collection/uk_bin_collection/councils/BracknellForestCouncil.py,sha256=Llo1rULaAZ8rChVYZqXFFLo7CN6vbT0ULUJD6ActouY,9015
37
- uk_bin_collection/uk_bin_collection/councils/BradfordMDC.py,sha256=VFrdcqKpHPw8v77Ll9QzBz_4carUfC1XYnxqUvDihkA,4275
38
+ uk_bin_collection/uk_bin_collection/councils/BradfordMDC.py,sha256=BEWS2c62cOsf26jqn1AkNUvVmc5AlUADYLaQuPn9RY4,5456
38
39
  uk_bin_collection/uk_bin_collection/councils/BrecklandCouncil.py,sha256=PX6A_pDvaN109aSNWmEhm88GFKfkClIkmbwGURWvsks,1744
39
40
  uk_bin_collection/uk_bin_collection/councils/BrightonandHoveCityCouncil.py,sha256=k6qt4cds-Ejd97Z-__pw2BYvGVbFdc9SUfF73PPrTNA,5823
40
41
  uk_bin_collection/uk_bin_collection/councils/BristolCityCouncil.py,sha256=kJmmDJz_kQ45DHmG7ocrUpNJonEn0kuXYEDQyZaf9ks,5576
41
42
  uk_bin_collection/uk_bin_collection/councils/BromleyBoroughCouncil.py,sha256=_bAFykZWZkEVUB-QKeVLfWO8plG6nRgn71QF2BUN2rk,4329
42
43
  uk_bin_collection/uk_bin_collection/councils/BromsgroveDistrictCouncil.py,sha256=PUfxP8j5Oh9wFHkdjbrJzQli9UzMHZzwrZ2hkThrvhI,1781
44
+ uk_bin_collection/uk_bin_collection/councils/BroxbourneCouncil.py,sha256=JC6Qqou1Rj4awn2VP3iuvwFpYayKDTt2_JNuNitjSoY,2393
43
45
  uk_bin_collection/uk_bin_collection/councils/BroxtoweBoroughCouncil.py,sha256=-Facq-ToQkcWUePpKBwq90LZUFxgUSydNL2sYaLX4yw,4473
44
46
  uk_bin_collection/uk_bin_collection/councils/BuckinghamshireCouncil.py,sha256=_ELVUM5VLp1nwDxRpvpsp6n8SzLJvp_UyMp-i_MXYuo,4383
45
47
  uk_bin_collection/uk_bin_collection/councils/BuryCouncil.py,sha256=H7wAxO1nfxkewVoRRolumq8bBJG04siE3jieFH3RGpQ,2632
@@ -101,6 +103,7 @@ uk_bin_collection/uk_bin_collection/councils/HaltonBoroughCouncil.py,sha256=gq_C
101
103
  uk_bin_collection/uk_bin_collection/councils/HarboroughDistrictCouncil.py,sha256=uAbCgfrqkIkEKUyLVE8l72s5tzbfMFsw775i0nVRAyc,1934
102
104
  uk_bin_collection/uk_bin_collection/councils/HaringeyCouncil.py,sha256=t_6AkAu4wrv8Q0WlDhWh_82I0djl5tk531Pzs-SjWzg,2647
103
105
  uk_bin_collection/uk_bin_collection/councils/HarrogateBoroughCouncil.py,sha256=_g3fP5Nq-OUjgNrfRf4UEyFKzq0x8QK-4enh5RP1efA,2050
106
+ uk_bin_collection/uk_bin_collection/councils/HertsmereBoroughCouncil.py,sha256=-ThSG6NIJP_wf2GmGL7SAvxbOujdhanZ8ECP4VSQCBs,5415
104
107
  uk_bin_collection/uk_bin_collection/councils/HighPeakCouncil.py,sha256=oqF8M0lcT3KsrG6W6I6JJX07E6Sc_-_sr7MybfIMab8,4626
105
108
  uk_bin_collection/uk_bin_collection/councils/HighlandCouncil.py,sha256=GNxDU65QuZHV5va2IrKtcJ6TQoDdwmV03JvkVqOauP4,3291
106
109
  uk_bin_collection/uk_bin_collection/councils/HounslowCouncil.py,sha256=LXhJ47rujx7k3naz0tFiTT1l5k6gAYcVdekJN1t_HLY,4564
@@ -155,7 +158,7 @@ uk_bin_collection/uk_bin_collection/councils/NorthYorkshire.py,sha256=2wTrr3VrZD
155
158
  uk_bin_collection/uk_bin_collection/councils/NorthumberlandCouncil.py,sha256=KEFsxEvQ159fkuFo-fza67YCnnCZ5ElwE80zTrqDEWI,4990
156
159
  uk_bin_collection/uk_bin_collection/councils/NorwichCityCouncil.py,sha256=At-9dEcKBUZSrtJ2RncwvMnV0OVU3pE6kxEYbLL-Av8,2437
157
160
  uk_bin_collection/uk_bin_collection/councils/NottinghamCityCouncil.py,sha256=panTCjnsBOQ98-TBO9xVZk_jcT_gjMhx3Gg5oWxBRLo,1254
158
- uk_bin_collection/uk_bin_collection/councils/NuneatonBedworthBoroughCouncil.py,sha256=QikiWSIYMjFzXMjLeXls35roJUNYn_RMpDjgkyimIu8,19134
161
+ uk_bin_collection/uk_bin_collection/councils/NuneatonBedworthBoroughCouncil.py,sha256=KqjQczn214ctkTsm-zimlDSDtgvcLQ23fte3kHiCEsg,19187
159
162
  uk_bin_collection/uk_bin_collection/councils/OldhamCouncil.py,sha256=9dlesCxNoVXlmQaqZj7QFh00smnJbm1Gnjkr_Uvzurs,1771
160
163
  uk_bin_collection/uk_bin_collection/councils/OxfordCityCouncil.py,sha256=d_bY0cXRDH4kSoWGGCTNN61MNErapSOf2WSTYDJr2r8,2318
161
164
  uk_bin_collection/uk_bin_collection/councils/PerthAndKinrossCouncil.py,sha256=Kos5GzN2co3Ij3tSHOXB9S71Yt78RROCfVRtnh7M1VU,3657
@@ -221,12 +224,14 @@ uk_bin_collection/uk_bin_collection/councils/ValeofWhiteHorseCouncil.py,sha256=K
221
224
  uk_bin_collection/uk_bin_collection/councils/WakefieldCityCouncil.py,sha256=vRfIU0Uloi1bgXqjOCpdb-EQ4oY-aismcANZRwOIFkc,4914
222
225
  uk_bin_collection/uk_bin_collection/councils/WalsallCouncil.py,sha256=_anovUnXMr40lZLHyX3opIP73BwauCllKy-Z2SBrzPw,2076
223
226
  uk_bin_collection/uk_bin_collection/councils/WalthamForest.py,sha256=P7MMw0EhpRmDbbnHb25tY5_yvYuZUFwJ1br4TOv24sY,4997
227
+ uk_bin_collection/uk_bin_collection/councils/WarringtonBoroughCouncil.py,sha256=AB9mrV1v4pKKhfsBS8MpjO8XXBifqojSk53J9Q74Guk,1583
224
228
  uk_bin_collection/uk_bin_collection/councils/WarwickDistrictCouncil.py,sha256=3WQrAxzYzKoV4LyOqNTp9xINVsNi1xW9t8etducGeag,1146
225
229
  uk_bin_collection/uk_bin_collection/councils/WatfordBoroughCouncil.py,sha256=zFkXmF1X5g8pjv7II_jXBdrHJu16gy_PowVWVdaDg7A,2657
226
230
  uk_bin_collection/uk_bin_collection/councils/WaverleyBoroughCouncil.py,sha256=tp9l7vdgSGRzNNG0pDfnNuFj4D2bpRJUJmAiTJ6bM0g,4662
227
231
  uk_bin_collection/uk_bin_collection/councils/WealdenDistrictCouncil.py,sha256=SvSSaLkx7iJjzypAwKkaJwegXkSsIQtUOS2V605kz1A,3368
228
232
  uk_bin_collection/uk_bin_collection/councils/WelhatCouncil.py,sha256=ikUft37dYNJghfe-_6Fskiq1JihqpLmLNj38QkKSUUA,2316
229
233
  uk_bin_collection/uk_bin_collection/councils/WestBerkshireCouncil.py,sha256=2eHRlalZyY9jv_UsCWM9IYzOpRdhce2sEW5NtygEnpw,5513
234
+ uk_bin_collection/uk_bin_collection/councils/WestLancashireBoroughCouncil.py,sha256=iohI2NZSjsEnCpSGsCPZFC2EQZL5pyzcb9Ng_H0tMUE,4718
230
235
  uk_bin_collection/uk_bin_collection/councils/WestLindseyDistrictCouncil.py,sha256=JFWUy4w0CKulGq16PfbRDKAdQEbokVEuabwlZYigdEU,4606
231
236
  uk_bin_collection/uk_bin_collection/councils/WestLothianCouncil.py,sha256=dq0jimtARvRkZiGbVFrXXZgY-BODtz3uYZ5UKn0bf64,4114
232
237
  uk_bin_collection/uk_bin_collection/councils/WestMorlandAndFurness.py,sha256=jbqV3460rn9D0yTBGWjpSe1IvWWcdGur5pzgj-hJcQ4,2513
@@ -247,8 +252,8 @@ uk_bin_collection/uk_bin_collection/councils/YorkCouncil.py,sha256=I2kBYMlsD4bId
247
252
  uk_bin_collection/uk_bin_collection/councils/council_class_template/councilclasstemplate.py,sha256=EQWRhZ2pEejlvm0fPyOTsOHKvUZmPnxEYO_OWRGKTjs,1158
248
253
  uk_bin_collection/uk_bin_collection/create_new_council.py,sha256=m-IhmWmeWQlFsTZC4OxuFvtw5ZtB8EAJHxJTH4O59lQ,1536
249
254
  uk_bin_collection/uk_bin_collection/get_bin_data.py,sha256=YvmHfZqanwrJ8ToGch34x-L-7yPe31nB_x77_Mgl_vo,4545
250
- uk_bin_collection-0.114.5.dist-info/LICENSE,sha256=vABBUOzcrgfaTKpzeo-si9YVEun6juDkndqA8RKdKGs,1071
251
- uk_bin_collection-0.114.5.dist-info/METADATA,sha256=LNZH_sVWVQa1WxvXeHt7pXAgglb0bKWj0i3LXU0jnLE,17574
252
- uk_bin_collection-0.114.5.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
253
- uk_bin_collection-0.114.5.dist-info/entry_points.txt,sha256=36WCSGMWSc916S3Hi1ZkazzDKHaJ6CD-4fCEFm5MIao,90
254
- uk_bin_collection-0.114.5.dist-info/RECORD,,
255
+ uk_bin_collection-0.115.0.dist-info/LICENSE,sha256=vABBUOzcrgfaTKpzeo-si9YVEun6juDkndqA8RKdKGs,1071
256
+ uk_bin_collection-0.115.0.dist-info/METADATA,sha256=cHozMBPjD1SsD5IQeuL4wJJuYuV0_-3ZZWAZ5uAiALU,17574
257
+ uk_bin_collection-0.115.0.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
258
+ uk_bin_collection-0.115.0.dist-info/entry_points.txt,sha256=36WCSGMWSc916S3Hi1ZkazzDKHaJ6CD-4fCEFm5MIao,90
259
+ uk_bin_collection-0.115.0.dist-info/RECORD,,