uk_bin_collection 0.129.0__py3-none-any.whl → 0.130.1__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
@@ -882,6 +882,12 @@
882
882
  "wiki_name": "Hartlepool Borough Council",
883
883
  "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find your UPRN."
884
884
  },
885
+ "HerefordshireCouncil": {
886
+ "url": "https://www.herefordshire.gov.uk/rubbish-recycling/check-bin-collection-day?blpu_uprn=10096232662",
887
+ "wiki_command_url_override": "https://www.herefordshire.gov.uk/rubbish-recycling/check-bin-collection-day?blpu_uprn=XXXXXXXXXXXX",
888
+ "wiki_name": "Herefordshire Council",
889
+ "wiki_note": "Replace 'XXXXXXXXXX' with your property's UPRN. You can find it using [FindMyAddress](https://www.findmyaddress.co.uk/search)."
890
+ },
885
891
  "HertsmereBoroughCouncil": {
886
892
  "house_number": "1",
887
893
  "postcode": "WD7 9HZ",
@@ -19,87 +19,97 @@ class CouncilClass(AbstractGetBinDataClass):
19
19
  """
20
20
 
21
21
  def parse_data(self, page: str, **kwargs) -> dict:
22
- # Get and check UPRN
23
- user_postcode = kwargs.get("postcode")
24
- user_paon = kwargs.get("paon")
25
- check_paon(user_paon)
26
- check_postcode(user_postcode)
27
- web_driver = kwargs.get("web_driver")
28
- headless = kwargs.get("headless")
29
- bindata = {"bins": []}
30
-
31
- API_URL = "https://portal.digital.ashfield.gov.uk/w/webpage/raise-case?service=bin_calendar"
32
-
33
- # Create Selenium webdriver
34
- driver = create_webdriver(web_driver, headless, None, __name__)
35
- driver.get(API_URL)
36
-
37
- title = WebDriverWait(driver, 10).until(
38
- EC.presence_of_element_located((By.ID, "sub_page_title"))
39
- )
40
-
41
- # Wait for the postcode field to appear then populate it
42
- WebDriverWait(driver, 10).until(
43
- EC.presence_of_element_located(
44
- (By.CSS_SELECTOR, "input.relation_path_type_ahead_search")
22
+ driver = None
23
+ try:
24
+ # Get and check UPRN
25
+ user_postcode = kwargs.get("postcode")
26
+ user_paon = kwargs.get("paon")
27
+ check_paon(user_paon)
28
+ check_postcode(user_postcode)
29
+ web_driver = kwargs.get("web_driver")
30
+ headless = kwargs.get("headless")
31
+ bindata = {"bins": []}
32
+
33
+ API_URL = "https://portal.digital.ashfield.gov.uk/w/webpage/raise-case?service=bin_calendar"
34
+
35
+ # Create Selenium webdriver
36
+ driver = create_webdriver(web_driver, headless, None, __name__)
37
+ driver.get(API_URL)
38
+
39
+ title = WebDriverWait(driver, 10).until(
40
+ EC.presence_of_element_located((By.ID, "sub_page_title"))
45
41
  )
46
- )
47
42
 
48
- inputElement_postcode = WebDriverWait(driver, 10).until(
49
- EC.presence_of_element_located(
50
- (By.CSS_SELECTOR, "input.relation_path_type_ahead_search")
43
+ # Wait for the postcode field to appear then populate it
44
+ WebDriverWait(driver, 10).until(
45
+ EC.presence_of_element_located(
46
+ (By.CSS_SELECTOR, "input.relation_path_type_ahead_search")
47
+ )
51
48
  )
52
- )
53
- inputElement_postcode.clear()
54
- inputElement_postcode.send_keys(user_postcode)
55
-
56
- # Wait for the 'Select your property' dropdown to appear and select the first result
57
- dropdown = WebDriverWait(driver, 10).until(
58
- EC.element_to_be_clickable(
59
- (
60
- By.CLASS_NAME,
61
- "result_list ",
49
+
50
+ inputElement_postcode = WebDriverWait(driver, 10).until(
51
+ EC.presence_of_element_located(
52
+ (By.CSS_SELECTOR, "input.relation_path_type_ahead_search")
62
53
  )
63
54
  )
64
- )
55
+ inputElement_postcode.clear()
56
+ inputElement_postcode.send_keys(user_postcode)
65
57
 
66
- address_element = (
67
- WebDriverWait(driver, 10)
68
- .until(
58
+ # Wait for the 'Select your property' dropdown to appear and select the first result
59
+ dropdown = WebDriverWait(driver, 10).until(
69
60
  EC.element_to_be_clickable(
70
- (By.XPATH, f"//li[starts-with(@aria-label, '{user_paon}')]")
61
+ (
62
+ By.CLASS_NAME,
63
+ "result_list ",
64
+ )
71
65
  )
72
66
  )
73
- .click()
74
- )
75
67
 
76
- search_button = WebDriverWait(driver, 10).until(
77
- EC.element_to_be_clickable(
78
- (By.XPATH, "//input[@type='submit' and @value='Search']")
68
+ address_element = (
69
+ WebDriverWait(driver, 10)
70
+ .until(
71
+ EC.element_to_be_clickable(
72
+ (By.XPATH, f"//li[starts-with(@aria-label, '{user_paon}')]")
73
+ )
74
+ )
75
+ .click()
79
76
  )
80
- )
81
- search_button.click()
82
-
83
- time.sleep(10)
84
-
85
- soup = BeautifulSoup(driver.page_source, features="html.parser")
86
- soup.prettify()
87
-
88
- # Find the table by class name
89
- table = soup.find("table", {"class": "table listing table-striped"})
90
-
91
- # Iterate over each row in the tbody of the table
92
- for row in table.find("tbody").find_all("tr"):
93
- # Extract the service, day, and date for each row
94
- service = row.find_all("td")[0].get_text(strip=True)
95
- date = row.find_all("td")[2].get_text(strip=True)
96
-
97
- dict_data = {
98
- "type": service,
99
- "collectionDate": datetime.strptime(date, "%a, %d %b %Y").strftime(
100
- date_format
101
- ),
102
- }
103
- bindata["bins"].append(dict_data)
104
77
 
78
+ search_button = WebDriverWait(driver, 10).until(
79
+ EC.element_to_be_clickable(
80
+ (By.XPATH, "//input[@type='submit' and @value='Search']")
81
+ )
82
+ )
83
+ search_button.click()
84
+
85
+ time.sleep(10)
86
+
87
+ soup = BeautifulSoup(driver.page_source, features="html.parser")
88
+ soup.prettify()
89
+
90
+ # Find the table by class name
91
+ table = soup.find("table", {"class": "table listing table-striped"})
92
+
93
+ # Iterate over each row in the tbody of the table
94
+ for row in table.find("tbody").find_all("tr"):
95
+ # Extract the service, day, and date for each row
96
+ service = row.find_all("td")[0].get_text(strip=True)
97
+ date = row.find_all("td")[2].get_text(strip=True)
98
+
99
+ dict_data = {
100
+ "type": service,
101
+ "collectionDate": datetime.strptime(date, "%a, %d %b %Y").strftime(
102
+ date_format
103
+ ),
104
+ }
105
+ bindata["bins"].append(dict_data)
106
+ except Exception as e:
107
+ # Here you can log the exception if needed
108
+ print(f"An error occurred: {e}")
109
+ # Optionally, re-raise the exception if you want it to propagate
110
+ raise
111
+ finally:
112
+ # This block ensures that the driver is closed regardless of an exception
113
+ if driver:
114
+ driver.quit()
105
115
  return bindata
@@ -0,0 +1,53 @@
1
+ import logging
2
+
3
+ from bs4 import BeautifulSoup
4
+
5
+ from uk_bin_collection.uk_bin_collection.common import *
6
+ from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
7
+
8
+
9
+ # import the wonderful Beautiful Soup and the URL grabber
10
+ class CouncilClass(AbstractGetBinDataClass):
11
+ """
12
+ Concrete classes have to implement all abstract operations of the
13
+ base class. They can also override some operations with a default
14
+ implementation.
15
+ """
16
+
17
+ def parse_data(self, page: str, **kwargs) -> dict:
18
+ # Make a BS4 object
19
+ soup = BeautifulSoup(page.text, features="html.parser")
20
+ soup.prettify()
21
+
22
+ data = {"bins": []}
23
+
24
+ checkValid = soup.find("p", id="selectedAddressResult")
25
+ if checkValid is None:
26
+ raise ValueError("Address/UPRN not found")
27
+
28
+ collections = soup.find("div", id="wasteCollectionDates")
29
+
30
+ for bins in collections.select('div[class*="hc-island"]'):
31
+ bin_type = bins.h4.get_text(strip=True)
32
+
33
+ # Last div.hc-island is the calendar link, skip it
34
+ if bin_type == "Calendar":
35
+ continue
36
+
37
+ # Next collection date is in a span under the second p.hc-no-margin of the div.
38
+ bin_collection = re.search(
39
+ r"(.*) \(.*\)", bins.select("div > p > span")[0].get_text(strip=True)
40
+ ).group(1)
41
+ if bin_collection:
42
+ logging.info(
43
+ f"Bin type: {bin_type} - Collection date: {bin_collection}"
44
+ )
45
+ dict_data = {
46
+ "type": bin_type,
47
+ "collectionDate": datetime.strptime(
48
+ bin_collection, "%A %d %B %Y"
49
+ ).strftime(date_format),
50
+ }
51
+ data["bins"].append(dict_data)
52
+
53
+ return data
@@ -21,121 +21,130 @@ class CouncilClass(AbstractGetBinDataClass):
21
21
 
22
22
  def parse_data(self, page: str, **kwargs) -> dict:
23
23
  driver = None
24
- data = {"bins": []}
25
- user_paon = kwargs.get("paon")
26
- user_postcode = kwargs.get("postcode")
27
- web_driver = kwargs.get("web_driver")
28
- headless = kwargs.get("headless")
29
- check_paon(user_paon)
30
- check_postcode(user_postcode)
31
-
32
- user_paon = user_paon.upper()
33
-
34
- # Create Selenium webdriver
35
- driver = create_webdriver(web_driver, headless, None, __name__)
36
- driver.get("https://en.powys.gov.uk/binday")
37
-
38
- accept_button = WebDriverWait(driver, timeout=10).until(
39
- EC.element_to_be_clickable(
40
- (
41
- By.NAME,
42
- "acceptall",
24
+ try:
25
+ data = {"bins": []}
26
+ user_paon = kwargs.get("paon")
27
+ user_postcode = kwargs.get("postcode")
28
+ web_driver = kwargs.get("web_driver")
29
+ headless = kwargs.get("headless")
30
+ check_paon(user_paon)
31
+ check_postcode(user_postcode)
32
+
33
+ user_paon = user_paon.upper()
34
+
35
+ # Create Selenium webdriver
36
+ driver = create_webdriver(web_driver, headless, None, __name__)
37
+ driver.get("https://en.powys.gov.uk/binday")
38
+
39
+ accept_button = WebDriverWait(driver, timeout=10).until(
40
+ EC.element_to_be_clickable(
41
+ (
42
+ By.NAME,
43
+ "acceptall",
44
+ )
43
45
  )
44
46
  )
45
- )
46
- accept_button.click()
47
+ accept_button.click()
47
48
 
48
- # Wait for the postcode field to appear then populate it
49
- inputElement_postcode = WebDriverWait(driver, 10).until(
50
- EC.presence_of_element_located(
51
- (By.ID, "BINDAYLOOKUP_ADDRESSLOOKUP_ADDRESSLOOKUPPOSTCODE")
49
+ # Wait for the postcode field to appear then populate it
50
+ inputElement_postcode = WebDriverWait(driver, 10).until(
51
+ EC.presence_of_element_located(
52
+ (By.ID, "BINDAYLOOKUP_ADDRESSLOOKUP_ADDRESSLOOKUPPOSTCODE")
53
+ )
52
54
  )
53
- )
54
- inputElement_postcode.send_keys(user_postcode)
55
+ inputElement_postcode.send_keys(user_postcode)
55
56
 
56
- # Click search button
57
- findAddress = WebDriverWait(driver, 10).until(
58
- EC.presence_of_element_located(
59
- (By.ID, "BINDAYLOOKUP_ADDRESSLOOKUP_ADDRESSLOOKUPSEARCH")
60
- )
61
- )
62
- findAddress.click()
63
-
64
- # Wait for the 'Select address' dropdown to appear and select option matching the house name/number
65
- WebDriverWait(driver, 10).until(
66
- EC.element_to_be_clickable(
67
- (
68
- By.XPATH,
69
- "//select[@id='BINDAYLOOKUP_ADDRESSLOOKUP_ADDRESSLOOKUPADDRESS']//option[contains(., '"
70
- + user_paon
71
- + "')]",
57
+ # Click search button
58
+ findAddress = WebDriverWait(driver, 10).until(
59
+ EC.presence_of_element_located(
60
+ (By.ID, "BINDAYLOOKUP_ADDRESSLOOKUP_ADDRESSLOOKUPSEARCH")
72
61
  )
73
62
  )
74
- ).click()
63
+ findAddress.click()
64
+
65
+ # Wait for the 'Select address' dropdown to appear and select option matching the house name/number
66
+ WebDriverWait(driver, 10).until(
67
+ EC.element_to_be_clickable(
68
+ (
69
+ By.XPATH,
70
+ "//select[@id='BINDAYLOOKUP_ADDRESSLOOKUP_ADDRESSLOOKUPADDRESS']//option[contains(., '"
71
+ + user_paon
72
+ + "')]",
73
+ )
74
+ )
75
+ ).click()
76
+
77
+ # Wait for the submit button to appear, then click it to get the collection dates
78
+ WebDriverWait(driver, 30).until(
79
+ EC.element_to_be_clickable(
80
+ (By.ID, "BINDAYLOOKUP_ADDRESSLOOKUP_ADDRESSLOOKUPBUTTONS_NEXT")
81
+ )
82
+ ).click()
75
83
 
76
- # Wait for the submit button to appear, then click it to get the collection dates
77
- WebDriverWait(driver, 30).until(
78
- EC.element_to_be_clickable(
79
- (By.ID, "BINDAYLOOKUP_ADDRESSLOOKUP_ADDRESSLOOKUPBUTTONS_NEXT")
84
+ # Wait for the collections table to appear
85
+ WebDriverWait(driver, 10).until(
86
+ EC.presence_of_element_located(
87
+ (By.ID, "BINDAYLOOKUP_COLLECTIONDATES_COLLECTIONDATES")
88
+ )
80
89
  )
81
- ).click()
82
90
 
83
- # Wait for the collections table to appear
84
- WebDriverWait(driver, 10).until(
85
- EC.presence_of_element_located(
86
- (By.ID, "BINDAYLOOKUP_COLLECTIONDATES_COLLECTIONDATES")
91
+ soup = BeautifulSoup(driver.page_source, features="html.parser")
92
+
93
+ # General rubbish collection dates
94
+ general_rubbish_section = soup.find(
95
+ "h3", string="General Rubbish / Wheelie bin"
87
96
  )
88
- )
89
-
90
- soup = BeautifulSoup(driver.page_source, features="html.parser")
91
-
92
- # General rubbish collection dates
93
- general_rubbish_section = soup.find(
94
- "h3", string="General Rubbish / Wheelie bin"
95
- )
96
- general_rubbish_dates = [
97
- li.text for li in general_rubbish_section.find_next("ul").find_all("li")
98
- ]
99
-
100
- for date in general_rubbish_dates:
101
- dict_data = {
102
- "type": "General Rubbish / Wheelie bin",
103
- "collectionDate": datetime.strptime(
104
- remove_ordinal_indicator_from_date_string(date), "%d %B %Y"
105
- ).strftime(date_format),
106
- }
107
- data["bins"].append(dict_data)
108
-
109
- # Recycling and food waste collection dates
110
- recycling_section = soup.find("h3", string="Recycling and Food Waste")
111
- recycling_dates = [
112
- li.text for li in recycling_section.find_next("ul").find_all("li")
113
- ]
114
-
115
- for date in recycling_dates:
116
- dict_data = {
117
- "type": "Recycling and Food Waste",
118
- "collectionDate": datetime.strptime(
119
- remove_ordinal_indicator_from_date_string(date), "%d %B %Y"
120
- ).strftime(date_format),
121
- }
122
- data["bins"].append(dict_data)
123
-
124
- # Garden waste collection dates
125
- garden_waste_section = soup.find("h3", string="Garden Waste")
126
- garden_waste_dates = [
127
- li.text for li in garden_waste_section.find_next("ul").find_all("li")
128
- ]
129
- for date in garden_waste_dates:
130
- try:
97
+ general_rubbish_dates = [
98
+ li.text for li in general_rubbish_section.find_next("ul").find_all("li")
99
+ ]
100
+
101
+ for date in general_rubbish_dates:
102
+ dict_data = {
103
+ "type": "General Rubbish / Wheelie bin",
104
+ "collectionDate": datetime.strptime(
105
+ remove_ordinal_indicator_from_date_string(date), "%d %B %Y"
106
+ ).strftime(date_format),
107
+ }
108
+ data["bins"].append(dict_data)
109
+
110
+ # Recycling and food waste collection dates
111
+ recycling_section = soup.find("h3", string="Recycling and Food Waste")
112
+ recycling_dates = [
113
+ li.text for li in recycling_section.find_next("ul").find_all("li")
114
+ ]
115
+
116
+ for date in recycling_dates:
131
117
  dict_data = {
132
- "type": "Garden Waste",
118
+ "type": "Recycling and Food Waste",
133
119
  "collectionDate": datetime.strptime(
134
120
  remove_ordinal_indicator_from_date_string(date), "%d %B %Y"
135
121
  ).strftime(date_format),
136
122
  }
137
123
  data["bins"].append(dict_data)
138
- except:
139
- continue
140
124
 
125
+ # Garden waste collection dates
126
+ garden_waste_section = soup.find("h3", string="Garden Waste")
127
+ garden_waste_dates = [
128
+ li.text for li in garden_waste_section.find_next("ul").find_all("li")
129
+ ]
130
+ for date in garden_waste_dates:
131
+ try:
132
+ dict_data = {
133
+ "type": "Garden Waste",
134
+ "collectionDate": datetime.strptime(
135
+ remove_ordinal_indicator_from_date_string(date), "%d %B %Y"
136
+ ).strftime(date_format),
137
+ }
138
+ data["bins"].append(dict_data)
139
+ except:
140
+ continue
141
+ except Exception as e:
142
+ # Here you can log the exception if needed
143
+ print(f"An error occurred: {e}")
144
+ # Optionally, re-raise the exception if you want it to propagate
145
+ raise
146
+ finally:
147
+ # This block ensures that the driver is closed regardless of an exception
148
+ if driver:
149
+ driver.quit()
141
150
  return data
@@ -16,44 +16,53 @@ class CouncilClass(AbstractGetBinDataClass):
16
16
  """
17
17
 
18
18
  def parse_data(self, page: str, **kwargs) -> dict:
19
+ driver = None
20
+ try:
21
+ user_uprn = kwargs.get("uprn")
22
+ web_driver = kwargs.get("web_driver")
23
+ headless = kwargs.get("headless")
24
+ check_uprn(user_uprn)
25
+ bindata = {"bins": []}
19
26
 
20
- user_uprn = kwargs.get("uprn")
21
- web_driver = kwargs.get("web_driver")
22
- headless = kwargs.get("headless")
23
- check_uprn(user_uprn)
24
- bindata = {"bins": []}
25
-
26
- URI = f"https://www.teignbridge.gov.uk/repositories/hidden-pages/bin-finder?uprn={user_uprn}"
27
-
28
- driver = create_webdriver(web_driver, headless, None, __name__)
29
- driver.get(URI)
30
-
31
- soup = BeautifulSoup(driver.page_source, features="html.parser")
32
-
33
- collection_dates = soup.find_all(
34
- "h3"
35
- ) # Assuming bin types are inside <h3> tags
36
- bin_type_headers = soup.find_all(
37
- "div", {"class": "binInfoContainer"}
38
- ) # Assuming collection dates are inside <p> tags
39
-
40
- # Iterate over the results and extract bin type and collection dates
41
- for i, date in enumerate(collection_dates):
42
- collection_date = date.get_text(strip=True)
43
-
44
- bin_types = bin_type_headers[i].find_all("div")
45
- for bin_type in bin_types:
46
- dict_data = {
47
- "type": bin_type.text.strip(),
48
- "collectionDate": datetime.strptime(
49
- collection_date,
50
- "%d %B %Y%A",
51
- ).strftime("%d/%m/%Y"),
52
- }
53
- bindata["bins"].append(dict_data)
54
-
55
- bindata["bins"].sort(
56
- key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y")
57
- )
27
+ URI = f"https://www.teignbridge.gov.uk/repositories/hidden-pages/bin-finder?uprn={user_uprn}"
58
28
 
29
+ driver = create_webdriver(web_driver, headless, None, __name__)
30
+ driver.get(URI)
31
+
32
+ soup = BeautifulSoup(driver.page_source, features="html.parser")
33
+
34
+ collection_dates = soup.find_all(
35
+ "h3"
36
+ ) # Assuming bin types are inside <h3> tags
37
+ bin_type_headers = soup.find_all(
38
+ "div", {"class": "binInfoContainer"}
39
+ ) # Assuming collection dates are inside <p> tags
40
+
41
+ # Iterate over the results and extract bin type and collection dates
42
+ for i, date in enumerate(collection_dates):
43
+ collection_date = date.get_text(strip=True)
44
+
45
+ bin_types = bin_type_headers[i].find_all("div")
46
+ for bin_type in bin_types:
47
+ dict_data = {
48
+ "type": bin_type.text.strip(),
49
+ "collectionDate": datetime.strptime(
50
+ collection_date,
51
+ "%d %B %Y%A",
52
+ ).strftime("%d/%m/%Y"),
53
+ }
54
+ bindata["bins"].append(dict_data)
55
+
56
+ bindata["bins"].sort(
57
+ key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y")
58
+ )
59
+ except Exception as e:
60
+ # Here you can log the exception if needed
61
+ print(f"An error occurred: {e}")
62
+ # Optionally, re-raise the exception if you want it to propagate
63
+ raise
64
+ finally:
65
+ # This block ensures that the driver is closed regardless of an exception
66
+ if driver:
67
+ driver.quit()
59
68
  return bindata
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: uk_bin_collection
3
- Version: 0.129.0
3
+ Version: 0.130.1
4
4
  Summary: Python Lib to collect UK Bin Data
5
5
  Author: Robert Bradley
6
6
  Author-email: robbrad182@gmail.com
@@ -2,7 +2,7 @@ uk_bin_collection/README.rst,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,
2
2
  uk_bin_collection/tests/council_feature_input_parity.py,sha256=DO6Mk4ImYgM5ZCZ-cutwz5RoYYWZRLYx2tr6zIs_9Rc,3843
3
3
  uk_bin_collection/tests/features/environment.py,sha256=VQZjJdJI_kZn08M0j5cUgvKT4k3iTw8icJge1DGOkoA,127
4
4
  uk_bin_collection/tests/features/validate_council_outputs.feature,sha256=SJK-Vc737hrf03tssxxbeg_JIvAH-ddB8f6gU1LTbuQ,251
5
- uk_bin_collection/tests/input.json,sha256=FYCsAsuo_x_eRxdI9GRehnzWQMG2yN2tP_Sw7XqlS4Y,115347
5
+ uk_bin_collection/tests/input.json,sha256=EUx3biB0uK4bYrQEM5YTMa3xXTxKVfb0-mcw8bupyqM,115835
6
6
  uk_bin_collection/tests/output.schema,sha256=ZwKQBwYyTDEM4G2hJwfLUVM-5v1vKRvRK9W9SS1sd18,1086
7
7
  uk_bin_collection/tests/step_defs/step_helpers/file_handler.py,sha256=Ygzi4V0S1MIHqbdstUlIqtRIwnynvhu4UtpweJ6-5N8,1474
8
8
  uk_bin_collection/tests/step_defs/test_validate_council.py,sha256=VZ0a81sioJULD7syAYHjvK_-nT_Rd36tUyzPetSA0gk,3475
@@ -20,7 +20,7 @@ uk_bin_collection/uk_bin_collection/councils/ArdsAndNorthDownCouncil.py,sha256=E
20
20
  uk_bin_collection/uk_bin_collection/councils/ArgyllandButeCouncil.py,sha256=fJ0UvuSCbzFE9CPoxt1U9CJeFsbTKts_5GRBc3E9Eno,2201
21
21
  uk_bin_collection/uk_bin_collection/councils/ArmaghBanbridgeCraigavonCouncil.py,sha256=o9NBbVCTdxKXnpYbP8-zxe1Gh8s57vwfV75Son_sAHE,2863
22
22
  uk_bin_collection/uk_bin_collection/councils/ArunCouncil.py,sha256=yfhthv9nuogP19VOZ3TYQrq51qqjiCZcSel4sXhiKjs,4012
23
- uk_bin_collection/uk_bin_collection/councils/AshfieldDistrictCouncil.py,sha256=2kZt9HGCVK-n0aq2VFWG6yiWihXjRf8MnksdQLMj4LU,3555
23
+ uk_bin_collection/uk_bin_collection/councils/AshfieldDistrictCouncil.py,sha256=fhX7S_A3jqoND7NE6qITPMPvdk3FJSKZ3Eoa5RtSg3I,4247
24
24
  uk_bin_collection/uk_bin_collection/councils/AshfordBoroughCouncil.py,sha256=yC-8UMQHSbvze43PJ2_F4Z3cu7M7cynKTojipBJU7Ug,4307
25
25
  uk_bin_collection/uk_bin_collection/councils/AylesburyValeCouncil.py,sha256=LouqjspEMt1TkOGqWHs2zkxwOETIy3n7p64uKIlAgUg,2401
26
26
  uk_bin_collection/uk_bin_collection/councils/BCPCouncil.py,sha256=W7QBx6Mgso8RYosuXsaYo3GGNAu-tiyBSmuYxr1JSOU,1707
@@ -129,6 +129,7 @@ uk_bin_collection/uk_bin_collection/councils/HaringeyCouncil.py,sha256=t_6AkAu4w
129
129
  uk_bin_collection/uk_bin_collection/councils/HarrogateBoroughCouncil.py,sha256=_g3fP5Nq-OUjgNrfRf4UEyFKzq0x8QK-4enh5RP1efA,2050
130
130
  uk_bin_collection/uk_bin_collection/councils/HartDistrictCouncil.py,sha256=_llxT4JYYlwm20ZtS3fXwtDs6mwJyLTZBP2wBhvEpWk,2342
131
131
  uk_bin_collection/uk_bin_collection/councils/HartlepoolBoroughCouncil.py,sha256=MUT1A24iZShT2p55rXEvgYwGUuw3W05Z4ZQAveehv-s,2842
132
+ uk_bin_collection/uk_bin_collection/councils/HerefordshireCouncil.py,sha256=JpQhkWM6Jeuzf1W7r0HqvtVnEqNi18nhwJX70YucdsI,1848
132
133
  uk_bin_collection/uk_bin_collection/councils/HertsmereBoroughCouncil.py,sha256=-ThSG6NIJP_wf2GmGL7SAvxbOujdhanZ8ECP4VSQCBs,5415
133
134
  uk_bin_collection/uk_bin_collection/councils/HighPeakCouncil.py,sha256=x7dfy8mdt2iGl8qJxHb-uBh4u0knmi9MJ6irOJw9WYA,4805
134
135
  uk_bin_collection/uk_bin_collection/councils/HighlandCouncil.py,sha256=GNxDU65QuZHV5va2IrKtcJ6TQoDdwmV03JvkVqOauP4,3291
@@ -203,7 +204,7 @@ uk_bin_collection/uk_bin_collection/councils/OxfordCityCouncil.py,sha256=d_bY0cX
203
204
  uk_bin_collection/uk_bin_collection/councils/PerthAndKinrossCouncil.py,sha256=Kos5GzN2co3Ij3tSHOXB9S71Yt78RROCfVRtnh7M1VU,3657
204
205
  uk_bin_collection/uk_bin_collection/councils/PlymouthCouncil.py,sha256=FJqpJ0GJhpjYeyZ9ioZPkKGl-zrqMD3y5iKa07e_i30,3202
205
206
  uk_bin_collection/uk_bin_collection/councils/PortsmouthCityCouncil.py,sha256=xogNgVvwM5FljCziiNLgZ_wzkOnrQkifi1dkPMDRMtg,5588
206
- uk_bin_collection/uk_bin_collection/councils/PowysCouncil.py,sha256=FYdENaJ1ekGAPxd75b0wrizhlDU0SOu03jXoyon6M7Y,5094
207
+ uk_bin_collection/uk_bin_collection/councils/PowysCouncil.py,sha256=db3Y5FJz-LFDqmVZqPdzcBxh0Q26OFPrbUxlQ7r4vsQ,5896
207
208
  uk_bin_collection/uk_bin_collection/councils/PrestonCityCouncil.py,sha256=3Nuin2hQsiEsbJR_kHldtzRhzmnPFctH7C7MFG7thj8,3838
208
209
  uk_bin_collection/uk_bin_collection/councils/ReadingBoroughCouncil.py,sha256=ZlQjU0IeKylGE9VlivSMh4XKwoLgntESPiylSOYkuD4,1009
209
210
  uk_bin_collection/uk_bin_collection/councils/RedditchBoroughCouncil.py,sha256=8QmcpStCT7c-CLhmiQ8ZeEyvtysU110VDiMQdfQTErk,2469
@@ -256,7 +257,7 @@ uk_bin_collection/uk_bin_collection/councils/SwanseaCouncil.py,sha256=nmVPoPhnFg
256
257
  uk_bin_collection/uk_bin_collection/councils/SwindonBoroughCouncil.py,sha256=lSIykpkBjVwQSf3rrnrNuh7YRepgnkKQLbf1iErMuJs,1932
257
258
  uk_bin_collection/uk_bin_collection/councils/TamesideMBCouncil.py,sha256=k2TAAZG7n2S1BWVyxbE_-4-lZuzhOimCNz4yimUCOGk,1995
258
259
  uk_bin_collection/uk_bin_collection/councils/TandridgeDistrictCouncil.py,sha256=KLVvM2NNq_DQylVe5dwO2l7qPahLHg08jJGLCv1MBQ4,2324
259
- uk_bin_collection/uk_bin_collection/councils/TeignbridgeCouncil.py,sha256=vSnQ7UyKEKozhRg3B6BtUhgv8yo719aqAeoXhRP2CW8,2164
260
+ uk_bin_collection/uk_bin_collection/councils/TeignbridgeCouncil.py,sha256=-NowMNcxsnktzUxTk-XUfzFJgXKSSerCmdZ7cN4cE1s,2703
260
261
  uk_bin_collection/uk_bin_collection/councils/TelfordAndWrekinCouncil.py,sha256=p1ZS5R4EGxbEWlRBrkGXgKwE_lkyBT-R60yKFFhVObc,1844
261
262
  uk_bin_collection/uk_bin_collection/councils/TendringDistrictCouncil.py,sha256=DJbYI8m6lIISDrK5h8V5Jo-9kGG7kr9dz7GD8St4nc8,4274
262
263
  uk_bin_collection/uk_bin_collection/councils/TestValleyBoroughCouncil.py,sha256=Dtfkyrwt795W7gqFJxVGRR8t3R5WMNQZwTWJckLpZWE,8480
@@ -305,8 +306,8 @@ uk_bin_collection/uk_bin_collection/councils/YorkCouncil.py,sha256=I2kBYMlsD4bId
305
306
  uk_bin_collection/uk_bin_collection/councils/council_class_template/councilclasstemplate.py,sha256=EQWRhZ2pEejlvm0fPyOTsOHKvUZmPnxEYO_OWRGKTjs,1158
306
307
  uk_bin_collection/uk_bin_collection/create_new_council.py,sha256=m-IhmWmeWQlFsTZC4OxuFvtw5ZtB8EAJHxJTH4O59lQ,1536
307
308
  uk_bin_collection/uk_bin_collection/get_bin_data.py,sha256=YvmHfZqanwrJ8ToGch34x-L-7yPe31nB_x77_Mgl_vo,4545
308
- uk_bin_collection-0.129.0.dist-info/LICENSE,sha256=vABBUOzcrgfaTKpzeo-si9YVEun6juDkndqA8RKdKGs,1071
309
- uk_bin_collection-0.129.0.dist-info/METADATA,sha256=1SICPtPXrWAhEM-GkWQRRW2yw0cINhmXdBfpp1kwES4,19549
310
- uk_bin_collection-0.129.0.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
311
- uk_bin_collection-0.129.0.dist-info/entry_points.txt,sha256=36WCSGMWSc916S3Hi1ZkazzDKHaJ6CD-4fCEFm5MIao,90
312
- uk_bin_collection-0.129.0.dist-info/RECORD,,
309
+ uk_bin_collection-0.130.1.dist-info/LICENSE,sha256=vABBUOzcrgfaTKpzeo-si9YVEun6juDkndqA8RKdKGs,1071
310
+ uk_bin_collection-0.130.1.dist-info/METADATA,sha256=hJOq8xXrfHC1KyQ9OutxVsvz57Mi8aXuBCjC8scAFEI,19549
311
+ uk_bin_collection-0.130.1.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
312
+ uk_bin_collection-0.130.1.dist-info/entry_points.txt,sha256=36WCSGMWSc916S3Hi1ZkazzDKHaJ6CD-4fCEFm5MIao,90
313
+ uk_bin_collection-0.130.1.dist-info/RECORD,,