uk_bin_collection 0.130.0__py3-none-any.whl → 0.131.0__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
@@ -12,6 +12,7 @@ class CouncilClass(AbstractGetBinDataClass):
12
12
  base class. They can also override some operations with a default
13
13
  implementation.
14
14
  """
15
+
15
16
  def parse_data(self, page: str, **kwargs) -> dict:
16
17
 
17
18
  user_uprn = kwargs.get("uprn")
@@ -19,87 +19,97 @@ class CouncilClass(AbstractGetBinDataClass):
19
19
  """
20
20
 
21
21
  def parse_data(self, page: str, **kwargs) -> dict:
22
- # Get and check UPRN
23
- user_postcode = kwargs.get("postcode")
24
- user_paon = kwargs.get("paon")
25
- check_paon(user_paon)
26
- check_postcode(user_postcode)
27
- web_driver = kwargs.get("web_driver")
28
- headless = kwargs.get("headless")
29
- bindata = {"bins": []}
30
-
31
- API_URL = "https://portal.digital.ashfield.gov.uk/w/webpage/raise-case?service=bin_calendar"
32
-
33
- # Create Selenium webdriver
34
- driver = create_webdriver(web_driver, headless, None, __name__)
35
- driver.get(API_URL)
36
-
37
- title = WebDriverWait(driver, 10).until(
38
- EC.presence_of_element_located((By.ID, "sub_page_title"))
39
- )
40
-
41
- # Wait for the postcode field to appear then populate it
42
- WebDriverWait(driver, 10).until(
43
- EC.presence_of_element_located(
44
- (By.CSS_SELECTOR, "input.relation_path_type_ahead_search")
22
+ driver = None
23
+ try:
24
+ # Get and check UPRN
25
+ user_postcode = kwargs.get("postcode")
26
+ user_paon = kwargs.get("paon")
27
+ check_paon(user_paon)
28
+ check_postcode(user_postcode)
29
+ web_driver = kwargs.get("web_driver")
30
+ headless = kwargs.get("headless")
31
+ bindata = {"bins": []}
32
+
33
+ API_URL = "https://portal.digital.ashfield.gov.uk/w/webpage/raise-case?service=bin_calendar"
34
+
35
+ # Create Selenium webdriver
36
+ driver = create_webdriver(web_driver, headless, None, __name__)
37
+ driver.get(API_URL)
38
+
39
+ title = WebDriverWait(driver, 10).until(
40
+ EC.presence_of_element_located((By.ID, "sub_page_title"))
45
41
  )
46
- )
47
42
 
48
- inputElement_postcode = WebDriverWait(driver, 10).until(
49
- EC.presence_of_element_located(
50
- (By.CSS_SELECTOR, "input.relation_path_type_ahead_search")
43
+ # Wait for the postcode field to appear then populate it
44
+ WebDriverWait(driver, 10).until(
45
+ EC.presence_of_element_located(
46
+ (By.CSS_SELECTOR, "input.relation_path_type_ahead_search")
47
+ )
51
48
  )
52
- )
53
- inputElement_postcode.clear()
54
- inputElement_postcode.send_keys(user_postcode)
55
-
56
- # Wait for the 'Select your property' dropdown to appear and select the first result
57
- dropdown = WebDriverWait(driver, 10).until(
58
- EC.element_to_be_clickable(
59
- (
60
- By.CLASS_NAME,
61
- "result_list ",
49
+
50
+ inputElement_postcode = WebDriverWait(driver, 10).until(
51
+ EC.presence_of_element_located(
52
+ (By.CSS_SELECTOR, "input.relation_path_type_ahead_search")
62
53
  )
63
54
  )
64
- )
55
+ inputElement_postcode.clear()
56
+ inputElement_postcode.send_keys(user_postcode)
65
57
 
66
- address_element = (
67
- WebDriverWait(driver, 10)
68
- .until(
58
+ # Wait for the 'Select your property' dropdown to appear and select the first result
59
+ dropdown = WebDriverWait(driver, 10).until(
69
60
  EC.element_to_be_clickable(
70
- (By.XPATH, f"//li[starts-with(@aria-label, '{user_paon}')]")
61
+ (
62
+ By.CLASS_NAME,
63
+ "result_list ",
64
+ )
71
65
  )
72
66
  )
73
- .click()
74
- )
75
67
 
76
- search_button = WebDriverWait(driver, 10).until(
77
- EC.element_to_be_clickable(
78
- (By.XPATH, "//input[@type='submit' and @value='Search']")
68
+ address_element = (
69
+ WebDriverWait(driver, 10)
70
+ .until(
71
+ EC.element_to_be_clickable(
72
+ (By.XPATH, f"//li[starts-with(@aria-label, '{user_paon}')]")
73
+ )
74
+ )
75
+ .click()
79
76
  )
80
- )
81
- search_button.click()
82
-
83
- time.sleep(10)
84
-
85
- soup = BeautifulSoup(driver.page_source, features="html.parser")
86
- soup.prettify()
87
-
88
- # Find the table by class name
89
- table = soup.find("table", {"class": "table listing table-striped"})
90
-
91
- # Iterate over each row in the tbody of the table
92
- for row in table.find("tbody").find_all("tr"):
93
- # Extract the service, day, and date for each row
94
- service = row.find_all("td")[0].get_text(strip=True)
95
- date = row.find_all("td")[2].get_text(strip=True)
96
-
97
- dict_data = {
98
- "type": service,
99
- "collectionDate": datetime.strptime(date, "%a, %d %b %Y").strftime(
100
- date_format
101
- ),
102
- }
103
- bindata["bins"].append(dict_data)
104
77
 
78
+ search_button = WebDriverWait(driver, 10).until(
79
+ EC.element_to_be_clickable(
80
+ (By.XPATH, "//input[@type='submit' and @value='Search']")
81
+ )
82
+ )
83
+ search_button.click()
84
+
85
+ time.sleep(10)
86
+
87
+ soup = BeautifulSoup(driver.page_source, features="html.parser")
88
+ soup.prettify()
89
+
90
+ # Find the table by class name
91
+ table = soup.find("table", {"class": "table listing table-striped"})
92
+
93
+ # Iterate over each row in the tbody of the table
94
+ for row in table.find("tbody").find_all("tr"):
95
+ # Extract the service, day, and date for each row
96
+ service = row.find_all("td")[0].get_text(strip=True)
97
+ date = row.find_all("td")[2].get_text(strip=True)
98
+
99
+ dict_data = {
100
+ "type": service,
101
+ "collectionDate": datetime.strptime(date, "%a, %d %b %Y").strftime(
102
+ date_format
103
+ ),
104
+ }
105
+ bindata["bins"].append(dict_data)
106
+ except Exception as e:
107
+ # Here you can log the exception if needed
108
+ print(f"An error occurred: {e}")
109
+ # Optionally, re-raise the exception if you want it to propagate
110
+ raise
111
+ finally:
112
+ # This block ensures that the driver is closed regardless of an exception
113
+ if driver:
114
+ driver.quit()
105
115
  return bindata
@@ -21,121 +21,130 @@ class CouncilClass(AbstractGetBinDataClass):
21
21
 
22
22
  def parse_data(self, page: str, **kwargs) -> dict:
23
23
  driver = None
24
- data = {"bins": []}
25
- user_paon = kwargs.get("paon")
26
- user_postcode = kwargs.get("postcode")
27
- web_driver = kwargs.get("web_driver")
28
- headless = kwargs.get("headless")
29
- check_paon(user_paon)
30
- check_postcode(user_postcode)
31
-
32
- user_paon = user_paon.upper()
33
-
34
- # Create Selenium webdriver
35
- driver = create_webdriver(web_driver, headless, None, __name__)
36
- driver.get("https://en.powys.gov.uk/binday")
37
-
38
- accept_button = WebDriverWait(driver, timeout=10).until(
39
- EC.element_to_be_clickable(
40
- (
41
- By.NAME,
42
- "acceptall",
24
+ try:
25
+ data = {"bins": []}
26
+ user_paon = kwargs.get("paon")
27
+ user_postcode = kwargs.get("postcode")
28
+ web_driver = kwargs.get("web_driver")
29
+ headless = kwargs.get("headless")
30
+ check_paon(user_paon)
31
+ check_postcode(user_postcode)
32
+
33
+ user_paon = user_paon.upper()
34
+
35
+ # Create Selenium webdriver
36
+ driver = create_webdriver(web_driver, headless, None, __name__)
37
+ driver.get("https://en.powys.gov.uk/binday")
38
+
39
+ accept_button = WebDriverWait(driver, timeout=10).until(
40
+ EC.element_to_be_clickable(
41
+ (
42
+ By.NAME,
43
+ "acceptall",
44
+ )
43
45
  )
44
46
  )
45
- )
46
- accept_button.click()
47
+ accept_button.click()
47
48
 
48
- # Wait for the postcode field to appear then populate it
49
- inputElement_postcode = WebDriverWait(driver, 10).until(
50
- EC.presence_of_element_located(
51
- (By.ID, "BINDAYLOOKUP_ADDRESSLOOKUP_ADDRESSLOOKUPPOSTCODE")
49
+ # Wait for the postcode field to appear then populate it
50
+ inputElement_postcode = WebDriverWait(driver, 10).until(
51
+ EC.presence_of_element_located(
52
+ (By.ID, "BINDAYLOOKUP_ADDRESSLOOKUP_ADDRESSLOOKUPPOSTCODE")
53
+ )
52
54
  )
53
- )
54
- inputElement_postcode.send_keys(user_postcode)
55
+ inputElement_postcode.send_keys(user_postcode)
55
56
 
56
- # Click search button
57
- findAddress = WebDriverWait(driver, 10).until(
58
- EC.presence_of_element_located(
59
- (By.ID, "BINDAYLOOKUP_ADDRESSLOOKUP_ADDRESSLOOKUPSEARCH")
60
- )
61
- )
62
- findAddress.click()
63
-
64
- # Wait for the 'Select address' dropdown to appear and select option matching the house name/number
65
- WebDriverWait(driver, 10).until(
66
- EC.element_to_be_clickable(
67
- (
68
- By.XPATH,
69
- "//select[@id='BINDAYLOOKUP_ADDRESSLOOKUP_ADDRESSLOOKUPADDRESS']//option[contains(., '"
70
- + user_paon
71
- + "')]",
57
+ # Click search button
58
+ findAddress = WebDriverWait(driver, 10).until(
59
+ EC.presence_of_element_located(
60
+ (By.ID, "BINDAYLOOKUP_ADDRESSLOOKUP_ADDRESSLOOKUPSEARCH")
72
61
  )
73
62
  )
74
- ).click()
63
+ findAddress.click()
64
+
65
+ # Wait for the 'Select address' dropdown to appear and select option matching the house name/number
66
+ WebDriverWait(driver, 10).until(
67
+ EC.element_to_be_clickable(
68
+ (
69
+ By.XPATH,
70
+ "//select[@id='BINDAYLOOKUP_ADDRESSLOOKUP_ADDRESSLOOKUPADDRESS']//option[contains(., '"
71
+ + user_paon
72
+ + "')]",
73
+ )
74
+ )
75
+ ).click()
76
+
77
+ # Wait for the submit button to appear, then click it to get the collection dates
78
+ WebDriverWait(driver, 30).until(
79
+ EC.element_to_be_clickable(
80
+ (By.ID, "BINDAYLOOKUP_ADDRESSLOOKUP_ADDRESSLOOKUPBUTTONS_NEXT")
81
+ )
82
+ ).click()
75
83
 
76
- # Wait for the submit button to appear, then click it to get the collection dates
77
- WebDriverWait(driver, 30).until(
78
- EC.element_to_be_clickable(
79
- (By.ID, "BINDAYLOOKUP_ADDRESSLOOKUP_ADDRESSLOOKUPBUTTONS_NEXT")
84
+ # Wait for the collections table to appear
85
+ WebDriverWait(driver, 10).until(
86
+ EC.presence_of_element_located(
87
+ (By.ID, "BINDAYLOOKUP_COLLECTIONDATES_COLLECTIONDATES")
88
+ )
80
89
  )
81
- ).click()
82
90
 
83
- # Wait for the collections table to appear
84
- WebDriverWait(driver, 10).until(
85
- EC.presence_of_element_located(
86
- (By.ID, "BINDAYLOOKUP_COLLECTIONDATES_COLLECTIONDATES")
91
+ soup = BeautifulSoup(driver.page_source, features="html.parser")
92
+
93
+ # General rubbish collection dates
94
+ general_rubbish_section = soup.find(
95
+ "h3", string="General Rubbish / Wheelie bin"
87
96
  )
88
- )
89
-
90
- soup = BeautifulSoup(driver.page_source, features="html.parser")
91
-
92
- # General rubbish collection dates
93
- general_rubbish_section = soup.find(
94
- "h3", string="General Rubbish / Wheelie bin"
95
- )
96
- general_rubbish_dates = [
97
- li.text for li in general_rubbish_section.find_next("ul").find_all("li")
98
- ]
99
-
100
- for date in general_rubbish_dates:
101
- dict_data = {
102
- "type": "General Rubbish / Wheelie bin",
103
- "collectionDate": datetime.strptime(
104
- remove_ordinal_indicator_from_date_string(date), "%d %B %Y"
105
- ).strftime(date_format),
106
- }
107
- data["bins"].append(dict_data)
108
-
109
- # Recycling and food waste collection dates
110
- recycling_section = soup.find("h3", string="Recycling and Food Waste")
111
- recycling_dates = [
112
- li.text for li in recycling_section.find_next("ul").find_all("li")
113
- ]
114
-
115
- for date in recycling_dates:
116
- dict_data = {
117
- "type": "Recycling and Food Waste",
118
- "collectionDate": datetime.strptime(
119
- remove_ordinal_indicator_from_date_string(date), "%d %B %Y"
120
- ).strftime(date_format),
121
- }
122
- data["bins"].append(dict_data)
123
-
124
- # Garden waste collection dates
125
- garden_waste_section = soup.find("h3", string="Garden Waste")
126
- garden_waste_dates = [
127
- li.text for li in garden_waste_section.find_next("ul").find_all("li")
128
- ]
129
- for date in garden_waste_dates:
130
- try:
97
+ general_rubbish_dates = [
98
+ li.text for li in general_rubbish_section.find_next("ul").find_all("li")
99
+ ]
100
+
101
+ for date in general_rubbish_dates:
102
+ dict_data = {
103
+ "type": "General Rubbish / Wheelie bin",
104
+ "collectionDate": datetime.strptime(
105
+ remove_ordinal_indicator_from_date_string(date), "%d %B %Y"
106
+ ).strftime(date_format),
107
+ }
108
+ data["bins"].append(dict_data)
109
+
110
+ # Recycling and food waste collection dates
111
+ recycling_section = soup.find("h3", string="Recycling and Food Waste")
112
+ recycling_dates = [
113
+ li.text for li in recycling_section.find_next("ul").find_all("li")
114
+ ]
115
+
116
+ for date in recycling_dates:
131
117
  dict_data = {
132
- "type": "Garden Waste",
118
+ "type": "Recycling and Food Waste",
133
119
  "collectionDate": datetime.strptime(
134
120
  remove_ordinal_indicator_from_date_string(date), "%d %B %Y"
135
121
  ).strftime(date_format),
136
122
  }
137
123
  data["bins"].append(dict_data)
138
- except:
139
- continue
140
124
 
125
+ # Garden waste collection dates
126
+ garden_waste_section = soup.find("h3", string="Garden Waste")
127
+ garden_waste_dates = [
128
+ li.text for li in garden_waste_section.find_next("ul").find_all("li")
129
+ ]
130
+ for date in garden_waste_dates:
131
+ try:
132
+ dict_data = {
133
+ "type": "Garden Waste",
134
+ "collectionDate": datetime.strptime(
135
+ remove_ordinal_indicator_from_date_string(date), "%d %B %Y"
136
+ ).strftime(date_format),
137
+ }
138
+ data["bins"].append(dict_data)
139
+ except:
140
+ continue
141
+ except Exception as e:
142
+ # Here you can log the exception if needed
143
+ print(f"An error occurred: {e}")
144
+ # Optionally, re-raise the exception if you want it to propagate
145
+ raise
146
+ finally:
147
+ # This block ensures that the driver is closed regardless of an exception
148
+ if driver:
149
+ driver.quit()
141
150
  return data
@@ -16,44 +16,53 @@ class CouncilClass(AbstractGetBinDataClass):
16
16
  """
17
17
 
18
18
  def parse_data(self, page: str, **kwargs) -> dict:
19
+ driver = None
20
+ try:
21
+ user_uprn = kwargs.get("uprn")
22
+ web_driver = kwargs.get("web_driver")
23
+ headless = kwargs.get("headless")
24
+ check_uprn(user_uprn)
25
+ bindata = {"bins": []}
19
26
 
20
- user_uprn = kwargs.get("uprn")
21
- web_driver = kwargs.get("web_driver")
22
- headless = kwargs.get("headless")
23
- check_uprn(user_uprn)
24
- bindata = {"bins": []}
25
-
26
- URI = f"https://www.teignbridge.gov.uk/repositories/hidden-pages/bin-finder?uprn={user_uprn}"
27
-
28
- driver = create_webdriver(web_driver, headless, None, __name__)
29
- driver.get(URI)
30
-
31
- soup = BeautifulSoup(driver.page_source, features="html.parser")
32
-
33
- collection_dates = soup.find_all(
34
- "h3"
35
- ) # Assuming bin types are inside <h3> tags
36
- bin_type_headers = soup.find_all(
37
- "div", {"class": "binInfoContainer"}
38
- ) # Assuming collection dates are inside <p> tags
39
-
40
- # Iterate over the results and extract bin type and collection dates
41
- for i, date in enumerate(collection_dates):
42
- collection_date = date.get_text(strip=True)
43
-
44
- bin_types = bin_type_headers[i].find_all("div")
45
- for bin_type in bin_types:
46
- dict_data = {
47
- "type": bin_type.text.strip(),
48
- "collectionDate": datetime.strptime(
49
- collection_date,
50
- "%d %B %Y%A",
51
- ).strftime("%d/%m/%Y"),
52
- }
53
- bindata["bins"].append(dict_data)
54
-
55
- bindata["bins"].sort(
56
- key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y")
57
- )
27
+ URI = f"https://www.teignbridge.gov.uk/repositories/hidden-pages/bin-finder?uprn={user_uprn}"
58
28
 
29
+ driver = create_webdriver(web_driver, headless, None, __name__)
30
+ driver.get(URI)
31
+
32
+ soup = BeautifulSoup(driver.page_source, features="html.parser")
33
+
34
+ collection_dates = soup.find_all(
35
+ "h3"
36
+ ) # Assuming bin types are inside <h3> tags
37
+ bin_type_headers = soup.find_all(
38
+ "div", {"class": "binInfoContainer"}
39
+ ) # Assuming collection dates are inside <p> tags
40
+
41
+ # Iterate over the results and extract bin type and collection dates
42
+ for i, date in enumerate(collection_dates):
43
+ collection_date = date.get_text(strip=True)
44
+
45
+ bin_types = bin_type_headers[i].find_all("div")
46
+ for bin_type in bin_types:
47
+ dict_data = {
48
+ "type": bin_type.text.strip(),
49
+ "collectionDate": datetime.strptime(
50
+ collection_date,
51
+ "%d %B %Y%A",
52
+ ).strftime("%d/%m/%Y"),
53
+ }
54
+ bindata["bins"].append(dict_data)
55
+
56
+ bindata["bins"].sort(
57
+ key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y")
58
+ )
59
+ except Exception as e:
60
+ # Here you can log the exception if needed
61
+ print(f"An error occurred: {e}")
62
+ # Optionally, re-raise the exception if you want it to propagate
63
+ raise
64
+ finally:
65
+ # This block ensures that the driver is closed regardless of an exception
66
+ if driver:
67
+ driver.quit()
59
68
  return bindata
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: uk_bin_collection
3
- Version: 0.130.0
3
+ Version: 0.131.0
4
4
  Summary: Python Lib to collect UK Bin Data
5
5
  Author: Robert Bradley
6
6
  Author-email: robbrad182@gmail.com
@@ -80,6 +80,8 @@ This integration can be installed directly via HACS. To install:
80
80
  * [Add the repository](https://my.home-assistant.io/redirect/hacs_repository/?owner=robbrad&repository=UKBinCollectionData&category=integration) to your HACS installation
81
81
  * Click `Download`
82
82
 
83
+ For details how setup the custom component integration see the [this documentation](https://github.com/robbrad/UKBinCollectionData/tree/master/custom_components/uk_bin_collection).
84
+
83
85
  #### Manual
84
86
  1. Ensure you have [HACS](https://hacs.xyz/) installed
85
87
  1. In the Home Assistant UI go to `HACS` > `Integrations` > `⋮` > `Custom repositories`.
@@ -354,11 +356,10 @@ Since the Raspberry Pi 4 uses an ARM64-based architecture, use the `seleniarm/st
354
356
 
355
357
  ## Reports
356
358
 
357
- ### Nightly Full Integration Test Reports:
358
- - [3.12 Full](https://robbrad.github.io/UKBinCollectionData/3.12/full)
359
+ All integration tests results are in [CodeCov](https://app.codecov.io/gh/robbrad/UKBinCollectionData/)
359
360
 
360
- ### Partial Pull Request Test Reports
361
- - [3.12 Partial](https://robbrad.github.io/UKBinCollectionData/3.12/partial)
361
+ ### Nightly Full Integration Test Reports:
362
+ - [Nightly Council Test](https://app.codecov.io/gh/robbrad/UKBinCollectionData/tests/master)
362
363
 
363
364
  ---
364
365
  ## Docker API Server
@@ -12,7 +12,7 @@ uk_bin_collection/tests/test_conftest.py,sha256=qI_zgGjNOnwE9gmZUiuirL1SYz3TFw5y
12
12
  uk_bin_collection/tests/test_get_data.py,sha256=sFJz_Fd6o-1r2gdmzY52JGwVi0Of_mDzvYSoc7a3RUw,7239
13
13
  uk_bin_collection/uk_bin_collection/collect_data.py,sha256=dB7wWXsJX4fm5bIf84lexkvHIcO54CZ3JPxqmS-60YY,4654
14
14
  uk_bin_collection/uk_bin_collection/common.py,sha256=Wj6o2NaxYDE1lkpYzMFDIZiPARX0K3xmt-5bnt2kjSI,10970
15
- uk_bin_collection/uk_bin_collection/councils/AberdeenCityCouncil.py,sha256=-MtmZfivccyxy52bJ0uw7brm9I4ZtE9Fxom4iWL_Xtc,4192
15
+ uk_bin_collection/uk_bin_collection/councils/AberdeenCityCouncil.py,sha256=l8OahAm9w9AG61VjCu0B_i51TrwB4vEl_KO0907uJPE,4197
16
16
  uk_bin_collection/uk_bin_collection/councils/AberdeenshireCouncil.py,sha256=xhAdPFEQrPpQxQQ6Q5JYy2GS0QQ10-_y7nxoC74ARc4,1654
17
17
  uk_bin_collection/uk_bin_collection/councils/AdurAndWorthingCouncils.py,sha256=ppbrmm-MzB1wOulK--CU_0j4P-djNf3ozMhHnmQFqLo,1511
18
18
  uk_bin_collection/uk_bin_collection/councils/AntrimAndNewtonabbeyCouncil.py,sha256=Hp5pteaC5RjL5ZqPZ564S9WQ6ZTKLMO6Dl_fxip2TUc,1653
@@ -20,7 +20,7 @@ uk_bin_collection/uk_bin_collection/councils/ArdsAndNorthDownCouncil.py,sha256=E
20
20
  uk_bin_collection/uk_bin_collection/councils/ArgyllandButeCouncil.py,sha256=fJ0UvuSCbzFE9CPoxt1U9CJeFsbTKts_5GRBc3E9Eno,2201
21
21
  uk_bin_collection/uk_bin_collection/councils/ArmaghBanbridgeCraigavonCouncil.py,sha256=o9NBbVCTdxKXnpYbP8-zxe1Gh8s57vwfV75Son_sAHE,2863
22
22
  uk_bin_collection/uk_bin_collection/councils/ArunCouncil.py,sha256=yfhthv9nuogP19VOZ3TYQrq51qqjiCZcSel4sXhiKjs,4012
23
- uk_bin_collection/uk_bin_collection/councils/AshfieldDistrictCouncil.py,sha256=2kZt9HGCVK-n0aq2VFWG6yiWihXjRf8MnksdQLMj4LU,3555
23
+ uk_bin_collection/uk_bin_collection/councils/AshfieldDistrictCouncil.py,sha256=fhX7S_A3jqoND7NE6qITPMPvdk3FJSKZ3Eoa5RtSg3I,4247
24
24
  uk_bin_collection/uk_bin_collection/councils/AshfordBoroughCouncil.py,sha256=yC-8UMQHSbvze43PJ2_F4Z3cu7M7cynKTojipBJU7Ug,4307
25
25
  uk_bin_collection/uk_bin_collection/councils/AylesburyValeCouncil.py,sha256=LouqjspEMt1TkOGqWHs2zkxwOETIy3n7p64uKIlAgUg,2401
26
26
  uk_bin_collection/uk_bin_collection/councils/BCPCouncil.py,sha256=W7QBx6Mgso8RYosuXsaYo3GGNAu-tiyBSmuYxr1JSOU,1707
@@ -204,7 +204,7 @@ uk_bin_collection/uk_bin_collection/councils/OxfordCityCouncil.py,sha256=d_bY0cX
204
204
  uk_bin_collection/uk_bin_collection/councils/PerthAndKinrossCouncil.py,sha256=Kos5GzN2co3Ij3tSHOXB9S71Yt78RROCfVRtnh7M1VU,3657
205
205
  uk_bin_collection/uk_bin_collection/councils/PlymouthCouncil.py,sha256=FJqpJ0GJhpjYeyZ9ioZPkKGl-zrqMD3y5iKa07e_i30,3202
206
206
  uk_bin_collection/uk_bin_collection/councils/PortsmouthCityCouncil.py,sha256=xogNgVvwM5FljCziiNLgZ_wzkOnrQkifi1dkPMDRMtg,5588
207
- uk_bin_collection/uk_bin_collection/councils/PowysCouncil.py,sha256=FYdENaJ1ekGAPxd75b0wrizhlDU0SOu03jXoyon6M7Y,5094
207
+ uk_bin_collection/uk_bin_collection/councils/PowysCouncil.py,sha256=db3Y5FJz-LFDqmVZqPdzcBxh0Q26OFPrbUxlQ7r4vsQ,5896
208
208
  uk_bin_collection/uk_bin_collection/councils/PrestonCityCouncil.py,sha256=3Nuin2hQsiEsbJR_kHldtzRhzmnPFctH7C7MFG7thj8,3838
209
209
  uk_bin_collection/uk_bin_collection/councils/ReadingBoroughCouncil.py,sha256=ZlQjU0IeKylGE9VlivSMh4XKwoLgntESPiylSOYkuD4,1009
210
210
  uk_bin_collection/uk_bin_collection/councils/RedditchBoroughCouncil.py,sha256=8QmcpStCT7c-CLhmiQ8ZeEyvtysU110VDiMQdfQTErk,2469
@@ -257,7 +257,7 @@ uk_bin_collection/uk_bin_collection/councils/SwanseaCouncil.py,sha256=nmVPoPhnFg
257
257
  uk_bin_collection/uk_bin_collection/councils/SwindonBoroughCouncil.py,sha256=lSIykpkBjVwQSf3rrnrNuh7YRepgnkKQLbf1iErMuJs,1932
258
258
  uk_bin_collection/uk_bin_collection/councils/TamesideMBCouncil.py,sha256=k2TAAZG7n2S1BWVyxbE_-4-lZuzhOimCNz4yimUCOGk,1995
259
259
  uk_bin_collection/uk_bin_collection/councils/TandridgeDistrictCouncil.py,sha256=KLVvM2NNq_DQylVe5dwO2l7qPahLHg08jJGLCv1MBQ4,2324
260
- uk_bin_collection/uk_bin_collection/councils/TeignbridgeCouncil.py,sha256=vSnQ7UyKEKozhRg3B6BtUhgv8yo719aqAeoXhRP2CW8,2164
260
+ uk_bin_collection/uk_bin_collection/councils/TeignbridgeCouncil.py,sha256=-NowMNcxsnktzUxTk-XUfzFJgXKSSerCmdZ7cN4cE1s,2703
261
261
  uk_bin_collection/uk_bin_collection/councils/TelfordAndWrekinCouncil.py,sha256=p1ZS5R4EGxbEWlRBrkGXgKwE_lkyBT-R60yKFFhVObc,1844
262
262
  uk_bin_collection/uk_bin_collection/councils/TendringDistrictCouncil.py,sha256=DJbYI8m6lIISDrK5h8V5Jo-9kGG7kr9dz7GD8St4nc8,4274
263
263
  uk_bin_collection/uk_bin_collection/councils/TestValleyBoroughCouncil.py,sha256=Dtfkyrwt795W7gqFJxVGRR8t3R5WMNQZwTWJckLpZWE,8480
@@ -306,8 +306,8 @@ uk_bin_collection/uk_bin_collection/councils/YorkCouncil.py,sha256=I2kBYMlsD4bId
306
306
  uk_bin_collection/uk_bin_collection/councils/council_class_template/councilclasstemplate.py,sha256=EQWRhZ2pEejlvm0fPyOTsOHKvUZmPnxEYO_OWRGKTjs,1158
307
307
  uk_bin_collection/uk_bin_collection/create_new_council.py,sha256=m-IhmWmeWQlFsTZC4OxuFvtw5ZtB8EAJHxJTH4O59lQ,1536
308
308
  uk_bin_collection/uk_bin_collection/get_bin_data.py,sha256=YvmHfZqanwrJ8ToGch34x-L-7yPe31nB_x77_Mgl_vo,4545
309
- uk_bin_collection-0.130.0.dist-info/LICENSE,sha256=vABBUOzcrgfaTKpzeo-si9YVEun6juDkndqA8RKdKGs,1071
310
- uk_bin_collection-0.130.0.dist-info/METADATA,sha256=BnCUzurUgw3yydA2WY7gFgwqUBQ2R3h6HGT5RC4wXEo,19549
311
- uk_bin_collection-0.130.0.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
312
- uk_bin_collection-0.130.0.dist-info/entry_points.txt,sha256=36WCSGMWSc916S3Hi1ZkazzDKHaJ6CD-4fCEFm5MIao,90
313
- uk_bin_collection-0.130.0.dist-info/RECORD,,
309
+ uk_bin_collection-0.131.0.dist-info/LICENSE,sha256=vABBUOzcrgfaTKpzeo-si9YVEun6juDkndqA8RKdKGs,1071
310
+ uk_bin_collection-0.131.0.dist-info/METADATA,sha256=5iyUTdBk4UF-jJtqD9XHWc4kOaN4M1vxTo599wZVibM,19741
311
+ uk_bin_collection-0.131.0.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
312
+ uk_bin_collection-0.131.0.dist-info/entry_points.txt,sha256=36WCSGMWSc916S3Hi1ZkazzDKHaJ6CD-4fCEFm5MIao,90
313
+ uk_bin_collection-0.131.0.dist-info/RECORD,,