uk_bin_collection 0.84.1__py3-none-any.whl → 0.85.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -352,6 +352,14 @@
352
352
  "url": "https://www.eastleigh.gov.uk/waste-bins-and-recycling/collection-dates/your-waste-bin-and-recycling-collections?uprn=",
353
353
  "wiki_name": "Eastleigh Borough Council"
354
354
  },
355
+ "EnfieldCouncil": {
356
+ "url": "https://www.enfield.gov.uk/services/rubbish-and-recycling/find-my-collection-day",
357
+ "wiki_name": "Enfield Council",
358
+ "skip_get_url": true,
359
+ "postcode": "N13 5AJ",
360
+ "house_number": "111",
361
+ "web_driver": "http://selenium:4444"
362
+ },
355
363
  "EnvironmentFirst": {
356
364
  "url": "https://environmentfirst.co.uk/house.php?uprn=100060055444",
357
365
  "wiki_command_url_override": "https://environmentfirst.co.uk/house.php?uprn=XXXXXXXXXX",
@@ -0,0 +1,156 @@
1
+ import time
2
+
3
+ from bs4 import BeautifulSoup
4
+ from selenium.webdriver.common.by import By
5
+ from selenium.webdriver.support import expected_conditions as EC
6
+ from selenium.webdriver.support.ui import Select, WebDriverWait
7
+ import pdb
8
+
9
+ from uk_bin_collection.uk_bin_collection.common import *
10
+ from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
11
+
12
+
13
+ # import the wonderful Beautiful Soup and the URL grabber
14
+ class CouncilClass(AbstractGetBinDataClass):
15
+ """
16
+ Concrete classes have to implement all abstract operations of the
17
+ base class. They can also override some operations with a default
18
+ implementation.
19
+ """
20
+
21
+ def parse_data(self, page: str, **kwargs) -> dict:
22
+ driver = None
23
+ try:
24
+ user_postcode = kwargs.get("postcode")
25
+ if not user_postcode:
26
+ raise ValueError("No postcode provided.")
27
+ check_postcode(user_postcode)
28
+
29
+ user_paon = kwargs.get("paon")
30
+ check_paon(user_paon)
31
+ headless = kwargs.get("headless")
32
+ web_driver = kwargs.get("web_driver")
33
+ driver = create_webdriver(web_driver, headless, None, __name__)
34
+ page = "https://www.enfield.gov.uk/services/rubbish-and-recycling/find-my-collection-day"
35
+ driver.get(page)
36
+
37
+ time.sleep(5)
38
+
39
+ try:
40
+ accept_cookies = WebDriverWait(driver, timeout=10).until(
41
+ EC.presence_of_element_located((By.ID, "ccc-notify-reject"))
42
+ )
43
+ accept_cookies.click()
44
+ except:
45
+ print(
46
+ "Accept cookies banner not found or clickable within the specified time."
47
+ )
48
+ pass
49
+
50
+ postcode_input = WebDriverWait(driver, 10).until(
51
+ EC.presence_of_element_located(
52
+ (By.CSS_SELECTOR, '[aria-label="Enter your address"]')
53
+ )
54
+ )
55
+
56
+ postcode_input.send_keys(user_postcode)
57
+
58
+ find_address_button = WebDriverWait(driver, 10).until(
59
+ EC.presence_of_element_located(
60
+ (By.ID, 'submitButton0')
61
+ )
62
+ )
63
+ find_address_button.click()
64
+
65
+ time.sleep(15)
66
+ # Wait for address box to be visible
67
+ select_address_input = WebDriverWait(driver, 10).until(
68
+ EC.presence_of_element_located(
69
+ (
70
+ By.CSS_SELECTOR,
71
+ '[aria-label="Select full address"]',
72
+ )
73
+ )
74
+ )
75
+
76
+ # Select address based
77
+ select = Select(select_address_input)
78
+ # Grab the first option as a template
79
+ first_option = select.options[0].accessible_name
80
+ template_parts = first_option.split(", ")
81
+ template_parts[0] = user_paon # Replace the first part with user_paon
82
+
83
+ addr_label = ", ".join(template_parts)
84
+ for addr_option in select.options:
85
+ option_name = addr_option.accessible_name[0 : len(addr_label)]
86
+ if option_name == addr_label:
87
+ break
88
+ select.select_by_value(addr_option.text)
89
+
90
+ time.sleep(10)
91
+ # Wait for the specified div to be present
92
+ target_div_id = "FinalResults"
93
+ target_div = WebDriverWait(driver, 10).until(
94
+ EC.presence_of_element_located((By.ID, target_div_id))
95
+ )
96
+
97
+ time.sleep(5)
98
+ soup = BeautifulSoup(driver.page_source, "html.parser")
99
+
100
+ # Find the div with the specified id
101
+ target_div = soup.find("div", {"id": target_div_id})
102
+
103
+
104
+ # Check if the div is found
105
+ if target_div:
106
+ bin_data = {"bins": []}
107
+
108
+ for bin_div in target_div.find_all(
109
+ "div"
110
+ ):
111
+ # Extract the collection date from the message
112
+ try:
113
+ bin_collection_message = bin_div.find("p").text.strip()
114
+ date_pattern = r"\b\d{2}/\d{2}/\d{4}\b"
115
+
116
+ collection_date_string = (
117
+ re.search(date_pattern, bin_div.text)
118
+ .group(0)
119
+ .strip()
120
+ .replace(",", "")
121
+ )
122
+ except AttributeError:
123
+ continue
124
+
125
+ current_date = datetime.now()
126
+ parsed_date = datetime.strptime(
127
+ collection_date_string, "%d/%m/%Y"
128
+ )
129
+ # Check if the parsed date is in the past and not today
130
+ if parsed_date.date() < current_date.date():
131
+ # If so, set the year to the next year
132
+ parsed_date = parsed_date.replace(year=current_date.year + 1)
133
+ else:
134
+ # If not, set the year to the current year
135
+ parsed_date = parsed_date.replace(year=current_date.year)
136
+ formatted_date = parsed_date.strftime("%d/%m/%Y")
137
+ contains_date(formatted_date)
138
+
139
+ # Extract the bin type from the message
140
+ bin_type_match = re.search(r"Your next (.*?) collection", bin_collection_message)
141
+ if bin_type_match:
142
+ bin_info = {"type": bin_type_match.group(1), "collectionDate": formatted_date}
143
+ bin_data["bins"].append(bin_info)
144
+ else:
145
+ raise ValueError("Collection data not found.")
146
+
147
+ except Exception as e:
148
+ # Here you can log the exception if needed
149
+ print(f"An error occurred: {e}")
150
+ # Optionally, re-raise the exception if you want it to propagate
151
+ raise
152
+ finally:
153
+ # This block ensures that the driver is closed regardless of an exception
154
+ if driver:
155
+ driver.quit()
156
+ return bin_data
@@ -8,214 +8,136 @@ from uk_bin_collection.uk_bin_collection.common import *
8
8
  from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
9
9
 
10
10
 
11
- # import the wonderful Beautiful Soup and the URL grabber
12
11
  class CouncilClass(AbstractGetBinDataClass):
13
- """
14
- Concrete classes have to implement all abstract operations of the
15
- base class. They can also override some operations with a default
16
- implementation.
17
- """
18
12
 
19
13
  def parse_data(self, page: str, **kwargs) -> dict:
20
- requests.packages.urllib3.disable_warnings()
21
- data = {"bins": []}
14
+
22
15
  user_uprn = kwargs.get("uprn")
23
16
  user_postcode = kwargs.get("postcode")
24
17
  check_uprn(user_uprn)
25
18
  check_postcode(user_postcode)
26
19
 
27
- # Get form data
20
+ requests.packages.urllib3.disable_warnings()
28
21
  s = requests.Session()
29
- cookies = {
30
- "ntc-cookie-policy": "1",
31
- "SSESS6ec6d5d2d471c0357053d5993a839bce": "qBdR7XhmSMd5_PDBIqG0It2R0Fq67igrejRY-WOcskE",
32
- "has_js": "1",
33
- }
34
- headers = {
35
- "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8",
36
- "Accept-Language": "en-GB,en;q=0.7",
37
- "Cache-Control": "max-age=0",
38
- "Connection": "keep-alive",
39
- "Origin": "https://my.northtyneside.gov.uk",
40
- "Referer": "https://my.northtyneside.gov.uk/category/81/bin-collection-dates",
41
- "Sec-Fetch-Dest": "document",
42
- "Sec-Fetch-Mode": "navigate",
43
- "Sec-Fetch-Site": "same-origin",
44
- "Sec-Fetch-User": "?1",
45
- "Sec-GPC": "1",
46
- "Upgrade-Insecure-Requests": "1",
47
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36",
48
- "sec-ch-ua": '"Not?A_Brand";v="8", "Chromium";v="108", "Brave";v="108"',
49
- "sec-ch-ua-mobile": "?0",
50
- "sec-ch-ua-platform": '"Windows"',
51
- }
52
- ajax_data = {
53
- "postcode": user_postcode,
54
- "form_build_id": "form-BQ47tM0NKADE0s8toYkdSef3QBn6lDM-yBseqIOho80",
55
- "form_id": "ntc_address_wizard",
56
- "_triggering_element_name": "op",
57
- "_triggering_element_value": "Find",
58
- "ajax_html_ids[]": [
59
- "ntc-web-my",
60
- "skip-link",
61
- "navbar",
62
- "navbar-collapse",
63
- "search-block-form",
64
- "ntc-web-search-input-label",
65
- "ntc-web-search-input",
66
- "ui-id-1",
67
- "ntc-web-main",
68
- "main-content",
69
- "block-system-main",
70
- "web-drupal-content",
71
- "web-drupal-content-main",
72
- "node-4024",
73
- "block-ntc-address-ntc-address-finder",
74
- "wizard-form-wrapper",
75
- "ntc-address-wizard",
76
- "edit-postcode",
77
- "edit-find",
78
- "backtotop",
79
- ],
80
- "ajax_page_state[theme]": "ntc_bootstrap",
81
- "ajax_page_state[theme_token]": "LN05JIzI6rocWDiBpDyVeywYveuS4jlxD_N0_hhp2Ko",
82
- "ajax_page_state[css][0]": "1",
83
- "ajax_page_state[css][modules/system/system.base.css]": "1",
84
- "ajax_page_state[css][misc/ui/jquery.ui.core.css]": "1",
85
- "ajax_page_state[css][misc/ui/jquery.ui.theme.css]": "1",
86
- "ajax_page_state[css][misc/ui/jquery.ui.menu.css]": "1",
87
- "ajax_page_state[css][misc/ui/jquery.ui.autocomplete.css]": "1",
88
- "ajax_page_state[css][sites/all/modules/calendar/css/calendar_multiday.css]": "1",
89
- "ajax_page_state[css][sites/all/modules/date/date_repeat_field/date_repeat_field.css]": "1",
90
- "ajax_page_state[css][modules/field/theme/field.css]": "1",
91
- "ajax_page_state[css][modules/node/node.css]": "1",
92
- "ajax_page_state[css][sites/all/modules/youtube/css/youtube.css]": "1",
93
- "ajax_page_state[css][sites/all/modules/views/css/views.css]": "1",
94
- "ajax_page_state[css][sites/all/modules/back_to_top/css/back_to_top.css]": "1",
95
- "ajax_page_state[css][sites/all/modules/ckeditor/css/ckeditor.css]": "1",
96
- "ajax_page_state[css][sites/all/modules/ctools/css/ctools.css]": "1",
97
- "ajax_page_state[css][sites/all/modules/panels/css/panels.css]": "1",
98
- "ajax_page_state[css][sites/all/modules/taxonomy_access/taxonomy_access.css]": "1",
99
- "ajax_page_state[css][sites/all/modules/search_autocomplete/css/themes/minimal.css]": "1",
100
- "ajax_page_state[css][sites/all/themes/ntc_bootstrap/css/bootstrap.css]": "1",
101
- "ajax_page_state[css][sites/all/themes/ntc_bootstrap/css/generic.css]": "1",
102
- "ajax_page_state[css][sites/all/themes/ntc_bootstrap/css/custom.css]": "1",
103
- "ajax_page_state[css][sites/all/themes/ntc_bootstrap/css/components.css]": "1",
104
- "ajax_page_state[css][sites/all/themes/ntc_bootstrap/css/modules.css]": "1",
105
- "ajax_page_state[css][sites/all/themes/ntc_bootstrap/css/fostering.css]": "1",
106
- "ajax_page_state[css][sites/all/themes/ntc_bootstrap/css/responsive.css]": "1",
107
- "ajax_page_state[css][sites/all/themes/ntc_bootstrap/css/ie10.css]": "1",
108
- "ajax_page_state[css][sites/all/themes/ntc_bootstrap/css/ie.css]": "1",
109
- "ajax_page_state[js][0]": "1",
110
- "ajax_page_state[js][1]": "1",
111
- "ajax_page_state[js][sites/all/themes/bootstrap/js/bootstrap.js]": "1",
112
- "ajax_page_state[js][//ajax.googleapis.com/ajax/libs/jquery/1.10.2/jquery.min.js]": "1",
113
- "ajax_page_state[js][misc/jquery-extend-3.4.0.js]": "1",
114
- "ajax_page_state[js][misc/jquery-html-prefilter-3.5.0-backport.js]": "1",
115
- "ajax_page_state[js][misc/jquery.once.js]": "1",
116
- "ajax_page_state[js][misc/drupal.js]": "1",
117
- "ajax_page_state[js][//ajax.googleapis.com/ajax/libs/jqueryui/1.10.2/jquery-ui.min.js]": "1",
118
- "ajax_page_state[js][sites/all/modules/jquery_update/replace/ui/external/jquery.cookie.js]": "1",
119
- "ajax_page_state[js][sites/all/modules/jquery_update/replace/misc/jquery.form.min.js]": "1",
120
- "ajax_page_state[js][misc/ajax.js]": "1",
121
- "ajax_page_state[js][sites/all/modules/jquery_update/js/jquery_update.js]": "1",
122
- "ajax_page_state[js][sites/all/modules/back_to_top/js/back_to_top.js]": "1",
123
- "ajax_page_state[js][sites/all/themes/bootstrap/js/misc/_progress.js]": "1",
124
- "ajax_page_state[js][sites/all/modules/field_group/field_group.js]": "1",
125
- "ajax_page_state[js][sites/all/modules/search_autocomplete/js/jquery.autocomplete.js]": "1",
126
- "ajax_page_state[js][sites/all/themes/ntc_bootstrap/scripts/NTC.jquery.contentMenuScroller.js]": "1",
127
- "ajax_page_state[js][sites/all/themes/ntc_bootstrap/scripts/NTC.jquery.alertClose.js]": "1",
128
- "ajax_page_state[js][sites/all/themes/ntc_bootstrap/scripts/NTC.jquery.activeTrail.js]": "1",
129
- "ajax_page_state[js][sites/all/themes/ntc_bootstrap/scripts/NTC.jquery.expandLinkToDiv.js]": "1",
130
- "ajax_page_state[js][sites/all/themes/ntc_bootstrap/scripts/NTC.jquery.events.js]": "1",
131
- "ajax_page_state[js][sites/all/themes/ntc_bootstrap/scripts/cookieconsent.js]": "1",
132
- "ajax_page_state[js][sites/all/themes/ntc_bootstrap/scripts/google-analytics.js]": "1",
133
- "ajax_page_state[js][sites/all/themes/ntc_bootstrap/scripts/ios-orientationchange-fix.js]": "1",
134
- "ajax_page_state[js][sites/all/themes/bootstrap/js/misc/ajax.js]": "1",
135
- "ajax_page_state[jquery_version]": "1.10",
136
- }
137
- uprn_data = {
138
- "house_number": "0000" + f"{user_uprn}",
139
- "op": "Use",
140
- "form_build_id": "form-BQ47tM0NKADE0s8toYkdSef3QBn6lDM-yBseqIOho80",
141
- "form_id": "ntc_address_wizard",
142
- }
143
- collections = []
144
22
 
145
- response = s.post(
146
- "https://my.northtyneside.gov.uk/system/ajax",
147
- # cookies=cookies,
148
- headers=headers,
149
- data=ajax_data,
150
- verify=False,
23
+ # Get the first form
24
+ response = s.get(
25
+ "https://my.northtyneside.gov.uk/category/81/bin-collection-dates",
26
+ verify = False,
151
27
  )
28
+
29
+ # Find the form ID and submit with a postcode
30
+ soup = BeautifulSoup(response.text, features="html.parser")
31
+ form_build_id = soup.find("input", {"name": "form_build_id"})["value"]
152
32
  response = s.post(
153
33
  "https://my.northtyneside.gov.uk/category/81/bin-collection-dates",
154
- # cookies=cookies,
155
- headers=headers,
156
- data=uprn_data,
157
- verify=False,
34
+ data = {
35
+ "postcode": user_postcode,
36
+ "op": "Find",
37
+ "form_build_id": form_build_id,
38
+ "form_id": "ntc_address_wizard",
39
+ },
40
+ verify = False,
158
41
  )
159
- response = s.get(
42
+
43
+ # Find the form ID and submit with the UPRN
44
+ soup = BeautifulSoup(response.text, features="html.parser")
45
+ form_build_id = soup.find("input", {"name": "form_build_id"})["value"]
46
+ response = s.post(
160
47
  "https://my.northtyneside.gov.uk/category/81/bin-collection-dates",
161
- # cookies=cookies,
162
- headers=headers,
163
- data=uprn_data,
164
- verify=False,
48
+ data = {
49
+ "house_number": f"0000{user_uprn}",
50
+ "op": "Use",
51
+ "form_build_id": form_build_id,
52
+ "form_id": "ntc_address_wizard",
53
+ },
54
+ verify = False,
165
55
  )
166
56
 
167
- # Parse form page and get the day of week text
57
+ # Parse form page and get the day of week and week offsets
168
58
  soup = BeautifulSoup(response.text, features="html.parser")
169
- soup.prettify()
170
- bin_text = soup.find("section", {"class": "block block-ntc-bins clearfix"})
171
- regular_text = bin_text.select("p:nth-child(2) > strong")[0].text.strip()
172
- x = bin_text.select("p:nth-child(4) > strong")
173
- if len(bin_text.select("p:nth-child(4) > strong")) == 1:
174
- special_text = bin_text.select("p:nth-child(4) > strong")[0].text.strip()
175
- else:
176
- special_text = bin_text.select("p:nth-child(5) > strong")[0].text.strip()
177
-
178
- # Since calendar only shows until end of March 2024, work out how many weeks that is
179
- weeks_total = math.floor((datetime(2024, 4, 1) - datetime.now()).days / 7)
59
+ info_section = soup.find("section", {"class": "block block-ntc-bins clearfix"})
60
+
61
+ regular_day, garden_day, special_day = None, None, None
62
+ # Get day of week and week label for refuse, garden and special collections.
63
+ # Week label is A or B. Convert that to an int to use as an offset.
64
+ for anchor in info_section.findAll("a"):
65
+ if anchor.text.startswith("Refuse and Recycling"):
66
+ regular_day = anchor.text.strip().split()[-3]
67
+ if anchor.text.strip().split()[-1] == "A":
68
+ regular_week = 0
69
+ else:
70
+ regular_week = 1
71
+ elif anchor.text.startswith("Garden Waste"):
72
+ garden_day = anchor.text.strip().split()[-3]
73
+ if anchor.text.strip().split()[-1] == "A":
74
+ garden_week = 0
75
+ else:
76
+ garden_week = 1
77
+ for para in info_section.findAll("p"):
78
+ if para.text.startswith("Your special collections day"):
79
+ special_day = para.find("strong").text.strip()
80
+
81
+ # The regular calendar only shows until end of March 2026, work out how many weeks that is
82
+ weeks_total = math.floor((datetime(2026, 4, 1) - datetime.now()).days / 7)
180
83
 
84
+ # The garden calendar only shows until end of November 2024, work out how many weeks that is
85
+ garden_weeks_total = math.floor((datetime(2024, 12, 1) - datetime.now()).days / 7)
86
+
87
+ regular_collections, garden_collections, special_collections = [], [], []
181
88
  # Convert day text to series of dates using previous calculation
182
- regular_collections = get_weekday_dates_in_period(
183
- datetime.today(),
184
- days_of_week.get(regular_text.capitalize()),
185
- amount=weeks_total,
186
- )
187
- special_collections = get_weekday_dates_in_period(
188
- datetime.today(), days_of_week.get(special_text.capitalize())
189
- )
89
+ if regular_day is not None:
90
+ regular_collections = get_weekday_dates_in_period(
91
+ datetime.today(),
92
+ days_of_week.get(regular_day.capitalize()),
93
+ amount=weeks_total,
94
+ )
95
+ if garden_day is not None:
96
+ garden_collections = get_weekday_dates_in_period(
97
+ datetime.today(),
98
+ days_of_week.get(garden_day.capitalize()),
99
+ amount=garden_weeks_total,
100
+ )
101
+ if special_day is not None:
102
+ special_collections = get_weekday_dates_in_period(
103
+ datetime.today(),
104
+ days_of_week.get(special_day.capitalize()),
105
+ amount=weeks_total,
106
+ )
190
107
 
191
- # Differentiate between regular and recycling bins
108
+ collections = []
109
+
110
+ # Add regular collections, and differentiate between regular and recycling bins
192
111
  for item in regular_collections:
193
112
  item_as_date = datetime.strptime(item, date_format)
194
113
  # Check if holiday (calendar only has one day that's a holiday, and it's moved to the next day)
195
114
  if is_holiday(item_as_date, Region.ENG):
196
115
  item_as_date += timedelta(days=1)
197
- # Use the isoweek number to separate collections - at the time of writing 11th Jan is week 2, which
198
- # is for the grey bin
199
- if (item_as_date.date().isocalendar()[1] % 2) == 0:
200
- collections.append(("Regular bin (green)", item_as_date))
201
-
116
+ # Use the isoweek number to separate collections based on week label.
117
+ if (item_as_date.date().isocalendar()[1] % 2) == regular_week:
118
+ collections.append(("Refuse (green)", item_as_date))
202
119
  else:
203
- collections.append(("Recycling bin (grey)", item_as_date))
120
+ collections.append(("Recycling (grey)", item_as_date))
121
+
122
+ # Add garden collections
123
+ for item in garden_collections:
124
+ item_as_date = datetime.strptime(item, date_format)
125
+ # Garden collections do not move for bank holidays
126
+ if (item_as_date.date().isocalendar()[1] % 2) == garden_week:
127
+ collections.append(("Garden Waste (brown)", item_as_date))
204
128
 
205
- # Add the special collection dates to the collection tuple
129
+ # Add special collections
206
130
  collections += [
207
- ("Special collection (bookable)", datetime.strptime(item, date_format))
131
+ ("Special Collection (bookable)", datetime.strptime(item, date_format))
208
132
  for item in special_collections
209
133
  ]
210
134
 
211
- # Sort the collections tuple by date, the add to dictionary and return
212
- ordered_data = sorted(collections, key=lambda x: x[1])
213
- data = {"bins": []}
214
- for item in ordered_data:
215
- dict_data = {
216
- "type": item[0],
217
- "collectionDate": item[1].strftime(date_format),
218
- }
219
- data["bins"].append(dict_data)
220
-
221
- return data
135
+ return {
136
+ "bins": [
137
+ {
138
+ "type": item[0],
139
+ "collectionDate": item[1].strftime(date_format),
140
+ }
141
+ for item in sorted(collections, key=lambda x: x[1])
142
+ ]
143
+ }
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: uk_bin_collection
3
- Version: 0.84.1
3
+ Version: 0.85.0
4
4
  Summary: Python Lib to collect UK Bin Data
5
5
  Author: Robert Bradley
6
6
  Author-email: robbrad182@gmail.com
@@ -2,7 +2,7 @@ uk_bin_collection/README.rst,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,
2
2
  uk_bin_collection/tests/council_feature_input_parity.py,sha256=DO6Mk4ImYgM5ZCZ-cutwz5RoYYWZRLYx2tr6zIs_9Rc,3843
3
3
  uk_bin_collection/tests/features/environment.py,sha256=VQZjJdJI_kZn08M0j5cUgvKT4k3iTw8icJge1DGOkoA,127
4
4
  uk_bin_collection/tests/features/validate_council_outputs.feature,sha256=SJK-Vc737hrf03tssxxbeg_JIvAH-ddB8f6gU1LTbuQ,251
5
- uk_bin_collection/tests/input.json,sha256=OVMlr2sTqDMgQGlSgTQ1_LeuxXpVgXPvqRb_zGV95r8,59090
5
+ uk_bin_collection/tests/input.json,sha256=540NA9Lt73cWYw8a4mzf0LiN0HbgbA1rcY8t7gvg0eE,59397
6
6
  uk_bin_collection/tests/output.schema,sha256=ZwKQBwYyTDEM4G2hJwfLUVM-5v1vKRvRK9W9SS1sd18,1086
7
7
  uk_bin_collection/tests/step_defs/step_helpers/file_handler.py,sha256=Ygzi4V0S1MIHqbdstUlIqtRIwnynvhu4UtpweJ6-5N8,1474
8
8
  uk_bin_collection/tests/step_defs/test_validate_council.py,sha256=LrOSt_loA1Mw3vTqaO2LpaDMu7rYJy6k5Kr-EOBln7s,3424
@@ -58,6 +58,7 @@ uk_bin_collection/uk_bin_collection/councils/EastLindseyDistrictCouncil.py,sha25
58
58
  uk_bin_collection/uk_bin_collection/councils/EastRidingCouncil.py,sha256=CsYdkmL-8Ty-Kz7uNdlnJnhiDMgOPah_swYgSKbaFqA,5218
59
59
  uk_bin_collection/uk_bin_collection/councils/EastSuffolkCouncil.py,sha256=qQ0oOfGd0sWcczse_B22YoeL9uj3og8v3UJLt_Sx29c,4353
60
60
  uk_bin_collection/uk_bin_collection/councils/EastleighBoroughCouncil.py,sha256=V4Vso4DvawFiezKlmXbTlJEK9Sjhz9nA8WeYjwtO2e4,2310
61
+ uk_bin_collection/uk_bin_collection/councils/EnfieldCouncil.py,sha256=HhKHlLciZKXViqcgkWme-wBUKlGhAs5LIpkKuRETvXM,6119
61
62
  uk_bin_collection/uk_bin_collection/councils/EnvironmentFirst.py,sha256=_9QJYDHpdnYK5R6znvZk1w0F9GnPnI8G4b6I_p26h4U,1695
62
63
  uk_bin_collection/uk_bin_collection/councils/EppingForestDistrictCouncil.py,sha256=cKFllQ4zt6MGkwiz_HedZvw3iL1kRMLA6Ct2spUE5og,2085
63
64
  uk_bin_collection/uk_bin_collection/councils/ErewashBoroughCouncil.py,sha256=QTQA6NjZtTL2baDeerIQW1SQpawwu6kGDMGdVvYQRRo,2501
@@ -107,7 +108,7 @@ uk_bin_collection/uk_bin_collection/councils/NorthLincolnshireCouncil.py,sha256=
107
108
  uk_bin_collection/uk_bin_collection/councils/NorthNorfolkDistrictCouncil.py,sha256=VV_zqVZYv8ekXcUHhrBlTX_W5qLYE9IA3mT2xmrZqoI,4315
108
109
  uk_bin_collection/uk_bin_collection/councils/NorthNorthamptonshireCouncil.py,sha256=kcMN-5GBjYDM9F1BKfHoYeydub8SuDxHamJbSvJRZ68,2337
109
110
  uk_bin_collection/uk_bin_collection/councils/NorthSomersetCouncil.py,sha256=EbFVnPYZIOkru5_Y75kjljM3Cr3HIJgP-SU6hxgf6tk,2754
110
- uk_bin_collection/uk_bin_collection/councils/NorthTynesideCouncil.py,sha256=waH6_PyJaiEF-v9HR3C_aUdiPTcdHlxfAuuuKBsvkII,11353
111
+ uk_bin_collection/uk_bin_collection/councils/NorthTynesideCouncil.py,sha256=N_ZRk3LZbIin7K4Vr8Rx-tExMW6Av7-WNQu4OOG3cfE,5921
111
112
  uk_bin_collection/uk_bin_collection/councils/NorthWestLeicestershire.py,sha256=gJj0dyQc5QUefqusKGk2LLXfWbG5tlEXUOh8KAPh3RI,4584
112
113
  uk_bin_collection/uk_bin_collection/councils/NorthYorkshire.py,sha256=2wTrr3VrZDp9-YtDPmWd649gXeWH4hbm2-Hw8Vau5Xs,1933
113
114
  uk_bin_collection/uk_bin_collection/councils/NorthumberlandCouncil.py,sha256=KEFsxEvQ159fkuFo-fza67YCnnCZ5ElwE80zTrqDEWI,4990
@@ -182,8 +183,8 @@ uk_bin_collection/uk_bin_collection/councils/YorkCouncil.py,sha256=I2kBYMlsD4bId
182
183
  uk_bin_collection/uk_bin_collection/councils/council_class_template/councilclasstemplate.py,sha256=4s9ODGPAwPqwXc8SrTX5Wlfmizs3_58iXUtHc4Ir86o,1162
183
184
  uk_bin_collection/uk_bin_collection/create_new_council.py,sha256=m-IhmWmeWQlFsTZC4OxuFvtw5ZtB8EAJHxJTH4O59lQ,1536
184
185
  uk_bin_collection/uk_bin_collection/get_bin_data.py,sha256=YvmHfZqanwrJ8ToGch34x-L-7yPe31nB_x77_Mgl_vo,4545
185
- uk_bin_collection-0.84.1.dist-info/LICENSE,sha256=vABBUOzcrgfaTKpzeo-si9YVEun6juDkndqA8RKdKGs,1071
186
- uk_bin_collection-0.84.1.dist-info/METADATA,sha256=nW0yhLZt4-z3n5GsVUfl4_-6pae8DiR67zXNcKuhel8,16231
187
- uk_bin_collection-0.84.1.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
188
- uk_bin_collection-0.84.1.dist-info/entry_points.txt,sha256=36WCSGMWSc916S3Hi1ZkazzDKHaJ6CD-4fCEFm5MIao,90
189
- uk_bin_collection-0.84.1.dist-info/RECORD,,
186
+ uk_bin_collection-0.85.0.dist-info/LICENSE,sha256=vABBUOzcrgfaTKpzeo-si9YVEun6juDkndqA8RKdKGs,1071
187
+ uk_bin_collection-0.85.0.dist-info/METADATA,sha256=6NrMstgZCRauGjucrc08jacZVh3RdoBwU9z50VW1OTA,16231
188
+ uk_bin_collection-0.85.0.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
189
+ uk_bin_collection-0.85.0.dist-info/entry_points.txt,sha256=36WCSGMWSc916S3Hi1ZkazzDKHaJ6CD-4fCEFm5MIao,90
190
+ uk_bin_collection-0.85.0.dist-info/RECORD,,