uk_bin_collection 0.140.0__py3-none-any.whl → 0.141.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1493,6 +1493,15 @@
1493
1493
  "wiki_name": "Oxford City Council",
1494
1494
  "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN."
1495
1495
  },
1496
+ "PeterboroughCityCouncil": {
1497
+ "house_number": "7 Arundel Road, Peterborough, PE4 6JJ",
1498
+ "postcode": "PE4 6JJ",
1499
+ "skip_get_url": true,
1500
+ "url": "https://report.peterborough.gov.uk/waste",
1501
+ "web_driver": "http://selenium:4444",
1502
+ "wiki_name": "Peterborough City Council",
1503
+ "wiki_note": "Pass the full address as it appears o nthe Peterborough website and postcode in their respective parameters. This parser requires a Selenium webdriver."
1504
+ },
1496
1505
  "PerthAndKinrossCouncil": {
1497
1506
  "uprn": "124032322",
1498
1507
  "url": "https://www.pkc.gov.uk",
@@ -19,106 +19,117 @@ class CouncilClass(AbstractGetBinDataClass):
19
19
  """
20
20
 
21
21
  def parse_data(self, page: str, **kwargs) -> dict:
22
+ driver = None
23
+ try:
22
24
  # Get and check UPRN
23
- user_uprn = kwargs.get("uprn")
24
- user_postcode = kwargs.get("postcode")
25
- check_uprn(user_uprn)
26
- check_postcode(user_postcode)
27
- web_driver = kwargs.get("web_driver")
28
- headless = kwargs.get("headless")
29
- bindata = {"bins": []}
30
-
31
- API_URL = "https://secure.ashford.gov.uk/waste/collectiondaylookup/"
32
-
33
- # Create Selenium webdriver
34
- driver = create_webdriver(web_driver, headless, None, __name__)
35
- driver.get(API_URL)
36
-
37
- # Wait for the postcode field to appear then populate it
38
- inputElement_postcode = WebDriverWait(driver, 30).until(
39
- EC.presence_of_element_located(
40
- (By.ID, "ContentPlaceHolder1_CollectionDayLookup2_TextBox_PostCode")
25
+ user_uprn = kwargs.get("uprn")
26
+ user_postcode = kwargs.get("postcode")
27
+ check_uprn(user_uprn)
28
+ check_postcode(user_postcode)
29
+ web_driver = kwargs.get("web_driver")
30
+ headless = kwargs.get("headless")
31
+ bindata = {"bins": []}
32
+
33
+ API_URL = "https://secure.ashford.gov.uk/waste/collectiondaylookup/"
34
+
35
+ # Create Selenium webdriver
36
+ driver = create_webdriver(web_driver, headless, None, __name__)
37
+ driver.get(API_URL)
38
+
39
+ # Wait for the postcode field to appear then populate it
40
+ inputElement_postcode = WebDriverWait(driver, 30).until(
41
+ EC.presence_of_element_located(
42
+ (By.ID, "ContentPlaceHolder1_CollectionDayLookup2_TextBox_PostCode")
43
+ )
41
44
  )
42
- )
43
- inputElement_postcode.send_keys(user_postcode)
44
-
45
- # Click search button
46
- findAddress = WebDriverWait(driver, 10).until(
47
- EC.presence_of_element_located(
48
- (
49
- By.ID,
50
- "ContentPlaceHolder1_CollectionDayLookup2_Button_PostCodeSearch",
45
+ inputElement_postcode.send_keys(user_postcode)
46
+
47
+ # Click search button
48
+ findAddress = WebDriverWait(driver, 10).until(
49
+ EC.presence_of_element_located(
50
+ (
51
+ By.ID,
52
+ "ContentPlaceHolder1_CollectionDayLookup2_Button_PostCodeSearch",
53
+ )
51
54
  )
52
55
  )
53
- )
54
- findAddress.click()
55
-
56
- # Wait for the 'Select your property' dropdown to appear and select the first result
57
- dropdown = WebDriverWait(driver, 10).until(
58
- EC.element_to_be_clickable(
59
- (
60
- By.ID,
61
- "ContentPlaceHolder1_CollectionDayLookup2_DropDownList_Addresses",
56
+ findAddress.click()
57
+
58
+ # Wait for the 'Select your property' dropdown to appear and select the first result
59
+ dropdown = WebDriverWait(driver, 10).until(
60
+ EC.element_to_be_clickable(
61
+ (
62
+ By.ID,
63
+ "ContentPlaceHolder1_CollectionDayLookup2_DropDownList_Addresses",
64
+ )
62
65
  )
63
66
  )
64
- )
65
67
 
66
- # Create a 'Select' for it, then select the first address in the list
67
- # (Index 0 is "Make a selection from the list")
68
- dropdownSelect = Select(dropdown)
69
- dropdownSelect.select_by_value(str(user_uprn))
68
+ # Create a 'Select' for it, then select the first address in the list
69
+ # (Index 0 is "Make a selection from the list")
70
+ dropdownSelect = Select(dropdown)
71
+ dropdownSelect.select_by_value(str(user_uprn))
70
72
 
71
- # Click search button
72
- findAddress = WebDriverWait(driver, 10).until(
73
- EC.presence_of_element_located(
74
- (By.ID, "ContentPlaceHolder1_CollectionDayLookup2_Button_SelectAddress")
73
+ # Click search button
74
+ findAddress = WebDriverWait(driver, 10).until(
75
+ EC.presence_of_element_located(
76
+ (By.ID, "ContentPlaceHolder1_CollectionDayLookup2_Button_SelectAddress")
77
+ )
75
78
  )
76
- )
77
- findAddress.click()
79
+ findAddress.click()
78
80
 
79
- h4_element = WebDriverWait(driver, 10).until(
80
- EC.presence_of_element_located(
81
- (By.XPATH, "//h4[contains(text(), 'Collection Dates')]")
81
+ h4_element = WebDriverWait(driver, 10).until(
82
+ EC.presence_of_element_located(
83
+ (By.XPATH, "//h4[contains(text(), 'Collection Dates')]")
84
+ )
82
85
  )
83
- )
84
86
 
85
- soup = BeautifulSoup(driver.page_source, features="html.parser")
87
+ soup = BeautifulSoup(driver.page_source, features="html.parser")
86
88
 
87
- bin_tables = soup.find_all("table")
89
+ bin_tables = soup.find_all("table")
88
90
 
89
- for bin_table in bin_tables:
90
- bin_text = bin_table.find("td", id=re.compile("CollectionDayLookup2_td_"))
91
- if not bin_text:
92
- continue
91
+ for bin_table in bin_tables:
92
+ bin_text = bin_table.find("td", id=re.compile("CollectionDayLookup2_td_"))
93
+ if not bin_text:
94
+ continue
93
95
 
94
- bin_type_soup = bin_text.find("b")
96
+ bin_type_soup = bin_text.find("b")
95
97
 
96
- if not bin_type_soup:
97
- continue
98
- bin_type: str = bin_type_soup.text.strip().split(" (")[0]
98
+ if not bin_type_soup:
99
+ continue
100
+ bin_type: str = bin_type_soup.text.strip().split(" (")[0]
99
101
 
100
- date_soup = bin_text.find(
101
- "span", id=re.compile(r"CollectionDayLookup2_Label_\w*_Date")
102
- )
103
- if not date_soup or (
104
- " " not in date_soup.text.strip()
105
- and date_soup.text.strip().lower() != "today"
106
- ):
107
- continue
108
- date_str: str = date_soup.text.strip()
109
- try:
110
- if date_soup.text.strip().lower() == "today":
111
- date = datetime.now().date()
112
- else:
113
- date = datetime.strptime(date_str.split(" ")[1], "%d/%m/%Y").date()
114
-
115
- except ValueError:
116
- continue
117
-
118
- dict_data = {
119
- "type": bin_type,
120
- "collectionDate": date.strftime("%d/%m/%Y"),
121
- }
122
- bindata["bins"].append(dict_data)
123
-
124
- return bindata
102
+ date_soup = bin_text.find(
103
+ "span", id=re.compile(r"CollectionDayLookup2_Label_\w*_Date")
104
+ )
105
+ if not date_soup or (
106
+ " " not in date_soup.text.strip()
107
+ and date_soup.text.strip().lower() != "today"
108
+ ):
109
+ continue
110
+ date_str: str = date_soup.text.strip()
111
+ try:
112
+ if date_soup.text.strip().lower() == "today":
113
+ date = datetime.now().date()
114
+ else:
115
+ date = datetime.strptime(date_str.split(" ")[1], "%d/%m/%Y").date()
116
+
117
+ except ValueError:
118
+ continue
119
+
120
+ dict_data = {
121
+ "type": bin_type,
122
+ "collectionDate": date.strftime("%d/%m/%Y"),
123
+ }
124
+ bindata["bins"].append(dict_data)
125
+
126
+ except Exception as e:
127
+ # Here you can log the exception if needed
128
+ print(f"An error occurred: {e}")
129
+ # Optionally, re-raise the exception if you want it to propagate
130
+ raise
131
+ finally:
132
+ # This block ensures that the driver is closed regardless of an exception
133
+ if driver:
134
+ driver.quit()
135
+ return bindata
@@ -16,102 +16,113 @@ class CouncilClass(AbstractGetBinDataClass):
16
16
 
17
17
  def parse_data(self, page: str, **kwargs) -> dict:
18
18
  # Get and check UPRN
19
- user_postcode = kwargs.get("postcode")
20
- user_paon = kwargs.get("paon")
21
- check_paon(user_paon)
22
- check_postcode(user_postcode)
23
- web_driver = kwargs.get("web_driver")
24
- headless = kwargs.get("headless")
25
- bindata = {"bins": []}
26
-
27
- API_URL = "https://uhte-wrp.whitespacews.com"
28
-
29
- # Create Selenium webdriver
30
- driver = create_webdriver(web_driver, headless, None, __name__)
31
- driver.get(API_URL)
32
-
33
- # Click Find my bin collection day button
34
- collectionButton = WebDriverWait(driver, 10).until(
35
- EC.element_to_be_clickable((By.LINK_TEXT, "Find my bin collection day"))
36
- )
37
- collectionButton.click()
38
-
39
- main_content = WebDriverWait(driver, 10).until(
40
- EC.presence_of_element_located((By.ID, "main-content"))
41
- )
42
-
43
- # Wait for the property number field to appear then populate it
44
- inputElement_number = WebDriverWait(driver, 10).until(
45
- EC.element_to_be_clickable(
46
- (
47
- By.ID,
48
- "address_name_number",
19
+ driver = None
20
+ try:
21
+ user_postcode = kwargs.get("postcode")
22
+ user_paon = kwargs.get("paon")
23
+ check_paon(user_paon)
24
+ check_postcode(user_postcode)
25
+ web_driver = kwargs.get("web_driver")
26
+ headless = kwargs.get("headless")
27
+ bindata = {"bins": []}
28
+
29
+ API_URL = "https://uhte-wrp.whitespacews.com"
30
+
31
+ # Create Selenium webdriver
32
+ driver = create_webdriver(web_driver, headless, None, __name__)
33
+ driver.get(API_URL)
34
+
35
+ # Click Find my bin collection day button
36
+ collectionButton = WebDriverWait(driver, 10).until(
37
+ EC.element_to_be_clickable((By.LINK_TEXT, "Find my bin collection day"))
38
+ )
39
+ collectionButton.click()
40
+
41
+ main_content = WebDriverWait(driver, 10).until(
42
+ EC.presence_of_element_located((By.ID, "main-content"))
43
+ )
44
+
45
+ # Wait for the property number field to appear then populate it
46
+ inputElement_number = WebDriverWait(driver, 10).until(
47
+ EC.element_to_be_clickable(
48
+ (
49
+ By.ID,
50
+ "address_name_number",
51
+ )
49
52
  )
50
53
  )
51
- )
52
- inputElement_number.send_keys(user_paon)
53
-
54
- # Wait for the postcode field to appear then populate it
55
- inputElement_postcode = WebDriverWait(driver, 10).until(
56
- EC.element_to_be_clickable(
57
- (
58
- By.ID,
59
- "address_postcode",
54
+ inputElement_number.send_keys(user_paon)
55
+
56
+ # Wait for the postcode field to appear then populate it
57
+ inputElement_postcode = WebDriverWait(driver, 10).until(
58
+ EC.element_to_be_clickable(
59
+ (
60
+ By.ID,
61
+ "address_postcode",
62
+ )
60
63
  )
61
64
  )
62
- )
63
- inputElement_postcode.send_keys(user_postcode)
64
-
65
- # Click search button
66
- continueButton = WebDriverWait(driver, 10).until(
67
- EC.element_to_be_clickable(
68
- (
69
- By.ID,
70
- "Submit",
65
+ inputElement_postcode.send_keys(user_postcode)
66
+
67
+ # Click search button
68
+ continueButton = WebDriverWait(driver, 10).until(
69
+ EC.element_to_be_clickable(
70
+ (
71
+ By.ID,
72
+ "Submit",
73
+ )
71
74
  )
72
75
  )
73
- )
74
- continueButton.click()
75
-
76
- # Wait for the 'Search Results' to appear and select the first result
77
- property = WebDriverWait(driver, 10).until(
78
- EC.element_to_be_clickable(
79
- (
80
- By.CSS_SELECTOR,
81
- "li.app-subnav__section-item a",
82
- # "app-subnav__link govuk-link clicker colordarkblue fontfamilyArial fontsize12rem",
83
- # "//a[starts-with(@aria-label, '{user_paon}')]",
76
+ continueButton.click()
77
+
78
+ # Wait for the 'Search Results' to appear and select the first result
79
+ property = WebDriverWait(driver, 10).until(
80
+ EC.element_to_be_clickable(
81
+ (
82
+ By.CSS_SELECTOR,
83
+ "li.app-subnav__section-item a",
84
+ # "app-subnav__link govuk-link clicker colordarkblue fontfamilyArial fontsize12rem",
85
+ # "//a[starts-with(@aria-label, '{user_paon}')]",
86
+ )
84
87
  )
85
88
  )
86
- )
87
- property.click()
88
-
89
- upcoming_scheduled_collections = WebDriverWait(driver, 10).until(
90
- EC.presence_of_element_located((By.ID, "upcoming-scheduled-collections"))
91
- )
92
-
93
- soup = BeautifulSoup(driver.page_source, features="html.parser")
94
-
95
- collections = []
96
- for collection in soup.find_all(
97
- "u1",
98
- class_="displayinlineblock justifycontentleft alignitemscenter margin0 padding0",
99
- ):
100
- date = collection.find(
101
- "p", string=lambda text: text and "/" in text
102
- ).text.strip() # Extract date
103
- service = collection.find(
104
- "p", string=lambda text: text and "Collection Service" in text
105
- ).text.strip() # Extract service type
106
- collections.append({"date": date, "service": service})
107
-
108
- # Print the parsed data
109
- for item in collections:
110
-
111
- dict_data = {
112
- "type": item["service"],
113
- "collectionDate": item["date"],
114
- }
115
- bindata["bins"].append(dict_data)
116
-
117
- return bindata
89
+ property.click()
90
+
91
+ upcoming_scheduled_collections = WebDriverWait(driver, 10).until(
92
+ EC.presence_of_element_located((By.ID, "upcoming-scheduled-collections"))
93
+ )
94
+
95
+ soup = BeautifulSoup(driver.page_source, features="html.parser")
96
+
97
+ collections = []
98
+ for collection in soup.find_all(
99
+ "u1",
100
+ class_="displayinlineblock justifycontentleft alignitemscenter margin0 padding0",
101
+ ):
102
+ date = collection.find(
103
+ "p", string=lambda text: text and "/" in text
104
+ ).text.strip() # Extract date
105
+ service = collection.find(
106
+ "p", string=lambda text: text and "Collection Service" in text
107
+ ).text.strip() # Extract service type
108
+ collections.append({"date": date, "service": service})
109
+
110
+ # Print the parsed data
111
+ for item in collections:
112
+
113
+ dict_data = {
114
+ "type": item["service"],
115
+ "collectionDate": item["date"],
116
+ }
117
+ bindata["bins"].append(dict_data)
118
+
119
+ except Exception as e:
120
+ # Here you can log the exception if needed
121
+ print(f"An error occurred: {e}")
122
+ # Optionally, re-raise the exception if you want it to propagate
123
+ raise
124
+ finally:
125
+ # This block ensures that the driver is closed regardless of an exception
126
+ if driver:
127
+ driver.quit()
128
+ return bindata
@@ -21,141 +21,151 @@ class CouncilClass(AbstractGetBinDataClass):
21
21
  """
22
22
 
23
23
  def parse_data(self, page: str, **kwargs) -> dict:
24
-
25
- user_paon = kwargs.get("paon")
26
- user_postcode = kwargs.get("postcode")
27
- web_driver = kwargs.get("web_driver")
28
- headless = kwargs.get("headless")
29
- check_paon(user_paon)
30
- check_postcode(user_postcode)
31
- bindata = {"bins": []}
32
-
33
- URI_1 = "https://www.hertsmere.gov.uk/Environment-Refuse-and-Recycling/Recycling--Waste/Bin-collections/Collections-and-calendar.aspx"
34
- URI_2 = "https://hertsmere-services.onmats.com/w/webpage/round-search"
35
-
36
- # Create Selenium webdriver
37
- driver = create_webdriver(web_driver, headless, None, __name__)
38
- driver.get(URI_1)
39
-
40
- soup = BeautifulSoup(driver.page_source, "html.parser")
41
-
42
- current_week = (soup.find("li", class_="current")).text.strip()
43
-
44
- strong = soup.find_all("strong", text=re.compile(r"^Week"))
45
-
46
- bin_weeks = []
47
- for tag in strong:
48
- parent = tag.parent
49
- bin_type = (
50
- (parent.text).split("-")[1].strip().replace("\xa0", " ").split(" and ")
51
- )
52
- for bin in bin_type:
53
- dict_data = {
54
- "week": tag.text.replace("\xa0", " "),
55
- "bin_type": bin,
56
- }
57
- bin_weeks.append(dict_data)
58
-
59
- driver.get(URI_2)
60
-
61
- # Wait for the postcode field to appear then populate it
62
- inputElement_postcode = WebDriverWait(driver, 30).until(
63
- EC.presence_of_element_located(
64
- (
65
- By.CLASS_NAME,
66
- "relation_path_type_ahead_search",
24
+ driver = None
25
+ try:
26
+ user_paon = kwargs.get("paon")
27
+ user_postcode = kwargs.get("postcode")
28
+ web_driver = kwargs.get("web_driver")
29
+ headless = kwargs.get("headless")
30
+ check_paon(user_paon)
31
+ check_postcode(user_postcode)
32
+ bindata = {"bins": []}
33
+
34
+ URI_1 = "https://www.hertsmere.gov.uk/Environment-Refuse-and-Recycling/Recycling--Waste/Bin-collections/Collections-and-calendar.aspx"
35
+ URI_2 = "https://hertsmere-services.onmats.com/w/webpage/round-search"
36
+
37
+ # Create Selenium webdriver
38
+ driver = create_webdriver(web_driver, headless, None, __name__)
39
+ driver.get(URI_1)
40
+
41
+ soup = BeautifulSoup(driver.page_source, "html.parser")
42
+
43
+ current_week = (soup.find("li", class_="current")).text.strip()
44
+
45
+ strong = soup.find_all("strong", text=re.compile(r"^Week"))
46
+
47
+ bin_weeks = []
48
+ for tag in strong:
49
+ parent = tag.parent
50
+ bin_type = (
51
+ (parent.text).split("-")[1].strip().replace("\xa0", " ").split(" and ")
67
52
  )
68
- )
69
- )
70
- inputElement_postcode.send_keys(user_postcode)
71
-
72
- WebDriverWait(driver, 10).until(
73
- EC.element_to_be_clickable(
74
- (
75
- By.XPATH,
76
- f"//ul[@class='result_list']/li[starts-with(@aria-label, '{user_paon}')]",
53
+ for bin in bin_type:
54
+ dict_data = {
55
+ "week": tag.text.replace("\xa0", " "),
56
+ "bin_type": bin,
57
+ }
58
+ bin_weeks.append(dict_data)
59
+
60
+ driver.get(URI_2)
61
+
62
+ # Wait for the postcode field to appear then populate it
63
+ inputElement_postcode = WebDriverWait(driver, 30).until(
64
+ EC.presence_of_element_located(
65
+ (
66
+ By.CLASS_NAME,
67
+ "relation_path_type_ahead_search",
68
+ )
77
69
  )
78
70
  )
79
- ).click()
71
+ inputElement_postcode.send_keys(user_postcode)
72
+
73
+ WebDriverWait(driver, 10).until(
74
+ EC.element_to_be_clickable(
75
+ (
76
+ By.XPATH,
77
+ f"//ul[@class='result_list']/li[starts-with(@aria-label, '{user_paon}')]",
78
+ )
79
+ )
80
+ ).click()
81
+
82
+ WebDriverWait(driver, timeout=10).until(
83
+ EC.element_to_be_clickable(
84
+ (
85
+ By.CSS_SELECTOR,
86
+ "input.fragment_presenter_template_edit.btn.bg-primary.btn-medium[type='submit']",
87
+ )
88
+ )
89
+ ).click()
80
90
 
81
- WebDriverWait(driver, timeout=10).until(
82
- EC.element_to_be_clickable(
83
- (
84
- By.CSS_SELECTOR,
85
- "input.fragment_presenter_template_edit.btn.bg-primary.btn-medium[type='submit']",
91
+ WebDriverWait(driver, timeout=10).until(
92
+ EC.presence_of_element_located(
93
+ (By.XPATH, "//h3[contains(text(), 'Collection days')]")
86
94
  )
87
95
  )
88
- ).click()
89
96
 
90
- WebDriverWait(driver, timeout=10).until(
91
- EC.presence_of_element_located(
92
- (By.XPATH, "//h3[contains(text(), 'Collection days')]")
97
+ soup = BeautifulSoup(driver.page_source, "html.parser")
98
+
99
+ table = soup.find("table", class_="table listing table-striped")
100
+
101
+ # Check if the table was found
102
+ if table:
103
+ # Extract table rows and cells
104
+ table_data = []
105
+ for row in table.find("tbody").find_all("tr"):
106
+ # Extract cell data from each <td> tag
107
+ row_data = [cell.get_text(strip=True) for cell in row.find_all("td")]
108
+ table_data.append(row_data)
109
+
110
+ else:
111
+ print("Table not found.")
112
+
113
+ collection_day = (table_data[0])[1]
114
+
115
+ current_week_bins = [bin for bin in bin_weeks if bin["week"] == current_week]
116
+ next_week_bins = [bin for bin in bin_weeks if bin["week"] != current_week]
117
+
118
+ days_of_week = [
119
+ "Monday",
120
+ "Tuesday",
121
+ "Wednesday",
122
+ "Thursday",
123
+ "Friday",
124
+ "Saturday",
125
+ "Sunday",
126
+ ]
127
+
128
+ today = datetime.now()
129
+ today_idx = today.weekday() # Monday is 0 and Sunday is 6
130
+ target_idx = days_of_week.index(collection_day)
131
+
132
+ days_until_target = (target_idx - today_idx) % 7
133
+ if days_until_target == 0:
134
+ next_day = today
135
+ else:
136
+ next_day = today + timedelta(days=days_until_target)
137
+
138
+ current_week_dates = get_dates_every_x_days(next_day, 14, 7)
139
+ next_week_date = next_day + timedelta(days=7)
140
+ next_week_dates = get_dates_every_x_days(next_week_date, 14, 7)
141
+
142
+ for date in current_week_dates:
143
+ for bin in current_week_bins:
144
+ dict_data = {
145
+ "type": bin["bin_type"],
146
+ "collectionDate": date,
147
+ }
148
+ bindata["bins"].append(dict_data)
149
+
150
+ for date in next_week_dates:
151
+ for bin in next_week_bins:
152
+ dict_data = {
153
+ "type": bin["bin_type"],
154
+ "collectionDate": date,
155
+ }
156
+ bindata["bins"].append(dict_data)
157
+
158
+ bindata["bins"].sort(
159
+ key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y")
93
160
  )
94
- )
95
-
96
- soup = BeautifulSoup(driver.page_source, "html.parser")
97
-
98
- table = soup.find("table", class_="table listing table-striped")
99
-
100
- # Check if the table was found
101
- if table:
102
- # Extract table rows and cells
103
- table_data = []
104
- for row in table.find("tbody").find_all("tr"):
105
- # Extract cell data from each <td> tag
106
- row_data = [cell.get_text(strip=True) for cell in row.find_all("td")]
107
- table_data.append(row_data)
108
-
109
- else:
110
- print("Table not found.")
111
-
112
- collection_day = (table_data[0])[1]
113
-
114
- current_week_bins = [bin for bin in bin_weeks if bin["week"] == current_week]
115
- next_week_bins = [bin for bin in bin_weeks if bin["week"] != current_week]
116
-
117
- days_of_week = [
118
- "Monday",
119
- "Tuesday",
120
- "Wednesday",
121
- "Thursday",
122
- "Friday",
123
- "Saturday",
124
- "Sunday",
125
- ]
126
-
127
- today = datetime.now()
128
- today_idx = today.weekday() # Monday is 0 and Sunday is 6
129
- target_idx = days_of_week.index(collection_day)
130
-
131
- days_until_target = (target_idx - today_idx) % 7
132
- if days_until_target == 0:
133
- next_day = today
134
- else:
135
- next_day = today + timedelta(days=days_until_target)
136
-
137
- current_week_dates = get_dates_every_x_days(next_day, 14, 7)
138
- next_week_date = next_day + timedelta(days=7)
139
- next_week_dates = get_dates_every_x_days(next_week_date, 14, 7)
140
-
141
- for date in current_week_dates:
142
- for bin in current_week_bins:
143
- dict_data = {
144
- "type": bin["bin_type"],
145
- "collectionDate": date,
146
- }
147
- bindata["bins"].append(dict_data)
148
-
149
- for date in next_week_dates:
150
- for bin in next_week_bins:
151
- dict_data = {
152
- "type": bin["bin_type"],
153
- "collectionDate": date,
154
- }
155
- bindata["bins"].append(dict_data)
156
-
157
- bindata["bins"].sort(
158
- key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y")
159
- )
160
-
161
- return bindata
161
+
162
+ except Exception as e:
163
+ # Here you can log the exception if needed
164
+ print(f"An error occurred: {e}")
165
+ # Optionally, re-raise the exception if you want it to propagate
166
+ raise
167
+ finally:
168
+ # This block ensures that the driver is closed regardless of an exception
169
+ if driver:
170
+ driver.quit()
171
+ return bindata
@@ -0,0 +1,167 @@
1
+ import time
2
+
3
+ from bs4 import BeautifulSoup
4
+ from selenium.webdriver.common.by import By
5
+ from selenium.webdriver.support import expected_conditions as EC
6
+ from selenium.webdriver.support.ui import Select, WebDriverWait
7
+
8
+ from uk_bin_collection.uk_bin_collection.common import *
9
+ from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
10
+
11
+
12
+ # import the wonderful Beautiful Soup and the URL grabber
13
+ class CouncilClass(AbstractGetBinDataClass):
14
+ """
15
+ Concrete classes have to implement all abstract operations of the
16
+ base class. They can also override some operations with a default
17
+ implementation.
18
+ """
19
+
20
+ def parse_data(self, page: str, **kwargs) -> dict:
21
+ driver = None
22
+ try:
23
+ user_poan = kwargs.get("paon")
24
+ user_postcode = kwargs.get("postcode")
25
+ if not user_postcode:
26
+ raise ValueError("No postcode provided.")
27
+ check_postcode(user_postcode)
28
+
29
+ headless = kwargs.get("headless")
30
+ web_driver = kwargs.get("web_driver")
31
+ driver = create_webdriver(web_driver, headless, None, __name__)
32
+ page = "https://report.peterborough.gov.uk/waste"
33
+
34
+ driver.get(page)
35
+
36
+ wait = WebDriverWait(driver, 30)
37
+
38
+ try:
39
+ # Cookies confirmed working in selenium
40
+ accept_cookies_button = wait.until(
41
+ EC.element_to_be_clickable(
42
+ (
43
+ By.XPATH,
44
+ "//button/span[contains(text(), 'I Accept Cookies')]",
45
+ )
46
+ )
47
+ )
48
+ accept_cookies_button.click()
49
+ except:
50
+ print(
51
+ "Accept cookies banner not found or clickable within the specified time."
52
+ )
53
+ pass
54
+
55
+ postcode_input = wait.until(
56
+ EC.presence_of_element_located((By.XPATH, '//input[@id="postcode"]'))
57
+ )
58
+
59
+ postcode_input.send_keys(user_postcode)
60
+
61
+ postcode_go_button = wait.until(
62
+ EC.element_to_be_clickable((By.XPATH, '//input[@id="go"]'))
63
+ )
64
+
65
+ postcode_go_button.click()
66
+
67
+ # Wait for the select address drop down to be present
68
+ select_address_input = wait.until(
69
+ EC.presence_of_element_located((By.XPATH, '//input[@id="address"]'))
70
+ )
71
+
72
+ select_address_input.click()
73
+ time.sleep(2)
74
+
75
+ select_address_input_item = wait.until(
76
+ EC.presence_of_element_located(
77
+ (By.XPATH, f"//li[contains(text(), '{user_poan}')]")
78
+ )
79
+ )
80
+
81
+ select_address_input_item.click()
82
+
83
+ address_continue_button = wait.until(
84
+ EC.element_to_be_clickable((By.XPATH, '//input[@value="Continue"]'))
85
+ )
86
+
87
+ address_continue_button.click()
88
+
89
+ your_collections_heading = wait.until(
90
+ EC.presence_of_element_located(
91
+ (By.XPATH, "//h2[contains(text(), 'Your collections')]")
92
+ )
93
+ )
94
+
95
+ results_page = wait.until(
96
+ EC.presence_of_element_located(
97
+ (By.XPATH, "//div[@class='waste__collections']")
98
+ )
99
+ )
100
+
101
+ soup = BeautifulSoup(results_page.get_attribute("innerHTML"), "html.parser")
102
+
103
+ data = {"bins": []}
104
+ output_date_format = "%d/%m/%Y"
105
+ input_date_format = "%A, %d %B %Y" # Expect: Thursday, 17 April 2025
106
+
107
+ # Each bin section is within a waste-service-wrapper div
108
+ collection_panels = soup.find_all("div", class_="waste-service-wrapper")
109
+
110
+ for panel in collection_panels:
111
+ try:
112
+ # Bin type
113
+ bin_type_tag = panel.find("h3", class_="waste-service-name")
114
+ if not bin_type_tag:
115
+ continue
116
+ bin_type = bin_type_tag.get_text(strip=True)
117
+
118
+ # Get 'Next collection' date
119
+ rows = panel.find_all("div", class_="govuk-summary-list__row")
120
+ next_collection = None
121
+ for row in rows:
122
+ key = row.find("dt", class_="govuk-summary-list__key")
123
+ value = row.find("dd", class_="govuk-summary-list__value")
124
+ if key and value and "Next collection" in key.get_text():
125
+ raw_date = " ".join(value.get_text().split())
126
+
127
+ # ✅ Remove st/nd/rd/th suffix from the day (e.g. 17th → 17)
128
+ cleaned_date = re.sub(
129
+ r"(\d{1,2})(st|nd|rd|th)", r"\1", raw_date
130
+ )
131
+ next_collection = cleaned_date
132
+ break
133
+
134
+ if not next_collection:
135
+ continue
136
+
137
+ print(f"Found next collection for {bin_type}: '{next_collection}'")
138
+
139
+ parsed_date = datetime.strptime(next_collection, input_date_format)
140
+ formatted_date = parsed_date.strftime(output_date_format)
141
+
142
+ data["bins"].append(
143
+ {
144
+ "type": bin_type,
145
+ "collectionDate": formatted_date,
146
+ }
147
+ )
148
+
149
+ except Exception as e:
150
+ print(
151
+ f"Error processing panel for bin '{bin_type if 'bin_type' in locals() else 'unknown'}': {e}"
152
+ )
153
+
154
+ # Sort the data
155
+ data["bins"].sort(
156
+ key=lambda x: datetime.strptime(x["collectionDate"], output_date_format)
157
+ )
158
+ except Exception as e:
159
+ # Here you can log the exception if needed
160
+ print(f"An error occurred: {e}")
161
+ # Optionally, re-raise the exception if you want it to propagate
162
+ raise
163
+ finally:
164
+ # This block ensures that the driver is closed regardless of an exception
165
+ if driver:
166
+ driver.quit()
167
+ return data
@@ -28,108 +28,119 @@ class CouncilClass(AbstractGetBinDataClass):
28
28
  """
29
29
 
30
30
  def parse_data(self, page: str, **kwargs) -> dict:
31
- # Get postcode and UPRN from kwargs
32
- user_postcode = kwargs.get("postcode")
33
- user_paon = kwargs.get("paon")
34
- web_driver = kwargs.get("web_driver")
35
- headless = kwargs.get("headless")
36
- check_postcode(user_postcode)
37
- check_paon(user_paon)
38
-
39
- # Build URL to parse
40
- council_url = "https://swale.gov.uk/bins-littering-and-the-environment/bins/check-your-bin-day"
41
-
42
- # Create Selenium webdriver
43
- driver = create_webdriver(web_driver, headless, None, __name__)
44
- driver.get(council_url)
45
-
46
- # Wait for the postcode field to appear then populate it
31
+ driver = None
47
32
  try:
48
- inputElement_postcode = WebDriverWait(driver, 10).until(
49
- EC.presence_of_element_located((By.ID, "q485476_q1"))
33
+ # Get postcode and UPRN from kwargs
34
+ user_postcode = kwargs.get("postcode")
35
+ user_paon = kwargs.get("paon")
36
+ web_driver = kwargs.get("web_driver")
37
+ headless = kwargs.get("headless")
38
+ check_postcode(user_postcode)
39
+ check_paon(user_paon)
40
+
41
+ # Build URL to parse
42
+ council_url = "https://swale.gov.uk/bins-littering-and-the-environment/bins/check-your-bin-day"
43
+
44
+ # Create Selenium webdriver
45
+ driver = create_webdriver(web_driver, headless, None, __name__)
46
+ driver.get(council_url)
47
+
48
+ # Wait for the postcode field to appear then populate it
49
+ try:
50
+ inputElement_postcode = WebDriverWait(driver, 10).until(
51
+ EC.presence_of_element_located((By.ID, "q485476_q1"))
52
+ )
53
+ inputElement_postcode.send_keys(user_postcode)
54
+ except Exception:
55
+ print("Page failed to load. Probably due to Cloudflare robot check!")
56
+
57
+ # Click search button
58
+ findAddress = WebDriverWait(driver, 10).until(
59
+ EC.presence_of_element_located((By.ID, "form_email_485465_submit"))
50
60
  )
51
- inputElement_postcode.send_keys(user_postcode)
52
- except Exception:
53
- print("Page failed to load. Probably due to Cloudflare robot check!")
54
-
55
- # Click search button
56
- findAddress = WebDriverWait(driver, 10).until(
57
- EC.presence_of_element_located((By.ID, "form_email_485465_submit"))
58
- )
59
- driver.execute_script("arguments[0].click();", findAddress)
60
-
61
- # Wait for the 'Select address' dropdown to appear and select option matching the house name/number
62
- WebDriverWait(driver, 10).until(
63
- EC.element_to_be_clickable(
64
- (
65
- By.XPATH,
66
- "//select[@name='q485480:q1']//option[contains(., '"
67
- + user_paon
68
- + "')]",
61
+ driver.execute_script("arguments[0].click();", findAddress)
62
+
63
+ # Wait for the 'Select address' dropdown to appear and select option matching the house name/number
64
+ WebDriverWait(driver, 10).until(
65
+ EC.element_to_be_clickable(
66
+ (
67
+ By.XPATH,
68
+ "//select[@name='q485480:q1']//option[contains(., '"
69
+ + user_paon
70
+ + "')]",
71
+ )
69
72
  )
73
+ ).click()
74
+
75
+ # Click search button
76
+ getBins = WebDriverWait(driver, 10).until(
77
+ EC.presence_of_element_located((By.ID, "form_email_485465_submit"))
78
+ )
79
+ driver.execute_script("arguments[0].click();", getBins)
80
+
81
+ BinTable = WebDriverWait(driver, 30).until(
82
+ EC.presence_of_element_located((By.ID, "SBCYBDSummary"))
83
+ )
84
+
85
+ soup = BeautifulSoup(driver.page_source, features="html.parser")
86
+ soup.prettify()
87
+
88
+ data = {"bins": []}
89
+
90
+ current_year = datetime.now().year
91
+ next_year = current_year + 1
92
+
93
+ next_collection_date = soup.find(
94
+ "strong", id="SBC-YBD-collectionDate"
95
+ ).text.strip()
96
+
97
+ # Extract bins for the next collection
98
+ next_bins = [li.text.strip() for li in soup.select("#SBCFirstBins ul li")]
99
+
100
+ # Extract future collection details
101
+ future_collection_date_tag = soup.find(
102
+ "p", text=lambda t: t and "starting from" in t
103
+ )
104
+ future_collection_date = (
105
+ future_collection_date_tag.text.split("starting from")[-1].strip()
106
+ if future_collection_date_tag
107
+ else "No future date found"
70
108
  )
71
- ).click()
72
-
73
- # Click search button
74
- getBins = WebDriverWait(driver, 10).until(
75
- EC.presence_of_element_located((By.ID, "form_email_485465_submit"))
76
- )
77
- driver.execute_script("arguments[0].click();", getBins)
78
-
79
- BinTable = WebDriverWait(driver, 30).until(
80
- EC.presence_of_element_located((By.ID, "SBCYBDSummary"))
81
- )
82
-
83
- soup = BeautifulSoup(driver.page_source, features="html.parser")
84
- soup.prettify()
85
-
86
- data = {"bins": []}
87
-
88
- current_year = datetime.now().year
89
- next_year = current_year + 1
90
-
91
- next_collection_date = soup.find(
92
- "strong", id="SBC-YBD-collectionDate"
93
- ).text.strip()
94
-
95
- # Extract bins for the next collection
96
- next_bins = [li.text.strip() for li in soup.select("#SBCFirstBins ul li")]
97
-
98
- # Extract future collection details
99
- future_collection_date_tag = soup.find(
100
- "p", text=lambda t: t and "starting from" in t
101
- )
102
- future_collection_date = (
103
- future_collection_date_tag.text.split("starting from")[-1].strip()
104
- if future_collection_date_tag
105
- else "No future date found"
106
- )
107
-
108
- future_bins = [li.text.strip() for li in soup.select("#FirstFutureBins li")]
109
-
110
- for bin in next_bins:
111
- collection_date = datetime.strptime(next_collection_date, "%A, %d %B")
112
- if (datetime.now().month == 12) and (collection_date.month == 1):
113
- collection_date = collection_date.replace(year=next_year)
114
- else:
115
- collection_date = collection_date.replace(year=current_year)
116
-
117
- dict_data = {
118
- "type": bin,
119
- "collectionDate": collection_date.strftime(date_format),
120
- }
121
- data["bins"].append(dict_data)
122
-
123
- for bin in future_bins:
124
- collection_date = datetime.strptime(future_collection_date, "%A, %d %B")
125
- if (datetime.now().month == 12) and (collection_date.month == 1):
126
- collection_date = collection_date.replace(year=next_year)
127
- else:
128
- collection_date = collection_date.replace(year=current_year)
129
- dict_data = {
130
- "type": bin,
131
- "collectionDate": collection_date.strftime(date_format),
132
- }
133
- data["bins"].append(dict_data)
134
109
 
110
+ future_bins = [li.text.strip() for li in soup.select("#FirstFutureBins li")]
111
+
112
+ for bin in next_bins:
113
+ collection_date = datetime.strptime(next_collection_date, "%A, %d %B")
114
+ if (datetime.now().month == 12) and (collection_date.month == 1):
115
+ collection_date = collection_date.replace(year=next_year)
116
+ else:
117
+ collection_date = collection_date.replace(year=current_year)
118
+
119
+ dict_data = {
120
+ "type": bin,
121
+ "collectionDate": collection_date.strftime(date_format),
122
+ }
123
+ data["bins"].append(dict_data)
124
+
125
+ for bin in future_bins:
126
+ collection_date = datetime.strptime(future_collection_date, "%A, %d %B")
127
+ if (datetime.now().month == 12) and (collection_date.month == 1):
128
+ collection_date = collection_date.replace(year=next_year)
129
+ else:
130
+ collection_date = collection_date.replace(year=current_year)
131
+ dict_data = {
132
+ "type": bin,
133
+ "collectionDate": collection_date.strftime(date_format),
134
+ }
135
+ data["bins"].append(dict_data)
136
+
137
+ except Exception as e:
138
+ # Here you can log the exception if needed
139
+ print(f"An error occurred: {e}")
140
+ # Optionally, re-raise the exception if you want it to propagate
141
+ raise
142
+ finally:
143
+ # This block ensures that the driver is closed regardless of an exception
144
+ if driver:
145
+ driver.quit()
135
146
  return data
@@ -16,35 +16,47 @@ class CouncilClass(AbstractGetBinDataClass):
16
16
  """
17
17
 
18
18
  def parse_data(self, page: str, **kwargs) -> dict:
19
- data = {"bins": []}
20
- user_uprn = kwargs.get("uprn")
21
- web_driver = kwargs.get("web_driver")
22
- headless = kwargs.get("headless")
23
- check_uprn(user_uprn)
24
-
25
- root_url = "https://forms.rbwm.gov.uk/bincollections?uprn="
26
- api_url = root_url + user_uprn
27
-
28
- # Create Selenium webdriver
29
- driver = create_webdriver(web_driver, headless, None, __name__)
30
- driver.get(api_url)
31
-
32
- soup = BeautifulSoup(driver.page_source, features="html.parser")
33
- soup.prettify()
34
-
35
- # Get collections div
36
- next_collection_div = soup.find("div", {"class": "widget-bin-collections"})
37
-
38
- for tbody in next_collection_div.find_all("tbody"):
39
- for tr in tbody.find_all("tr"):
40
- td = tr.find_all("td")
41
- next_collection_type = td[0].get_text()
42
- next_collection_date = dateutil.parser.parse(td[1].get_text())
43
- print(next_collection_date)
44
- dict_data = {
45
- "type": next_collection_type,
46
- "collectionDate": next_collection_date.strftime("%d/%m/%Y"),
47
- }
48
- data["bins"].append(dict_data)
49
-
19
+ driver = None
20
+ try:
21
+ data = {"bins": []}
22
+ user_uprn = kwargs.get("uprn")
23
+ web_driver = kwargs.get("web_driver")
24
+ headless = kwargs.get("headless")
25
+ check_uprn(user_uprn)
26
+
27
+ root_url = "https://forms.rbwm.gov.uk/bincollections?uprn="
28
+ api_url = root_url + user_uprn
29
+
30
+ # Create Selenium webdriver
31
+ driver = create_webdriver(web_driver, headless, None, __name__)
32
+ driver.get(api_url)
33
+
34
+ soup = BeautifulSoup(driver.page_source, features="html.parser")
35
+ soup.prettify()
36
+
37
+ # Get collections div
38
+ next_collection_div = soup.find("div", {"class": "widget-bin-collections"})
39
+
40
+ for tbody in next_collection_div.find_all("tbody"):
41
+ for tr in tbody.find_all("tr"):
42
+ td = tr.find_all("td")
43
+ next_collection_type = td[0].get_text()
44
+ next_collection_date = dateutil.parser.parse(td[1].get_text())
45
+ print(next_collection_date)
46
+ dict_data = {
47
+ "type": next_collection_type,
48
+ "collectionDate": next_collection_date.strftime("%d/%m/%Y"),
49
+ }
50
+ data["bins"].append(dict_data)
51
+
52
+ except Exception as e:
53
+ # Here you can log the exception if needed
54
+ print(f"An error occurred: {e}")
55
+ # Optionally, re-raise the exception if you want it to propagate
56
+ raise
57
+ finally:
58
+ # This block ensures that the driver is closed regardless of an exception
59
+ if driver:
60
+ driver.quit()
50
61
  return data
62
+
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: uk_bin_collection
3
- Version: 0.140.0
3
+ Version: 0.141.1
4
4
  Summary: Python Lib to collect UK Bin Data
5
5
  Author: Robert Bradley
6
6
  Author-email: robbrad182@gmail.com
@@ -3,7 +3,7 @@ uk_bin_collection/tests/check_selenium_url_in_input.json.py,sha256=Iecdja0I3XIiY
3
3
  uk_bin_collection/tests/council_feature_input_parity.py,sha256=DO6Mk4ImYgM5ZCZ-cutwz5RoYYWZRLYx2tr6zIs_9Rc,3843
4
4
  uk_bin_collection/tests/features/environment.py,sha256=VQZjJdJI_kZn08M0j5cUgvKT4k3iTw8icJge1DGOkoA,127
5
5
  uk_bin_collection/tests/features/validate_council_outputs.feature,sha256=SJK-Vc737hrf03tssxxbeg_JIvAH-ddB8f6gU1LTbuQ,251
6
- uk_bin_collection/tests/input.json,sha256=b6MsNMMQbs1-lW6g1_dYz5WTiwt8Puuu3q_BEopiulk,120060
6
+ uk_bin_collection/tests/input.json,sha256=uvi5_CrjVy26H4gkWdoRXCJ1wsJPgntzJB26hXwC5jI,120556
7
7
  uk_bin_collection/tests/output.schema,sha256=ZwKQBwYyTDEM4G2hJwfLUVM-5v1vKRvRK9W9SS1sd18,1086
8
8
  uk_bin_collection/tests/step_defs/step_helpers/file_handler.py,sha256=Ygzi4V0S1MIHqbdstUlIqtRIwnynvhu4UtpweJ6-5N8,1474
9
9
  uk_bin_collection/tests/step_defs/test_validate_council.py,sha256=VZ0a81sioJULD7syAYHjvK_-nT_Rd36tUyzPetSA0gk,3475
@@ -23,7 +23,7 @@ uk_bin_collection/uk_bin_collection/councils/ArgyllandButeCouncil.py,sha256=fJ0U
23
23
  uk_bin_collection/uk_bin_collection/councils/ArmaghBanbridgeCraigavonCouncil.py,sha256=o9NBbVCTdxKXnpYbP8-zxe1Gh8s57vwfV75Son_sAHE,2863
24
24
  uk_bin_collection/uk_bin_collection/councils/ArunCouncil.py,sha256=yfhthv9nuogP19VOZ3TYQrq51qqjiCZcSel4sXhiKjs,4012
25
25
  uk_bin_collection/uk_bin_collection/councils/AshfieldDistrictCouncil.py,sha256=fhX7S_A3jqoND7NE6qITPMPvdk3FJSKZ3Eoa5RtSg3I,4247
26
- uk_bin_collection/uk_bin_collection/councils/AshfordBoroughCouncil.py,sha256=yC-8UMQHSbvze43PJ2_F4Z3cu7M7cynKTojipBJU7Ug,4307
26
+ uk_bin_collection/uk_bin_collection/councils/AshfordBoroughCouncil.py,sha256=4ebRyX2qZRbZdomuN1aJOSXij-r7YagRBQvV1vyPkqY,5059
27
27
  uk_bin_collection/uk_bin_collection/councils/AylesburyValeCouncil.py,sha256=LouqjspEMt1TkOGqWHs2zkxwOETIy3n7p64uKIlAgUg,2401
28
28
  uk_bin_collection/uk_bin_collection/councils/BCPCouncil.py,sha256=W7QBx6Mgso8RYosuXsaYo3GGNAu-tiyBSmuYxr1JSOU,1707
29
29
  uk_bin_collection/uk_bin_collection/councils/BaberghDistrictCouncil.py,sha256=1eXdST58xFRMdYl8AGNG_EwyQeLa31WSWUe882hQ2ec,6329
@@ -100,7 +100,7 @@ uk_bin_collection/uk_bin_collection/councils/EalingCouncil.py,sha256=UhNXGi-_6NY
100
100
  uk_bin_collection/uk_bin_collection/councils/EastAyrshireCouncil.py,sha256=i3AcWkeAnk7rD59nOm0QCSH7AggqjUAdwsXuSIC8ZJE,1614
101
101
  uk_bin_collection/uk_bin_collection/councils/EastCambridgeshireCouncil.py,sha256=aYUVE5QqTxdj8FHhCB4EiFVDJahWJD9Pq0d1upBEvXg,1501
102
102
  uk_bin_collection/uk_bin_collection/councils/EastDevonDC.py,sha256=U0VwSNIldMv5nUoiXtFgjbE0m6Kb-8W2WZQGVCNF_WI,3261
103
- uk_bin_collection/uk_bin_collection/councils/EastHertsCouncil.py,sha256=hjIrZXM0qe8xvHfrBqMDyXnq0_h_ySODqTfmOI5ahTc,4071
103
+ uk_bin_collection/uk_bin_collection/councils/EastHertsCouncil.py,sha256=gujFsqn3j4YGudXxhJcLiO_pVYHnY_rd6GeZHzSqqs4,4823
104
104
  uk_bin_collection/uk_bin_collection/councils/EastLindseyDistrictCouncil.py,sha256=RSOTD1MIXSW27eGf3TixCiJK4HtSJnpfME2CjalDeXs,4326
105
105
  uk_bin_collection/uk_bin_collection/councils/EastLothianCouncil.py,sha256=zTp-GDWYeUIlFaqfkqGvo7XMtxJd0VbxdGgqaAwRACk,2792
106
106
  uk_bin_collection/uk_bin_collection/councils/EastRenfrewshireCouncil.py,sha256=5giegMCKQ2JhVDR5M4mevVxIdhZtSW7kbuuoSkj3EGk,4361
@@ -141,7 +141,7 @@ uk_bin_collection/uk_bin_collection/councils/HartDistrictCouncil.py,sha256=_llxT
141
141
  uk_bin_collection/uk_bin_collection/councils/HartlepoolBoroughCouncil.py,sha256=MUT1A24iZShT2p55rXEvgYwGUuw3W05Z4ZQAveehv-s,2842
142
142
  uk_bin_collection/uk_bin_collection/councils/HastingsBoroughCouncil.py,sha256=9MCuit4awXSZTbZCXWBsQGX2tp2mHZ1eP1wENZdMvgA,1806
143
143
  uk_bin_collection/uk_bin_collection/councils/HerefordshireCouncil.py,sha256=JpQhkWM6Jeuzf1W7r0HqvtVnEqNi18nhwJX70YucdsI,1848
144
- uk_bin_collection/uk_bin_collection/councils/HertsmereBoroughCouncil.py,sha256=-ThSG6NIJP_wf2GmGL7SAvxbOujdhanZ8ECP4VSQCBs,5415
144
+ uk_bin_collection/uk_bin_collection/councils/HertsmereBoroughCouncil.py,sha256=k6MKZ3Xa-_a0oIpH5Rv5haYVq1eRHKSCiI6UAqAVTUk,6274
145
145
  uk_bin_collection/uk_bin_collection/councils/HighPeakCouncil.py,sha256=x7dfy8mdt2iGl8qJxHb-uBh4u0knmi9MJ6irOJw9WYA,4805
146
146
  uk_bin_collection/uk_bin_collection/councils/HighlandCouncil.py,sha256=GNxDU65QuZHV5va2IrKtcJ6TQoDdwmV03JvkVqOauP4,3291
147
147
  uk_bin_collection/uk_bin_collection/councils/HinckleyandBosworthBoroughCouncil.py,sha256=51vXTKrstfJhb7cLCcrsvA9qKCsptyNMZvy7ML9DasM,2344
@@ -216,6 +216,7 @@ uk_bin_collection/uk_bin_collection/councils/OadbyAndWigstonBoroughCouncil.py,sh
216
216
  uk_bin_collection/uk_bin_collection/councils/OldhamCouncil.py,sha256=9dlesCxNoVXlmQaqZj7QFh00smnJbm1Gnjkr_Uvzurs,1771
217
217
  uk_bin_collection/uk_bin_collection/councils/OxfordCityCouncil.py,sha256=d_bY0cXRDH4kSoWGGCTNN61MNErapSOf2WSTYDJr2r8,2318
218
218
  uk_bin_collection/uk_bin_collection/councils/PerthAndKinrossCouncil.py,sha256=Kos5GzN2co3Ij3tSHOXB9S71Yt78RROCfVRtnh7M1VU,3657
219
+ uk_bin_collection/uk_bin_collection/councils/PeterboroughCityCouncil.py,sha256=lOrDD4jfJ-_C5UwCGqRcQ1G-U1F5X6rf255ypzYEBcg,6300
219
220
  uk_bin_collection/uk_bin_collection/councils/PlymouthCouncil.py,sha256=FJqpJ0GJhpjYeyZ9ioZPkKGl-zrqMD3y5iKa07e_i30,3202
220
221
  uk_bin_collection/uk_bin_collection/councils/PortsmouthCityCouncil.py,sha256=xogNgVvwM5FljCziiNLgZ_wzkOnrQkifi1dkPMDRMtg,5588
221
222
  uk_bin_collection/uk_bin_collection/councils/PowysCouncil.py,sha256=db3Y5FJz-LFDqmVZqPdzcBxh0Q26OFPrbUxlQ7r4vsQ,5896
@@ -271,7 +272,7 @@ uk_bin_collection/uk_bin_collection/councils/StratfordUponAvonCouncil.py,sha256=
271
272
  uk_bin_collection/uk_bin_collection/councils/StroudDistrictCouncil.py,sha256=Akx80Ve7D8RVdIW1vkWLYp80VrhL6Qc3dMMKnbFWUhY,3653
272
273
  uk_bin_collection/uk_bin_collection/councils/SunderlandCityCouncil.py,sha256=Eyaer--n2JQmGNaEdDbOhgIIGA6mDu72N8ph2wPIZy4,4027
273
274
  uk_bin_collection/uk_bin_collection/councils/SurreyHeathBoroughCouncil.py,sha256=MROVvf7RSRYYjM2ZDD83rAEwf8BSnqXVrasgBiJC92A,5220
274
- uk_bin_collection/uk_bin_collection/councils/SwaleBoroughCouncil.py,sha256=pAGzj9CUDaU1w0wMEbJIl1fxcW0kWIzvzsU7oYMdgWE,5001
275
+ uk_bin_collection/uk_bin_collection/councils/SwaleBoroughCouncil.py,sha256=FuQGlPaiFxfQS3KzkyrvcuTaVb8VkwdWYgjm42FfUt8,5762
275
276
  uk_bin_collection/uk_bin_collection/councils/SwanseaCouncil.py,sha256=nmVPoPhnFgVi--vczX2i4Sf3bqM5RWJuwfhioRUr5XE,2303
276
277
  uk_bin_collection/uk_bin_collection/councils/SwindonBoroughCouncil.py,sha256=lSIykpkBjVwQSf3rrnrNuh7YRepgnkKQLbf1iErMuJs,1932
277
278
  uk_bin_collection/uk_bin_collection/councils/TamesideMBCouncil.py,sha256=k2TAAZG7n2S1BWVyxbE_-4-lZuzhOimCNz4yimUCOGk,1995
@@ -312,7 +313,7 @@ uk_bin_collection/uk_bin_collection/councils/WestSuffolkCouncil.py,sha256=9i8AQH
312
313
  uk_bin_collection/uk_bin_collection/councils/WiganBoroughCouncil.py,sha256=3gqFA4-BVx_In6QOu3KUNqPN4Fkn9iMlZTeopMK9p6A,3746
313
314
  uk_bin_collection/uk_bin_collection/councils/WiltshireCouncil.py,sha256=Q0ooHTQb9ynMXpSNBPk7XXEjI7zcHst3id4wxGdmVx4,5698
314
315
  uk_bin_collection/uk_bin_collection/councils/WinchesterCityCouncil.py,sha256=W2k00N5n9-1MzjMEqsNjldsQdOJPEPMjK7OGSinZm5Y,4335
315
- uk_bin_collection/uk_bin_collection/councils/WindsorAndMaidenheadCouncil.py,sha256=7Qhznj95ktAQjpWm5C8pbD5UcvfXm7Mwb7_DQxwjGSM,1777
316
+ uk_bin_collection/uk_bin_collection/councils/WindsorAndMaidenheadCouncil.py,sha256=d294otB0MRTLNx0Axf8Giy2ggzRRqn8ABN35wyu0l5w,2295
316
317
  uk_bin_collection/uk_bin_collection/councils/WirralCouncil.py,sha256=X_e9zXEZAl_Mp6nPORHc9CTmf3QHdoMY3BCnKrXEr1I,2131
317
318
  uk_bin_collection/uk_bin_collection/councils/WokingBoroughCouncil.py,sha256=37igH9g0xe4XIhRhcJ-ZJBU8MxTp5yzgpadWbdE33Yg,5205
318
319
  uk_bin_collection/uk_bin_collection/councils/WokinghamBoroughCouncil.py,sha256=H8aFHlacwV07X-6T9RQua4irqDA0cIQrF4O1FfPR7yI,4114
@@ -325,8 +326,8 @@ uk_bin_collection/uk_bin_collection/councils/YorkCouncil.py,sha256=I2kBYMlsD4bId
325
326
  uk_bin_collection/uk_bin_collection/councils/council_class_template/councilclasstemplate.py,sha256=EQWRhZ2pEejlvm0fPyOTsOHKvUZmPnxEYO_OWRGKTjs,1158
326
327
  uk_bin_collection/uk_bin_collection/create_new_council.py,sha256=m-IhmWmeWQlFsTZC4OxuFvtw5ZtB8EAJHxJTH4O59lQ,1536
327
328
  uk_bin_collection/uk_bin_collection/get_bin_data.py,sha256=YvmHfZqanwrJ8ToGch34x-L-7yPe31nB_x77_Mgl_vo,4545
328
- uk_bin_collection-0.140.0.dist-info/LICENSE,sha256=vABBUOzcrgfaTKpzeo-si9YVEun6juDkndqA8RKdKGs,1071
329
- uk_bin_collection-0.140.0.dist-info/METADATA,sha256=3Dw3sim4P-LwGOmRbcX7jSK15SFpT16G2lTDl_wcuj8,19851
330
- uk_bin_collection-0.140.0.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
331
- uk_bin_collection-0.140.0.dist-info/entry_points.txt,sha256=36WCSGMWSc916S3Hi1ZkazzDKHaJ6CD-4fCEFm5MIao,90
332
- uk_bin_collection-0.140.0.dist-info/RECORD,,
329
+ uk_bin_collection-0.141.1.dist-info/LICENSE,sha256=vABBUOzcrgfaTKpzeo-si9YVEun6juDkndqA8RKdKGs,1071
330
+ uk_bin_collection-0.141.1.dist-info/METADATA,sha256=OnpbVrmjYIjaD38jCRatXYehUPJHXHczfJ5yYrqZfpI,19851
331
+ uk_bin_collection-0.141.1.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
332
+ uk_bin_collection-0.141.1.dist-info/entry_points.txt,sha256=36WCSGMWSc916S3Hi1ZkazzDKHaJ6CD-4fCEFm5MIao,90
333
+ uk_bin_collection-0.141.1.dist-info/RECORD,,