uk_bin_collection 0.128.6__py3-none-any.whl → 0.130.0__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
@@ -31,13 +31,6 @@
31
31
  "wiki_name": "Ards and North Down Council",
32
32
  "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN."
33
33
  },
34
- "ArdsAndNorthDownCouncil": {
35
- "url": "https://www.ardsandnorthdown.gov.uk",
36
- "wiki_command_url_override": "https://www.ardsandnorthdown.gov.uk",
37
- "uprn": "187136177",
38
- "wiki_name": "Ards and North Down Council",
39
- "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN."
40
- },
41
34
  "ArgyllandButeCouncil": {
42
35
  "uprn": "125061759",
43
36
  "skip_get_url": true,
@@ -212,6 +205,15 @@
212
205
  "wiki_name": "Bolton Council",
213
206
  "wiki_note": "To get the UPRN, you will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search). Previously required a single field that was UPRN and full address; now requires UPRN and postcode as separate fields."
214
207
  },
208
+ "BostonBoroughCouncil": {
209
+ "postcode": "PE20 1AY",
210
+ "skip_get_url": true,
211
+ "house_number": "CEDAR",
212
+ "web_driver": "http://selenium:4444",
213
+ "url": "https://www.boston.gov.uk/findwastecollections",
214
+ "wiki_name": "Boston Borough Council",
215
+ "wiki_note": "Provide your house number in the `house_number` parameter and postcode in the `postcode` parameter."
216
+ },
215
217
  "BracknellForestCouncil": {
216
218
  "house_number": "57",
217
219
  "paon": "57",
@@ -348,7 +350,7 @@
348
350
  "CardiffCouncil": {
349
351
  "skip_get_url": true,
350
352
  "uprn": "100100112419",
351
- "url": "https://www.cardiff.gov.uk/ENG/resident/Rubbish-and-recycling/When-are-my-bins-collected/Pages/default.aspx",
353
+ "url": "https://www.gov.uk",
352
354
  "wiki_name": "Cardiff Council",
353
355
  "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN."
354
356
  },
@@ -611,13 +613,6 @@
611
613
  "wiki_name": "East Herts Council",
612
614
  "wiki_note": "Pass the house number and postcode in their respective parameters."
613
615
  },
614
- "EastHertsCouncil": {
615
- "house_number": "1",
616
- "postcode": "CM20 2FZ",
617
- "skip_get_url": true,
618
- "url": "https://www.eastherts.gov.uk",
619
- "wiki_name": "East Herts Council"
620
- },
621
616
  "EastLindseyDistrictCouncil": {
622
617
  "house_number": "1",
623
618
  "postcode": "PE22 0YD",
@@ -627,6 +622,14 @@
627
622
  "wiki_name": "East Lindsey District Council",
628
623
  "wiki_note": "Pass the house name/number and postcode in their respective parameters. This parser requires a Selenium webdriver."
629
624
  },
625
+ "EastLothianCouncil": {
626
+ "house_number": "Flat 1",
627
+ "postcode": "EH21 6QA",
628
+ "skip_get_url": true,
629
+ "url": "https://eastlothian.gov.uk",
630
+ "wiki_name": "East Lothian Council",
631
+ "wiki_note": "Pass the house number and postcode in their respective parameters"
632
+ },
630
633
  "EastRenfrewshireCouncil": {
631
634
  "house_number": "23",
632
635
  "postcode": "G46 6RG",
@@ -645,6 +648,12 @@
645
648
  "wiki_name": "East Riding Council",
646
649
  "wiki_note": "Put the full address as it displays on the council website dropdown when you do the check manually."
647
650
  },
651
+ "EastStaffordshireBoroughCouncil": {
652
+ "url": "https://www.eaststaffsbc.gov.uk/bins-rubbish-recycling/collection-dates/68382",
653
+ "wiki_command_url_override": "https://www.eaststaffsbc.gov.uk/bins-rubbish-recycling/collection-dates/XXXXX",
654
+ "wiki_name": "East Staffordshire Borough Council",
655
+ "wiki_note": "Replace `XXXXX` with your property's ID when selecting from https://www.eaststaffsbc.gov.uk/bins-rubbish-recycling/collection-dates."
656
+ },
648
657
  "EastSuffolkCouncil": {
649
658
  "postcode": "IP11 9FJ",
650
659
  "skip_get_url": true,
@@ -745,20 +754,6 @@
745
754
  "wiki_name": "Flintshire County Council",
746
755
  "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN."
747
756
  },
748
- "FifeCouncil": {
749
- "url": "https://www.fife.gov.uk",
750
- "wiki_command_url_override": "https://www.fife.gov.uk",
751
- "uprn": "320203521",
752
- "wiki_name": "Fife Council",
753
- "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN."
754
- },
755
- "FlintshireCountyCouncil": {
756
- "url": "https://digital.flintshire.gov.uk",
757
- "wiki_command_url_override": "https://digital.flintshire.gov.uk",
758
- "uprn": "100100213710",
759
- "wiki_name": "Flintshire County Council",
760
- "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN."
761
- },
762
757
  "FolkstoneandHytheDistrictCouncil": {
763
758
  "skip_get_url": true,
764
759
  "uprn": "50032097",
@@ -887,6 +882,12 @@
887
882
  "wiki_name": "Hartlepool Borough Council",
888
883
  "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find your UPRN."
889
884
  },
885
+ "HerefordshireCouncil": {
886
+ "url": "https://www.herefordshire.gov.uk/rubbish-recycling/check-bin-collection-day?blpu_uprn=10096232662",
887
+ "wiki_command_url_override": "https://www.herefordshire.gov.uk/rubbish-recycling/check-bin-collection-day?blpu_uprn=XXXXXXXXXXXX",
888
+ "wiki_name": "Herefordshire Council",
889
+ "wiki_note": "Replace 'XXXXXXXXXX' with your property's UPRN. You can find it using [FindMyAddress](https://www.findmyaddress.co.uk/search)."
890
+ },
890
891
  "HertsmereBoroughCouncil": {
891
892
  "house_number": "1",
892
893
  "postcode": "WD7 9HZ",
@@ -994,6 +995,12 @@
994
995
  "wiki_name": "Leeds City Council",
995
996
  "wiki_note": "Pass the house number, postcode, and UPRN. This parser requires a Selenium webdriver."
996
997
  },
998
+ "LeicesterCityCouncil": {
999
+ "url": "https://biffaleicester.co.uk",
1000
+ "uprn": "2465027976",
1001
+ "wiki_name": "Leicester City Council",
1002
+ "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN."
1003
+ },
997
1004
  "LichfieldDistrictCouncil": {
998
1005
  "url": "https://www.lichfielddc.gov.uk",
999
1006
  "wiki_command_url_override": "https://www.lichfielddc.gov.uk",
@@ -1231,7 +1238,7 @@
1231
1238
  "url": "https://community.newcastle.gov.uk/my-neighbourhood/ajax/getBinsNew.php?uprn=004510730634",
1232
1239
  "wiki_command_url_override": "https://community.newcastle.gov.uk/my-neighbourhood/ajax/getBinsNew.php?uprn=XXXXXXXX",
1233
1240
  "wiki_name": "Newcastle City Council",
1234
- "wiki_note": "Replace XXXXXXXX with your UPRN."
1241
+ "wiki_note": "Replace XXXXXXXX with your UPRN. UPRNs need to be 12 digits long so please pad the left hand side with 0s if your UPRN is not long enough"
1235
1242
  },
1236
1243
  "NewcastleUnderLymeCouncil": {
1237
1244
  "url": "https://www.newcastle-staffs.gov.uk",
@@ -0,0 +1,149 @@
1
+ from bs4 import BeautifulSoup
2
+ from selenium.webdriver.common.by import By
3
+ from selenium.webdriver.support import expected_conditions as EC
4
+ from selenium.webdriver.support.ui import Select
5
+ from selenium.webdriver.support.wait import WebDriverWait
6
+
7
+ from uk_bin_collection.uk_bin_collection.common import *
8
+ from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
9
+
10
+
11
+ # import the wonderful Beautiful Soup and the URL grabber
12
+ class CouncilClass(AbstractGetBinDataClass):
13
+ """
14
+ Concrete classes have to implement all abstract operations of the
15
+ base class. They can also override some operations with a default
16
+ implementation.
17
+ """
18
+
19
+ def parse_data(self, page: str, **kwargs) -> dict:
20
+ driver = None
21
+ try:
22
+ data = {"bins": []}
23
+ user_paon = kwargs.get("paon")
24
+ user_postcode = kwargs.get("postcode")
25
+ web_driver = kwargs.get("web_driver")
26
+ headless = kwargs.get("headless")
27
+ check_paon(user_paon)
28
+ check_postcode(user_postcode)
29
+
30
+ # Create Selenium webdriver
31
+ driver = create_webdriver(web_driver, headless, None, __name__)
32
+ driver.get("https://www.boston.gov.uk/findwastecollections")
33
+
34
+ accept_button = WebDriverWait(driver, timeout=30).until(
35
+ EC.element_to_be_clickable((By.NAME, "acceptall"))
36
+ )
37
+ accept_button.click()
38
+
39
+ # Wait for the postcode field to appear then populate it
40
+ inputElement_postcode = WebDriverWait(driver, 30).until(
41
+ EC.presence_of_element_located(
42
+ (By.ID, "BBCWASTECOLLECTIONS_START_SEARCHPOSTCODE")
43
+ )
44
+ )
45
+ inputElement_postcode.send_keys(user_postcode)
46
+
47
+ # Click search button
48
+ findAddress = WebDriverWait(driver, 10).until(
49
+ EC.presence_of_element_located(
50
+ (By.ID, "BBCWASTECOLLECTIONS_START_START10_NEXT")
51
+ )
52
+ )
53
+ findAddress.click()
54
+
55
+ # Wait for the custom dropdown container to be visible
56
+ WebDriverWait(driver, 10).until(
57
+ EC.element_to_be_clickable(
58
+ (By.ID, "BBCWASTECOLLECTIONS_COLLECTIONADDRESS_INCIDENTUPRN_chosen")
59
+ )
60
+ )
61
+
62
+ # Click on the dropdown to open it
63
+ dropdown = driver.find_element(
64
+ By.ID, "BBCWASTECOLLECTIONS_COLLECTIONADDRESS_INCIDENTUPRN_chosen"
65
+ )
66
+ dropdown.click()
67
+
68
+ # Wait for the dropdown options to be visible
69
+ WebDriverWait(driver, 10).until(
70
+ EC.visibility_of_element_located((By.CLASS_NAME, "chosen-results"))
71
+ )
72
+
73
+ # Locate the desired option using its text
74
+ desired_option = driver.find_element(
75
+ By.XPATH,
76
+ "//li[@class='active-result' and contains(text(), '"
77
+ + user_paon
78
+ + "')]",
79
+ )
80
+
81
+ # Click on the desired option
82
+ desired_option.click()
83
+
84
+ # dropdown.select_by_visible_text(user_paon)
85
+
86
+ # Click search button
87
+ findAddress = WebDriverWait(driver, 10).until(
88
+ EC.presence_of_element_located(
89
+ (By.ID, "BBCWASTECOLLECTIONS_COLLECTIONADDRESS_NEXT3_NEXT")
90
+ )
91
+ )
92
+ findAddress.click()
93
+
94
+ # Wait for the collections table to appear
95
+ WebDriverWait(driver, 10).until(
96
+ EC.presence_of_element_located(
97
+ (By.ID, "BBCWASTECOLLECTIONS_SERVICE_FIELD859_OUTER")
98
+ )
99
+ )
100
+
101
+ soup = BeautifulSoup(driver.page_source, features="html.parser")
102
+
103
+ # Find the container with the bin information
104
+ bins = soup.find_all(
105
+ "div", class_="grid__cell grid__cell--listitem grid__cell--cols1"
106
+ )
107
+
108
+ current_year = datetime.now().year
109
+ next_year = current_year + 1
110
+
111
+ # Loop through each bin container to extract the details
112
+ for bin_div in bins:
113
+ # Find the bin type (title text)
114
+ bin_type = bin_div.find("h2", class_="item__title").text.strip()
115
+
116
+ # Find the next collection date
117
+ next_collection = (
118
+ bin_div.find("div", class_="item__content")
119
+ .find("div")
120
+ .text.strip()
121
+ .replace("Next: ", "")
122
+ )
123
+
124
+ next_collection = datetime.strptime(
125
+ remove_ordinal_indicator_from_date_string(next_collection),
126
+ "%A %d %B",
127
+ )
128
+
129
+ if (datetime.now().month == 12) and (next_collection.month == 1):
130
+ next_collection = next_collection.replace(year=next_year)
131
+ else:
132
+ next_collection = next_collection.replace(year=current_year)
133
+
134
+ dict_data = {
135
+ "type": bin_type,
136
+ "collectionDate": next_collection.strftime(date_format),
137
+ }
138
+ data["bins"].append(dict_data)
139
+
140
+ except Exception as e:
141
+ # Here you can log the exception if needed
142
+ print(f"An error occurred: {e}")
143
+ # Optionally, re-raise the exception if you want it to propagate
144
+ raise
145
+ finally:
146
+ # This block ensures that the driver is closed regardless of an exception
147
+ if driver:
148
+ driver.quit()
149
+ return data
@@ -0,0 +1,83 @@
1
+ import requests
2
+ from bs4 import BeautifulSoup
3
+
4
+ from uk_bin_collection.uk_bin_collection.common import *
5
+ from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
6
+
7
+
8
+ # import the wonderful Beautiful Soup and the URL grabber
9
+ class CouncilClass(AbstractGetBinDataClass):
10
+ """
11
+ Concrete classes have to implement all abstract operations of the
12
+ base class. They can also override some operations with a default
13
+ implementation.
14
+ """
15
+
16
+ def parse_data(self, page: str, **kwargs) -> dict:
17
+
18
+ user_postcode = kwargs.get("postcode")
19
+ user_paon = kwargs.get("paon")
20
+ check_postcode(user_postcode)
21
+ check_paon(user_paon)
22
+ bindata = {"bins": []}
23
+
24
+ URI = "http://collectiondates.eastlothian.gov.uk/ajax/your-calendar/load-streets-spring-2024.asp"
25
+
26
+ payload = {
27
+ "postcode": user_postcode,
28
+ }
29
+
30
+ headers = {
31
+ "Referer": "http://collectiondates.eastlothian.gov.uk/your-calendar",
32
+ "User-Agent": "Mozilla/5.0",
33
+ }
34
+
35
+ # Make the GET request
36
+ response = requests.get(URI, headers=headers, params=payload)
37
+
38
+ # Parse the HTML with BeautifulSoup
39
+ soup = BeautifulSoup(response.text, "html.parser")
40
+
41
+ # Find the select dropdown
42
+ select = soup.find("select", id="SelectStreet")
43
+
44
+ # Find the option that contains "Flat 1"
45
+ address = select.find("option", string=lambda text: text and user_paon in text)
46
+
47
+ URI = "http://collectiondates.eastlothian.gov.uk/ajax/your-calendar/load-recycling-summer-2024.asp"
48
+
49
+ payload = {
50
+ "id": address["value"],
51
+ }
52
+
53
+ # Make the GET request
54
+ response = requests.get(URI, headers=headers, params=payload)
55
+
56
+ # Parse the HTML with BeautifulSoup
57
+ soup = BeautifulSoup(response.text, "html.parser")
58
+
59
+ # Extract collection details
60
+ calendar_items = soup.find_all("div", class_="calendar-item")
61
+ for item in calendar_items:
62
+ waste_label = item.find("div", class_="waste-label").text.strip()
63
+ waste_value = item.find("div", class_="waste-value").find("h4").text.strip()
64
+
65
+ try:
66
+ collection_date = datetime.strptime(
67
+ remove_ordinal_indicator_from_date_string(waste_value),
68
+ "%A %d %B %Y",
69
+ )
70
+ except ValueError:
71
+ continue
72
+
73
+ dict_data = {
74
+ "type": waste_label.replace(" is:", ""),
75
+ "collectionDate": collection_date.strftime(date_format),
76
+ }
77
+ bindata["bins"].append(dict_data)
78
+
79
+ bindata["bins"].sort(
80
+ key=lambda x: datetime.strptime(x.get("collectionDate"), date_format)
81
+ )
82
+
83
+ return bindata
@@ -0,0 +1,86 @@
1
+ import requests
2
+ from bs4 import BeautifulSoup
3
+ from selenium.webdriver.common.by import By
4
+ from selenium.webdriver.support import expected_conditions as EC
5
+ from selenium.webdriver.support.ui import Select
6
+ from selenium.webdriver.support.wait import WebDriverWait
7
+
8
+ from uk_bin_collection.uk_bin_collection.common import *
9
+ from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
10
+
11
+
12
+ # import the wonderful Beautiful Soup and the URL grabber
13
+ class CouncilClass(AbstractGetBinDataClass):
14
+ """
15
+ Concrete classes have to implement all abstract operations of the
16
+ base class. They can also override some operations with a default
17
+ implementation.
18
+ """
19
+
20
+ def parse_data(self, page: str, **kwargs) -> dict:
21
+
22
+ bindata = {"bins": []}
23
+
24
+ soup = BeautifulSoup(page.text, features="html.parser")
25
+
26
+ current_year = datetime.now().year
27
+ next_year = current_year + 1
28
+
29
+ # Extract next collection
30
+ next_collection_section = soup.find("div", class_="collection-next")
31
+ if next_collection_section:
32
+ next_collection_text = next_collection_section.find("h2").text.strip()
33
+ date_match = re.search(
34
+ r"(Monday|Tuesday|Wednesday|Thursday|Friday|Saturday|Sunday), (\d+)(?:st|nd|rd|th)? (\w+)",
35
+ next_collection_text,
36
+ )
37
+ if date_match:
38
+ collection_date = f"{date_match.group(1)} {remove_ordinal_indicator_from_date_string(date_match.group(2))} {date_match.group(3)}"
39
+
40
+ collection_date = datetime.strptime(collection_date, "%A %d %B")
41
+
42
+ if (datetime.now().month == 12) and (collection_date.month == 1):
43
+ collection_date = collection_date.replace(year=next_year)
44
+ else:
45
+ collection_date = collection_date.replace(year=current_year)
46
+
47
+ # Get bin types
48
+ bins = next_collection_section.find_all("div", class_="field__item")
49
+ for bin_type in bins:
50
+ dict_data = {
51
+ "type": bin_type.text.strip(),
52
+ "collectionDate": collection_date.strftime(date_format),
53
+ }
54
+ bindata["bins"].append(dict_data)
55
+
56
+ # Extract other collections
57
+ other_collections = soup.find_all("li")
58
+ for collection in other_collections:
59
+ date_text = collection.contents[0].strip()
60
+ date_match = re.search(
61
+ r"(Monday|Tuesday|Wednesday|Thursday|Friday|Saturday|Sunday), (\d+)(?:st|nd|rd|th)? (\w+)",
62
+ date_text,
63
+ )
64
+ if date_match:
65
+ collection_date = f"{date_match.group(1)} {remove_ordinal_indicator_from_date_string(date_match.group(2))} {date_match.group(3)}"
66
+ collection_date = datetime.strptime(collection_date, "%A %d %B")
67
+
68
+ if (datetime.now().month == 12) and (collection_date.month == 1):
69
+ collection_date = collection_date.replace(year=next_year)
70
+ else:
71
+ collection_date = collection_date.replace(year=current_year)
72
+
73
+ # Get bin types
74
+ bins = collection.find_all("div", class_="field__item")
75
+ for bin_type in bins:
76
+ dict_data = {
77
+ "type": bin_type.text.strip(),
78
+ "collectionDate": collection_date.strftime(date_format),
79
+ }
80
+ bindata["bins"].append(dict_data)
81
+
82
+ bindata["bins"].sort(
83
+ key=lambda x: datetime.strptime(x.get("collectionDate"), date_format)
84
+ )
85
+
86
+ return bindata
@@ -0,0 +1,53 @@
1
+ import logging
2
+
3
+ from bs4 import BeautifulSoup
4
+
5
+ from uk_bin_collection.uk_bin_collection.common import *
6
+ from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
7
+
8
+
9
+ # import the wonderful Beautiful Soup and the URL grabber
10
+ class CouncilClass(AbstractGetBinDataClass):
11
+ """
12
+ Concrete classes have to implement all abstract operations of the
13
+ base class. They can also override some operations with a default
14
+ implementation.
15
+ """
16
+
17
+ def parse_data(self, page: str, **kwargs) -> dict:
18
+ # Make a BS4 object
19
+ soup = BeautifulSoup(page.text, features="html.parser")
20
+ soup.prettify()
21
+
22
+ data = {"bins": []}
23
+
24
+ checkValid = soup.find("p", id="selectedAddressResult")
25
+ if checkValid is None:
26
+ raise ValueError("Address/UPRN not found")
27
+
28
+ collections = soup.find("div", id="wasteCollectionDates")
29
+
30
+ for bins in collections.select('div[class*="hc-island"]'):
31
+ bin_type = bins.h4.get_text(strip=True)
32
+
33
+ # Last div.hc-island is the calendar link, skip it
34
+ if bin_type == "Calendar":
35
+ continue
36
+
37
+ # Next collection date is in a span under the second p.hc-no-margin of the div.
38
+ bin_collection = re.search(
39
+ r"(.*) \(.*\)", bins.select("div > p > span")[0].get_text(strip=True)
40
+ ).group(1)
41
+ if bin_collection:
42
+ logging.info(
43
+ f"Bin type: {bin_type} - Collection date: {bin_collection}"
44
+ )
45
+ dict_data = {
46
+ "type": bin_type,
47
+ "collectionDate": datetime.strptime(
48
+ bin_collection, "%A %d %B %Y"
49
+ ).strftime(date_format),
50
+ }
51
+ data["bins"].append(dict_data)
52
+
53
+ return data
@@ -0,0 +1,62 @@
1
+ import time
2
+
3
+ import requests
4
+
5
+ from uk_bin_collection.uk_bin_collection.common import *
6
+ from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
7
+
8
+
9
+ # import the wonderful Beautiful Soup and the URL grabber
10
+ class CouncilClass(AbstractGetBinDataClass):
11
+ """
12
+ Concrete classes have to implement all abstract operations of the
13
+ base class. They can also override some operations with a default
14
+ implementation.
15
+ """
16
+
17
+ def parse_data(self, page: str, **kwargs) -> dict:
18
+
19
+ user_uprn = kwargs.get("uprn")
20
+ check_uprn(user_uprn)
21
+ bindata = {"bins": []}
22
+
23
+ user_uprn = str(user_uprn).zfill(12)
24
+
25
+ URI = "https://biffaleicester.co.uk/wp-admin/admin-ajax.php"
26
+
27
+ payload = {
28
+ "action": "get_details_api",
29
+ "uprn": user_uprn,
30
+ }
31
+
32
+ headers = {
33
+ "Origin": "https://biffaleicester.co.uk",
34
+ "Referer": "https://biffaleicester.co.uk/services/waste-collection-days/",
35
+ "User-Agent": "Mozilla/5.0",
36
+ }
37
+
38
+ # Make the GET request
39
+ response = requests.post(URI, headers=headers, data=payload)
40
+
41
+ # Parse the JSON response
42
+ bin_collection = response.json()
43
+
44
+ # Loop through each collection in bin_collection
45
+ for collection in bin_collection["anyType"]:
46
+ bin_type = collection["ServiceModeDesc"]
47
+ date = collection["ServiceDueDate"]
48
+
49
+ dict_data = {
50
+ "type": bin_type,
51
+ "collectionDate": datetime.strptime(
52
+ date,
53
+ "%d/%m/%y",
54
+ ).strftime(date_format),
55
+ }
56
+ bindata["bins"].append(dict_data)
57
+
58
+ bindata["bins"].sort(
59
+ key=lambda x: datetime.strptime(x.get("collectionDate"), date_format)
60
+ )
61
+
62
+ return bindata
@@ -82,6 +82,30 @@ class CouncilClass(AbstractGetBinDataClass):
82
82
  if pattern.match(link_text):
83
83
  return address_link
84
84
 
85
+ try:
86
+ print("Finding next page link not found.")
87
+ # Find the 'Next page' link
88
+ next_page_link = soup.find("a", class_="button float-right")
89
+
90
+ # Ensure the link exists
91
+ if next_page_link:
92
+ # Extract the href attribute
93
+ next_page_url = next_page_link["href"]
94
+
95
+ # Send a GET request to the next page
96
+ next_response = requests.get(next_page_url)
97
+ next_response.raise_for_status() # Raise an exception for HTTP errors
98
+
99
+ # Parse the HTML content of the next page
100
+ soup = BeautifulSoup(next_response.text, "html.parser")
101
+ address_link = self._get_result_by_identifier(soup, identifier)
102
+ return address_link
103
+ else:
104
+ print("Next page link not found.")
105
+ except AttributeError as e:
106
+ print(f"Warning: Could not find the search results. Error: {e}")
107
+ return None # Return None if no result found
108
+
85
109
  print(f"Warning: No results found for identifier '{identifier}'.")
86
110
  return None # Return None if no match is found
87
111
 
@@ -19,6 +19,7 @@ def parse_collection_date(date_string) -> datetime:
19
19
 
20
20
  return parsed_date
21
21
 
22
+
22
23
  class CouncilClass(AbstractGetBinDataClass):
23
24
  """
24
25
  Concrete classes have to implement all abstract operations of the
@@ -84,6 +85,9 @@ class CouncilClass(AbstractGetBinDataClass):
84
85
 
85
86
  data = {"bins": []}
86
87
 
88
+ current_year = datetime.now().year
89
+ next_year = current_year + 1
90
+
87
91
  next_collection_date = soup.find(
88
92
  "strong", id="SBC-YBD-collectionDate"
89
93
  ).text.strip()
@@ -104,20 +108,27 @@ class CouncilClass(AbstractGetBinDataClass):
104
108
  future_bins = [li.text.strip() for li in soup.select("#FirstFutureBins li")]
105
109
 
106
110
  for bin in next_bins:
111
+ collection_date = datetime.strptime(next_collection_date, "%A, %d %B")
112
+ if (datetime.now().month == 12) and (collection_date.month == 1):
113
+ collection_date = collection_date.replace(year=next_year)
114
+ else:
115
+ collection_date = collection_date.replace(year=current_year)
116
+
107
117
  dict_data = {
108
118
  "type": bin,
109
- "collectionDate": datetime.strptime(
110
- next_collection_date, "%A, %d %B"
111
- ).strftime(date_format),
119
+ "collectionDate": collection_date.strftime(date_format),
112
120
  }
113
121
  data["bins"].append(dict_data)
114
122
 
115
123
  for bin in future_bins:
124
+ collection_date = datetime.strptime(future_collection_date, "%A, %d %B")
125
+ if (datetime.now().month == 12) and (collection_date.month == 1):
126
+ collection_date = collection_date.replace(year=next_year)
127
+ else:
128
+ collection_date = collection_date.replace(year=current_year)
116
129
  dict_data = {
117
130
  "type": bin,
118
- "collectionDate": datetime.strptime(
119
- future_collection_date, "%A, %d %B"
120
- ).strftime(date_format),
131
+ "collectionDate": collection_date.strftime(date_format),
121
132
  }
122
133
  data["bins"].append(dict_data)
123
134
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: uk_bin_collection
3
- Version: 0.128.6
3
+ Version: 0.130.0
4
4
  Summary: Python Lib to collect UK Bin Data
5
5
  Author: Robert Bradley
6
6
  Author-email: robbrad182@gmail.com
@@ -2,7 +2,7 @@ uk_bin_collection/README.rst,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,
2
2
  uk_bin_collection/tests/council_feature_input_parity.py,sha256=DO6Mk4ImYgM5ZCZ-cutwz5RoYYWZRLYx2tr6zIs_9Rc,3843
3
3
  uk_bin_collection/tests/features/environment.py,sha256=VQZjJdJI_kZn08M0j5cUgvKT4k3iTw8icJge1DGOkoA,127
4
4
  uk_bin_collection/tests/features/validate_council_outputs.feature,sha256=SJK-Vc737hrf03tssxxbeg_JIvAH-ddB8f6gU1LTbuQ,251
5
- uk_bin_collection/tests/input.json,sha256=mQ6Mu07OBuIodT-uD74zTi-AuXDMQQ09GyzDcPvaxqo,115118
5
+ uk_bin_collection/tests/input.json,sha256=EUx3biB0uK4bYrQEM5YTMa3xXTxKVfb0-mcw8bupyqM,115835
6
6
  uk_bin_collection/tests/output.schema,sha256=ZwKQBwYyTDEM4G2hJwfLUVM-5v1vKRvRK9W9SS1sd18,1086
7
7
  uk_bin_collection/tests/step_defs/step_helpers/file_handler.py,sha256=Ygzi4V0S1MIHqbdstUlIqtRIwnynvhu4UtpweJ6-5N8,1474
8
8
  uk_bin_collection/tests/step_defs/test_validate_council.py,sha256=VZ0a81sioJULD7syAYHjvK_-nT_Rd36tUyzPetSA0gk,3475
@@ -39,6 +39,7 @@ uk_bin_collection/uk_bin_collection/councils/BlabyDistrictCouncil.py,sha256=xqWk
39
39
  uk_bin_collection/uk_bin_collection/councils/BlackburnCouncil.py,sha256=ZLA2V3qPsJTom7SeQdGDhF4tJSfgIV5Qi202QvGKJZ0,4477
40
40
  uk_bin_collection/uk_bin_collection/councils/BlaenauGwentCountyBoroughCouncil.py,sha256=C5Fi19t5HJcrOHhWnt_6pttgRL2IWIYnwYlF9MdSPR0,4330
41
41
  uk_bin_collection/uk_bin_collection/councils/BoltonCouncil.py,sha256=WI68r8jB0IHPUT4CgmZMtng899AAMFTxkyTdPg9yLF8,4117
42
+ uk_bin_collection/uk_bin_collection/councils/BostonBoroughCouncil.py,sha256=8xv6FMNj8Qgwn5K0nMdB5X8hkcNFzhcJ48DMordflJY,5631
42
43
  uk_bin_collection/uk_bin_collection/councils/BracknellForestCouncil.py,sha256=Llo1rULaAZ8rChVYZqXFFLo7CN6vbT0ULUJD6ActouY,9015
43
44
  uk_bin_collection/uk_bin_collection/councils/BradfordMDC.py,sha256=BEWS2c62cOsf26jqn1AkNUvVmc5AlUADYLaQuPn9RY4,5456
44
45
  uk_bin_collection/uk_bin_collection/councils/BraintreeDistrictCouncil.py,sha256=2vYHilpI8mSwC2Ykdr1gxYAN3excDWqF6AwtGbkwbTw,2441
@@ -94,8 +95,10 @@ uk_bin_collection/uk_bin_collection/councils/EastCambridgeshireCouncil.py,sha256
94
95
  uk_bin_collection/uk_bin_collection/councils/EastDevonDC.py,sha256=U0VwSNIldMv5nUoiXtFgjbE0m6Kb-8W2WZQGVCNF_WI,3261
95
96
  uk_bin_collection/uk_bin_collection/councils/EastHertsCouncil.py,sha256=hjIrZXM0qe8xvHfrBqMDyXnq0_h_ySODqTfmOI5ahTc,4071
96
97
  uk_bin_collection/uk_bin_collection/councils/EastLindseyDistrictCouncil.py,sha256=RSOTD1MIXSW27eGf3TixCiJK4HtSJnpfME2CjalDeXs,4326
98
+ uk_bin_collection/uk_bin_collection/councils/EastLothianCouncil.py,sha256=zTp-GDWYeUIlFaqfkqGvo7XMtxJd0VbxdGgqaAwRACk,2792
97
99
  uk_bin_collection/uk_bin_collection/councils/EastRenfrewshireCouncil.py,sha256=5giegMCKQ2JhVDR5M4mevVxIdhZtSW7kbuuoSkj3EGk,4361
98
100
  uk_bin_collection/uk_bin_collection/councils/EastRidingCouncil.py,sha256=oL-NqriLVy_NChGASNh8qTqeakLn4iP_XzoMC6VlPGM,5216
101
+ uk_bin_collection/uk_bin_collection/councils/EastStaffordshireBoroughCouncil.py,sha256=s13zlAN9Rac-RVHNFLIjIY0X8C6sPTNS37EL2t6vXw8,3692
99
102
  uk_bin_collection/uk_bin_collection/councils/EastSuffolkCouncil.py,sha256=qQ0oOfGd0sWcczse_B22YoeL9uj3og8v3UJLt_Sx29c,4353
100
103
  uk_bin_collection/uk_bin_collection/councils/EastleighBoroughCouncil.py,sha256=V4Vso4DvawFiezKlmXbTlJEK9Sjhz9nA8WeYjwtO2e4,2310
101
104
  uk_bin_collection/uk_bin_collection/councils/EdinburghCityCouncil.py,sha256=YRjNgevnCxfaAIU8BV9dkqG17NiT6S-hp7l-1rdLVgQ,3150
@@ -126,6 +129,7 @@ uk_bin_collection/uk_bin_collection/councils/HaringeyCouncil.py,sha256=t_6AkAu4w
126
129
  uk_bin_collection/uk_bin_collection/councils/HarrogateBoroughCouncil.py,sha256=_g3fP5Nq-OUjgNrfRf4UEyFKzq0x8QK-4enh5RP1efA,2050
127
130
  uk_bin_collection/uk_bin_collection/councils/HartDistrictCouncil.py,sha256=_llxT4JYYlwm20ZtS3fXwtDs6mwJyLTZBP2wBhvEpWk,2342
128
131
  uk_bin_collection/uk_bin_collection/councils/HartlepoolBoroughCouncil.py,sha256=MUT1A24iZShT2p55rXEvgYwGUuw3W05Z4ZQAveehv-s,2842
132
+ uk_bin_collection/uk_bin_collection/councils/HerefordshireCouncil.py,sha256=JpQhkWM6Jeuzf1W7r0HqvtVnEqNi18nhwJX70YucdsI,1848
129
133
  uk_bin_collection/uk_bin_collection/councils/HertsmereBoroughCouncil.py,sha256=-ThSG6NIJP_wf2GmGL7SAvxbOujdhanZ8ECP4VSQCBs,5415
130
134
  uk_bin_collection/uk_bin_collection/councils/HighPeakCouncil.py,sha256=x7dfy8mdt2iGl8qJxHb-uBh4u0knmi9MJ6irOJw9WYA,4805
131
135
  uk_bin_collection/uk_bin_collection/councils/HighlandCouncil.py,sha256=GNxDU65QuZHV5va2IrKtcJ6TQoDdwmV03JvkVqOauP4,3291
@@ -140,6 +144,7 @@ uk_bin_collection/uk_bin_collection/councils/KirkleesCouncil.py,sha256=WPM7koIqK
140
144
  uk_bin_collection/uk_bin_collection/councils/KnowsleyMBCouncil.py,sha256=VdlWDESoHfr_X0r8-UMaLMUQhKZOa2BnpVPkX-1u3EQ,5605
141
145
  uk_bin_collection/uk_bin_collection/councils/LancasterCityCouncil.py,sha256=FmHT6oyD4BwWuhxA80PHnGA7HPrLuyjP_54Cg8hT6k4,2537
142
146
  uk_bin_collection/uk_bin_collection/councils/LeedsCityCouncil.py,sha256=iSZApZ9oSfSatQ6dAxmykSfti91jGuY6n2BwEkVMOiU,5144
147
+ uk_bin_collection/uk_bin_collection/councils/LeicesterCityCouncil.py,sha256=o3kE8sjThQa4_AvSK5NH8VH7jWFO9MMPgoqLOTjyh0w,1851
143
148
  uk_bin_collection/uk_bin_collection/councils/LichfieldDistrictCouncil.py,sha256=l3zgTWuKOW8fgb8PmXv0OTI6HaiGBPndefNQk8MM4oY,1810
144
149
  uk_bin_collection/uk_bin_collection/councils/LincolnCouncil.py,sha256=aUCqjHuk0sLtx83a-2agcLIMgEbfqjltXRCBRXT9J-8,3733
145
150
  uk_bin_collection/uk_bin_collection/councils/LisburnCastlereaghCityCouncil.py,sha256=vSOzdEwp9ZeUhed7E3eVv9ReD-2XgbSkpyAbVnfc-Gk,3309
@@ -164,7 +169,7 @@ uk_bin_collection/uk_bin_collection/councils/MidAndEastAntrimBoroughCouncil.py,s
164
169
  uk_bin_collection/uk_bin_collection/councils/MidDevonCouncil.py,sha256=RjBZ7R3_Pax9p1d2DCygqryjV1RP4BYvqb-rT_KyOEg,3322
165
170
  uk_bin_collection/uk_bin_collection/councils/MidSuffolkDistrictCouncil.py,sha256=h6M-v5jVYe7OlQ47Vf-0pEgECZLOOacK3_XE6zbpsM4,6329
166
171
  uk_bin_collection/uk_bin_collection/councils/MidSussexDistrictCouncil.py,sha256=AZgC9wmDLEjUOtIFvf0ehF5LHturXTH4DkE3ioPSVBA,6254
167
- uk_bin_collection/uk_bin_collection/councils/MidlothianCouncil.py,sha256=mM5-itJDNhjsT5UEjSFfWppmfmPFSns4u_1QblewuFU,5605
172
+ uk_bin_collection/uk_bin_collection/councils/MidlothianCouncil.py,sha256=-VKvdIhrs859-YqxsNMzRWm2alP1avBR1_J8O9gJnYw,6725
168
173
  uk_bin_collection/uk_bin_collection/councils/MiltonKeynesCityCouncil.py,sha256=7e2pGBLCw24pNItHeI9jkxQ3rEOZ4WC4zVlbvKYGdXE,2600
169
174
  uk_bin_collection/uk_bin_collection/councils/MoleValleyDistrictCouncil.py,sha256=xWR5S0gwQu9gXxjl788Wux1KaC0CT7ZFw0iXuRLZCEM,5599
170
175
  uk_bin_collection/uk_bin_collection/councils/MonmouthshireCountyCouncil.py,sha256=V3R98D3DtCPZK3RNCg8yR2ddoUz_tOzl2YTgWfjdPX0,2468
@@ -247,7 +252,7 @@ uk_bin_collection/uk_bin_collection/councils/StratfordUponAvonCouncil.py,sha256=
247
252
  uk_bin_collection/uk_bin_collection/councils/StroudDistrictCouncil.py,sha256=Akx80Ve7D8RVdIW1vkWLYp80VrhL6Qc3dMMKnbFWUhY,3653
248
253
  uk_bin_collection/uk_bin_collection/councils/SunderlandCityCouncil.py,sha256=4DnKyyu56_AwuchD6_oL1dvpDStMvkkxQtYN79rUKOs,3825
249
254
  uk_bin_collection/uk_bin_collection/councils/SurreyHeathBoroughCouncil.py,sha256=MROVvf7RSRYYjM2ZDD83rAEwf8BSnqXVrasgBiJC92A,5220
250
- uk_bin_collection/uk_bin_collection/councils/SwaleBoroughCouncil.py,sha256=LNewGYrRHPforrsyZO7EaAy7Wx7VXi3I_tZ0SO9VcAA,4408
255
+ uk_bin_collection/uk_bin_collection/councils/SwaleBoroughCouncil.py,sha256=pAGzj9CUDaU1w0wMEbJIl1fxcW0kWIzvzsU7oYMdgWE,5001
251
256
  uk_bin_collection/uk_bin_collection/councils/SwanseaCouncil.py,sha256=nmVPoPhnFgVi--vczX2i4Sf3bqM5RWJuwfhioRUr5XE,2303
252
257
  uk_bin_collection/uk_bin_collection/councils/SwindonBoroughCouncil.py,sha256=lSIykpkBjVwQSf3rrnrNuh7YRepgnkKQLbf1iErMuJs,1932
253
258
  uk_bin_collection/uk_bin_collection/councils/TamesideMBCouncil.py,sha256=k2TAAZG7n2S1BWVyxbE_-4-lZuzhOimCNz4yimUCOGk,1995
@@ -301,8 +306,8 @@ uk_bin_collection/uk_bin_collection/councils/YorkCouncil.py,sha256=I2kBYMlsD4bId
301
306
  uk_bin_collection/uk_bin_collection/councils/council_class_template/councilclasstemplate.py,sha256=EQWRhZ2pEejlvm0fPyOTsOHKvUZmPnxEYO_OWRGKTjs,1158
302
307
  uk_bin_collection/uk_bin_collection/create_new_council.py,sha256=m-IhmWmeWQlFsTZC4OxuFvtw5ZtB8EAJHxJTH4O59lQ,1536
303
308
  uk_bin_collection/uk_bin_collection/get_bin_data.py,sha256=YvmHfZqanwrJ8ToGch34x-L-7yPe31nB_x77_Mgl_vo,4545
304
- uk_bin_collection-0.128.6.dist-info/LICENSE,sha256=vABBUOzcrgfaTKpzeo-si9YVEun6juDkndqA8RKdKGs,1071
305
- uk_bin_collection-0.128.6.dist-info/METADATA,sha256=4iZ2Zs9pA4B7EuezyBwaj0rDq8cX5KE98OVl2aQxk3Y,19549
306
- uk_bin_collection-0.128.6.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
307
- uk_bin_collection-0.128.6.dist-info/entry_points.txt,sha256=36WCSGMWSc916S3Hi1ZkazzDKHaJ6CD-4fCEFm5MIao,90
308
- uk_bin_collection-0.128.6.dist-info/RECORD,,
309
+ uk_bin_collection-0.130.0.dist-info/LICENSE,sha256=vABBUOzcrgfaTKpzeo-si9YVEun6juDkndqA8RKdKGs,1071
310
+ uk_bin_collection-0.130.0.dist-info/METADATA,sha256=BnCUzurUgw3yydA2WY7gFgwqUBQ2R3h6HGT5RC4wXEo,19549
311
+ uk_bin_collection-0.130.0.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
312
+ uk_bin_collection-0.130.0.dist-info/entry_points.txt,sha256=36WCSGMWSc916S3Hi1ZkazzDKHaJ6CD-4fCEFm5MIao,90
313
+ uk_bin_collection-0.130.0.dist-info/RECORD,,