uk_bin_collection 0.119.0__py3-none-any.whl → 0.123.1__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
Files changed (38) hide show
  1. uk_bin_collection/tests/input.json +129 -10
  2. uk_bin_collection/tests/test_common_functions.py +26 -0
  3. uk_bin_collection/uk_bin_collection/common.py +30 -6
  4. uk_bin_collection/uk_bin_collection/councils/AberdeenCityCouncil.py +0 -1
  5. uk_bin_collection/uk_bin_collection/councils/BaberghDistrictCouncil.py +3 -1
  6. uk_bin_collection/uk_bin_collection/councils/BlabyDistrictCouncil.py +8 -5
  7. uk_bin_collection/uk_bin_collection/councils/BlackburnCouncil.py +10 -2
  8. uk_bin_collection/uk_bin_collection/councils/CarmarthenshireCountyCouncil.py +3 -3
  9. uk_bin_collection/uk_bin_collection/councils/CheltenhamBoroughCouncil.py +102 -0
  10. uk_bin_collection/uk_bin_collection/councils/CotswoldDistrictCouncil.py +3 -3
  11. uk_bin_collection/uk_bin_collection/councils/CumberlandAllerdaleCouncil.py +93 -0
  12. uk_bin_collection/uk_bin_collection/councils/EastAyrshireCouncil.py +11 -8
  13. uk_bin_collection/uk_bin_collection/councils/EnvironmentFirst.py +14 -0
  14. uk_bin_collection/uk_bin_collection/councils/FolkstoneandHytheDistrictCouncil.py +81 -0
  15. uk_bin_collection/uk_bin_collection/councils/GlasgowCityCouncil.py +17 -0
  16. uk_bin_collection/uk_bin_collection/councils/HackneyCouncil.py +85 -0
  17. uk_bin_collection/uk_bin_collection/councils/HartlepoolBoroughCouncil.py +83 -0
  18. uk_bin_collection/uk_bin_collection/councils/HighPeakCouncil.py +10 -5
  19. uk_bin_collection/uk_bin_collection/councils/HinckleyandBosworthBoroughCouncil.py +71 -0
  20. uk_bin_collection/uk_bin_collection/councils/KingsLynnandWestNorfolkBC.py +59 -0
  21. uk_bin_collection/uk_bin_collection/councils/LondonBoroughHavering.py +75 -0
  22. uk_bin_collection/uk_bin_collection/councils/LondonBoroughLewisham.py +140 -0
  23. uk_bin_collection/uk_bin_collection/councils/MidSuffolkDistrictCouncil.py +3 -1
  24. uk_bin_collection/uk_bin_collection/councils/MonmouthshireCountyCouncil.py +70 -0
  25. uk_bin_collection/uk_bin_collection/councils/MorayCouncil.py +65 -0
  26. uk_bin_collection/uk_bin_collection/councils/NewcastleUnderLymeCouncil.py +66 -0
  27. uk_bin_collection/uk_bin_collection/councils/NorthHertfordshireDistrictCouncil.py +93 -0
  28. uk_bin_collection/uk_bin_collection/councils/RoyalBoroughofGreenwich.py +113 -0
  29. uk_bin_collection/uk_bin_collection/councils/SandwellBoroughCouncil.py +87 -0
  30. uk_bin_collection/uk_bin_collection/councils/ThurrockCouncil.py +93 -0
  31. uk_bin_collection/uk_bin_collection/councils/WarwickDistrictCouncil.py +29 -10
  32. uk_bin_collection/uk_bin_collection/councils/WestNorthamptonshireCouncil.py +12 -10
  33. uk_bin_collection/uk_bin_collection/councils/WyreForestDistrictCouncil.py +65 -0
  34. {uk_bin_collection-0.119.0.dist-info → uk_bin_collection-0.123.1.dist-info}/METADATA +1 -1
  35. {uk_bin_collection-0.119.0.dist-info → uk_bin_collection-0.123.1.dist-info}/RECORD +38 -21
  36. {uk_bin_collection-0.119.0.dist-info → uk_bin_collection-0.123.1.dist-info}/LICENSE +0 -0
  37. {uk_bin_collection-0.119.0.dist-info → uk_bin_collection-0.123.1.dist-info}/WHEEL +0 -0
  38. {uk_bin_collection-0.119.0.dist-info → uk_bin_collection-0.123.1.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,140 @@
1
+ import re
2
+ import time
3
+
4
+ from bs4 import BeautifulSoup
5
+ from selenium.webdriver.common.by import By
6
+ from selenium.webdriver.support import expected_conditions as EC
7
+ from selenium.webdriver.support.ui import Select, WebDriverWait
8
+
9
+ from uk_bin_collection.uk_bin_collection.common import *
10
+ from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
11
+
12
+
13
+ # import the wonderful Beautiful Soup and the URL grabber
14
+ class CouncilClass(AbstractGetBinDataClass):
15
+ """
16
+ Concrete classes have to implement all abstract operations of the
17
+ base class. They can also override some operations with a default
18
+ implementation.
19
+ """
20
+
21
+ def parse_data(self, page: str, **kwargs) -> dict:
22
+
23
+ user_uprn = kwargs.get("uprn")
24
+ user_postcode = kwargs.get("postcode")
25
+ web_driver = kwargs.get("web_driver")
26
+ headless = kwargs.get("headless")
27
+ check_uprn(user_uprn)
28
+ bindata = {"bins": []}
29
+
30
+ # Initialize the WebDriver (Chrome in this case)
31
+ with create_webdriver(
32
+ web_driver,
33
+ headless,
34
+ "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36",
35
+ __name__,
36
+ ) as driver:
37
+
38
+ # Step 1: Navigate to the form page
39
+ driver.get(
40
+ "https://lewisham.gov.uk/myservices/recycling-and-rubbish/your-bins/collection"
41
+ )
42
+
43
+ try:
44
+ cookie_accept_button = WebDriverWait(driver, 5).until(
45
+ EC.element_to_be_clickable(
46
+ (By.ID, "CybotCookiebotDialogBodyLevelButtonLevelOptinAllowAll")
47
+ )
48
+ )
49
+ cookie_accept_button.click()
50
+ except Exception:
51
+ print("No cookie consent banner found or already dismissed.")
52
+
53
+ # Wait for the form to load
54
+ WebDriverWait(driver, 10).until(
55
+ EC.presence_of_element_located((By.CLASS_NAME, "address-finder"))
56
+ )
57
+
58
+ # Step 2: Locate the input field for the postcode
59
+ postcode_input = driver.find_element(
60
+ By.CLASS_NAME, "js-address-finder-input"
61
+ )
62
+
63
+ # Enter the postcode
64
+ postcode_input.send_keys(
65
+ user_postcode
66
+ ) # Replace with your desired postcode
67
+ time.sleep(1) # Optional: Wait for the UI to react
68
+
69
+ # Step 4: Click the "Find address" button with retry logic
70
+ find_button = WebDriverWait(driver, 10).until(
71
+ EC.element_to_be_clickable(
72
+ (By.CLASS_NAME, "js-address-finder-step-address")
73
+ )
74
+ )
75
+ find_button.click()
76
+
77
+ # Wait for the address selector to appear and options to load
78
+ WebDriverWait(driver, 10).until(
79
+ lambda d: len(
80
+ d.find_element(By.ID, "address-selector").find_elements(
81
+ By.TAG_NAME, "option"
82
+ )
83
+ )
84
+ > 1
85
+ )
86
+
87
+ # Select the dropdown and print available options
88
+ address_selector = driver.find_element(By.ID, "address-selector")
89
+
90
+ # Use Select class to interact with the dropdown
91
+ select = Select(address_selector)
92
+ if len(select.options) > 1:
93
+ select.select_by_value(user_uprn)
94
+ else:
95
+ print("No additional addresses available to select")
96
+
97
+ # Wait until the URL contains the expected substring
98
+ WebDriverWait(driver, 10).until(
99
+ EC.url_contains("/find-your-collection-day-result")
100
+ )
101
+
102
+ # Parse the HTML
103
+ soup = BeautifulSoup(driver.page_source, "html.parser")
104
+
105
+ # Extract the main container
106
+ collection_result = soup.find("div", class_="js-find-collection-result")
107
+
108
+ # Extract each collection type and its frequency/day
109
+ for strong_tag in collection_result.find_all("strong"):
110
+ bin_type = strong_tag.text.strip() # e.g., "Food waste"
111
+ # Extract the sibling text
112
+ schedule_text = (
113
+ strong_tag.next_sibling.next_sibling.next_sibling.text.strip()
114
+ .replace("\n", " ")
115
+ .replace("\t", " ")
116
+ )
117
+
118
+ # Extract the day using regex
119
+ print(schedule_text)
120
+ day_match = re.search(r"on\s*(\w+day)", schedule_text)
121
+ print(day_match)
122
+ day = day_match.group(1) if day_match else None
123
+
124
+ # Extract the next collection date using regex
125
+ date_match = re.search(
126
+ r"Your next collection date is\s*(\d{2}/\d{2}/\d{4})(.?)",
127
+ schedule_text,
128
+ )
129
+ if date_match:
130
+ next_collection_date = date_match.group(1)
131
+ else:
132
+ next_collection_date = get_next_day_of_week(day, date_format)
133
+
134
+ dict_data = {
135
+ "type": bin_type,
136
+ "collectionDate": next_collection_date,
137
+ }
138
+ bindata["bins"].append(dict_data)
139
+
140
+ return bindata
@@ -24,6 +24,7 @@ class CouncilClass(AbstractGetBinDataClass):
24
24
 
25
25
  collection_day = kwargs.get("paon")
26
26
  garden_collection_week = kwargs.get("postcode")
27
+ garden_collection_day = kwargs.get("uprn")
27
28
  bindata = {"bins": []}
28
29
 
29
30
  days_of_week = [
@@ -42,6 +43,7 @@ class CouncilClass(AbstractGetBinDataClass):
42
43
  recyclingstartDate = datetime(2024, 11, 4)
43
44
 
44
45
  offset_days = days_of_week.index(collection_day)
46
+ offset_days_garden = days_of_week.index(garden_collection_day)
45
47
  if garden_collection_week:
46
48
  garden_collection = garden_week.index(garden_collection_week)
47
49
 
@@ -155,7 +157,7 @@ class CouncilClass(AbstractGetBinDataClass):
155
157
 
156
158
  collection_date = (
157
159
  datetime.strptime(gardenDate, "%d/%m/%Y")
158
- + timedelta(days=offset_days)
160
+ + timedelta(days=offset_days_garden)
159
161
  ).strftime("%d/%m/%Y")
160
162
 
161
163
  garden_holiday = next(
@@ -0,0 +1,70 @@
1
+ import requests
2
+ from bs4 import BeautifulSoup
3
+
4
+ from uk_bin_collection.uk_bin_collection.common import *
5
+ from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
6
+
7
+
8
+ # import the wonderful Beautiful Soup and the URL grabber
9
+ class CouncilClass(AbstractGetBinDataClass):
10
+ """
11
+ Concrete classes have to implement all abstract operations of the
12
+ base class. They can also override some operations with a default
13
+ implementation.
14
+ """
15
+
16
+ def parse_data(self, page: str, **kwargs) -> dict:
17
+
18
+ user_uprn = kwargs.get("uprn")
19
+ check_uprn(user_uprn)
20
+ bindata = {"bins": []}
21
+
22
+ URI = (
23
+ f"https://maps.monmouthshire.gov.uk/?action=SetAddress&UniqueId={user_uprn}"
24
+ )
25
+
26
+ # Make the GET request
27
+ response = requests.get(URI)
28
+
29
+ # Parse the HTML
30
+ soup = BeautifulSoup(response.content, "html.parser")
31
+
32
+ waste_collections_div = soup.find("div", {"aria-label": "Waste Collections"})
33
+
34
+ # Find all bin collection panels
35
+ bin_panels = waste_collections_div.find_all("div", class_="atPanelContent")
36
+
37
+ current_year = datetime.now().year
38
+ current_month = datetime.now().month
39
+
40
+ for panel in bin_panels:
41
+ # Extract bin name (e.g., "Household rubbish bag")
42
+ bin_name = panel.find("h4").text.strip().replace("\r", "").replace("\n", "")
43
+
44
+ # Extract collection date (e.g., "Monday 9th December")
45
+ date_tag = panel.find("p")
46
+ if date_tag and "Your next collection date is" in date_tag.text:
47
+ collection_date = date_tag.find("strong").text.strip()
48
+ else:
49
+ continue
50
+
51
+ collection_date = datetime.strptime(
52
+ remove_ordinal_indicator_from_date_string(collection_date), "%A %d %B"
53
+ )
54
+
55
+ if (current_month > 9) and (collection_date.month < 4):
56
+ collection_date = collection_date.replace(year=(current_year + 1))
57
+ else:
58
+ collection_date = collection_date.replace(year=current_year)
59
+
60
+ dict_data = {
61
+ "type": bin_name,
62
+ "collectionDate": collection_date.strftime("%d/%m/%Y"),
63
+ }
64
+ bindata["bins"].append(dict_data)
65
+
66
+ bindata["bins"].sort(
67
+ key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y")
68
+ )
69
+
70
+ return bindata
@@ -0,0 +1,65 @@
1
+ import requests
2
+ from bs4 import BeautifulSoup
3
+
4
+ from uk_bin_collection.uk_bin_collection.common import *
5
+ from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
6
+
7
+
8
+ # import the wonderful Beautiful Soup and the URL grabber
9
+ class CouncilClass(AbstractGetBinDataClass):
10
+ """
11
+ Concrete classes have to implement all abstract operations of the
12
+ base class. They can also override some operations with a default
13
+ implementation.
14
+ """
15
+
16
+ def parse_data(self, page: str, **kwargs) -> dict:
17
+
18
+ user_uprn = kwargs.get("uprn")
19
+ bindata = {"bins": []}
20
+
21
+ user_uprn = user_uprn.zfill(8)
22
+
23
+ year = datetime.today().year
24
+ response = requests.get(
25
+ f"https://bindayfinder.moray.gov.uk/cal_{year}_view.php",
26
+ params={"id": user_uprn},
27
+ )
28
+ if response.status_code != 200:
29
+ # fall back to known good calendar URL
30
+ response = requests.get(
31
+ "https://bindayfinder.moray.gov.uk/cal_2024_view.php",
32
+ params={"id": user_uprn},
33
+ )
34
+ soup = BeautifulSoup(response.text, "html.parser")
35
+
36
+ bin_types = {
37
+ "G": "Green",
38
+ "B": "Brown",
39
+ "P": "Purple",
40
+ "C": "Blue",
41
+ "O": "Orange",
42
+ }
43
+
44
+ for month_container in soup.findAll("div", class_="month-container"):
45
+ for div in month_container.findAll("div"):
46
+ if "month-header" in div["class"]:
47
+ month = div.text
48
+ elif div["class"] and div["class"][0] in ["B", "GPOC", "GBPOC"]:
49
+ bins = div["class"][0]
50
+ dom = int(div.text)
51
+ for i in bins:
52
+ dict_data = {
53
+ "type": bin_types.get(i),
54
+ "collectionDate": datetime.strptime(
55
+ f"{dom} {month} {year}",
56
+ "%d %B %Y",
57
+ ).strftime("%d/%m/%Y"),
58
+ }
59
+ bindata["bins"].append(dict_data)
60
+
61
+ bindata["bins"].sort(
62
+ key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y")
63
+ )
64
+
65
+ return bindata
@@ -0,0 +1,66 @@
1
+ import requests
2
+ from bs4 import BeautifulSoup
3
+ from dateutil.relativedelta import relativedelta
4
+
5
+ from uk_bin_collection.uk_bin_collection.common import *
6
+ from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
7
+
8
+
9
+ # import the wonderful Beautiful Soup and the URL grabber
10
+ class CouncilClass(AbstractGetBinDataClass):
11
+ """
12
+ Concrete classes have to implement all abstract operations of the
13
+ base class. They can also override some operations with a default
14
+ implementation.
15
+ """
16
+
17
+ def parse_data(self, page: str, **kwargs) -> dict:
18
+
19
+ user_uprn = kwargs.get("uprn")
20
+ check_uprn(user_uprn)
21
+ bindata = {"bins": []}
22
+
23
+ URI = f"https://www.newcastle-staffs.gov.uk/homepage/97/check-your-bin-day?uprn={user_uprn}"
24
+
25
+ # Make the GET request
26
+ response = requests.get(URI)
27
+ response.raise_for_status()
28
+ soup = BeautifulSoup(response.text, features="html.parser")
29
+ soup.prettify()
30
+
31
+ # Find the table
32
+ table = soup.find("table", {"class": "data-table"})
33
+
34
+ if table:
35
+ rows = table.find("tbody").find_all("tr")
36
+ for row in rows:
37
+ date = datetime.strptime(
38
+ (
39
+ row.find_all("td")[0]
40
+ .get_text(strip=True)
41
+ .replace("Date:", "")
42
+ .strip()
43
+ ),
44
+ "%A %d %B",
45
+ ).replace(year=datetime.now().year)
46
+ if datetime.now().month > 10 and date.month < 3:
47
+ date = date + relativedelta(years=1)
48
+ bin_types = (
49
+ row.find_all("td")[1]
50
+ .text.replace("Collection Type:", "")
51
+ .splitlines()
52
+ )
53
+ for bin_type in bin_types:
54
+ bin_type = bin_type.strip()
55
+ if bin_type:
56
+ dict_data = {
57
+ "type": bin_type.strip(),
58
+ "collectionDate": date.strftime("%d/%m/%Y"),
59
+ }
60
+ bindata["bins"].append(dict_data)
61
+
62
+ bindata["bins"].sort(
63
+ key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y")
64
+ )
65
+
66
+ return bindata
@@ -0,0 +1,93 @@
1
+ import requests
2
+ from bs4 import BeautifulSoup
3
+
4
+ from uk_bin_collection.uk_bin_collection.common import *
5
+ from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
6
+
7
+
8
+ # import the wonderful Beautiful Soup and the URL grabber
9
+ class CouncilClass(AbstractGetBinDataClass):
10
+ """
11
+ Concrete classes have to implement all abstract operations of the
12
+ base class. They can also override some operations with a default
13
+ implementation.
14
+ """
15
+
16
+ def parse_data(self, page: str, **kwargs) -> dict:
17
+
18
+ user_postcode = kwargs.get("postcode")
19
+ user_paon = kwargs.get("paon")
20
+ check_postcode(user_postcode)
21
+ check_paon(user_paon)
22
+ bindata = {"bins": []}
23
+
24
+ URI = "https://uhtn-wrp.whitespacews.com/"
25
+
26
+ session = requests.Session()
27
+
28
+ # get link from first page as has some kind of unique hash
29
+ r = session.get(
30
+ URI,
31
+ )
32
+ r.raise_for_status()
33
+ soup = BeautifulSoup(r.text, features="html.parser")
34
+
35
+ alink = soup.find("a", text="Find my bin collection day")
36
+
37
+ if alink is None:
38
+ raise Exception("Initial page did not load correctly")
39
+
40
+ # greplace 'seq' query string to skip next step
41
+ nextpageurl = alink["href"].replace("seq=1", "seq=2")
42
+
43
+ data = {
44
+ "address_name_number": user_paon,
45
+ "address_postcode": user_postcode,
46
+ }
47
+
48
+ # get list of addresses
49
+ r = session.post(nextpageurl, data)
50
+ r.raise_for_status()
51
+
52
+ soup = BeautifulSoup(r.text, features="html.parser")
53
+
54
+ # get first address (if you don't enter enough argument values this won't find the right address)
55
+ alink = soup.find("div", id="property_list").find("a")
56
+
57
+ if alink is None:
58
+ raise Exception("Address not found")
59
+
60
+ nextpageurl = URI + alink["href"]
61
+
62
+ # get collection page
63
+ r = session.get(
64
+ nextpageurl,
65
+ )
66
+ r.raise_for_status()
67
+ soup = BeautifulSoup(r.text, features="html.parser")
68
+
69
+ if soup.find("span", id="waste-hint"):
70
+ raise Exception("No scheduled services at this address")
71
+
72
+ u1s = soup.find("section", id="scheduled-collections").find_all("u1")
73
+
74
+ for u1 in u1s:
75
+ lis = u1.find_all("li", recursive=False)
76
+
77
+ date = lis[1].text.replace("\n", "")
78
+ bin_type = lis[2].text.replace("\n", "")
79
+
80
+ dict_data = {
81
+ "type": bin_type,
82
+ "collectionDate": datetime.strptime(
83
+ date,
84
+ "%d/%m/%Y",
85
+ ).strftime(date_format),
86
+ }
87
+ bindata["bins"].append(dict_data)
88
+
89
+ bindata["bins"].sort(
90
+ key=lambda x: datetime.strptime(x.get("collectionDate"), date_format)
91
+ )
92
+
93
+ return bindata
@@ -0,0 +1,113 @@
1
+ import time
2
+
3
+ import requests
4
+
5
+ from uk_bin_collection.uk_bin_collection.common import *
6
+ from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
7
+
8
+
9
+ # import the wonderful Beautiful Soup and the URL grabber
10
+ class CouncilClass(AbstractGetBinDataClass):
11
+ """
12
+ Concrete classes have to implement all abstract operations of the
13
+ base class. They can also override some operations with a default
14
+ implementation.
15
+ """
16
+
17
+ def parse_data(self, page: str, **kwargs) -> dict:
18
+
19
+ user_postcode = kwargs.get("postcode")
20
+ user_paon = kwargs.get("paon")
21
+ check_postcode(user_postcode)
22
+ check_paon(user_paon)
23
+ bindata = {"bins": []}
24
+
25
+ user_postcode = user_postcode.replace(" ", "+")
26
+
27
+ URI = f"https://www.royalgreenwich.gov.uk/site/custom_scripts/apps/waste-collection/new2023/source.php?term={user_postcode}"
28
+
29
+ # Make the GET request
30
+ response = requests.get(URI)
31
+
32
+ for address in response.json():
33
+ if user_paon in address:
34
+ collection_address = address
35
+ break
36
+
37
+ URI = "https://www.royalgreenwich.gov.uk/site/custom_scripts/repo/apps/waste-collection/new2023/ajax-response-uprn.php"
38
+
39
+ data = {"address": collection_address}
40
+
41
+ response = requests.post(URI, data=data)
42
+
43
+ response = response.json()
44
+
45
+ collection_day = response["Day"]
46
+ week = response["Frequency"]
47
+
48
+ days_of_week = [
49
+ "Monday",
50
+ "Tuesday",
51
+ "Wednesday",
52
+ "Thursday",
53
+ "Friday",
54
+ "Saturday",
55
+ "Sunday",
56
+ ]
57
+ collectionweek = ["Week A", "Week B"]
58
+
59
+ offset_days = days_of_week.index(collection_day)
60
+ week = collectionweek.index(week)
61
+
62
+ greenstartDate = datetime(2024, 11, 25)
63
+ bluestartDate = datetime(2024, 11, 25)
64
+ if week == 0:
65
+ blackstartDate = datetime(2024, 11, 18)
66
+ elif week == 1:
67
+ blackstartDate = datetime(2024, 11, 25)
68
+
69
+ green_dates = get_dates_every_x_days(greenstartDate, 7, 100)
70
+ blue_dates = get_dates_every_x_days(bluestartDate, 7, 100)
71
+ black_dates = get_dates_every_x_days(blackstartDate, 14, 50)
72
+
73
+ for greenDate in green_dates:
74
+
75
+ collection_date = (
76
+ datetime.strptime(greenDate, "%d/%m/%Y") + timedelta(days=offset_days)
77
+ ).strftime("%d/%m/%Y")
78
+
79
+ dict_data = {
80
+ "type": "Green Bin",
81
+ "collectionDate": collection_date,
82
+ }
83
+ bindata["bins"].append(dict_data)
84
+
85
+ for blueDate in blue_dates:
86
+
87
+ collection_date = (
88
+ datetime.strptime(blueDate, "%d/%m/%Y") + timedelta(days=offset_days)
89
+ ).strftime("%d/%m/%Y")
90
+
91
+ dict_data = {
92
+ "type": "Blue Bin",
93
+ "collectionDate": collection_date,
94
+ }
95
+ bindata["bins"].append(dict_data)
96
+
97
+ for blackDate in black_dates:
98
+
99
+ collection_date = (
100
+ datetime.strptime(blackDate, "%d/%m/%Y") + timedelta(days=offset_days)
101
+ ).strftime("%d/%m/%Y")
102
+
103
+ dict_data = {
104
+ "type": "Black Bin",
105
+ "collectionDate": collection_date,
106
+ }
107
+ bindata["bins"].append(dict_data)
108
+
109
+ bindata["bins"].sort(
110
+ key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y")
111
+ )
112
+
113
+ return bindata
@@ -0,0 +1,87 @@
1
+ import time
2
+
3
+ import requests
4
+
5
+ from uk_bin_collection.uk_bin_collection.common import *
6
+ from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
7
+
8
+
9
+ # import the wonderful Beautiful Soup and the URL grabber
10
+ class CouncilClass(AbstractGetBinDataClass):
11
+ """
12
+ Concrete classes have to implement all abstract operations of the
13
+ base class. They can also override some operations with a default
14
+ implementation.
15
+ """
16
+
17
+ def parse_data(self, page: str, **kwargs) -> dict:
18
+
19
+ user_uprn = kwargs.get("uprn")
20
+ check_uprn(user_uprn)
21
+ bindata = {"bins": []}
22
+
23
+ SESSION_URL = "https://my.sandwell.gov.uk/authapi/isauthenticated?uri=https%253A%252F%252Fmy.sandwell.gov.uk%252Fen%252FAchieveForms%252F%253Fform_uri%253Dsandbox-publish%253A%252F%252FAF-Process-ebaa26a2-393c-4a3c-84f5-e61564192a8a%252FAF-Stage-e4c2cb32-db55-4ff5-845c-8b27f87346c4%252Fdefinition.json%2526redirectlink%253D%25252Fen%2526cancelRedirectLink%253D%25252Fen%2526consentMessage%253Dyes&hostname=my.sandwell.gov.uk&withCredentials=true"
24
+
25
+ API_URL = "https://my.sandwell.gov.uk/apibroker/runLookup"
26
+
27
+ headers = {
28
+ "Content-Type": "application/json",
29
+ "Accept": "application/json",
30
+ "User-Agent": "Mozilla/5.0",
31
+ "X-Requested-With": "XMLHttpRequest",
32
+ "Referer": "https://my.sandwell.gov.uk/fillform/?iframe_id=fillform-frame-1&db_id=",
33
+ }
34
+ s = requests.session()
35
+ r = s.get(SESSION_URL)
36
+ r.raise_for_status()
37
+ session_data = r.json()
38
+ sid = session_data["auth-session"]
39
+
40
+ data = {
41
+ "formValues": {
42
+ "Property details": {
43
+ "Uprn": {
44
+ "value": user_uprn,
45
+ },
46
+ "NextCollectionFromDate": {
47
+ "value": datetime.now().strftime("%Y-%m-%d"),
48
+ },
49
+ },
50
+ },
51
+ }
52
+
53
+ params = {
54
+ "id": "58a1a71694992",
55
+ "repeat_against": "",
56
+ "noRetry": "false",
57
+ "getOnlyTokens": "undefined",
58
+ "log_id": "",
59
+ "app_name": "AF-Renderer::Self",
60
+ # unix_timestamp
61
+ "_": str(int(time.time() * 1000)),
62
+ "sid": sid,
63
+ }
64
+
65
+ r = s.post(API_URL, json=data, headers=headers, params=params)
66
+ r.raise_for_status()
67
+
68
+ data = r.json()
69
+ rows_data = data["integration"]["transformed"]["rows_data"]
70
+ if not isinstance(rows_data, dict):
71
+ raise ValueError("Invalid data returned from API")
72
+ bin_types = {
73
+ "Recycling (Blue)",
74
+ "Household Waste (Grey)",
75
+ "Food Waste (Brown)",
76
+ "Garden Waste (Green)",
77
+ }
78
+ for row in rows_data.items():
79
+ date = row[1]["DWDate"]
80
+ for bin_type in bin_types:
81
+ dict_data = {
82
+ "type": bin_type,
83
+ "collectionDate": date,
84
+ }
85
+ bindata["bins"].append(dict_data)
86
+
87
+ return bindata