uk_bin_collection 0.134.0__py3-none-any.whl → 0.135.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (25) hide show
  1. uk_bin_collection/tests/check_selenium_url_in_input.json.py +209 -0
  2. uk_bin_collection/tests/input.json +58 -8
  3. uk_bin_collection/uk_bin_collection/councils/AmberValleyBoroughCouncil.py +60 -0
  4. uk_bin_collection/uk_bin_collection/councils/BolsoverCouncil.py +298 -0
  5. uk_bin_collection/uk_bin_collection/councils/CheltenhamBoroughCouncil.py +254 -48
  6. uk_bin_collection/uk_bin_collection/councils/CherwellDistrictCouncil.py +75 -0
  7. uk_bin_collection/uk_bin_collection/councils/ConwyCountyBorough.py +11 -3
  8. uk_bin_collection/uk_bin_collection/councils/CotswoldDistrictCouncil.py +3 -5
  9. uk_bin_collection/uk_bin_collection/councils/DerbyshireDalesDistrictCouncil.py +54 -50
  10. uk_bin_collection/uk_bin_collection/councils/EpsomandEwellBoroughCouncil.py +86 -0
  11. uk_bin_collection/uk_bin_collection/councils/GloucesterCityCouncil.py +1 -1
  12. uk_bin_collection/uk_bin_collection/councils/LeedsCityCouncil.py +2 -1
  13. uk_bin_collection/uk_bin_collection/councils/MiddlesbroughCouncil.py +100 -0
  14. uk_bin_collection/uk_bin_collection/councils/NeathPortTalbotCouncil.py +2 -0
  15. uk_bin_collection/uk_bin_collection/councils/NorthYorkshire.py +17 -15
  16. uk_bin_collection/uk_bin_collection/councils/RedcarandClevelandCouncil.py +108 -0
  17. uk_bin_collection/uk_bin_collection/councils/RunnymedeBoroughCouncil.py +54 -0
  18. uk_bin_collection/uk_bin_collection/councils/SunderlandCityCouncil.py +21 -15
  19. uk_bin_collection/uk_bin_collection/councils/TendringDistrictCouncil.py +1 -1
  20. uk_bin_collection/uk_bin_collection/councils/TorridgeDistrictCouncil.py +1 -35
  21. {uk_bin_collection-0.134.0.dist-info → uk_bin_collection-0.135.2.dist-info}/METADATA +1 -1
  22. {uk_bin_collection-0.134.0.dist-info → uk_bin_collection-0.135.2.dist-info}/RECORD +25 -17
  23. {uk_bin_collection-0.134.0.dist-info → uk_bin_collection-0.135.2.dist-info}/LICENSE +0 -0
  24. {uk_bin_collection-0.134.0.dist-info → uk_bin_collection-0.135.2.dist-info}/WHEEL +0 -0
  25. {uk_bin_collection-0.134.0.dist-info → uk_bin_collection-0.135.2.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,298 @@
1
+ import time
2
+
3
+ import requests
4
+
5
+ from uk_bin_collection.uk_bin_collection.common import *
6
+ from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
7
+
8
+
9
+ # import the wonderful Beautiful Soup and the URL grabber
10
+ class CouncilClass(AbstractGetBinDataClass):
11
+ """
12
+ Concrete classes have to implement all abstract operations of the
13
+ base class. They can also override some operations with a default
14
+ implementation.
15
+ """
16
+
17
+ def parse_data(self, page: str, **kwargs) -> dict:
18
+
19
+ user_uprn = kwargs.get("uprn")
20
+ check_uprn(user_uprn)
21
+ bindata = {"bins": []}
22
+
23
+ SESSION_URL = "https://selfservice.bolsover.gov.uk/authapi/isauthenticated?uri=https%253A%252F%252Fselfservice.bolsover.gov.uk%252Fservice%252FCheck_your_Bin_Day&hostname=selfservice.bolsover.gov.uk&withCredentials=true"
24
+
25
+ API_URL = "https://selfservice.bolsover.gov.uk/apibroker/runLookup"
26
+
27
+ data = {
28
+ "formValues": {"Bin Collection": {"uprnLoggedIn": {"value": user_uprn}}},
29
+ }
30
+ headers = {
31
+ "Content-Type": "application/json",
32
+ "Accept": "application/json",
33
+ "User-Agent": "Mozilla/5.0",
34
+ "X-Requested-With": "XMLHttpRequest",
35
+ "Referer": "https://selfservice.bolsover.gov.uk/fillform/?iframe_id=fillform-frame-1&db_id=",
36
+ }
37
+ s = requests.session()
38
+ r = s.get(SESSION_URL)
39
+ r.raise_for_status()
40
+ session_data = r.json()
41
+ sid = session_data["auth-session"]
42
+ params = {
43
+ "id": "6023d37e037c3",
44
+ "repeat_against": "",
45
+ "noRetry": "true",
46
+ "getOnlyTokens": "undefined",
47
+ "log_id": "",
48
+ "app_name": "AF-Renderer::Self",
49
+ # unix_timestamp
50
+ "_": str(int(time.time() * 1000)),
51
+ "sid": sid,
52
+ }
53
+
54
+ r = s.post(API_URL, json=data, headers=headers, params=params)
55
+ r.raise_for_status()
56
+
57
+ data = r.json()
58
+ rows_data = data["integration"]["transformed"]["rows_data"]["0"]
59
+ if not isinstance(rows_data, dict):
60
+ raise ValueError("Invalid data returned from API")
61
+
62
+ # print(rows_data)
63
+
64
+ route = rows_data["Route"]
65
+
66
+ # print(route)
67
+
68
+ def get_route_number(route):
69
+ if route[:2] == "Mo":
70
+ return 0
71
+ elif route[:2] == "Tu":
72
+ return 1
73
+ elif route[:2] == "We":
74
+ return 2
75
+ elif route[:2] == "Th":
76
+ return 3
77
+ elif route[:2] == "Fr":
78
+ return 4
79
+ else:
80
+ return None # Default case if none of the conditions match
81
+
82
+ dayOfCollectionAsNumber = get_route_number(route)
83
+ # print(dayOfCollectionAsNumber)
84
+
85
+ def calculate_collection_date(
86
+ dayOfCollectionAsNumber,
87
+ currentDayAsNumber,
88
+ today,
89
+ dayDiffPlus,
90
+ dayDiffMinus,
91
+ ):
92
+ if dayOfCollectionAsNumber == currentDayAsNumber:
93
+ return today
94
+ elif dayOfCollectionAsNumber > currentDayAsNumber:
95
+ return today + timedelta(days=dayDiffPlus)
96
+ else:
97
+ return today + timedelta(days=dayDiffMinus)
98
+
99
+ # Example usage
100
+ today = datetime.today() # Current date
101
+ currentDayAsNumber = today.weekday()
102
+ dayDiffPlus = dayOfCollectionAsNumber - currentDayAsNumber
103
+ dayDiffMinus = dayOfCollectionAsNumber - currentDayAsNumber + 7
104
+
105
+ week1 = calculate_collection_date(
106
+ dayOfCollectionAsNumber,
107
+ currentDayAsNumber,
108
+ today,
109
+ dayDiffPlus,
110
+ dayDiffMinus,
111
+ )
112
+ week2 = week1 + timedelta(days=7)
113
+ week3 = week2 + timedelta(days=7)
114
+ week4 = week3 + timedelta(days=7)
115
+
116
+ # print(week1.strftime(date_format))
117
+ # print(week2.strftime(date_format))
118
+ # print(week3.strftime(date_format))
119
+ # print(week4.strftime(date_format))
120
+
121
+ greenSusStart = datetime.strptime("2024-11-08", "%Y-%m-%d")
122
+ greenSusEnd = datetime.strptime("2025-03-18", "%Y-%m-%d")
123
+
124
+ def is_within_green_sus(dtDay0, greenSusStart, greenSusEnd):
125
+ return "Yes" if greenSusStart <= dtDay0 < greenSusEnd else "No"
126
+
127
+ week1InSus = is_within_green_sus(week1, greenSusStart, greenSusEnd)
128
+ week2InSus = is_within_green_sus(week2, greenSusStart, greenSusEnd)
129
+ week3InSus = is_within_green_sus(week3, greenSusStart, greenSusEnd)
130
+ week4InSus = is_within_green_sus(week4, greenSusStart, greenSusEnd)
131
+
132
+ # print(week1InSus)
133
+ # print(week2InSus)
134
+ # print(week3InSus)
135
+ # print(week4InSus)
136
+
137
+ WeekBlack = rows_data["WeekBlack"]
138
+ WeekBandG = rows_data["WeekBandG"]
139
+
140
+ if WeekBlack == "1":
141
+ WeekBandG = ""
142
+ if WeekBandG == "1":
143
+ WeekBlack = ""
144
+
145
+ def determine_bin_collection_week1(
146
+ txtBlack, txtBurgGreen, dtDay0, today, week1InSus
147
+ ):
148
+ # Check for empty values
149
+ if txtBlack == "" and txtBurgGreen == "":
150
+ return ""
151
+
152
+ # Black Bin Collection
153
+ if txtBlack == "1" and dtDay0 >= today:
154
+ return "Black Bin"
155
+
156
+ # Burgundy Bin Collection
157
+ if txtBurgGreen == "1" and dtDay0 > today:
158
+ if week1InSus == "Yes":
159
+ return "Burgundy Bin"
160
+ elif week1InSus == "No":
161
+ return "Burgundy Bin & Green Bin"
162
+
163
+ # Default cases based on week1InSus
164
+ if txtBlack == "" and dtDay0 >= today:
165
+ if week1InSus == "Yes":
166
+ return "Burgundy Bin"
167
+ elif week1InSus == "No":
168
+ return "Burgundy Bin & Green Bin"
169
+
170
+ return "" # Default empty case
171
+
172
+ def determine_bin_collection_week2(
173
+ txtBlack, txtBurgGreen, dtDay7, today, week2InSus
174
+ ):
175
+ # Check for empty values
176
+ if txtBlack == "" and txtBurgGreen == "":
177
+ return ""
178
+
179
+ # Black Bin Collection
180
+ if txtBlack == "" and dtDay7 >= today:
181
+ return "Black Bin"
182
+
183
+ # Burgundy Bin Collection (week2InSus check)
184
+ if txtBurgGreen == "1" and dtDay7 > today:
185
+ if week2InSus == "Yes":
186
+ return "Burgundy Bin"
187
+ elif week2InSus == "No":
188
+ return "Burgundy Bin & Green Bin"
189
+
190
+ # Burgundy Bin Collection for txtBlack = '1'
191
+ if txtBlack == "1" and dtDay7 >= today:
192
+ if week2InSus == "Yes":
193
+ return "Burgundy Bin"
194
+ elif week2InSus == "No":
195
+ return "Burgundy Bin & Green Bin"
196
+
197
+ return "" # Default empty case
198
+
199
+ def determine_bin_collection_week3(
200
+ txtBlack, txtBurgGreen, dtDay14, today, week3InSus
201
+ ):
202
+ # Check for empty values
203
+ if txtBlack == "" and txtBurgGreen == "":
204
+ return ""
205
+
206
+ # Black Bin Collection
207
+ if txtBlack == "1" and dtDay14 >= today:
208
+ return "Black Bin"
209
+
210
+ # Burgundy Bin Collection (week3InSus check)
211
+ if txtBurgGreen == "1" and dtDay14 > today:
212
+ if week3InSus == "Yes":
213
+ return "Burgundy Bin"
214
+ elif week3InSus == "No":
215
+ return "Burgundy Bin & Green Bin"
216
+
217
+ # Burgundy Bin Collection for txtBlack = ''
218
+ if txtBlack == "" and dtDay14 >= today:
219
+ if week3InSus == "Yes":
220
+ return "Burgundy Bin"
221
+ elif week3InSus == "No":
222
+ return "Burgundy Bin & Green Bin"
223
+
224
+ return "" # Default empty case
225
+
226
+ def determine_bin_collection_week4(
227
+ txtBlack, txtBurgGreen, dtDay21, today, week4InSus
228
+ ):
229
+ # Check for empty values
230
+ if txtBlack == "" and txtBurgGreen == "":
231
+ return ""
232
+
233
+ # Black Bin Collection
234
+ if txtBlack == "" and dtDay21 >= today:
235
+ return "Black Bin"
236
+
237
+ # Burgundy Bin Collection (week4InSus check)
238
+ if txtBurgGreen == "1" and dtDay21 > today:
239
+ if week4InSus == "Yes":
240
+ return "Burgundy Bin"
241
+ elif week4InSus == "No":
242
+ return "Burgundy Bin & Green Bin"
243
+
244
+ # Burgundy Bin Collection for txtBlack = '1'
245
+ if txtBlack == "1" and dtDay21 >= today:
246
+ if week4InSus == "Yes":
247
+ return "Burgundy Bin"
248
+ elif week4InSus == "No":
249
+ return "Burgundy Bin & Green Bin"
250
+
251
+ return "" # Default empty case
252
+
253
+ week1Text = determine_bin_collection_week1(
254
+ WeekBlack, WeekBandG, week1, today, week1InSus
255
+ )
256
+ week2Text = determine_bin_collection_week2(
257
+ WeekBlack, WeekBandG, week2, today, week2InSus
258
+ )
259
+ week3Text = determine_bin_collection_week3(
260
+ WeekBlack, WeekBandG, week3, today, week3InSus
261
+ )
262
+ week4Text = determine_bin_collection_week4(
263
+ WeekBlack, WeekBandG, week4, today, week4InSus
264
+ )
265
+
266
+ # print(week1Text)
267
+ # print(week2Text)
268
+ # print(week3Text)
269
+ # print(week4Text)
270
+
271
+ week_data = [
272
+ (week1Text, week1),
273
+ (week2Text, week2),
274
+ (week3Text, week3),
275
+ (week4Text, week4),
276
+ ]
277
+
278
+ # print(week_data)
279
+
280
+ # Iterate through the array
281
+ for week_text, week_date in week_data:
282
+ # Check if '&' exists and split
283
+ if "&" in week_text:
284
+ split_texts = [text.strip() for text in week_text.split("&")]
285
+ for text in split_texts:
286
+ dict_data = {
287
+ "type": text,
288
+ "collectionDate": week_date.strftime(date_format),
289
+ }
290
+ bindata["bins"].append(dict_data)
291
+ else:
292
+ dict_data = {
293
+ "type": week_text,
294
+ "collectionDate": week_date.strftime(date_format),
295
+ }
296
+ bindata["bins"].append(dict_data)
297
+
298
+ return bindata
@@ -1,18 +1,18 @@
1
- import re
2
- import time
1
+ # import re
3
2
 
4
3
  import requests
5
- from bs4 import BeautifulSoup
6
- from selenium.webdriver.common.by import By
7
- from selenium.webdriver.support import expected_conditions as EC
8
- from selenium.webdriver.support.ui import Select
9
- from selenium.webdriver.support.wait import WebDriverWait
10
4
 
11
- from uk_bin_collection.uk_bin_collection.common import *
5
+ from uk_bin_collection.uk_bin_collection.common import (
6
+ check_postcode,
7
+ check_uprn,
8
+ datetime,
9
+ get_dates_every_x_days,
10
+ json,
11
+ timedelta,
12
+ )
12
13
  from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
13
14
 
14
15
 
15
- # import the wonderful Beautiful Soup and the URL grabber
16
16
  class CouncilClass(AbstractGetBinDataClass):
17
17
  """
18
18
  Concrete classes have to implement all abstract operations of the
@@ -20,42 +20,193 @@ class CouncilClass(AbstractGetBinDataClass):
20
20
  implementation.
21
21
  """
22
22
 
23
- def parse_data(self, page: str, **kwargs) -> dict:
23
+ def parse_data(self, page: str, **kwargs: str) -> dict[str, list[dict[str, str]]]:
24
+ if (postcode := kwargs.get("postcode")) is None:
25
+ raise KeyError("Missing: postcode")
26
+ if (uprn := kwargs.get("uprn")) is None:
27
+ raise KeyError("Missing: uprn")
28
+ check_postcode(postcode)
29
+ check_uprn(uprn)
30
+ bindata: dict[str, list[dict[str, str]]] = {"bins": []}
31
+ location_x: int = 0
32
+ location_y: int = 0
33
+ location_usrn: str = ""
24
34
 
25
- collection_day = kwargs.get("paon")
26
- collection_week = kwargs.get("postcode")
27
- bindata = {"bins": []}
35
+ # Ensure any cookies set are maintained in a requests session
36
+ s = requests.session()
37
+
38
+ # Ask for a new SessionId from the server
39
+ session_id_url = "https://maps.cheltenham.gov.uk/map/Aurora.svc/"\
40
+ "RequestSession?userName=guest+CBC&password=&"\
41
+ "script=%5CAurora%5CCBC+Waste+Streets.AuroraScript%24"
42
+ session_id_response = s.get(session_id_url)
43
+ session_id_response.raise_for_status()
44
+ session_id = session_id_response.json().get("Session").get("SessionId")
45
+
46
+ # Ask what tasks we can do within the session
47
+ tasks_url = f"https://maps.cheltenham.gov.uk/map/Aurora.svc/"\
48
+ f"GetWorkflow?sessionId={session_id}&workflowId=wastestreet"
49
+ tasks_response = s.get(tasks_url)
50
+ tasks_response.raise_for_status()
51
+ # JSON response contained a BOM marker
52
+ tasks = json.loads(tasks_response.text[1:])
53
+ retrieve_results_task_id, initialise_map_task_id, drilldown_task_id = None, None, None
54
+ # Pull out the ID's of the tasks we will need
55
+ for task in tasks.get("Tasks"):
56
+ if task.get("$type") == "StatMap.Aurora.FetchResultSetTask, StatMapService":
57
+ retrieve_results_task_id = task.get("Id")
58
+ elif task.get("$type") == "StatMap.Aurora.SaveStateTask, StatMapService":
59
+ initialise_map_task_id = task.get("Id")
60
+ elif task.get("$type") == "StatMap.Aurora.DrillDownTask, StatMapService":
61
+ drilldown_task_id = task.get("Id")
62
+ if not all([retrieve_results_task_id, initialise_map_task_id, drilldown_task_id]):
63
+ raise ValueError("Not all task ID's found")
64
+
65
+ # Find the X / Y coordinates for the requested postcode
66
+ postcode_search_url = "https://maps.cheltenham.gov.uk/map/Aurora.svc/FindLocation?"\
67
+ f"sessionId={session_id}&address={postcode}&limit=1000"
68
+ postcode_search_response = s.get(postcode_search_url)
69
+ postcode_search_response.raise_for_status()
70
+ if len(locations_list := postcode_search_response.json().get("Locations")) == 0:
71
+ raise ValueError("Address locations empty")
72
+ for location in locations_list:
73
+ location_search_url = "https://maps.cheltenham.gov.uk/map/Aurora.svc/FindLocation?"\
74
+ f"sessionId={session_id}&locationId={location.get('Id')}"
75
+ location_search_response = s.get(location_search_url)
76
+ location_search_response.raise_for_status()
77
+ if not (location_list := location_search_response.json().get("Locations")):
78
+ raise KeyError("Locations wasn't present in results")
79
+ if not (location_detail := location_list[0].get("Details")):
80
+ raise KeyError("Details wasn't present in location")
81
+ location_uprn = [detail.get(
82
+ "Value") for detail in location_detail if detail.get("Name") == "UPRN"][0]
83
+ if str(location_uprn) == uprn:
84
+ location_usrn = str([detail.get(
85
+ "Value") for detail in location_detail if detail.get("Name") == "USRN"][0])
86
+ location_x = location_list[0].get("X")
87
+ location_y = location_list[0].get("Y")
88
+ break
89
+
90
+ # Needed to initialise the server to allow follow on call
91
+ open_map_url = "https://maps.cheltenham.gov.uk/map/Aurora.svc/OpenScriptMap?"\
92
+ f"sessionId={session_id}"
93
+ if res := s.get(open_map_url):
94
+ res.raise_for_status()
95
+
96
+ # Needed to initialise the server to allow follow on call
97
+ save_state_map_url = "https://maps.cheltenham.gov.uk/map/Aurora.svc/ExecuteTaskJob?"\
98
+ f"sessionId={session_id}&taskId={initialise_map_task_id}&job="\
99
+ "%7BTask%3A+%7B+%24type%3A+%27StatMap.Aurora.SaveStateTask%2C"\
100
+ "+StatMapService%27+%7D%7D"
101
+ if res := s.get(save_state_map_url):
102
+ res.raise_for_status()
103
+
104
+ # Start search for address given by x / y coord
105
+ drilldown_map_url = "https://maps.cheltenham.gov.uk/map/Aurora.svc/ExecuteTaskJob?"\
106
+ f"sessionId={session_id}&taskId={drilldown_task_id}&job=%7B%22"\
107
+ f"QueryX%22%3A{location_x}%2C%22QueryY%22%3A{location_y}%2C%22"\
108
+ "Task%22%3A%7B%22Type%22%3A%22StatMap.Aurora.DrillDownTask%2C"\
109
+ "+StatMapService%22%7D%7D"
110
+ if res := s.get(drilldown_map_url):
111
+ res.raise_for_status()
112
+
113
+ # Get results from search for address given by x / y coord
114
+ address_details_url = "https://maps.cheltenham.gov.uk/map/Aurora.svc/ExecuteTaskJob?"\
115
+ f"sessionId={session_id}&taskId={retrieve_results_task_id}"\
116
+ f"&job=%7B%22QueryX%22%3A{location_x}%2C%22QueryY%22%3A"\
117
+ f"{location_y}%2C%22Task%22%3A%7B%22Type%22%3A%22"\
118
+ "StatMap.Aurora.FetchResultSetTask%2C+StatMapService"\
119
+ "%22%2C%22ResultSetName%22%3A%22inspection%22%7D%7D"
120
+ address_details_response = s.get(address_details_url)
121
+ address_details_response.raise_for_status()
122
+ # JSON response contained a BOM marker, skip first character
123
+ address_details = json.loads(address_details_response.text[1:])
124
+ if not (task_results := address_details.get("TaskResult")):
125
+ raise KeyError("TaskResult wasn't present in results")
126
+ if not (distance_export_set := task_results.get("DistanceOrderedSet")):
127
+ raise KeyError("DistanceOrderedSet wasn't present in TaskResult")
128
+ if not (result_set := distance_export_set.get("ResultSet")):
129
+ raise KeyError("ResultSet wasn't present in DistanceOrderedSet")
130
+ if not (result_tables := result_set.get("Tables")):
131
+ raise KeyError("Tables wasn't present in ResultSet")
132
+ result = result_tables[0]
133
+ column_names: dict[int, str] = {}
134
+ result_dict: dict[str, str | int] = {}
135
+ for column in result.get("ColumnDefinitions"):
136
+ column_names[column.get("ColumnIndex")] = column.get("ColumnName")
137
+ for r in result.get("Records"):
138
+ result_dict: dict[str, str | int] = {}
139
+ for idx, column_value in enumerate(r):
140
+ if not (column_name := column_names.get(idx)):
141
+ raise IndexError("Column index out of range")
142
+ result_dict[column_name.upper()] = column_value
143
+ # Validate the street against the USRN. Some locations can return multiple results.
144
+ # Break on first match of USRN
145
+ # TODO: Need to select the correct option out of all available options
146
+ if location_usrn == str(result_dict.get("USRN")):
147
+ break
148
+
149
+ refuse_week, recycling_week, garden_week = 0, 0, 0
150
+ # After we've got the correct result, pull out the week number each bin type is taken on
151
+ if (refuse_week_raw := result_dict.get("New_Refuse_Week".upper())) is not None:
152
+ refuse_week = int(refuse_week_raw)
153
+ if (recycling_week_raw := result_dict.get("New_Recycling_Week".upper())) is not None:
154
+ recycling_week = int(recycling_week_raw)
155
+ if (garden_week_raw := result_dict.get("Garden_Bin_Week".upper())) is not None:
156
+ garden_week = int(garden_week_raw)
157
+
158
+ if not all([refuse_week, recycling_week, garden_week]):
159
+ raise KeyError("Not all week numbers found")
28
160
 
29
161
  days_of_week = [
30
- "Monday",
31
- "Tuesday",
32
- "Wednesday",
33
- "Thursday",
34
- "Friday",
35
- "Saturday",
36
- "Sunday",
162
+ "MON",
163
+ "TUE",
164
+ "WED",
165
+ "THU",
166
+ "FRI",
167
+ "SAT",
168
+ "SUN",
37
169
  ]
38
170
 
39
- week = ["Week 1", "Week 2"]
171
+ refuse_day_offset = days_of_week.index(
172
+ str(result_dict.get("New_Refuse_Day_internal".upper())).upper())
173
+ recycling_day_offset = days_of_week.index(
174
+ str(result_dict.get("New_Recycling_Day".upper())).upper())
175
+ garden_day_offset = days_of_week.index(
176
+ str(result_dict.get("New_Garden_Day".upper())).upper())
177
+ food_day_offset = days_of_week.index(
178
+ str(result_dict.get("New_Food_Day".upper())).upper())
40
179
 
41
- offset_days = days_of_week.index(collection_day)
42
- collection_week = week.index(collection_week)
180
+ # Initialise WEEK-1/WEEK-2 based on known details
181
+ week_1_epoch = datetime(2025, 1, 13)
43
182
 
44
- if collection_week == 0:
45
- refusestartDate = datetime(2024, 11, 25)
46
- recyclingstartDate = datetime(2024, 11, 18)
47
- else:
48
- refusestartDate = datetime(2024, 11, 18)
49
- recyclingstartDate = datetime(2024, 11, 25)
183
+ # Start of this week
184
+ this_week = datetime.now() - timedelta(days=datetime.now().weekday())
50
185
 
51
- refuse_dates = get_dates_every_x_days(refusestartDate, 14, 28)
52
- recycling_dates = get_dates_every_x_days(recyclingstartDate, 14, 28)
53
- food_dates = get_dates_every_x_days(recyclingstartDate, 7, 56)
186
+ # If there's an even number of weeks between the week-1
187
+ # epoch and this week, then this week is of type week-1
188
+ if (((this_week - week_1_epoch).days // 7)) % 2 == 0:
189
+ week = {
190
+ 1: this_week,
191
+ 2: this_week + timedelta(days=7)
192
+ }
193
+ else:
194
+ week = {
195
+ 1: this_week - timedelta(days=7),
196
+ 2: this_week
197
+ }
54
198
 
55
- for refuseDate in refuse_dates:
199
+ refuse_dates: list[str] = get_dates_every_x_days(
200
+ week[refuse_week], 14, 28)
201
+ recycling_dates: list[str] = get_dates_every_x_days(
202
+ week[recycling_week], 14, 28)
203
+ garden_dates: list[str] = get_dates_every_x_days(
204
+ week[garden_week], 14, 28)
56
205
 
206
+ for refuse_date in refuse_dates:
57
207
  collection_date = (
58
- datetime.strptime(refuseDate, "%d/%m/%Y") + timedelta(days=offset_days)
208
+ datetime.strptime(refuse_date, "%d/%m/%Y") +
209
+ timedelta(days=refuse_day_offset)
59
210
  ).strftime("%d/%m/%Y")
60
211
 
61
212
  dict_data = {
@@ -64,11 +215,11 @@ class CouncilClass(AbstractGetBinDataClass):
64
215
  }
65
216
  bindata["bins"].append(dict_data)
66
217
 
67
- for recyclingDate in recycling_dates:
218
+ for recycling_date in recycling_dates:
68
219
 
69
220
  collection_date = (
70
- datetime.strptime(recyclingDate, "%d/%m/%Y")
71
- + timedelta(days=offset_days)
221
+ datetime.strptime(recycling_date, "%d/%m/%Y") +
222
+ timedelta(days=recycling_day_offset)
72
223
  ).strftime("%d/%m/%Y")
73
224
 
74
225
  dict_data = {
@@ -77,26 +228,81 @@ class CouncilClass(AbstractGetBinDataClass):
77
228
  }
78
229
  bindata["bins"].append(dict_data)
79
230
 
80
- dict_data = {
81
- "type": "Garden Waste Bin",
82
- "collectionDate": collection_date,
83
- }
84
- bindata["bins"].append(dict_data)
85
-
86
- for food_date in food_dates:
231
+ for garden_date in garden_dates:
87
232
 
88
233
  collection_date = (
89
- datetime.strptime(food_date, "%d/%m/%Y") + timedelta(days=offset_days)
234
+ datetime.strptime(garden_date, "%d/%m/%Y") +
235
+ timedelta(days=garden_day_offset)
90
236
  ).strftime("%d/%m/%Y")
91
237
 
92
238
  dict_data = {
93
- "type": "Food Waste Bin",
239
+ "type": "Garden Waste Bin",
94
240
  "collectionDate": collection_date,
95
241
  }
96
242
  bindata["bins"].append(dict_data)
97
243
 
244
+ if ((food_waste_week := str(result_dict.get("FOOD_WASTE_WEEK_EXTERNAL", "")).upper())
245
+ == "weekly".upper()):
246
+ food_dates: list[str] = get_dates_every_x_days(week[1], 7, 56)
247
+
248
+ for food_date in food_dates:
249
+
250
+ collection_date = (
251
+ datetime.strptime(food_date, "%d/%m/%Y") +
252
+ timedelta(days=food_day_offset)
253
+ ).strftime("%d/%m/%Y")
254
+
255
+ dict_data = {
256
+ "type": "Food Waste Bin",
257
+ "collectionDate": collection_date,
258
+ }
259
+ bindata["bins"].append(dict_data)
260
+ # Sometimes the food bin is collected on different days between
261
+ # week-1 and week-2
262
+ else:
263
+ first_week: str | int
264
+ second_week_detail: str
265
+ first_week, _, second_week_detail = food_waste_week.partition("&")
266
+ first_week = int(first_week.strip())
267
+
268
+ second_week_day, _, second_week_number = second_week_detail.partition(
269
+ "WEEK")
270
+ second_week_number = int(second_week_number.strip())
271
+ second_week_day: str = second_week_day.strip()[:3]
272
+
273
+ food_dates_first: list[str] = get_dates_every_x_days(
274
+ week[first_week], 14, 28)
275
+ food_dates_second: list[str] = get_dates_every_x_days(
276
+ week[second_week_number], 14, 28)
277
+ second_week_offset = days_of_week.index(second_week_day)
278
+
279
+ for food_date in food_dates_first:
280
+
281
+ collection_date = (
282
+ datetime.strptime(food_date, "%d/%m/%Y") +
283
+ timedelta(days=food_day_offset)
284
+ ).strftime("%d/%m/%Y")
285
+
286
+ dict_data = {
287
+ "type": "Food Waste Bin",
288
+ "collectionDate": collection_date,
289
+ }
290
+ bindata["bins"].append(dict_data)
291
+ for food_date in food_dates_second:
292
+
293
+ collection_date = (
294
+ datetime.strptime(food_date, "%d/%m/%Y") +
295
+ timedelta(days=second_week_offset)
296
+ ).strftime("%d/%m/%Y")
297
+
298
+ dict_data = {
299
+ "type": "Food Waste Bin",
300
+ "collectionDate": collection_date,
301
+ }
302
+ bindata["bins"].append(dict_data)
303
+
98
304
  bindata["bins"].sort(
99
- key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y")
305
+ key=lambda x: datetime.strptime(
306
+ x.get("collectionDate", ""), "%d/%m/%Y")
100
307
  )
101
-
102
308
  return bindata