uk_bin_collection 0.116.0__py3-none-any.whl → 0.118.0__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
@@ -81,9 +81,10 @@
81
81
  "BaberghDistrictCouncil": {
82
82
  "skip_get_url": true,
83
83
  "house_number": "Monday",
84
+ "postcode": "Week 1",
84
85
  "url": "https://www.babergh.gov.uk",
85
86
  "wiki_name": "Babergh District Council",
86
- "wiki_note": "Use the House Number field to pass the DAY of the week for your collections. Monday/Tuesday/Wednesday/Thursday/Friday"
87
+ "wiki_note": "Use the House Number field to pass the DAY of the week for your collections. Monday/Tuesday/Wednesday/Thursday/Friday. [OPTIONAL] Use the 'postcode' field to pass the WEEK for your garden collection. [Week 1/Week 2]"
87
88
  },
88
89
  "BCPCouncil": {
89
90
  "skip_get_url": true,
@@ -397,6 +398,12 @@
397
398
  "wiki_name": "Conwy County Borough Council",
398
399
  "wiki_note": "Conwy County Borough Council uses a straight UPRN in the URL, e.g., `&uprn=XXXXXXXXXXXXX`."
399
400
  },
401
+ "CopelandBoroughCouncil": {
402
+ "uprn": "100110734613",
403
+ "url": "https://www.copeland.gov.uk",
404
+ "wiki_name": "Copeland Borough Council",
405
+ "wiki_note": "Use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find your UPRN."
406
+ },
400
407
  "CornwallCouncil": {
401
408
  "skip_get_url": true,
402
409
  "uprn": "100040128734",
@@ -1005,9 +1012,10 @@
1005
1012
  "MidSuffolkDistrictCouncil": {
1006
1013
  "skip_get_url": true,
1007
1014
  "house_number": "Monday",
1015
+ "postcode": "Week 2",
1008
1016
  "url": "https://www.midsuffolk.gov.uk",
1009
1017
  "wiki_name": "Mid Suffolk District Council",
1010
- "wiki_note": "Use the House Number field to pass the DAY of the week for your collections. Monday/Tuesday/Wednesday/Thursday/Friday"
1018
+ "wiki_note": "Use the House Number field to pass the DAY of the week for your collections. Monday/Tuesday/Wednesday/Thursday/Friday. [OPTIONAL] Use the 'postcode' field to pass the WEEK for your garden collection. [Week 1/Week 2]"
1011
1019
  },
1012
1020
  "MidSussexDistrictCouncil": {
1013
1021
  "house_number": "OAKLANDS, OAKLANDS ROAD RH16 1SS",
@@ -1303,6 +1311,12 @@
1303
1311
  "wiki_name": "Rochford Council",
1304
1312
  "wiki_note": "No extra parameters are required. Dates presented should be read as 'week commencing'."
1305
1313
  },
1314
+ "RotherDistrictCouncil": {
1315
+ "uprn": "100061937338",
1316
+ "url": "https://www.rother.gov.uk",
1317
+ "wiki_name": "Rother District Council",
1318
+ "wiki_note": "Use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find your UPRN."
1319
+ },
1306
1320
  "RotherhamCouncil": {
1307
1321
  "url": "https://www.rotherham.gov.uk/bin-collections?address=100050866000&submit=Submit",
1308
1322
  "uprn": "100050866000",
@@ -1412,6 +1426,12 @@
1412
1426
  "wiki_name": "South Gloucestershire Council",
1413
1427
  "wiki_note": "Provide your UPRN. You can find it using [FindMyAddress](https://www.findmyaddress.co.uk/search)."
1414
1428
  },
1429
+ "SouthHamsDistrictCouncil": {
1430
+ "uprn": "10004742851",
1431
+ "url": "https://www.southhams.gov.uk",
1432
+ "wiki_name": "South Hams District Council",
1433
+ "wiki_note": "Use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find your UPRN."
1434
+ },
1415
1435
  "SouthKestevenDistrictCouncil": {
1416
1436
  "house_number": "2 Althorpe Close, Market Deeping, PE6 8BL",
1417
1437
  "postcode": "PE68BL",
@@ -1448,6 +1468,12 @@
1448
1468
  "wiki_name": "South Ribble Council",
1449
1469
  "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find your UPRN."
1450
1470
  },
1471
+ "SouthStaffordshireDistrictCouncil": {
1472
+ "uprn": "200004523954",
1473
+ "url": "https://www.sstaffs.gov.uk/where-i-live?uprn=200004523954",
1474
+ "wiki_name": "South Staffordshire District Council",
1475
+ "wiki_note": "The URL needs to be `https://www.sstaffs.gov.uk/where-i-live?uprn=<Your_UPRN>`. Replace `<Your_UPRN>` with your UPRN."
1476
+ },
1451
1477
  "SouthTynesideCouncil": {
1452
1478
  "house_number": "1",
1453
1479
  "postcode": "NE33 3JW",
@@ -1470,6 +1496,12 @@
1470
1496
  "wiki_name": "St Albans City and District Council",
1471
1497
  "wiki_note": "Provide your UPRN. You can find it using [FindMyAddress](https://www.findmyaddress.co.uk/search)."
1472
1498
  },
1499
+ "StevenageBoroughCouncil": {
1500
+ "uprn": "100080878852",
1501
+ "url": "https://www.stevenage.gov.uk",
1502
+ "wiki_name": "Stevenage Borough Council",
1503
+ "wiki_note": "Use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find your UPRN."
1504
+ },
1473
1505
  "StHelensBC": {
1474
1506
  "house_number": "15",
1475
1507
  "postcode": "L34 2GA",
@@ -1624,6 +1656,12 @@
1624
1656
  "wiki_name": "Test Valley Borough Council",
1625
1657
  "wiki_note": "Provide your UPRN and postcode. Use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find your UPRN."
1626
1658
  },
1659
+ "ThanetDistrictCouncil": {
1660
+ "uprn": "100061111858",
1661
+ "url": "https://www.thanet.gov.uk",
1662
+ "wiki_name": "Thanet District Council",
1663
+ "wiki_note": "Use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find your UPRN."
1664
+ },
1627
1665
  "ThreeRiversDistrictCouncil": {
1628
1666
  "postcode": "WD3 7AZ",
1629
1667
  "skip_get_url": true,
@@ -1891,6 +1929,13 @@
1891
1929
  "wiki_name": "Worcester City Council",
1892
1930
  "wiki_note": "Provide your UPRN. You can find it using [FindMyAddress](https://www.findmyaddress.co.uk/search)."
1893
1931
  },
1932
+ "WolverhamptonCityCouncil": {
1933
+ "uprn": "100071205205",
1934
+ "postcode": "WV3 9NZ",
1935
+ "url": "https://www.wolverhampton.gov.uk",
1936
+ "wiki_name": "Wolverhampton City Council",
1937
+ "wiki_note": "Use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find your UPRN."
1938
+ },
1894
1939
  "WorcesterCityCouncil": {
1895
1940
  "url": "https://www.Worcester.gov.uk",
1896
1941
  "wiki_command_url_override": "https://www.Worcester.gov.uk",
@@ -23,6 +23,7 @@ class CouncilClass(AbstractGetBinDataClass):
23
23
  def parse_data(self, page: str, **kwargs) -> dict:
24
24
 
25
25
  collection_day = kwargs.get("paon")
26
+ garden_collection_week = kwargs.get("postcode")
26
27
  bindata = {"bins": []}
27
28
 
28
29
  days_of_week = [
@@ -35,10 +36,14 @@ class CouncilClass(AbstractGetBinDataClass):
35
36
  "Sunday",
36
37
  ]
37
38
 
39
+ garden_week = ["Week 1", "Week 2"]
40
+
38
41
  refusestartDate = datetime(2024, 11, 4)
39
42
  recyclingstartDate = datetime(2024, 11, 11)
40
43
 
41
44
  offset_days = days_of_week.index(collection_day)
45
+ if garden_collection_week:
46
+ garden_collection = garden_week.index(garden_collection_week)
42
47
 
43
48
  refuse_dates = get_dates_every_x_days(refusestartDate, 14, 28)
44
49
  recycling_dates = get_dates_every_x_days(recyclingstartDate, 14, 28)
@@ -125,6 +130,63 @@ class CouncilClass(AbstractGetBinDataClass):
125
130
  }
126
131
  bindata["bins"].append(dict_data)
127
132
 
133
+ if garden_collection_week:
134
+ if garden_collection == 0:
135
+ gardenstartDate = datetime(2024, 11, 11)
136
+ elif garden_collection == 1:
137
+ gardenstartDate = datetime(2024, 11, 4)
138
+
139
+ garden_dates = get_dates_every_x_days(gardenstartDate, 14, 28)
140
+
141
+ garden_bank_holidays = [
142
+ ("23/12/2024", 1),
143
+ ("24/12/2024", 1),
144
+ ("25/12/2024", 1),
145
+ ("26/12/2024", 1),
146
+ ("27/12/2024", 1),
147
+ ("30/12/2024", 1),
148
+ ("31/12/2024", 1),
149
+ ("01/01/2025", 1),
150
+ ("02/01/2025", 1),
151
+ ("03/01/2025", 1),
152
+ ]
153
+
154
+ for gardenDate in garden_dates:
155
+
156
+ collection_date = (
157
+ datetime.strptime(gardenDate, "%d/%m/%Y")
158
+ + timedelta(days=offset_days)
159
+ ).strftime("%d/%m/%Y")
160
+
161
+ garden_holiday = next(
162
+ (
163
+ value
164
+ for date, value in garden_bank_holidays
165
+ if date == collection_date
166
+ ),
167
+ 0,
168
+ )
169
+
170
+ if garden_holiday > 0:
171
+ continue
172
+
173
+ holiday_offset = next(
174
+ (value for date, value in bank_holidays if date == collection_date),
175
+ 0,
176
+ )
177
+
178
+ if holiday_offset > 0:
179
+ collection_date = (
180
+ datetime.strptime(collection_date, "%d/%m/%Y")
181
+ + timedelta(days=holiday_offset)
182
+ ).strftime("%d/%m/%Y")
183
+
184
+ dict_data = {
185
+ "type": "Garden Bin",
186
+ "collectionDate": collection_date,
187
+ }
188
+ bindata["bins"].append(dict_data)
189
+
128
190
  bindata["bins"].sort(
129
191
  key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y")
130
192
  )
@@ -0,0 +1,93 @@
1
+ from xml.etree import ElementTree
2
+
3
+ from bs4 import BeautifulSoup
4
+
5
+ from uk_bin_collection.uk_bin_collection.common import *
6
+ from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
7
+
8
+
9
+ class CouncilClass(AbstractGetBinDataClass):
10
+ """
11
+ Concrete classes have to implement all abstract operations of the
12
+ baseclass. They can also override some
13
+ operations with a default implementation.
14
+ """
15
+
16
+ def parse_data(self, page: str, **kwargs) -> dict:
17
+ uprn = kwargs.get("uprn")
18
+ check_uprn(uprn)
19
+ council = "CPL"
20
+
21
+ # Make SOAP request
22
+ headers = {
23
+ "Content-Type": "text/xml; charset=UTF-8",
24
+ "Referer": "https://collections-copeland.azurewebsites.net/calendar.html",
25
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36",
26
+ }
27
+ requests.packages.urllib3.disable_warnings()
28
+ post_data = (
29
+ '<?xml version="1.0" encoding="utf-8"?>'
30
+ '<soap:Envelope xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:soap="http://schemas.xmlsoap.org/soap/envelope/">'
31
+ '<soap:Body><getRoundCalendarForUPRN xmlns="http://webaspx-collections.azurewebsites.net/">'
32
+ "<council>" + council + "</council><UPRN>" + uprn + "</UPRN>"
33
+ "<from>Chtml</from></getRoundCalendarForUPRN></soap:Body></soap:Envelope>"
34
+ )
35
+ response = requests.post(
36
+ "https://collections-copeland.azurewebsites.net/WSCollExternal.asmx",
37
+ headers=headers,
38
+ data=post_data,
39
+ )
40
+
41
+ if response.status_code != 200:
42
+ raise ValueError("No bin data found for provided UPRN.")
43
+
44
+ # Get HTML from SOAP response
45
+ xmltree = ElementTree.fromstring(response.text)
46
+ html = xmltree.find(
47
+ ".//{http://webaspx-collections.azurewebsites.net/}getRoundCalendarForUPRNResult"
48
+ ).text
49
+ # Parse with BS4
50
+ soup = BeautifulSoup(html, features="html.parser")
51
+ soup.prettify()
52
+
53
+ data = {"bins": []}
54
+ for bin_type in ["Refuse", "Recycling", "Garden"]:
55
+ bin_el = soup.find("b", string=bin_type)
56
+ if bin_el:
57
+ bin_info = bin_el.next_sibling.split(": ")[1]
58
+ collection_date = ""
59
+ results = re.search("([A-Za-z]+ \\d\\d? [A-Za-z]+) then", bin_info)
60
+ if results:
61
+ if results[1] == "Today":
62
+ date = datetime.now()
63
+ elif results[1] == "Tomorrow":
64
+ date = datetime.now() + timedelta(days=1)
65
+ else:
66
+ date = get_next_occurrence_from_day_month(
67
+ datetime.strptime(
68
+ results[1] + " " + datetime.now().strftime("%Y"),
69
+ "%a %d %b %Y",
70
+ )
71
+ )
72
+ if date:
73
+ collection_date = date.strftime(date_format)
74
+ else:
75
+ results2 = re.search("([A-Za-z]+) then", bin_info)
76
+ if results2:
77
+ if results2[1] == "Today":
78
+ collection_date = datetime.now().strftime(date_format)
79
+ elif results2[1] == "Tomorrow":
80
+ collection_date = (
81
+ datetime.now() + timedelta(days=1)
82
+ ).strftime(date_format)
83
+ else:
84
+ collection_date = results2[1]
85
+
86
+ if collection_date != "":
87
+ dict_data = {
88
+ "type": bin_type,
89
+ "collectionDate": collection_date,
90
+ }
91
+ data["bins"].append(dict_data)
92
+
93
+ return data
@@ -1,4 +1,6 @@
1
- from bs4 import BeautifulSoup
1
+ import time
2
+
3
+ import requests
2
4
  from dateutil.relativedelta import relativedelta
3
5
 
4
6
  from uk_bin_collection.uk_bin_collection.common import *
@@ -19,43 +21,92 @@ class CouncilClass(AbstractGetBinDataClass):
19
21
  usrn = kwargs.get("paon")
20
22
  check_uprn(uprn)
21
23
  check_usrn(usrn)
24
+ bindata = {"bins": []}
25
+
26
+ SESSION_URL = "https://crawleybc-self.achieveservice.com/authapi/isauthenticated?uri=https%253A%252F%252Fcrawleybc-self.achieveservice.com%252Fen%252FAchieveForms%252F%253Fform_uri%253Dsandbox-publish%253A%252F%252FAF-Process-fb73f73e-e8f5-4441-9f83-8b5d04d889d6%252FAF-Stage-ec9ada91-d2d9-43bc-9730-597d15fc8108%252Fdefinition.json%2526redirectlink%253D%252Fen%2526cancelRedirectLink%253D%252Fen%2526noLoginPrompt%253D1%2526accept%253Dyes&hostname=crawleybc-self.achieveservice.com&withCredentials=true"
27
+
28
+ API_URL = "https://crawleybc-self.achieveservice.com/apibroker/"
29
+
30
+ currentdate = datetime.now().strftime("%d/%m/%Y")
31
+
32
+ data = {
33
+ "formValues": {
34
+ "Address": {
35
+ "address": {
36
+ "value": {
37
+ "Address": {
38
+ "usrn": {
39
+ "value": usrn,
40
+ },
41
+ "uprn": {
42
+ "value": uprn,
43
+ },
44
+ }
45
+ },
46
+ },
47
+ "dayConverted": {
48
+ "value": currentdate,
49
+ },
50
+ "getCollection": {
51
+ "value": "true",
52
+ },
53
+ "getWorksheets": {
54
+ "value": "false",
55
+ },
56
+ },
57
+ },
58
+ }
59
+
60
+ headers = {
61
+ "Content-Type": "application/json",
62
+ "Accept": "application/json",
63
+ "User-Agent": "Mozilla/5.0",
64
+ "X-Requested-With": "XMLHttpRequest",
65
+ "Referer": "https://crawleybc-self.achieveservice.com/fillform/?iframe_id=fillform-frame-1&db_id=",
66
+ }
67
+ s = requests.session()
68
+ r = s.get(SESSION_URL)
69
+ r.raise_for_status()
70
+ session_data = r.json()
71
+ sid = session_data["auth-session"]
72
+ params = {
73
+ "api": "RunLookup",
74
+ "id": "5b4f0ec5f13f4",
75
+ "repeat_against": "",
76
+ "noRetry": "true",
77
+ "getOnlyTokens": "undefined",
78
+ "log_id": "",
79
+ "app_name": "AF-Renderer::Self",
80
+ # unix_timestamp
81
+ "_": str(int(time.time() * 1000)),
82
+ "sid": sid,
83
+ }
84
+
85
+ r = s.post(API_URL, json=data, headers=headers, params=params)
86
+ r.raise_for_status()
87
+
88
+ data = r.json()
89
+ rows_data = data["integration"]["transformed"]["rows_data"]["0"]
90
+ if not isinstance(rows_data, dict):
91
+ raise ValueError("Invalid data returned from API")
92
+
93
+ # Extract each service's relevant details for the bin schedule
94
+ for key, value in rows_data.items():
95
+ if key.endswith("DateNext"):
96
+ BinType = key.replace("DateNext", "Service")
97
+ for key2, value2 in rows_data.items():
98
+ if key2 == BinType:
99
+ BinType = value2
100
+ next_collection = datetime.strptime(value, "%A %d %B").replace(
101
+ year=datetime.now().year
102
+ )
103
+ if datetime.now().month == 12 and next_collection.month == 1:
104
+ next_collection = next_collection + relativedelta(years=1)
105
+
106
+ dict_data = {
107
+ "type": BinType,
108
+ "collectionDate": next_collection.strftime(date_format),
109
+ }
110
+ bindata["bins"].append(dict_data)
22
111
 
23
- day = datetime.now().date().strftime("%d")
24
- month = datetime.now().date().strftime("%m")
25
- year = datetime.now().date().strftime("%Y")
26
-
27
- api_url = (
28
- f"https://my.crawley.gov.uk/appshost/firmstep/self/apps/custompage/waste?language=en&uprn={uprn}"
29
- f"&usrn={usrn}&day={day}&month={month}&year={year}"
30
- )
31
- response = requests.get(api_url)
32
-
33
- soup = BeautifulSoup(response.text, features="html.parser")
34
- soup.prettify()
35
-
36
- data = {"bins": []}
37
-
38
- titles = [title.text for title in soup.select(".block-title")]
39
- collection_tag = soup.body.find_all(
40
- "div", {"class": "col-md-6 col-sm-6 col-xs-6"}, string="Next collection"
41
- )
42
- bin_index = 0
43
- for tag in collection_tag:
44
- for item in tag.next_elements:
45
- if (
46
- str(item).startswith('<div class="date text-right text-grey">')
47
- and str(item) != ""
48
- ):
49
- collection_date = datetime.strptime(item.text, "%A %d %B")
50
- next_collection = collection_date.replace(year=datetime.now().year)
51
- if datetime.now().month == 12 and next_collection.month == 1:
52
- next_collection = next_collection + relativedelta(years=1)
53
-
54
- dict_data = {
55
- "type": titles[bin_index].strip(),
56
- "collectionDate": next_collection.strftime(date_format),
57
- }
58
- data["bins"].append(dict_data)
59
- bin_index += 1
60
- break
61
- return data
112
+ return bindata
@@ -23,6 +23,7 @@ class CouncilClass(AbstractGetBinDataClass):
23
23
  def parse_data(self, page: str, **kwargs) -> dict:
24
24
 
25
25
  collection_day = kwargs.get("paon")
26
+ garden_collection_week = kwargs.get("postcode")
26
27
  bindata = {"bins": []}
27
28
 
28
29
  days_of_week = [
@@ -35,10 +36,14 @@ class CouncilClass(AbstractGetBinDataClass):
35
36
  "Sunday",
36
37
  ]
37
38
 
39
+ garden_week = ["Week 1", "Week 2"]
40
+
38
41
  refusestartDate = datetime(2024, 11, 11)
39
42
  recyclingstartDate = datetime(2024, 11, 4)
40
43
 
41
44
  offset_days = days_of_week.index(collection_day)
45
+ if garden_collection_week:
46
+ garden_collection = garden_week.index(garden_collection_week)
42
47
 
43
48
  refuse_dates = get_dates_every_x_days(refusestartDate, 14, 28)
44
49
  recycling_dates = get_dates_every_x_days(recyclingstartDate, 14, 28)
@@ -125,6 +130,63 @@ class CouncilClass(AbstractGetBinDataClass):
125
130
  }
126
131
  bindata["bins"].append(dict_data)
127
132
 
133
+ if garden_collection_week:
134
+ if garden_collection == 0:
135
+ gardenstartDate = datetime(2024, 11, 11)
136
+ elif garden_collection == 1:
137
+ gardenstartDate = datetime(2024, 11, 4)
138
+
139
+ garden_dates = get_dates_every_x_days(gardenstartDate, 14, 28)
140
+
141
+ garden_bank_holidays = [
142
+ ("23/12/2024", 1),
143
+ ("24/12/2024", 1),
144
+ ("25/12/2024", 1),
145
+ ("26/12/2024", 1),
146
+ ("27/12/2024", 1),
147
+ ("30/12/2024", 1),
148
+ ("31/12/2024", 1),
149
+ ("01/01/2025", 1),
150
+ ("02/01/2025", 1),
151
+ ("03/01/2025", 1),
152
+ ]
153
+
154
+ for gardenDate in garden_dates:
155
+
156
+ collection_date = (
157
+ datetime.strptime(gardenDate, "%d/%m/%Y")
158
+ + timedelta(days=offset_days)
159
+ ).strftime("%d/%m/%Y")
160
+
161
+ garden_holiday = next(
162
+ (
163
+ value
164
+ for date, value in garden_bank_holidays
165
+ if date == collection_date
166
+ ),
167
+ 0,
168
+ )
169
+
170
+ if garden_holiday > 0:
171
+ continue
172
+
173
+ holiday_offset = next(
174
+ (value for date, value in bank_holidays if date == collection_date),
175
+ 0,
176
+ )
177
+
178
+ if holiday_offset > 0:
179
+ collection_date = (
180
+ datetime.strptime(collection_date, "%d/%m/%Y")
181
+ + timedelta(days=holiday_offset)
182
+ ).strftime("%d/%m/%Y")
183
+
184
+ dict_data = {
185
+ "type": "Garden Bin",
186
+ "collectionDate": collection_date,
187
+ }
188
+ bindata["bins"].append(dict_data)
189
+
128
190
  bindata["bins"].sort(
129
191
  key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y")
130
192
  )
@@ -0,0 +1,84 @@
1
+ from datetime import datetime
2
+
3
+ import requests
4
+ from bs4 import BeautifulSoup
5
+ from dateutil.relativedelta import relativedelta
6
+
7
+ from uk_bin_collection.uk_bin_collection.common import *
8
+ from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
9
+
10
+
11
+ class CouncilClass(AbstractGetBinDataClass):
12
+ """
13
+ Concrete classes have to implement all abstract operations of the
14
+ base class. They can also override some operations with a default
15
+ implementation.
16
+ """
17
+
18
+ def parse_data(self, page: str, **kwargs) -> dict:
19
+ # Get and check UPRN
20
+ user_uprn = kwargs.get("uprn")
21
+ check_uprn(user_uprn)
22
+ bindata = {"bins": []}
23
+
24
+ uri = "https://www.rother.gov.uk/wp-admin/admin-ajax.php"
25
+ params = {
26
+ "action": "get_address_data",
27
+ "uprn": user_uprn,
28
+ "context": "full-page",
29
+ }
30
+
31
+ headers = {
32
+ "Content-Type": "application/x-www-form-urlencoded",
33
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36",
34
+ }
35
+
36
+ # Send a POST request with form data and headers
37
+ r = requests.post(uri, data=params, headers=headers, verify=False)
38
+
39
+ result = r.json()
40
+
41
+ if result["success"]:
42
+ # Parse the HTML with BeautifulSoup
43
+ soup = BeautifulSoup(result["data"], "html.parser")
44
+ soup.prettify()
45
+
46
+ # print(soup)
47
+
48
+ # Find the div elements with class "bindays-item"
49
+ bin_days = soup.find_all("div", class_="bindays-item")
50
+
51
+ # Loop through each bin item and extract type and date
52
+ for bin_day in bin_days:
53
+ # Extract bin type from the <h3> tag
54
+ bin_type = bin_day.find("h3").get_text(strip=True).replace(":", "")
55
+
56
+ # Extract date (or check if it's a subscription link for Garden Waste)
57
+ date_span = bin_day.find("span", class_="find-my-nearest-bindays-date")
58
+ if date_span:
59
+ if date_span.find("a"):
60
+ # If there is a link, this is the Garden bin signup link
61
+ continue
62
+ else:
63
+ # Otherwise, get the date text directly
64
+ date = date_span.get_text(strip=True)
65
+ else:
66
+ date = None
67
+
68
+ date = datetime.strptime(
69
+ remove_ordinal_indicator_from_date_string(date),
70
+ "%A %d %B",
71
+ ).replace(year=datetime.now().year)
72
+ if datetime.now().month == 12 and date.month == 1:
73
+ date = date + relativedelta(years=1)
74
+
75
+ dict_data = {
76
+ "type": bin_type,
77
+ "collectionDate": date.strftime(date_format),
78
+ }
79
+ bindata["bins"].append(dict_data)
80
+
81
+ bindata["bins"].sort(
82
+ key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y")
83
+ )
84
+ return bindata
@@ -0,0 +1,90 @@
1
+ from datetime import datetime
2
+
3
+ import requests
4
+ from bs4 import BeautifulSoup
5
+ from dateutil.relativedelta import relativedelta
6
+
7
+ from uk_bin_collection.uk_bin_collection.common import *
8
+ from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
9
+
10
+
11
+ class CouncilClass(AbstractGetBinDataClass):
12
+ """
13
+ Concrete classes have to implement all abstract operations of the
14
+ base class. They can also override some operations with a default
15
+ implementation.
16
+ """
17
+
18
+ def parse_data(self, page: str, **kwargs) -> dict:
19
+ # Get and check UPRN
20
+ user_uprn = kwargs.get("uprn")
21
+ check_uprn(user_uprn)
22
+ bindata = {"bins": []}
23
+
24
+ uri = "https://waste.southhams.gov.uk/mycollections"
25
+
26
+ s = requests.session()
27
+ r = s.get(uri)
28
+ for cookie in r.cookies:
29
+ if cookie.name == "fcc_session_cookie":
30
+ fcc_session_token = cookie.value
31
+
32
+ uri = "https://waste.southhams.gov.uk/mycollections/getcollectiondetails"
33
+
34
+ params = {
35
+ "fcc_session_token": fcc_session_token,
36
+ "uprn": user_uprn,
37
+ }
38
+
39
+ headers = {
40
+ "Content-Type": "application/x-www-form-urlencoded",
41
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36",
42
+ "Referer": "https://waste.southhams.gov.uk/mycollections",
43
+ "X-Requested-With": "XMLHttpRequest",
44
+ }
45
+
46
+ # Send a POST request with form data and headers
47
+ r = s.post(uri, data=params, headers=headers)
48
+
49
+ result = r.json()
50
+
51
+ for collection in result["binCollections"]["tile"]:
52
+
53
+ # Parse the HTML with BeautifulSoup
54
+ soup = BeautifulSoup(collection[0], "html.parser")
55
+ soup.prettify()
56
+
57
+ # Find all collectionDiv elements
58
+ collections = soup.find_all("div", class_="collectionDiv")
59
+
60
+ # Process each collectionDiv
61
+ for collection in collections:
62
+ # Extract the service name
63
+ service_name = collection.find("h3").text.strip()
64
+
65
+ # Extract collection frequency and day
66
+ details = collection.find("div", class_="detWrap").text.strip()
67
+
68
+ # Extract the next collection date
69
+ next_collection = details.split("Your next scheduled collection is ")[
70
+ 1
71
+ ].split(".")[0]
72
+
73
+ if next_collection.startswith("today"):
74
+ next_collection = next_collection.split("today, ")[1]
75
+ elif next_collection.startswith("tomorrow"):
76
+ next_collection = next_collection.split("tomorrow, ")[1]
77
+
78
+ dict_data = {
79
+ "type": service_name,
80
+ "collectionDate": datetime.strptime(
81
+ next_collection, "%A, %d %B %Y"
82
+ ).strftime(date_format),
83
+ }
84
+ bindata["bins"].append(dict_data)
85
+
86
+ bindata["bins"].sort(
87
+ key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y")
88
+ )
89
+
90
+ return bindata
@@ -0,0 +1,99 @@
1
+ from bs4 import BeautifulSoup
2
+
3
+ from uk_bin_collection.uk_bin_collection.common import *
4
+ from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
5
+
6
+
7
+ # import the wonderful Beautiful Soup and the URL grabber
8
+ class CouncilClass(AbstractGetBinDataClass):
9
+ """
10
+ Concrete classes have to implement all abstract operations of the
11
+ base class. They can also override some operations with a default
12
+ implementation.
13
+ """
14
+
15
+ def parse_date(self, date_str):
16
+ months = {
17
+ "January": "01",
18
+ "February": "02",
19
+ "March": "03",
20
+ "April": "04",
21
+ "May": "05",
22
+ "June": "06",
23
+ "July": "07",
24
+ "August": "08",
25
+ "September": "09",
26
+ "October": "10",
27
+ "November": "11",
28
+ "December": "12",
29
+ }
30
+ day, date, month_abbr, year = date_str.split()
31
+ month = months[month_abbr]
32
+ return f"{date}/{month}/{year}"
33
+
34
+ def add_bin_types_to_collection(
35
+ self, bin_data: {"bins": []}, collection_date: str, collectionType: str
36
+ ):
37
+ if "Grey Bin" in collectionType:
38
+ bin_data["bins"].append(
39
+ {
40
+ "type": "Grey Bin",
41
+ "collectionDate": self.parse_date(collection_date),
42
+ }
43
+ )
44
+ if "Green Bin" in collectionType:
45
+ bin_data["bins"].append(
46
+ {
47
+ "type": "Green Bin",
48
+ "collectionDate": self.parse_date(collection_date),
49
+ }
50
+ )
51
+
52
+ if "Blue Bin" in collectionType:
53
+ bin_data["bins"].append(
54
+ {
55
+ "type": "Blue Bin",
56
+ "collectionDate": self.parse_date(collection_date),
57
+ }
58
+ )
59
+
60
+ def parse_data(self, page: str, **kwargs) -> dict:
61
+ # Make a BS4 object
62
+ soup = BeautifulSoup(page.text, features="html.parser")
63
+ soup.prettify()
64
+
65
+ # Initialize the bin data structure
66
+ bin_data = {"bins": []}
67
+
68
+ collectionDatesSection = soup.find("div", id="showCollectionDates")
69
+
70
+ # Find next date
71
+ collection_date = collectionDatesSection.find(
72
+ "p", class_="collection-date"
73
+ ).getText()
74
+
75
+ # convert to date
76
+ collection_type = collectionDatesSection.find(
77
+ "p", class_="collection-type"
78
+ ).getText()
79
+
80
+ self.add_bin_types_to_collection(bin_data, collection_date, collection_type)
81
+
82
+ # Find the table with collection dates
83
+ table = collectionDatesSection.find("table", class_="leisure-table")
84
+
85
+ # Extract the rows containing the bin collection information
86
+ rows = table.find_all("tr")
87
+
88
+ # Loop through the rows and extract bin data
89
+ for row in rows:
90
+ cells = row.find_all("td")
91
+ if len(cells) == 2:
92
+ collection_date = cells[1].get_text(strip=True)
93
+ collection_type = cells[0].get_text(strip=True)
94
+
95
+ self.add_bin_types_to_collection(
96
+ bin_data, collection_date, collection_type
97
+ )
98
+
99
+ return bin_data
@@ -0,0 +1,101 @@
1
+ import time
2
+
3
+ import requests
4
+ from dateutil.relativedelta import relativedelta
5
+
6
+ from uk_bin_collection.uk_bin_collection.common import *
7
+ from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
8
+
9
+
10
+ # import the wonderful Beautiful Soup and the URL grabber
11
+ class CouncilClass(AbstractGetBinDataClass):
12
+ """
13
+ Concrete classes have to implement all abstract operations of the
14
+ base class. They can also override some operations with a default
15
+ implementation.
16
+ """
17
+
18
+ def parse_data(self, page: str, **kwargs) -> dict:
19
+ # Make a BS4 object
20
+ uprn = kwargs.get("uprn")
21
+ check_uprn(uprn)
22
+ bindata = {"bins": []}
23
+
24
+ SESSION_URL = "https://stevenage-self.achieveservice.com/authapi/isauthenticated?uri=https%253A%252F%252Fstevenage-self.achieveservice.com%252Fservice%252Fmy_bin_collection_schedule&hostname=stevenage-self.achieveservice.com&withCredentials=true"
25
+ TOKEN_URL = "https://stevenage-self.achieveservice.com/apibroker/runLookup?id=5e55337a540d4"
26
+ API_URL = "https://stevenage-self.achieveservice.com/apibroker/runLookup"
27
+
28
+ data = {
29
+ "formValues": {
30
+ "Section 1": {
31
+ "token": {"value": ""},
32
+ "LLPGUPRN": {
33
+ "value": uprn,
34
+ },
35
+ "MinimumDateLookAhead": {
36
+ "value": time.strftime("%Y-%m-%d"),
37
+ },
38
+ "MaximumDateLookAhead": {
39
+ "value": str(int(time.strftime("%Y")) + 1)
40
+ + time.strftime("-%m-%d"),
41
+ },
42
+ },
43
+ },
44
+ }
45
+
46
+ headers = {
47
+ "Content-Type": "application/json",
48
+ "Accept": "application/json",
49
+ "User-Agent": "Mozilla/5.0",
50
+ "X-Requested-With": "XMLHttpRequest",
51
+ "Referer": "https://stevenage-self.achieveservice.com/fillform/?iframe_id=fillform-frame-1&db_id=",
52
+ }
53
+ s = requests.session()
54
+ r = s.get(SESSION_URL)
55
+ r.raise_for_status()
56
+ session_data = r.json()
57
+ sid = session_data["auth-session"]
58
+
59
+ t = s.get(TOKEN_URL)
60
+ t.raise_for_status()
61
+ token_data = t.json()
62
+ data["formValues"]["Section 1"]["token"]["value"] = token_data["integration"][
63
+ "transformed"
64
+ ]["rows_data"]["0"]["token"]
65
+
66
+ params = {
67
+ "id": "64ba8cee353e6",
68
+ "repeat_against": "",
69
+ "noRetry": "false",
70
+ "getOnlyTokens": "undefined",
71
+ "log_id": "",
72
+ "app_name": "AF-Renderer::Self",
73
+ # unix_timestamp
74
+ "_": str(int(time.time() * 1000)),
75
+ "sid": sid,
76
+ }
77
+
78
+ r = s.post(API_URL, json=data, headers=headers, params=params)
79
+ r.raise_for_status()
80
+
81
+ data = r.json()
82
+ rows_data = data["integration"]["transformed"]["rows_data"]
83
+ if not isinstance(rows_data, dict):
84
+ raise ValueError("Invalid data returned from API")
85
+
86
+ for key in rows_data:
87
+ value = rows_data[key]
88
+ bin_type = value["bintype"].strip()
89
+
90
+ try:
91
+ date = datetime.strptime(value["collectiondate"], "%A %d %B %Y").date()
92
+ except ValueError:
93
+ continue
94
+
95
+ dict_data = {
96
+ "type": bin_type,
97
+ "collectionDate": date.strftime(date_format),
98
+ }
99
+ bindata["bins"].append(dict_data)
100
+
101
+ return bindata
@@ -0,0 +1,51 @@
1
+ import time
2
+
3
+ import requests
4
+
5
+ from uk_bin_collection.uk_bin_collection.common import *
6
+ from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
7
+
8
+
9
+ # import the wonderful Beautiful Soup and the URL grabber
10
+ class CouncilClass(AbstractGetBinDataClass):
11
+ """
12
+ Concrete classes have to implement all abstract operations of the
13
+ base class. They can also override some operations with a default
14
+ implementation.
15
+ """
16
+
17
+ def parse_data(self, page: str, **kwargs) -> dict:
18
+
19
+ user_uprn = kwargs.get("uprn")
20
+ check_uprn(user_uprn)
21
+ bindata = {"bins": []}
22
+
23
+ URI = f"https://www.thanet.gov.uk/wp-content/mu-plugins/collection-day/incl/mu-collection-day-calls.php?pAddress={user_uprn}"
24
+
25
+ headers = {
26
+ "x-requested-with": "XMLHttpRequest",
27
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36",
28
+ }
29
+
30
+ # Make the GET request
31
+ response = requests.get(URI, headers=headers)
32
+
33
+ # Parse the JSON response
34
+ bin_collection = response.json()
35
+
36
+ # Loop through each collection in bin_collection
37
+ for collection in bin_collection:
38
+ bin_type = collection["type"]
39
+ collection_date = collection["nextDate"].split(" ")[0]
40
+
41
+ dict_data = {
42
+ "type": bin_type,
43
+ "collectionDate": collection_date,
44
+ }
45
+ bindata["bins"].append(dict_data)
46
+
47
+ bindata["bins"].sort(
48
+ key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y")
49
+ )
50
+
51
+ return bindata
@@ -0,0 +1,57 @@
1
+ import time
2
+
3
+ import requests
4
+ from bs4 import BeautifulSoup
5
+
6
+ from uk_bin_collection.uk_bin_collection.common import *
7
+ from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
8
+
9
+
10
+ # import the wonderful Beautiful Soup and the URL grabber
11
+ class CouncilClass(AbstractGetBinDataClass):
12
+ """
13
+ Concrete classes have to implement all abstract operations of the
14
+ base class. They can also override some operations with a default
15
+ implementation.
16
+ """
17
+
18
+ def parse_data(self, page: str, **kwargs) -> dict:
19
+
20
+ user_uprn = kwargs.get("uprn")
21
+ user_postcode = kwargs.get("postcode")
22
+ check_uprn(user_uprn)
23
+ check_postcode(user_postcode)
24
+ bindata = {"bins": []}
25
+
26
+ user_postcode = user_postcode.replace(" ", "%20")
27
+
28
+ URI = f"https://www.wolverhampton.gov.uk/find-my-nearest/{user_postcode}/{user_uprn}"
29
+
30
+ # Make the GET request
31
+ response = requests.get(URI)
32
+
33
+ soup = BeautifulSoup(response.content, "html.parser")
34
+
35
+ jumbotron = soup.find("div", {"class": "jumbotron jumbotron-fluid"})
36
+
37
+ # Find all bin entries in the row
38
+ for bin_div in jumbotron.select("div.col-md-4"):
39
+ service_name = bin_div.h3.text.strip()
40
+ next_date = bin_div.find(
41
+ "h4", text=lambda x: x and "Next date" in x
42
+ ).text.split(": ")[1]
43
+
44
+ dict_data = {
45
+ "type": service_name,
46
+ "collectionDate": datetime.strptime(
47
+ next_date,
48
+ "%B %d, %Y",
49
+ ).strftime(date_format),
50
+ }
51
+ bindata["bins"].append(dict_data)
52
+
53
+ bindata["bins"].sort(
54
+ key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y")
55
+ )
56
+
57
+ return bindata
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: uk_bin_collection
3
- Version: 0.116.0
3
+ Version: 0.118.0
4
4
  Summary: Python Lib to collect UK Bin Data
5
5
  Author: Robert Bradley
6
6
  Author-email: robbrad182@gmail.com
@@ -2,7 +2,7 @@ uk_bin_collection/README.rst,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,
2
2
  uk_bin_collection/tests/council_feature_input_parity.py,sha256=DO6Mk4ImYgM5ZCZ-cutwz5RoYYWZRLYx2tr6zIs_9Rc,3843
3
3
  uk_bin_collection/tests/features/environment.py,sha256=VQZjJdJI_kZn08M0j5cUgvKT4k3iTw8icJge1DGOkoA,127
4
4
  uk_bin_collection/tests/features/validate_council_outputs.feature,sha256=SJK-Vc737hrf03tssxxbeg_JIvAH-ddB8f6gU1LTbuQ,251
5
- uk_bin_collection/tests/input.json,sha256=YbcJwMyKLhZPNACJ8VL3QKjV47A3SQD5suyo4N9yfDc,101581
5
+ uk_bin_collection/tests/input.json,sha256=d54M0Eun5WcoFXATl2jFSaNkMv3QYAZFl20ujKzW9Co,103837
6
6
  uk_bin_collection/tests/output.schema,sha256=ZwKQBwYyTDEM4G2hJwfLUVM-5v1vKRvRK9W9SS1sd18,1086
7
7
  uk_bin_collection/tests/step_defs/step_helpers/file_handler.py,sha256=Ygzi4V0S1MIHqbdstUlIqtRIwnynvhu4UtpweJ6-5N8,1474
8
8
  uk_bin_collection/tests/step_defs/test_validate_council.py,sha256=VZ0a81sioJULD7syAYHjvK_-nT_Rd36tUyzPetSA0gk,3475
@@ -23,7 +23,7 @@ uk_bin_collection/uk_bin_collection/councils/AshfieldDistrictCouncil.py,sha256=2
23
23
  uk_bin_collection/uk_bin_collection/councils/AshfordBoroughCouncil.py,sha256=yC-8UMQHSbvze43PJ2_F4Z3cu7M7cynKTojipBJU7Ug,4307
24
24
  uk_bin_collection/uk_bin_collection/councils/AylesburyValeCouncil.py,sha256=LouqjspEMt1TkOGqWHs2zkxwOETIy3n7p64uKIlAgUg,2401
25
25
  uk_bin_collection/uk_bin_collection/councils/BCPCouncil.py,sha256=W7QBx6Mgso8RYosuXsaYo3GGNAu-tiyBSmuYxr1JSOU,1707
26
- uk_bin_collection/uk_bin_collection/councils/BaberghDistrictCouncil.py,sha256=T0Awn7afXhpyp3-R3IjZcowhcBgfXNlimRlODDAMMdQ,4094
26
+ uk_bin_collection/uk_bin_collection/councils/BaberghDistrictCouncil.py,sha256=aPONTzKXtPy7aBHFTRvhKTJ13qXqNicSquNRVAYV2d8,6200
27
27
  uk_bin_collection/uk_bin_collection/councils/BarnetCouncil.py,sha256=Sd4-pbv0QZsR7soxvXYqsfdOUIqZqS6notyoZthG77s,9182
28
28
  uk_bin_collection/uk_bin_collection/councils/BarnsleyMBCouncil.py,sha256=RKuH8HzGc3Q0WtLg-g_xVMn9hUYqdENgfcvvR4Bx5PI,4763
29
29
  uk_bin_collection/uk_bin_collection/councils/BasildonCouncil.py,sha256=NymPmq5pud0PJ8ePcc2r1SKED4EHQ0EY2l71O-Metxc,3313
@@ -63,10 +63,11 @@ uk_bin_collection/uk_bin_collection/councils/ChichesterDistrictCouncil.py,sha256
63
63
  uk_bin_collection/uk_bin_collection/councils/ChorleyCouncil.py,sha256=M7HjuUaFq8aSnOf_9m1QS4MmPPMmPhF3mLHSrfDPtV0,5194
64
64
  uk_bin_collection/uk_bin_collection/councils/ColchesterCityCouncil.py,sha256=Mny-q2rQkWe2Tj1gINwEM1L4AkqQl1EDMAaKY0-deD4,3968
65
65
  uk_bin_collection/uk_bin_collection/councils/ConwyCountyBorough.py,sha256=el75qv2QyfWZBU09tJLvD8vLQZ9pCg73u1NBFs6ybo8,1034
66
+ uk_bin_collection/uk_bin_collection/councils/CopelandBoroughCouncil.py,sha256=JDtcJFUY47a3Ws4_7pnCYlngw_3p_A-m5MitPLdrv3w,3983
66
67
  uk_bin_collection/uk_bin_collection/councils/CornwallCouncil.py,sha256=WZiz50svwyZgO8QKUCLy7hfFuy2HmAx5h-TG3yAweRA,2836
67
68
  uk_bin_collection/uk_bin_collection/councils/CotswoldDistrictCouncil.py,sha256=K_HVkAJHEs-i3PRdoBqWVtWUl3BNed6mRLGaqvtSskg,4896
68
69
  uk_bin_collection/uk_bin_collection/councils/CoventryCityCouncil.py,sha256=kfAvA2e4MlO0W9YT70U_mW9gxVPrmr0BOGzV99Tw2Bg,2012
69
- uk_bin_collection/uk_bin_collection/councils/CrawleyBoroughCouncil.py,sha256=_BEKZAjlS5Ad5DjyxqAEFSLn8F-KYox0zmn4BXaAD6A,2367
70
+ uk_bin_collection/uk_bin_collection/councils/CrawleyBoroughCouncil.py,sha256=Oaj5INA3zNjtzBRsfLvRTIxZzcd4E4bJfVF1ULWlrL4,4322
70
71
  uk_bin_collection/uk_bin_collection/councils/CroydonCouncil.py,sha256=Vxh5ICoaXTAvx0nDOq_95XQ4He9sQKcLdI5keV2uxM4,11384
71
72
  uk_bin_collection/uk_bin_collection/councils/DacorumBoroughCouncil.py,sha256=Tm_6pvBPj-6qStbe6-02LXaoCOlnnDvVXAAocGVvf_E,3970
72
73
  uk_bin_collection/uk_bin_collection/councils/DartfordBoroughCouncil.py,sha256=SPirUUoweMwX5Txtsr0ocdcFtKxCQ9LhzTTJN20tM4w,1550
@@ -138,7 +139,7 @@ uk_bin_collection/uk_bin_collection/councils/MansfieldDistrictCouncil.py,sha256=
138
139
  uk_bin_collection/uk_bin_collection/councils/MertonCouncil.py,sha256=3Y2Un4xXo1sCcMsudynODSzocV_mMofWkX2JqONDb5o,1997
139
140
  uk_bin_collection/uk_bin_collection/councils/MidAndEastAntrimBoroughCouncil.py,sha256=oOWwU5FSgGej2Mv7FQ66N-EzS5nZgmGsd0WnfLWUc1I,5238
140
141
  uk_bin_collection/uk_bin_collection/councils/MidDevonCouncil.py,sha256=RjBZ7R3_Pax9p1d2DCygqryjV1RP4BYvqb-rT_KyOEg,3322
141
- uk_bin_collection/uk_bin_collection/councils/MidSuffolkDistrictCouncil.py,sha256=nnN7S1mK7HU3NPW7KsmEVkcC7Gb5rE1mmW9FCUfukWk,4094
142
+ uk_bin_collection/uk_bin_collection/councils/MidSuffolkDistrictCouncil.py,sha256=b1E47oGUmBfLHzoIT74t556kO-SuopelRprf_ywUsvM,6200
142
143
  uk_bin_collection/uk_bin_collection/councils/MidSussexDistrictCouncil.py,sha256=AZgC9wmDLEjUOtIFvf0ehF5LHturXTH4DkE3ioPSVBA,6254
143
144
  uk_bin_collection/uk_bin_collection/councils/MidlothianCouncil.py,sha256=mM5-itJDNhjsT5UEjSFfWppmfmPFSns4u_1QblewuFU,5605
144
145
  uk_bin_collection/uk_bin_collection/councils/MiltonKeynesCityCouncil.py,sha256=7e2pGBLCw24pNItHeI9jkxQ3rEOZ4WC4zVlbvKYGdXE,2600
@@ -178,6 +179,7 @@ uk_bin_collection/uk_bin_collection/councils/RenfrewshireCouncil.py,sha256=VlWm-
178
179
  uk_bin_collection/uk_bin_collection/councils/RhonddaCynonTaffCouncil.py,sha256=wInyVG_0wRrX_dRO9qbAzPhlXDseXapj2zQhsISw8gg,3233
179
180
  uk_bin_collection/uk_bin_collection/councils/RochdaleCouncil.py,sha256=UTSwSw515VehGn4xkjjRhUlzS4lDj4hgna6y-4VW3uM,2379
180
181
  uk_bin_collection/uk_bin_collection/councils/RochfordCouncil.py,sha256=rfhD66A9HfHL46ldF9sbxvV7fPaaoNxzIJbHjVT6A90,2621
182
+ uk_bin_collection/uk_bin_collection/councils/RotherDistrictCouncil.py,sha256=-fdLvtik9ytfwXrrhwWdBxqQOMq2N1pvrIuvShhf8PU,3090
181
183
  uk_bin_collection/uk_bin_collection/councils/RotherhamCouncil.py,sha256=LtMPM8lj5bfReDR4buHEo-aRC_HTBIeo1nf8GE5-R80,1790
182
184
  uk_bin_collection/uk_bin_collection/councils/RugbyBoroughCouncil.py,sha256=a5ySLmFvvY56QMA7-bk6MVBxRp5tPBIBg4navH0eYas,4306
183
185
  uk_bin_collection/uk_bin_collection/councils/RushcliffeBoroughCouncil.py,sha256=wMtiYRirT585vtsEOIyXHugk7aEj3pvyVWBaAePdqtE,4005
@@ -193,17 +195,20 @@ uk_bin_collection/uk_bin_collection/councils/SouthAyrshireCouncil.py,sha256=03ea
193
195
  uk_bin_collection/uk_bin_collection/councils/SouthCambridgeshireCouncil.py,sha256=xGSMcikxjS4UzqKs0X50LJKmn09C-XAAs98SPhNZgkQ,2308
194
196
  uk_bin_collection/uk_bin_collection/councils/SouthDerbyshireDistrictCouncil.py,sha256=irqelQSENPsZLlNtYtpt-Z7GwKUyvhp94kKKVIIDjQg,2087
195
197
  uk_bin_collection/uk_bin_collection/councils/SouthGloucestershireCouncil.py,sha256=ytQot0J7i6DTJo6hb9koTB1UpXLATKVeRU4FBF9kHRo,2412
198
+ uk_bin_collection/uk_bin_collection/councils/SouthHamsDistrictCouncil.py,sha256=DiijlLisR8qwPSo-4XSAiNk5G31-fYr7bDHKSFjDT7o,3180
196
199
  uk_bin_collection/uk_bin_collection/councils/SouthKestevenDistrictCouncil.py,sha256=_26ouWln5VrKiIFcp2b6ZzuwCKpp3aNcS2n5d4-8NsA,6210
197
200
  uk_bin_collection/uk_bin_collection/councils/SouthLanarkshireCouncil.py,sha256=fj-eZI0yrvQVCv8GvhcovZ3b9bV6Xv_ws3IunWjnv4U,3126
198
201
  uk_bin_collection/uk_bin_collection/councils/SouthNorfolkCouncil.py,sha256=C2qIZjjbl9JnuukX9OH2RbfP0hSdp3uX76APGY33qKs,4622
199
202
  uk_bin_collection/uk_bin_collection/councils/SouthOxfordshireCouncil.py,sha256=zW4bN3hcqNoK_Y0-vPpuZs3K0LTPvApu6_v9K-D7WjE,3879
200
203
  uk_bin_collection/uk_bin_collection/councils/SouthRibbleCouncil.py,sha256=OdexbeiI5WsCfjlsnHjAce8oGF5fW-n7q2XOuxcpHzw,3604
204
+ uk_bin_collection/uk_bin_collection/councils/SouthStaffordshireDistrictCouncil.py,sha256=ACQMHWyamnj1ag3gNF-8Jhp-DKUok1GhFdnzH4nCzwU,3201
201
205
  uk_bin_collection/uk_bin_collection/councils/SouthTynesideCouncil.py,sha256=dxXGrJfg_fn2IPTBgq6Duwy0WY8GYLafMuisaCjOnbs,3426
202
206
  uk_bin_collection/uk_bin_collection/councils/SouthwarkCouncil.py,sha256=Z6JIbUt3yr4oG60n1At4AjPIGrs7Qzn_sDNY-TsS62E,4882
203
207
  uk_bin_collection/uk_bin_collection/councils/StAlbansCityAndDistrictCouncil.py,sha256=mPZz6Za6kTSkrfHnj0OfwtnpRYR1dKvxbuFEKnWsiL8,1451
204
208
  uk_bin_collection/uk_bin_collection/councils/StHelensBC.py,sha256=X9dvnQTNn7QUO8gv1A587e1aDI92TWN4iNLATTn3H3w,4777
205
209
  uk_bin_collection/uk_bin_collection/councils/StaffordBoroughCouncil.py,sha256=9Qj4HJI7Dbiqb2mVSG2UtkBe27Y7wvQ5SYFTwGzJ5g0,2292
206
210
  uk_bin_collection/uk_bin_collection/councils/StaffordshireMoorlandsDistrictCouncil.py,sha256=_N8Cg26EbTaKp0RsWvQuELVcZDHbT2BlD2LW8qhkS_Q,4361
211
+ uk_bin_collection/uk_bin_collection/councils/StevenageBoroughCouncil.py,sha256=EiDIyOlHhdiJ-YYjo7T5uA5sN2jzNoysu6FctjuAjBI,3549
207
212
  uk_bin_collection/uk_bin_collection/councils/StockportBoroughCouncil.py,sha256=v0HmioNVRoU1-9OnLJl2V3M5pVR1aVu1BgOLHFR1Sf4,1429
208
213
  uk_bin_collection/uk_bin_collection/councils/StocktonOnTeesCouncil.py,sha256=obaBgsmIJ95Ah7KaTNWdU107tZDPVuuJox0mGUoGjNk,6070
209
214
  uk_bin_collection/uk_bin_collection/councils/StokeOnTrentCityCouncil.py,sha256=KM0EgWeO7mk8lkozX0RCTfMchXdjrfBqIjCiOtB09aM,2884
@@ -219,6 +224,7 @@ uk_bin_collection/uk_bin_collection/councils/TeignbridgeCouncil.py,sha256=vSnQ7U
219
224
  uk_bin_collection/uk_bin_collection/councils/TelfordAndWrekinCouncil.py,sha256=p1ZS5R4EGxbEWlRBrkGXgKwE_lkyBT-R60yKFFhVObc,1844
220
225
  uk_bin_collection/uk_bin_collection/councils/TendringDistrictCouncil.py,sha256=DJbYI8m6lIISDrK5h8V5Jo-9kGG7kr9dz7GD8St4nc8,4274
221
226
  uk_bin_collection/uk_bin_collection/councils/TestValleyBoroughCouncil.py,sha256=Dtfkyrwt795W7gqFJxVGRR8t3R5WMNQZwTWJckLpZWE,8480
227
+ uk_bin_collection/uk_bin_collection/councils/ThanetDistrictCouncil.py,sha256=-opmZG9GzjB_NvmWpN6nFZ7rlkSoaRrQICU5E8T0DEQ,1659
222
228
  uk_bin_collection/uk_bin_collection/councils/ThreeRiversDistrictCouncil.py,sha256=RHt3e9oeKzwxjjY-M8aC0nk-ZXhHIoyC81JzxkPVxsE,5531
223
229
  uk_bin_collection/uk_bin_collection/councils/TonbridgeAndMallingBC.py,sha256=UlgnHDoi8ecav2H5-HqKNDpqW1J3RN-c___5c08_Q7I,4859
224
230
  uk_bin_collection/uk_bin_collection/councils/TorbayCouncil.py,sha256=JW_BS7wkfxFsmx6taQtPAQWdBp1AfLrxs0XRQ2XZcSw,2029
@@ -251,6 +257,7 @@ uk_bin_collection/uk_bin_collection/councils/WindsorAndMaidenheadCouncil.py,sha2
251
257
  uk_bin_collection/uk_bin_collection/councils/WirralCouncil.py,sha256=X_e9zXEZAl_Mp6nPORHc9CTmf3QHdoMY3BCnKrXEr1I,2131
252
258
  uk_bin_collection/uk_bin_collection/councils/WokingBoroughCouncil.py,sha256=37igH9g0xe4XIhRhcJ-ZJBU8MxTp5yzgpadWbdE33Yg,5205
253
259
  uk_bin_collection/uk_bin_collection/councils/WokinghamBoroughCouncil.py,sha256=H8aFHlacwV07X-6T9RQua4irqDA0cIQrF4O1FfPR7yI,4114
260
+ uk_bin_collection/uk_bin_collection/councils/WolverhamptonCityCouncil.py,sha256=ncXfu5RHPCFsArczXHy7g0l_HEZa7GC-QA1QRReP_00,1801
254
261
  uk_bin_collection/uk_bin_collection/councils/WorcesterCityCouncil.py,sha256=dKHB2fPSmOGOwyvfpbdR4U8XW2ctBf63gCPxX06kwKA,1867
255
262
  uk_bin_collection/uk_bin_collection/councils/WychavonDistrictCouncil.py,sha256=GnNNMe33YMlK6S7rjM3c4BQkBnXelS0DKl2x5V4fb2w,5775
256
263
  uk_bin_collection/uk_bin_collection/councils/WyreCouncil.py,sha256=zDDa7n4K_zm5PgDL08A26gD9yOOsOhuexI3x2seaBF4,3511
@@ -258,8 +265,8 @@ uk_bin_collection/uk_bin_collection/councils/YorkCouncil.py,sha256=I2kBYMlsD4bId
258
265
  uk_bin_collection/uk_bin_collection/councils/council_class_template/councilclasstemplate.py,sha256=EQWRhZ2pEejlvm0fPyOTsOHKvUZmPnxEYO_OWRGKTjs,1158
259
266
  uk_bin_collection/uk_bin_collection/create_new_council.py,sha256=m-IhmWmeWQlFsTZC4OxuFvtw5ZtB8EAJHxJTH4O59lQ,1536
260
267
  uk_bin_collection/uk_bin_collection/get_bin_data.py,sha256=YvmHfZqanwrJ8ToGch34x-L-7yPe31nB_x77_Mgl_vo,4545
261
- uk_bin_collection-0.116.0.dist-info/LICENSE,sha256=vABBUOzcrgfaTKpzeo-si9YVEun6juDkndqA8RKdKGs,1071
262
- uk_bin_collection-0.116.0.dist-info/METADATA,sha256=Dn9hGIlLa54AUUAhbw_fnO9kixM10sHL3cke0azYc3Y,17574
263
- uk_bin_collection-0.116.0.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
264
- uk_bin_collection-0.116.0.dist-info/entry_points.txt,sha256=36WCSGMWSc916S3Hi1ZkazzDKHaJ6CD-4fCEFm5MIao,90
265
- uk_bin_collection-0.116.0.dist-info/RECORD,,
268
+ uk_bin_collection-0.118.0.dist-info/LICENSE,sha256=vABBUOzcrgfaTKpzeo-si9YVEun6juDkndqA8RKdKGs,1071
269
+ uk_bin_collection-0.118.0.dist-info/METADATA,sha256=qfGDaPkQ6hAS_gVAFVs10LUi2EzwBJtfvFK5c7m-rh0,17574
270
+ uk_bin_collection-0.118.0.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
271
+ uk_bin_collection-0.118.0.dist-info/entry_points.txt,sha256=36WCSGMWSc916S3Hi1ZkazzDKHaJ6CD-4fCEFm5MIao,90
272
+ uk_bin_collection-0.118.0.dist-info/RECORD,,