uk_bin_collection 0.152.10__py3-none-any.whl → 0.153.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (21) hide show
  1. uk_bin_collection/tests/input.json +10 -6
  2. uk_bin_collection/uk_bin_collection/councils/BoltonCouncil.py +1 -1
  3. uk_bin_collection/uk_bin_collection/councils/BuckinghamshireCouncil.py +75 -100
  4. uk_bin_collection/uk_bin_collection/councils/CheltenhamBoroughCouncil.py +32 -18
  5. uk_bin_collection/uk_bin_collection/councils/DarlingtonBoroughCouncil.py +68 -0
  6. uk_bin_collection/uk_bin_collection/councils/EastHertsCouncil.py +82 -24
  7. uk_bin_collection/uk_bin_collection/councils/HinckleyandBosworthBoroughCouncil.py +7 -1
  8. uk_bin_collection/uk_bin_collection/councils/IpswichBoroughCouncil.py +3 -1
  9. uk_bin_collection/uk_bin_collection/councils/LichfieldDistrictCouncil.py +7 -1
  10. uk_bin_collection/uk_bin_collection/councils/NorthEastLincs.py +17 -6
  11. uk_bin_collection/uk_bin_collection/councils/NorwichCityCouncil.py +67 -66
  12. uk_bin_collection/uk_bin_collection/councils/NuneatonBedworthBoroughCouncil.py +19 -7
  13. uk_bin_collection/uk_bin_collection/councils/RochfordCouncil.py +2 -3
  14. uk_bin_collection/uk_bin_collection/councils/RunnymedeBoroughCouncil.py +7 -1
  15. uk_bin_collection/uk_bin_collection/councils/StaffordshireMoorlandsDistrictCouncil.py +4 -0
  16. uk_bin_collection/uk_bin_collection/councils/WiltshireCouncil.py +47 -29
  17. {uk_bin_collection-0.152.10.dist-info → uk_bin_collection-0.153.0.dist-info}/METADATA +1 -1
  18. {uk_bin_collection-0.152.10.dist-info → uk_bin_collection-0.153.0.dist-info}/RECORD +21 -20
  19. {uk_bin_collection-0.152.10.dist-info → uk_bin_collection-0.153.0.dist-info}/LICENSE +0 -0
  20. {uk_bin_collection-0.152.10.dist-info → uk_bin_collection-0.153.0.dist-info}/WHEEL +0 -0
  21. {uk_bin_collection-0.152.10.dist-info → uk_bin_collection-0.153.0.dist-info}/entry_points.txt +0 -0
@@ -253,7 +253,7 @@
253
253
  "postcode": "BL1 5PQ",
254
254
  "skip_get_url": true,
255
255
  "uprn": "100010886936",
256
- "url": "https://carehomes.bolton.gov.uk/bins.aspx",
256
+ "url": "https://web.bolton.gov.uk/bins.aspx",
257
257
  "web_driver": "http://selenium:4444",
258
258
  "wiki_name": "Bolton",
259
259
  "wiki_note": "To get the UPRN, you will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search). Previously required a single field that was UPRN and full address; now requires UPRN and postcode as separate fields.",
@@ -377,13 +377,10 @@
377
377
  "LAD24CD": "E07000172"
378
378
  },
379
379
  "BuckinghamshireCouncil": {
380
- "house_number": "The Ridings, Magpie Lane, Loudwater, High Wycombe, HP13 7BA",
381
- "postcode": "HP13 7BA",
382
380
  "uprn": "100081093078",
383
381
  "url": "https://www.buckinghamshire.gov.uk/waste-and-recycling/find-out-when-its-your-bin-collection/",
384
- "web_driver": "http://selenium:4444",
385
382
  "wiki_name": "Buckinghamshire",
386
- "wiki_note": "Pass the house name/number and postcode in their respective arguments, both wrapped in quotes.",
383
+ "wiki_note": "Pass the UPRN. You can find it using [FindMyAddress](https://www.findmyaddress.co.uk/search).",
387
384
  "LAD24CD": "E06000060"
388
385
  },
389
386
  "BurnleyBoroughCouncil": {
@@ -641,6 +638,13 @@
641
638
  "wiki_note": "Pass the house number and postcode in their respective parameters. This parser requires a Selenium webdriver.",
642
639
  "LAD24CD": "E07000096"
643
640
  },
641
+ "DarlingtonBoroughCouncil": {
642
+ "uprn": "10003076924",
643
+ "url": "https://www.darlington.gov.uk/bins-waste-and-recycling/collection-day-lookup/",
644
+ "wiki_name": "Darlington Borough Council",
645
+ "wiki_note": "Use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find your UPRN.",
646
+ "LAD24CD": "E06000005"
647
+ },
644
648
  "DartfordBoroughCouncil": {
645
649
  "uprn": "100060861698",
646
650
  "url": "https://www.dartford.gov.uk/waste-recycling/collection-day",
@@ -1439,7 +1443,7 @@
1439
1443
  "house_number": "71",
1440
1444
  "postcode": "ME16 8BT",
1441
1445
  "url": "https://my.maidstone.gov.uk/service/Find-your-bin-day",
1442
- "web_driver": "http://selenium:4444",
1446
+ "web_driver": "http://selenium:4444",
1443
1447
  "wiki_name": "Maidstone",
1444
1448
  "wiki_note": "Pass the house number and postcode in their respective parameters. This parser requires a Selenium webdriver.",
1445
1449
  "LAD24CD": "E07000110"
@@ -35,7 +35,7 @@ class CouncilClass(AbstractGetBinDataClass):
35
35
  data = {"bins": []}
36
36
 
37
37
  # Get our initial session running
38
- page = "https://carehomes.bolton.gov.uk/bins.aspx"
38
+ page = "https://web.bolton.gov.uk/bins.aspx"
39
39
 
40
40
  driver = create_webdriver(web_driver, headless, None, __name__)
41
41
  driver.get(page)
@@ -1,11 +1,26 @@
1
- from bs4 import BeautifulSoup
2
- from selenium.webdriver.common.by import By
3
- from selenium.webdriver.support import expected_conditions as EC
4
- from selenium.webdriver.support.wait import WebDriverWait
1
+ import json
2
+ from dataclasses import asdict, dataclass
3
+ from typing import Literal
5
4
 
6
- from uk_bin_collection.uk_bin_collection.common import *
5
+ import requests
6
+ from cryptography.hazmat.backends import default_backend
7
+ from cryptography.hazmat.primitives import padding
8
+ from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
9
+
10
+ from uk_bin_collection.uk_bin_collection.common import check_uprn
7
11
  from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
8
12
 
13
+ key_hex = "F57E76482EE3DC3336495DEDEEF3962671B054FE353E815145E29C5689F72FEC"
14
+ iv_hex = "2CBF4FC35C69B82362D393A4F0B9971A"
15
+
16
+
17
+ @dataclass
18
+ class BucksInput:
19
+ P_CLIENT_ID: Literal[152]
20
+ P_COUNCIL_ID: Literal[34505]
21
+ P_LANG_CODE: Literal["EN"]
22
+ P_UPRN: str
23
+
9
24
 
10
25
  class CouncilClass(AbstractGetBinDataClass):
11
26
  """
@@ -14,113 +29,73 @@ class CouncilClass(AbstractGetBinDataClass):
14
29
  implementation.
15
30
  """
16
31
 
17
- def parse_data(self, page: str, **kwargs) -> dict:
18
- driver = None
32
+ def encode_body(self, bucks_input: BucksInput):
33
+ key = bytes.fromhex(key_hex)
34
+ iv = bytes.fromhex(iv_hex)
35
+
36
+ json_data = json.dumps(asdict(bucks_input))
37
+ data_bytes = json_data.encode("utf-8")
38
+
39
+ padder = padding.PKCS7(128).padder()
40
+ padded_data = padder.update(data_bytes) + padder.finalize()
41
+
42
+ backend = default_backend()
43
+ cipher = Cipher(algorithms.AES(key), modes.CBC(iv), backend=backend)
44
+ encryptor = cipher.encryptor()
45
+ ciphertext = encryptor.update(padded_data) + encryptor.finalize()
46
+
47
+ return ciphertext.hex()
48
+
49
+ def decode_response(self, hex_input: str):
50
+
51
+ key = bytes.fromhex(key_hex)
52
+ iv = bytes.fromhex(iv_hex)
53
+ ciphertext = bytes.fromhex(hex_input)
54
+
55
+ backend = default_backend()
56
+ cipher = Cipher(algorithms.AES(key), modes.CBC(iv), backend=backend)
57
+ decryptor = cipher.decryptor()
58
+ decrypted_padded = decryptor.update(ciphertext) + decryptor.finalize()
59
+
60
+ unpadder = padding.PKCS7(128).unpadder()
61
+ plaintext_bytes = unpadder.update(decrypted_padded) + unpadder.finalize()
62
+ plaintext = plaintext_bytes.decode("utf-8")
63
+
64
+ return json.loads(plaintext)
65
+
66
+ def parse_data(self, _: str, **kwargs) -> dict:
19
67
  try:
20
- data = {"bins": []}
21
- user_paon = kwargs.get("paon")
22
- user_postcode = kwargs.get("postcode")
23
- user_uprn = kwargs.get("uprn")
24
- web_driver = kwargs.get("web_driver")
25
- headless = kwargs.get("headless")
26
- check_paon(user_paon)
27
- check_postcode(user_postcode)
28
-
29
- # Create Selenium webdriver
30
- driver = create_webdriver(web_driver, headless, None, __name__)
31
- driver.get(kwargs.get("url"))
32
-
33
- # Click "Check now" button
34
- check_now_button = WebDriverWait(driver, 10).until(
35
- EC.element_to_be_clickable((By.XPATH, "//a[contains(text(), 'Check now')]"))
68
+ user_uprn: str = kwargs.get("uprn") or ""
69
+ check_uprn(user_uprn)
70
+ bucks_input = BucksInput(
71
+ P_CLIENT_ID=152, P_COUNCIL_ID=34505, P_LANG_CODE="EN", P_UPRN=user_uprn
36
72
  )
37
- check_now_button.click()
38
73
 
39
- # Wait for the postcode field to appear then populate it
40
- inputElement_postcode = WebDriverWait(driver, 10).until(
41
- EC.presence_of_element_located((By.ID, "postcodeSearch"))
42
- )
43
- inputElement_postcode.send_keys(user_postcode)
74
+ encoded_input = self.encode_body(bucks_input)
44
75
 
45
- # Click Find button
46
- find_button = WebDriverWait(driver, 10).until(
47
- EC.element_to_be_clickable((By.XPATH, "//button[contains(text(), 'Find')]"))
76
+ session = requests.Session()
77
+ response = session.post(
78
+ "https://itouchvision.app/portal/itouchvision/kmbd/collectionDay",
79
+ data=encoded_input,
48
80
  )
49
- find_button.click()
50
-
51
- # Wait for the address dropdown and select by UPRN
52
- if user_uprn:
53
- address_option = WebDriverWait(driver, 10).until(
54
- EC.element_to_be_clickable((By.XPATH, f"//option[@value='{user_uprn}']"))
55
- )
56
- address_option.click()
57
- else:
58
- # Fallback to selecting by address text
59
- address_option = WebDriverWait(driver, 10).until(
60
- EC.element_to_be_clickable(
61
- (By.XPATH, f"//select[@id='addressSelect']//option[contains(., '{user_paon}')]")
62
- )
63
- )
64
- address_option.click()
65
81
 
66
- # Wait a moment for the page to update after address selection
67
- import time
68
- time.sleep(2)
82
+ output = response.text
69
83
 
70
- # Wait for collection information to appear - try multiple possible selectors
71
- try:
72
- WebDriverWait(driver, 15).until(
73
- EC.presence_of_element_located((By.XPATH, "//h2[contains(text(), 'Your next collections')]"))
74
- )
75
- except:
76
- # Alternative wait for collection data structure
77
- WebDriverWait(driver, 10).until(
78
- EC.presence_of_element_located((By.XPATH, "//div[contains(@class, 'ant-row') and contains(@class, 'd-flex')]//h3[@class='text-white']"))
84
+ decoded_bins = self.decode_response(output)
85
+ data: dict[str, list[dict[str, str]]] = {}
86
+ data["bins"] = list(
87
+ map(
88
+ lambda a: {
89
+ "type": a["binType"],
90
+ "collectionDate": a["collectionDay"].replace("-", "/"),
91
+ },
92
+ decoded_bins["collectionDay"],
79
93
  )
80
-
81
- soup = BeautifulSoup(driver.page_source, features="html.parser")
82
-
83
- # Find all collection items with the specific structure - try multiple class patterns
84
- collection_items = soup.find_all("div", class_=lambda x: x and "ant-col" in x and "ant-col-xs-12" in x)
85
- if not collection_items:
86
- # Fallback to finding items by structure
87
- collection_items = soup.find_all("div", class_=lambda x: x and "p-2" in x and "d-flex" in x and "flex-column" in x)
88
-
89
- current_year = datetime.now().year
90
- current_month = datetime.now().month
91
-
92
- for item in collection_items:
93
- # Extract bin type from h3 element
94
- bin_type_elem = item.find("h3", class_="text-white")
95
- # Extract date from div with specific classes
96
- date_elem = item.find("div", class_="text-white fw-bold")
97
-
98
- if bin_type_elem and date_elem:
99
- bin_type = bin_type_elem.get_text().strip()
100
- date_text = date_elem.get_text().strip()
101
-
102
- try:
103
- collection_date = datetime.strptime(date_text, "%A %d %B")
104
- if (current_month > 10) and (collection_date.month < 3):
105
- collection_date = collection_date.replace(year=(current_year + 1))
106
- else:
107
- collection_date = collection_date.replace(year=current_year)
108
-
109
- dict_data = {
110
- "type": bin_type,
111
- "collectionDate": collection_date.strftime("%d/%m/%Y"),
112
- }
113
- data["bins"].append(dict_data)
114
- except ValueError:
115
- continue
94
+ )
116
95
 
117
96
  except Exception as e:
118
97
  # Here you can log the exception if needed
119
98
  print(f"An error occurred: {e}")
120
99
  # Optionally, re-raise the exception if you want it to propagate
121
100
  raise
122
- finally:
123
- # This block ensures that the driver is closed regardless of an exception
124
- if driver:
125
- driver.quit()
126
101
  return data
@@ -245,25 +245,39 @@ class CouncilClass(AbstractGetBinDataClass):
245
245
  # extract table body
246
246
  for row in table.find_all("tr")[1:]:
247
247
  if row.find_all("td")[1].text.strip() == "Normal collection day":
248
- bh_dict[
249
- parse(
250
- row.find_all("td")[0].text.strip(),
251
- dayfirst=True,
252
- fuzzy=True,
253
- ).date()
254
- ] = parse(
255
- row.find_all("td")[0].text.strip(), dayfirst=True, fuzzy=True
256
- ).date()
248
+ try:
249
+ # Check for normal collection day (no change)
250
+ if row.find_all("td")[0].text.strip() == "Normal collection":
251
+ continue
252
+ else:
253
+ bh_dict[
254
+ parse(
255
+ row.find_all("td")[0].text.strip(),
256
+ dayfirst=True,
257
+ fuzzy=True,
258
+ ).date()
259
+ ] = parse(
260
+ row.find_all("td")[0].text.strip(), dayfirst=True, fuzzy=True
261
+ ).date()
262
+ except:
263
+ continue
257
264
  else:
258
- bh_dict[
259
- parse(
260
- row.find_all("td")[0].text.strip(),
261
- dayfirst=True,
262
- fuzzy=True,
263
- ).date()
264
- ] = parse(
265
- row.find_all("td")[1].text.strip(), dayfirst=True, fuzzy=True
266
- ).date()
265
+ try:
266
+ # Check for normal collection day (no change)
267
+ if row.find_all("td")[1].text.strip() == "Normal collection":
268
+ continue
269
+ else:
270
+ bh_dict[
271
+ parse(
272
+ row.find_all("td")[0].text.strip(),
273
+ dayfirst=True,
274
+ fuzzy=True,
275
+ ).date()
276
+ ] = parse(
277
+ row.find_all("td")[1].text.strip(), dayfirst=True, fuzzy=True
278
+ ).date()
279
+ except:
280
+ continue
267
281
 
268
282
  for refuse_date in refuse_dates:
269
283
  collection_date = (datetime.strptime(refuse_date, "%d/%m/%Y") + timedelta(
@@ -0,0 +1,68 @@
1
+ import re
2
+
3
+ from bs4 import BeautifulSoup
4
+
5
+ from uk_bin_collection.uk_bin_collection.common import *
6
+ from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
7
+
8
+
9
+ # import the wonderful Beautiful Soup and the URL grabber
10
+ class CouncilClass(AbstractGetBinDataClass):
11
+ """
12
+ Concrete classes have to implement all abstract operations of the
13
+ base class. They can also override some operations with a default
14
+ implementation.
15
+ """
16
+
17
+ def parse_data(self, page: str, **kwargs) -> dict:
18
+
19
+ data = {"bins": []}
20
+
21
+ user_uprn = kwargs.get("uprn")
22
+ check_uprn(user_uprn)
23
+
24
+ url = f"https://www.darlington.gov.uk/bins-waste-and-recycling/collection-day-lookup/?uprn={user_uprn}"
25
+
26
+ # Referrer: https://www.darlington.gov.uk/bins-waste-and-recycling/collection-day-lookup/
27
+ # X-Requested-With: XMLHttpRequest
28
+ headers = {
29
+ "Accept": "*/*",
30
+ "Accept-Encoding": "gzip, deflate, br, zstd",
31
+ "Accept-Language": "en-GB,en;q=0.5",
32
+ "Referer": "https://www.darlington.gov.uk/bins-waste-and-recycling/collection-day-lookup/",
33
+ "Sec-Detch-Dest": "empty",
34
+ "Sec-Fetch-Mode": "cors",
35
+ "Sec-Fetch-Site": "same-origin",
36
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/121.0.6167.186 Safari/537.36",
37
+ "X-Requested-With": "XMLHttpRequest",
38
+ }
39
+
40
+ # Make a BS4 object
41
+ page = requests.get(url, headers=headers)
42
+ soup = BeautifulSoup(page.text, features="html.parser")
43
+ soup.prettify()
44
+
45
+ # Loop over each date card
46
+ card_blocks = soup.select("#detailsDisplay .refuse-results")
47
+
48
+ for card in card_blocks:
49
+ bin_date_tag = card.select_one(".card-footer h3")
50
+ if not bin_date_tag:
51
+ continue
52
+
53
+ bin_type = card.select_one(".card-header h2").text.strip()
54
+ bin_date = bin_date_tag.text.strip()
55
+
56
+ # Remove any extra text from the date "(Today)", "(Tomorrow)"
57
+ cleaned_bin_date = re.sub(r"\s*\(.*?\)", "", bin_date).strip()
58
+
59
+ next_binfo = {
60
+ "type": bin_type,
61
+ "collectionDate": datetime.strptime(
62
+ cleaned_bin_date, "%A %d %B %Y"
63
+ ).strftime(date_format),
64
+ }
65
+
66
+ data["bins"].append(next_binfo)
67
+
68
+ return data
@@ -1,11 +1,13 @@
1
- import json
1
+ import time
2
+
2
3
  import requests
3
- from datetime import datetime
4
+ from dateutil.relativedelta import relativedelta
4
5
 
5
6
  from uk_bin_collection.uk_bin_collection.common import *
6
7
  from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
7
8
 
8
9
 
10
+ # import the wonderful Beautiful Soup and the URL grabber
9
11
  class CouncilClass(AbstractGetBinDataClass):
10
12
  """
11
13
  Concrete classes have to implement all abstract operations of the
@@ -14,28 +16,84 @@ class CouncilClass(AbstractGetBinDataClass):
14
16
  """
15
17
 
16
18
  def parse_data(self, page: str, **kwargs) -> dict:
17
- user_uprn = kwargs.get("uprn")
18
- check_uprn(user_uprn)
19
+ # Make a BS4 object
20
+ uprn = kwargs.get("uprn")
21
+ # usrn = kwargs.get("paon")
22
+ check_uprn(uprn)
23
+ # check_usrn(usrn)
19
24
  bindata = {"bins": []}
20
-
21
- # Make API request
22
- api_url = f"https://east-herts.co.uk/api/services/{user_uprn}"
23
- response = requests.get(api_url)
24
- response.raise_for_status()
25
-
26
- data = response.json()
27
- today = datetime.now().date()
28
-
29
- for service in data.get("services", []):
30
- collection_date_str = service.get("collectionDate")
31
- if collection_date_str:
32
- collection_date = datetime.strptime(collection_date_str, "%Y-%m-%d").date()
33
- # Only include future dates
34
- if collection_date >= today:
35
- dict_data = {
36
- "type": service.get("binType", ""),
37
- "collectionDate": collection_date.strftime("%d/%m/%Y"),
25
+
26
+ # uprn = uprn.zfill(12)
27
+
28
+ SESSION_URL = "https://eastherts-self.achieveservice.com/authapi/isauthenticated?uri=https%253A%252F%252Feastherts-self.achieveservice.com%252FAchieveForms%252F%253Fmode%253Dfill%2526consentMessage%253Dyes%2526form_uri%253Dsandbox-publish%253A%252F%252FAF-Process-98782935-6101-4962-9a55-5923e76057b6%252FAF-Stage-dcd0ec18-dfb4-496a-a266-bd8fadaa28a7%252Fdefinition.json%2526process%253D1%2526process_uri%253Dsandbox-processes%253A%252F%252FAF-Process-98782935-6101-4962-9a55-5923e76057b6%2526process_id%253DAF-Process-98782935-6101-4962-9a55-5923e76057b6&hostname=eastherts-self.achieveservice.com&withCredentials=true"
29
+
30
+ API_URL = "https://eastherts-self.achieveservice.com/apibroker/runLookup"
31
+
32
+ headers = {
33
+ "Content-Type": "application/json",
34
+ "Accept": "*/*",
35
+ "User-Agent": "Mozilla/5.0",
36
+ "X-Requested-With": "XMLHttpRequest",
37
+ "Referer": "https://eastherts-self.achieveservice.com/fillform/?iframe_id=fillform-frame-1&db_id=",
38
+ }
39
+ s = requests.session()
40
+ r = s.get(SESSION_URL)
41
+ r.raise_for_status()
42
+ session_data = r.json()
43
+ sid = session_data["auth-session"]
44
+ params = {
45
+ # unix_timestamp
46
+ "_": str(int(time.time() * 1000)),
47
+ "sid": sid,
48
+ }
49
+
50
+ params = {
51
+ "id": "683d9ff0e299d",
52
+ "repeat_against": "",
53
+ "noRetry": "true",
54
+ "getOnlyTokens": "undefined",
55
+ "log_id": "",
56
+ "app_name": "AF-Renderer::Self",
57
+ # unix_timestamp
58
+ "_": str(int(time.time() * 1000)),
59
+ "sid": sid,
60
+ }
61
+
62
+ data = {
63
+ "formValues": {
64
+ "Collection Days": {
65
+ "inputUPRN": {
66
+ "value": uprn,
38
67
  }
39
- bindata["bins"].append(dict_data)
40
-
68
+ },
69
+ }
70
+ }
71
+
72
+ r = s.post(API_URL, json=data, headers=headers, params=params)
73
+ r.raise_for_status()
74
+
75
+ data = r.json()
76
+ rows_data = data["integration"]["transformed"]["rows_data"]["0"]
77
+ if not isinstance(rows_data, dict):
78
+ raise ValueError("Invalid data returned from API")
79
+
80
+ # Extract each service's relevant details for the bin schedule
81
+ for key, value in rows_data.items():
82
+ if key.endswith("NextDate"):
83
+ BinType = key.replace("NextDate", "ServiceName")
84
+ for key2, value2 in rows_data.items():
85
+ if key2 == BinType:
86
+ BinType = value2
87
+ next_collection = datetime.strptime(
88
+ remove_ordinal_indicator_from_date_string(value), "%A %d %B"
89
+ ).replace(year=datetime.now().year)
90
+ if datetime.now().month == 12 and next_collection.month == 1:
91
+ next_collection = next_collection + relativedelta(years=1)
92
+
93
+ dict_data = {
94
+ "type": BinType,
95
+ "collectionDate": next_collection.strftime(date_format),
96
+ }
97
+ bindata["bins"].append(dict_data)
98
+
41
99
  return bindata
@@ -20,10 +20,16 @@ class CouncilClass(AbstractGetBinDataClass):
20
20
  check_uprn(user_uprn)
21
21
  bindata = {"bins": []}
22
22
 
23
+ headers = {
24
+ "Origin": "https://www.hinckley-bosworth.gov.uk",
25
+ "Referer": "https://www.hinckley-bosworth.gov.uk",
26
+ "User-Agent": "Mozilla/5.0",
27
+ }
28
+
23
29
  URI = f"https://www.hinckley-bosworth.gov.uk/set-location?id={user_uprn}&redirect=refuse&rememberloc="
24
30
 
25
31
  # Make the GET request
26
- response = requests.get(URI)
32
+ response = requests.get(URI, headers=headers)
27
33
 
28
34
  # Parse the HTML
29
35
  soup = BeautifulSoup(response.content, "html.parser")
@@ -31,7 +31,9 @@ class CouncilClass(AbstractGetBinDataClass):
31
31
  IBC_ENDPOINT = "https://app.ipswich.gov.uk/bin-collection/"
32
32
 
33
33
  def transform_date(self, date_str):
34
- date_str = re.sub(r"(st|nd|rd|th)", "", date_str) # Remove ordinal suffixes
34
+ date_str = re.sub(
35
+ r"(\d{1,2})(st|nd|rd|th)", r"\1", date_str
36
+ ) # Remove ordinal suffixes
35
37
  date_obj = datetime.strptime(date_str, "%A %d %B %Y")
36
38
  return date_obj.strftime(date_format)
37
39
 
@@ -24,10 +24,16 @@ class CouncilClass(AbstractGetBinDataClass):
24
24
  def solve(s):
25
25
  return re.sub(r"(\d)(st|nd|rd|th)", r"\1", s)
26
26
 
27
+ headers = {
28
+ "Origin": "https://www.lichfielddc.gov.uk",
29
+ "Referer": "https://www.lichfielddc.gov.uk",
30
+ "User-Agent": "Mozilla/5.0",
31
+ }
32
+
27
33
  URI = f"https://www.lichfielddc.gov.uk/homepage/6/bin-collection-dates?uprn={user_uprn}"
28
34
 
29
35
  # Make the GET request
30
- response = requests.get(URI)
36
+ response = requests.get(URI, headers=headers)
31
37
 
32
38
  soup = BeautifulSoup(response.text, "html.parser")
33
39
 
@@ -1,5 +1,7 @@
1
1
  import pandas as pd
2
+ import requests
2
3
  from bs4 import BeautifulSoup
4
+
3
5
  from uk_bin_collection.uk_bin_collection.common import date_format
4
6
  from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
5
7
 
@@ -12,15 +14,26 @@ class CouncilClass(AbstractGetBinDataClass):
12
14
  """
13
15
 
14
16
  def parse_data(self, page: str, **kwargs) -> dict:
15
- # Make a BS4 object
16
- soup = BeautifulSoup(page.text, features="html.parser")
17
+ user_url = kwargs.get("url")
18
+
19
+ headers = {
20
+ "Origin": "https://www.nelincs.gov.uk",
21
+ "Referer": "https://www.nelincs.gov.uk",
22
+ "User-Agent": "Mozilla/5.0",
23
+ }
24
+
25
+ # Make the GET request
26
+ response = requests.get(user_url, headers=headers)
27
+
28
+ # Parse the HTML
29
+ soup = BeautifulSoup(response.content, "html.parser")
17
30
  soup.prettify()
18
31
 
19
32
  data = {"bins": []}
20
33
 
21
34
  # Get list items that can be seen on page
22
35
  for element in soup.find_all(
23
- "li", {"class": "list-group-item p-0 p-3 bin-collection-item"}
36
+ "li", {"class": "border-0 list-group-item p-3 bg-light rounded p-2"}
24
37
  ):
25
38
  element_text = element.text.strip().split("\n\n")
26
39
  element_text = [x.strip() for x in element_text]
@@ -35,9 +48,7 @@ class CouncilClass(AbstractGetBinDataClass):
35
48
  data["bins"].append(dict_data)
36
49
 
37
50
  # Get hidden list items too
38
- for element in soup.find_all(
39
- "li", {"class": "list-group-item p-0 p-3 bin-collection-item d-none"}
40
- ):
51
+ for element in soup.find_all("li", {"class": "border-0 list-group-item p-3"}):
41
52
  element_text = element.text.strip().split("\n\n")
42
53
  element_text = [x.strip() for x in element_text]
43
54
 
@@ -1,5 +1,3 @@
1
- import time
2
-
3
1
  import requests
4
2
  from bs4 import BeautifulSoup
5
3
 
@@ -17,76 +15,79 @@ class CouncilClass(AbstractGetBinDataClass):
17
15
 
18
16
  def parse_data(self, page: str, **kwargs) -> dict:
19
17
 
20
- user_uprn = kwargs.get("uprn")
21
- check_uprn(user_uprn)
18
+ user_postcode = kwargs.get("postcode")
19
+ user_paon = kwargs.get("paon")
20
+ check_postcode(user_postcode)
21
+ check_paon(user_paon)
22
22
  bindata = {"bins": []}
23
23
 
24
- API_URL = "https://maps.norwich.gov.uk/arcgis/rest/services/MyNorwich/PropertyDetails/FeatureServer/2/query"
25
-
26
- params = {
27
- "f": "json",
28
- "where": f"UPRN='{user_uprn}' or UPRN='0{user_uprn}'",
29
- "returnGeometry": "true",
30
- "spatialRel": "esriSpatialRelIntersects",
31
- "geometryType": "esriGeometryPolygon",
32
- "inSR": "4326",
33
- "outFields": "*",
34
- "outSR": "4326",
35
- "resultRecordCount": "1000",
24
+ URI = "https://bnr-wrp.whitespacews.com/"
25
+
26
+ session = requests.Session()
27
+
28
+ # get link from first page as has some kind of unique hash
29
+ r = session.get(
30
+ URI,
31
+ )
32
+ r.raise_for_status()
33
+ soup = BeautifulSoup(r.text, features="html.parser")
34
+
35
+ alink = soup.find("a", text="View my collections")
36
+
37
+ if alink is None:
38
+ raise Exception("Initial page did not load correctly")
39
+
40
+ # greplace 'seq' query string to skip next step
41
+ nextpageurl = alink["href"].replace("seq=1", "seq=2")
42
+
43
+ data = {
44
+ "address_name_number": user_paon,
45
+ "address_postcode": user_postcode,
36
46
  }
37
47
 
38
- r = requests.get(API_URL, params=params)
39
-
40
- data = r.json()
41
- data = data["features"][0]["attributes"]["WasteCollectionHtml"]
42
- soup = BeautifulSoup(data, "html.parser")
43
-
44
- alternateCheck = soup.find("p")
45
- if alternateCheck.text.__contains__("alternate"):
46
- alternateCheck = True
47
- else:
48
- alternateCheck = False
49
-
50
- strong = soup.find_all("strong")
51
- collections = []
52
-
53
- if alternateCheck:
54
- bin_types = strong[2].text.strip().replace(".", "").split(" and ")
55
- for bin in bin_types:
56
- collections.append(
57
- (
58
- bin.capitalize(),
59
- datetime.strptime(strong[1].text.strip(), date_format),
60
- )
61
- )
62
-
63
- else:
64
- p_tag = soup.find_all("p")
65
- i = 1
66
- for p in p_tag:
67
- bin_types = (
68
- p.text.split("Your ")[1].split(" is collected")[0].split(" and ")
69
- )
70
- for bin in bin_types:
71
- collections.append(
72
- (
73
- bin.capitalize(),
74
- datetime.strptime(strong[1].text.strip(), date_format),
75
- )
76
- )
77
- i += 2
78
-
79
- if len(strong) > 3:
80
- collections.append(
81
- ("Garden", datetime.strptime(strong[4].text.strip(), date_format))
82
- )
83
-
84
- ordered_data = sorted(collections, key=lambda x: x[1])
85
- for item in ordered_data:
48
+ # get list of addresses
49
+ r = session.post(nextpageurl, data)
50
+ r.raise_for_status()
51
+
52
+ soup = BeautifulSoup(r.text, features="html.parser")
53
+
54
+ # get first address (if you don't enter enough argument values this won't find the right address)
55
+ alink = soup.find("div", id="property_list").find("a")
56
+
57
+ if alink is None:
58
+ raise Exception("Address not found")
59
+
60
+ nextpageurl = URI + alink["href"]
61
+
62
+ # get collection page
63
+ r = session.get(
64
+ nextpageurl,
65
+ )
66
+ r.raise_for_status()
67
+ soup = BeautifulSoup(r.text, features="html.parser")
68
+
69
+ if soup.find("span", id="waste-hint"):
70
+ raise Exception("No scheduled services at this address")
71
+
72
+ u1s = soup.find("section", id="scheduled-collections").find_all("u1")
73
+
74
+ for u1 in u1s:
75
+ lis = u1.find_all("li", recursive=False)
76
+
77
+ date = lis[1].text.replace("\n", "")
78
+ bin_type = lis[2].text.replace("\n", "")
79
+
86
80
  dict_data = {
87
- "type": item[0] + " bin",
88
- "collectionDate": item[1].strftime(date_format),
81
+ "type": bin_type,
82
+ "collectionDate": datetime.strptime(
83
+ date,
84
+ "%d/%m/%Y",
85
+ ).strftime(date_format),
89
86
  }
90
87
  bindata["bins"].append(dict_data)
91
88
 
89
+ bindata["bins"].sort(
90
+ key=lambda x: datetime.strptime(x.get("collectionDate"), date_format)
91
+ )
92
+
92
93
  return bindata
@@ -1,23 +1,29 @@
1
+ import re
2
+ import urllib.parse
3
+
4
+ import requests
1
5
  from bs4 import BeautifulSoup
6
+
2
7
  from uk_bin_collection.uk_bin_collection.common import *
3
8
  from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
4
9
 
5
- from bs4 import BeautifulSoup
6
- import urllib.parse
7
- import requests
8
- import re
9
-
10
10
 
11
11
  class CouncilClass(AbstractGetBinDataClass):
12
12
  def parse_data(self, page: str, **kwargs) -> dict:
13
13
 
14
14
  data = {"bins": []}
15
15
 
16
+ headers = {
17
+ "Origin": "https://www.nuneatonandbedworth.gov.uk/",
18
+ "Referer": "https://www.nuneatonandbedworth.gov.uk/",
19
+ "User-Agent": "Mozilla/5.0",
20
+ }
21
+
16
22
  street = urllib.parse.quote_plus(kwargs.get("paon"))
17
23
  base_url = "https://www.nuneatonandbedworth.gov.uk/"
18
24
  search_query = f"directory/search?directoryID=3&showInMap=&keywords={street}&search=Search+directory"
19
25
 
20
- search_response = requests.get(base_url + search_query)
26
+ search_response = requests.get(base_url + search_query, headers=headers)
21
27
 
22
28
  if search_response.status_code == 200:
23
29
  soup = BeautifulSoup(search_response.content, "html.parser")
@@ -56,7 +62,13 @@ class CouncilClass(AbstractGetBinDataClass):
56
62
 
57
63
  def get_bin_data(self, url) -> dict:
58
64
 
59
- bin_day_response = requests.get(url)
65
+ headers = {
66
+ "Origin": "https://www.nuneatonandbedworth.gov.uk/",
67
+ "Referer": "https://www.nuneatonandbedworth.gov.uk/",
68
+ "User-Agent": "Mozilla/5.0",
69
+ }
70
+
71
+ bin_day_response = requests.get(url, headers=headers)
60
72
 
61
73
  if bin_day_response.status_code == 200:
62
74
 
@@ -33,10 +33,9 @@ class CouncilClass(AbstractGetBinDataClass):
33
33
  "tr", {"class": "govuk-table__row"}
34
34
  ):
35
35
  week_text = week.get_text().strip().split("\n")
36
+ date_str = week_text[0].split(" - ")[0].split("–")[0].strip()
36
37
  collection_date = datetime.strptime(
37
- remove_ordinal_indicator_from_date_string(
38
- week_text[0].split(" - ")[0]
39
- ).strip(),
38
+ remove_ordinal_indicator_from_date_string(date_str),
40
39
  "%A %d %B",
41
40
  )
42
41
  next_collection = collection_date.replace(year=datetime.now().year)
@@ -21,10 +21,16 @@ class CouncilClass(AbstractGetBinDataClass):
21
21
  check_uprn(user_uprn)
22
22
  bindata = {"bins": []}
23
23
 
24
+ headers = {
25
+ "Origin": "https://www.runnymede.gov.uk",
26
+ "Referer": "https://www.runnymede.gov.uk",
27
+ "User-Agent": "Mozilla/5.0",
28
+ }
29
+
24
30
  URI = f"https://www.runnymede.gov.uk/homepage/150/check-your-bin-collection-day?address={user_uprn}"
25
31
 
26
32
  # Make the GET request
27
- response = requests.get(URI)
33
+ response = requests.get(URI, headers=headers)
28
34
 
29
35
  soup = BeautifulSoup(response.text, "html.parser")
30
36
 
@@ -77,6 +77,10 @@ class CouncilClass(AbstractGetBinDataClass):
77
77
  )
78
78
  submit.click()
79
79
 
80
+ WebDriverWait(driver, 10).until(
81
+ EC.presence_of_element_located((By.CLASS_NAME, "bin-collection__month"))
82
+ )
83
+
80
84
  soup = BeautifulSoup(driver.page_source, features="html.parser")
81
85
 
82
86
  # Quit Selenium webdriver to release session
@@ -1,3 +1,5 @@
1
+ import re
2
+
1
3
  from bs4 import BeautifulSoup
2
4
 
3
5
  from uk_bin_collection.uk_bin_collection.common import *
@@ -91,40 +93,56 @@ class CouncilClass(AbstractGetBinDataClass):
91
93
 
92
94
  soup = BeautifulSoup(response.text, features="html.parser")
93
95
  soup.prettify()
94
-
96
+ # print(soup)
95
97
  # Find all the bits of the current calendar that contain an event
96
- events = soup.find_all("div", {"class": "rc-event-container"})
98
+ resultscontainer = soup.find_all("div", {"class": "results-container"})
97
99
 
98
- for event in events:
99
- # Get the date and type of each bin collection
100
- bin_date = datetime.strptime(
101
- event.find_next("a").attrs.get("data-original-datetext"),
102
- "%A %d %B, %Y",
100
+ for result in resultscontainer:
101
+ rows = result.find_all(
102
+ "div", {"class": "col-12 col-sm-6 col-md-4 col-lg-4 mb-4"}
103
103
  )
104
- bin_type = event.find_next("a").attrs.get("data-original-title")
105
- # Only process it if it's today or in the future
106
- if bin_date.date() >= datetime.now().date():
107
- # Split the really long type up into two separate bins
108
- if (
109
- bin_type
110
- == "Mixed dry recycling (blue lidded bin) and glass (black box or basket)"
111
- ):
112
- collections.append(
113
- (
114
- "Mixed dry recycling (blue lidded bin)",
115
- datetime.strftime(bin_date, date_format),
104
+ for row in rows:
105
+ cardcollectionday = row.find(
106
+ "span", {"class": "card-collection-day"}
107
+ )
108
+ cardcollectiondate = row.find(
109
+ "span", {"class": "card-collection-date"}
110
+ )
111
+ cardcollectionmonth = row.find(
112
+ "span", {"class": "card-collection-month"}
113
+ )
114
+ bin_type = row.find(
115
+ "li", {"class": re.compile(r"collection-type-...$")}
116
+ ).text
117
+
118
+ collection_date = f"{cardcollectionday.text}{cardcollectiondate.text}{cardcollectionmonth.text}"
119
+ bin_date = datetime.strptime(
120
+ collection_date,
121
+ "%A %d %B %Y",
122
+ )
123
+
124
+ if bin_date.date() >= datetime.now().date():
125
+ # Split the really long type up into two separate bins
126
+ if (
127
+ bin_type
128
+ == "Mixed dry recycling (blue lidded bin) and glass (black box or basket)"
129
+ ):
130
+ collections.append(
131
+ (
132
+ "Mixed dry recycling (blue lidded bin)",
133
+ datetime.strftime(bin_date, date_format),
134
+ )
135
+ )
136
+ collections.append(
137
+ (
138
+ "Glass (black box or basket)",
139
+ datetime.strftime(bin_date, date_format),
140
+ )
116
141
  )
117
- )
118
- collections.append(
119
- (
120
- "Glass (black box or basket)",
121
- datetime.strftime(bin_date, date_format),
142
+ else:
143
+ collections.append(
144
+ (bin_type, datetime.strftime(bin_date, date_format))
122
145
  )
123
- )
124
- else:
125
- collections.append(
126
- (bin_type, datetime.strftime(bin_date, date_format))
127
- )
128
146
 
129
147
  data = {"bins": []}
130
148
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: uk_bin_collection
3
- Version: 0.152.10
3
+ Version: 0.153.0
4
4
  Summary: Python Lib to collect UK Bin Data
5
5
  Author: Robert Bradley
6
6
  Author-email: robbrad182@gmail.com
@@ -7,7 +7,7 @@ uk_bin_collection/tests/council_feature_input_parity.py,sha256=DO6Mk4ImYgM5ZCZ-c
7
7
  uk_bin_collection/tests/features/environment.py,sha256=VQZjJdJI_kZn08M0j5cUgvKT4k3iTw8icJge1DGOkoA,127
8
8
  uk_bin_collection/tests/features/validate_council_outputs.feature,sha256=SJK-Vc737hrf03tssxxbeg_JIvAH-ddB8f6gU1LTbuQ,251
9
9
  uk_bin_collection/tests/generate_map_test_results.py,sha256=CKnGK2ZgiSXomRGkomX90DitgMP-X7wkHhyKORDcL2E,1144
10
- uk_bin_collection/tests/input.json,sha256=TEBMKFjbR6llQIMVXw1KlkoSRUiWIp-Zbka4yARv2Kw,132602
10
+ uk_bin_collection/tests/input.json,sha256=Yc2-jsV8WUduGHCTWgUug9KYzIkGt5hrLlETgfgP-0w,132782
11
11
  uk_bin_collection/tests/output.schema,sha256=ZwKQBwYyTDEM4G2hJwfLUVM-5v1vKRvRK9W9SS1sd18,1086
12
12
  uk_bin_collection/tests/step_defs/step_helpers/file_handler.py,sha256=Ygzi4V0S1MIHqbdstUlIqtRIwnynvhu4UtpweJ6-5N8,1474
13
13
  uk_bin_collection/tests/step_defs/test_validate_council.py,sha256=VZ0a81sioJULD7syAYHjvK_-nT_Rd36tUyzPetSA0gk,3475
@@ -46,7 +46,7 @@ uk_bin_collection/uk_bin_collection/councils/BlabyDistrictCouncil.py,sha256=MT1b
46
46
  uk_bin_collection/uk_bin_collection/councils/BlackburnCouncil.py,sha256=ZLA2V3qPsJTom7SeQdGDhF4tJSfgIV5Qi202QvGKJZ0,4477
47
47
  uk_bin_collection/uk_bin_collection/councils/BlaenauGwentCountyBoroughCouncil.py,sha256=Fp83gwF6RQieDgELpAdurjm6tDF4C6pBBukfPQSBHtc,6190
48
48
  uk_bin_collection/uk_bin_collection/councils/BolsoverCouncil.py,sha256=_NZuSvSblS0NlxIhrzqbB9GNnY0xjT4SrIBi9bQ6UC8,10354
49
- uk_bin_collection/uk_bin_collection/councils/BoltonCouncil.py,sha256=WI68r8jB0IHPUT4CgmZMtng899AAMFTxkyTdPg9yLF8,4117
49
+ uk_bin_collection/uk_bin_collection/councils/BoltonCouncil.py,sha256=wnw_q0A42SUJAdtzAWG2tga3p67egv5v1p2IY0oLPBk,4111
50
50
  uk_bin_collection/uk_bin_collection/councils/BostonBoroughCouncil.py,sha256=8xv6FMNj8Qgwn5K0nMdB5X8hkcNFzhcJ48DMordflJY,5631
51
51
  uk_bin_collection/uk_bin_collection/councils/BracknellForestCouncil.py,sha256=Llo1rULaAZ8rChVYZqXFFLo7CN6vbT0ULUJD6ActouY,9015
52
52
  uk_bin_collection/uk_bin_collection/councils/BradfordMDC.py,sha256=qtCGHIwKDJQw0SNvQr0EZub21PFPDsdcxABOPv_MC6s,6114
@@ -60,7 +60,7 @@ uk_bin_collection/uk_bin_collection/councils/BromleyBoroughCouncil.py,sha256=dii
60
60
  uk_bin_collection/uk_bin_collection/councils/BromsgroveDistrictCouncil.py,sha256=PUfxP8j5Oh9wFHkdjbrJzQli9UzMHZzwrZ2hkThrvhI,1781
61
61
  uk_bin_collection/uk_bin_collection/councils/BroxbourneCouncil.py,sha256=GRneTbNNj0RPuzE7XdaWPBeEyek1hI6grAqV9ad1ck8,5030
62
62
  uk_bin_collection/uk_bin_collection/councils/BroxtoweBoroughCouncil.py,sha256=-Facq-ToQkcWUePpKBwq90LZUFxgUSydNL2sYaLX4yw,4473
63
- uk_bin_collection/uk_bin_collection/councils/BuckinghamshireCouncil.py,sha256=EUgAdwvAxNyGfTnDFPIcHuOKEru5X08We9_qN9OZjH0,5600
63
+ uk_bin_collection/uk_bin_collection/councils/BuckinghamshireCouncil.py,sha256=qZk3tsDKp5EvER3JT4Mrq0wI5c3tPOrSZJntjCSB7_w,3419
64
64
  uk_bin_collection/uk_bin_collection/councils/BurnleyBoroughCouncil.py,sha256=GJf1OPvUVj3vqsR3KjG0DFHZrSBu4ogIz_MJeVV8tNA,3192
65
65
  uk_bin_collection/uk_bin_collection/councils/BuryCouncil.py,sha256=H7wAxO1nfxkewVoRRolumq8bBJG04siE3jieFH3RGpQ,2632
66
66
  uk_bin_collection/uk_bin_collection/councils/CalderdaleCouncil.py,sha256=OJZcHYlvZDzmBpjjPPm3J8CRK9Twc49vRj7O9c5fyQ4,4971
@@ -73,7 +73,7 @@ uk_bin_collection/uk_bin_collection/councils/CastlepointDistrictCouncil.py,sha25
73
73
  uk_bin_collection/uk_bin_collection/councils/CeredigionCountyCouncil.py,sha256=np9iLnMVWpMYUiHZ4sJaSaU5pOWfmiCLQ8TIrOlY48o,5924
74
74
  uk_bin_collection/uk_bin_collection/councils/CharnwoodBoroughCouncil.py,sha256=FC8jftZFfMnqV-Of_eQAYBcUiCIJDSNFO2DMZbtDy6E,2569
75
75
  uk_bin_collection/uk_bin_collection/councils/ChelmsfordCityCouncil.py,sha256=thagz60NeEDbOHdnBH-ByKDBRNElPvo0UU0lwZ6c2OQ,3891
76
- uk_bin_collection/uk_bin_collection/councils/CheltenhamBoroughCouncil.py,sha256=bGxfMO4PpTd6ZTfm1hA9tmNUFDKGt20CMJolZ3K_CeM,16411
76
+ uk_bin_collection/uk_bin_collection/councils/CheltenhamBoroughCouncil.py,sha256=7W0MmXn0yXhryRg8rPVMvp1rilYHnYRfxpZ9UmATt24,17173
77
77
  uk_bin_collection/uk_bin_collection/councils/CherwellDistrictCouncil.py,sha256=h8jgpTra9H-aqfDB7tcOd8fveRcQXEXRUnG35SF7ARg,3484
78
78
  uk_bin_collection/uk_bin_collection/councils/CheshireEastCouncil.py,sha256=RoybPitUD4u0xk4Kc9hXqHbUXCqGJG9Z4uRHWKj4ttk,2495
79
79
  uk_bin_collection/uk_bin_collection/councils/CheshireWestAndChesterCouncil.py,sha256=5mKZf22NgdyBY-SqV0c2q8b8IJobkoZrsfGEVUcxUyM,3544
@@ -91,6 +91,7 @@ uk_bin_collection/uk_bin_collection/councils/CroydonCouncil.py,sha256=6UCT2Q75NA
91
91
  uk_bin_collection/uk_bin_collection/councils/CumberlandAllerdaleCouncil.py,sha256=bPOmkyzNnHrOtUprbouHdOsgpu7WilUADcaccWTCPFI,2839
92
92
  uk_bin_collection/uk_bin_collection/councils/CumberlandCouncil.py,sha256=PwTbTVNDdGZnSkPcIHYf-LHItDyIbR68Avr96T1FrX8,3308
93
93
  uk_bin_collection/uk_bin_collection/councils/DacorumBoroughCouncil.py,sha256=Tm_6pvBPj-6qStbe6-02LXaoCOlnnDvVXAAocGVvf_E,3970
94
+ uk_bin_collection/uk_bin_collection/councils/DarlingtonBoroughCouncil.py,sha256=5rv6xiGkqBrCP57dw2vkrqyQlSX6qhGj_uB2nKrkHL4,2448
94
95
  uk_bin_collection/uk_bin_collection/councils/DartfordBoroughCouncil.py,sha256=3vuXYhoK3ZkFGtxIDJXCTeY8_kbaXDcKn0C2yk5g1kI,2056
95
96
  uk_bin_collection/uk_bin_collection/councils/DenbighshireCouncil.py,sha256=FtG0LMTY21RBQiBpeX4FihdMCEZ1knpARYyMfyCn9ng,2103
96
97
  uk_bin_collection/uk_bin_collection/councils/DerbyCityCouncil.py,sha256=M8FGLhZn9wdRCq1W6z_yqJQqeba3EKyba3vhM22MzB4,1883
@@ -105,7 +106,7 @@ uk_bin_collection/uk_bin_collection/councils/EalingCouncil.py,sha256=UhNXGi-_6NY
105
106
  uk_bin_collection/uk_bin_collection/councils/EastAyrshireCouncil.py,sha256=i3AcWkeAnk7rD59nOm0QCSH7AggqjUAdwsXuSIC8ZJE,1614
106
107
  uk_bin_collection/uk_bin_collection/councils/EastCambridgeshireCouncil.py,sha256=aYUVE5QqTxdj8FHhCB4EiFVDJahWJD9Pq0d1upBEvXg,1501
107
108
  uk_bin_collection/uk_bin_collection/councils/EastDevonDC.py,sha256=5aZ4C2t-RBuygtqTRdcpGutjqJ2udrpKbW4F8RQ9r3M,3825
108
- uk_bin_collection/uk_bin_collection/councils/EastHertsCouncil.py,sha256=8BG-EGHyrORQmECovXqlc7DbDg6rTuELVDJJU5-qxYY,1463
109
+ uk_bin_collection/uk_bin_collection/councils/EastHertsCouncil.py,sha256=enESYQCaqZ5miaHCmeQUWw3VIE5HfD8K7Oq8gxUVYDA,3901
109
110
  uk_bin_collection/uk_bin_collection/councils/EastLindseyDistrictCouncil.py,sha256=Laf-j0LLr7M4xmKhk8kjPNTtt66oXKYWm0ppxdUX3F0,4326
110
111
  uk_bin_collection/uk_bin_collection/councils/EastLothianCouncil.py,sha256=5IqDnO5dvnOMNb3gNP1xp14xdPwFG3mb7MBhnSq2npI,2882
111
112
  uk_bin_collection/uk_bin_collection/councils/EastRenfrewshireCouncil.py,sha256=rWUQOWsiGIFffei5o-92jBHNhmcfyV72EnSy2Yd-B4c,4910
@@ -153,12 +154,12 @@ uk_bin_collection/uk_bin_collection/councils/HertsmereBoroughCouncil.py,sha256=Z
153
154
  uk_bin_collection/uk_bin_collection/councils/HighPeakCouncil.py,sha256=x7dfy8mdt2iGl8qJxHb-uBh4u0knmi9MJ6irOJw9WYA,4805
154
155
  uk_bin_collection/uk_bin_collection/councils/HighlandCouncil.py,sha256=GNxDU65QuZHV5va2IrKtcJ6TQoDdwmV03JvkVqOauP4,3291
155
156
  uk_bin_collection/uk_bin_collection/councils/Hillingdon.py,sha256=2OUp0iYO1YeZuTq0XRUalgoay5JRZgfHKKEwYzdMAU0,11291
156
- uk_bin_collection/uk_bin_collection/councils/HinckleyandBosworthBoroughCouncil.py,sha256=51vXTKrstfJhb7cLCcrsvA9qKCsptyNMZvy7ML9DasM,2344
157
+ uk_bin_collection/uk_bin_collection/councils/HinckleyandBosworthBoroughCouncil.py,sha256=v7s_Rp51IyXcj2MphGlECzaknPEl4lTzndgoc6m1sQg,2558
157
158
  uk_bin_collection/uk_bin_collection/councils/HorshamDistrictCouncil.py,sha256=U8WelJiHivT7CS3meUVcLURWOLRKes1pKZ81tcqKarM,4446
158
159
  uk_bin_collection/uk_bin_collection/councils/HullCityCouncil.py,sha256=UHcesBoctFVcXDYuwfag43KbcJcopkEDzJ-54NxtK0Q,1851
159
160
  uk_bin_collection/uk_bin_collection/councils/HuntingdonDistrictCouncil.py,sha256=TXDIlVvYMwWECiXXBmzUjm36L5Pmn3GHciUuRaKmfF0,2059
160
161
  uk_bin_collection/uk_bin_collection/councils/HyndburnBoroughCouncil.py,sha256=_lJ__EMcUBbE4QZvH-c1nCGeKxZTn5hhcpjVAYBbJZc,5403
161
- uk_bin_collection/uk_bin_collection/councils/IpswichBoroughCouncil.py,sha256=57lmDl_FprG68gUhKQYpOa1M2pudyb1utfoMhUXNwzs,2802
162
+ uk_bin_collection/uk_bin_collection/councils/IpswichBoroughCouncil.py,sha256=J9iQzZ4dc0VZhU_gTtMyiqpef6UYAPY_B7hQO5tY3Wg,2836
162
163
  uk_bin_collection/uk_bin_collection/councils/IslingtonCouncil.py,sha256=xavzL6ZIU9DG8Xro3vN0CEnYmNU31OGnOvnq78wgpQc,1258
163
164
  uk_bin_collection/uk_bin_collection/councils/KingsLynnandWestNorfolkBC.py,sha256=Shj18R-7NW4ivqJJFVJOLmf-EeN6hXP2Of30oI-SeAQ,1932
164
165
  uk_bin_collection/uk_bin_collection/councils/KingstonUponThamesCouncil.py,sha256=8kKRhvdqs1hCymqmTtbi0FPJp2kd47JVmKTgRaWOGeY,3573
@@ -168,7 +169,7 @@ uk_bin_collection/uk_bin_collection/councils/LancasterCityCouncil.py,sha256=FmHT
168
169
  uk_bin_collection/uk_bin_collection/councils/LeedsCityCouncil.py,sha256=VWdhw6qvCTj3EhFHf046xPWgc6szeFW2Xbt6W2J0e6w,4371
169
170
  uk_bin_collection/uk_bin_collection/councils/LeicesterCityCouncil.py,sha256=o3kE8sjThQa4_AvSK5NH8VH7jWFO9MMPgoqLOTjyh0w,1851
170
171
  uk_bin_collection/uk_bin_collection/councils/LewesDistrictCouncil.py,sha256=WZAxQ6TOGqQYYsRiL5SdzKdRA4k3GH5ygbKnau5KtDA,2658
171
- uk_bin_collection/uk_bin_collection/councils/LichfieldDistrictCouncil.py,sha256=l3zgTWuKOW8fgb8PmXv0OTI6HaiGBPndefNQk8MM4oY,1810
172
+ uk_bin_collection/uk_bin_collection/councils/LichfieldDistrictCouncil.py,sha256=qO-z5nl2Deairo_afpcTyHr-fg_pqOGWyM516KjgUoU,2012
172
173
  uk_bin_collection/uk_bin_collection/councils/LincolnCouncil.py,sha256=aUCqjHuk0sLtx83a-2agcLIMgEbfqjltXRCBRXT9J-8,3733
173
174
  uk_bin_collection/uk_bin_collection/councils/LisburnCastlereaghCityCouncil.py,sha256=vSOzdEwp9ZeUhed7E3eVv9ReD-2XgbSkpyAbVnfc-Gk,3309
174
175
  uk_bin_collection/uk_bin_collection/councils/LiverpoolCityCouncil.py,sha256=qo02eDHHSFSCN7xqKjLoKNZo-RjZPSUy3TVyJNpfg2Q,3069
@@ -211,7 +212,7 @@ uk_bin_collection/uk_bin_collection/councils/NewportCityCouncil.py,sha256=dAcl5P
211
212
  uk_bin_collection/uk_bin_collection/councils/NorthAyrshireCouncil.py,sha256=o8zv40Wt19d51mrN5lsgLMCKMokMPmI1cMHBNT5yAho,1976
212
213
  uk_bin_collection/uk_bin_collection/councils/NorthDevonCountyCouncil.py,sha256=tgJKIvu7nnCAHu_HImfG5SQABD6ygKFqrZU-ZoC6ObY,6260
213
214
  uk_bin_collection/uk_bin_collection/councils/NorthEastDerbyshireDistrictCouncil.py,sha256=BfNpYcjG3z0Yz8OYN6NkfzvZ5k1FI-80D-rv211kPPU,5449
214
- uk_bin_collection/uk_bin_collection/councils/NorthEastLincs.py,sha256=fYf438VZIaOaqPSwdTTWVjFTdrI0jGfFsxVzOc-QdkA,1817
215
+ uk_bin_collection/uk_bin_collection/councils/NorthEastLincs.py,sha256=GAAWNksi_-mLT8os7xE4K9_5X--WvVBPz5_a0h-4lgc,2097
215
216
  uk_bin_collection/uk_bin_collection/councils/NorthHertfordshireDistrictCouncil.py,sha256=vwskZOfnUrSmKiezzt6TgP6_EIym0hDe-qaoLC9hWuU,12307
216
217
  uk_bin_collection/uk_bin_collection/councils/NorthKestevenDistrictCouncil.py,sha256=vYOCerJXr9LTP6F2wm4vpYNYbQaWNZ6yfHEQ33N_hTw,1681
217
218
  uk_bin_collection/uk_bin_collection/councils/NorthLanarkshireCouncil.py,sha256=npK1V8D3SLNTSSKkfEpEPvVgXDFyhH_tAsuGogsVKQY,1763
@@ -223,9 +224,9 @@ uk_bin_collection/uk_bin_collection/councils/NorthTynesideCouncil.py,sha256=yqyc
223
224
  uk_bin_collection/uk_bin_collection/councils/NorthWestLeicestershire.py,sha256=gJj0dyQc5QUefqusKGk2LLXfWbG5tlEXUOh8KAPh3RI,4584
224
225
  uk_bin_collection/uk_bin_collection/councils/NorthYorkshire.py,sha256=rwW6y5yhRiuYk3lVS7YFZxLaY_yfojdcn2dxiWaP4YA,2013
225
226
  uk_bin_collection/uk_bin_collection/councils/NorthumberlandCouncil.py,sha256=7kCoBqLkt4UmLQNOnuC6jWrSgJLwavzgfJTR6EWINQU,6409
226
- uk_bin_collection/uk_bin_collection/councils/NorwichCityCouncil.py,sha256=hddZD4tv-fw52hvxTm2UjSvRP3MwGchzWZyuhNeNvp0,2970
227
+ uk_bin_collection/uk_bin_collection/councils/NorwichCityCouncil.py,sha256=mf2qB_YQlYUN_YLxC4JHNqpB3yAuUiFbVT1pFichZB4,2839
227
228
  uk_bin_collection/uk_bin_collection/councils/NottinghamCityCouncil.py,sha256=panTCjnsBOQ98-TBO9xVZk_jcT_gjMhx3Gg5oWxBRLo,1254
228
- uk_bin_collection/uk_bin_collection/councils/NuneatonBedworthBoroughCouncil.py,sha256=PViXaBJ5SeP3inOnqf4PuPahzRCLN06n5Gicqyt3gbM,37781
229
+ uk_bin_collection/uk_bin_collection/councils/NuneatonBedworthBoroughCouncil.py,sha256=thcKPPUgSFrr2_an4jWsFzlwNzVCVvRLGNPK5jSXgd0,38192
229
230
  uk_bin_collection/uk_bin_collection/councils/OadbyAndWigstonBoroughCouncil.py,sha256=Kgy5HA0xZ9hR4_cAydPfOfskhGUB4j93AQF2-9Fj-Cg,2179
230
231
  uk_bin_collection/uk_bin_collection/councils/OldhamCouncil.py,sha256=9dlesCxNoVXlmQaqZj7QFh00smnJbm1Gnjkr_Uvzurs,1771
231
232
  uk_bin_collection/uk_bin_collection/councils/OxfordCityCouncil.py,sha256=a6bPDZDh0RKcbyDoidI-PiIM5lw7Tr0AlApP4_hTbxQ,2419
@@ -243,12 +244,12 @@ uk_bin_collection/uk_bin_collection/councils/ReigateAndBansteadBoroughCouncil.py
243
244
  uk_bin_collection/uk_bin_collection/councils/RenfrewshireCouncil.py,sha256=VjjolGn0KemgIUnmF-JlB8gxNLyaQB5mP61NonxVJAo,5080
244
245
  uk_bin_collection/uk_bin_collection/councils/RhonddaCynonTaffCouncil.py,sha256=wInyVG_0wRrX_dRO9qbAzPhlXDseXapj2zQhsISw8gg,3233
245
246
  uk_bin_collection/uk_bin_collection/councils/RochdaleCouncil.py,sha256=UTSwSw515VehGn4xkjjRhUlzS4lDj4hgna6y-4VW3uM,2379
246
- uk_bin_collection/uk_bin_collection/councils/RochfordCouncil.py,sha256=rfhD66A9HfHL46ldF9sbxvV7fPaaoNxzIJbHjVT6A90,2621
247
+ uk_bin_collection/uk_bin_collection/councils/RochfordCouncil.py,sha256=a4sL2K3a0dAWeX74qW9bbsTC666ahNRz0rZg3fAn_C0,2623
247
248
  uk_bin_collection/uk_bin_collection/councils/RotherDistrictCouncil.py,sha256=-fdLvtik9ytfwXrrhwWdBxqQOMq2N1pvrIuvShhf8PU,3090
248
249
  uk_bin_collection/uk_bin_collection/councils/RotherhamCouncil.py,sha256=dfAqXtmbptHGZGGWkurjY9snaVm1aH5CGjhzdtoe4JM,2058
249
250
  uk_bin_collection/uk_bin_collection/councils/RoyalBoroughofGreenwich.py,sha256=AziuKhP6wKu_316MA4xXRN-ayDU6RSB21EfSWjmMqU8,3800
250
251
  uk_bin_collection/uk_bin_collection/councils/RugbyBoroughCouncil.py,sha256=WlmOD-xHJFdowI9fGuGDHOpnAVHGUWxGZqmJ6z4fGXs,5687
251
- uk_bin_collection/uk_bin_collection/councils/RunnymedeBoroughCouncil.py,sha256=vmTZfijt9b6sKzRdRwROE94lrMVnqs6kn6tg-xi1jR4,1618
252
+ uk_bin_collection/uk_bin_collection/councils/RunnymedeBoroughCouncil.py,sha256=-ZKQfpxviEcHM0j3QBGrKqJ-FbSouFHPuRnQSCoG5J0,1816
252
253
  uk_bin_collection/uk_bin_collection/councils/RushcliffeBoroughCouncil.py,sha256=nWo8xeER71FEbnMTX8W9bcwZNpLEExWzPvgRT7DmcMc,4221
253
254
  uk_bin_collection/uk_bin_collection/councils/RushmoorCouncil.py,sha256=ZsGnXjoEaOS6U7fI0w7-uqxayAHdNVKsJi2fqIWEls8,3375
254
255
  uk_bin_collection/uk_bin_collection/councils/SalfordCityCouncil.py,sha256=XUGemp2cdzsvkWjnv2m4YKTMcoKDUfIlVy3YucX-_o4,2601
@@ -279,7 +280,7 @@ uk_bin_collection/uk_bin_collection/councils/SpelthorneBoroughCouncil.py,sha256=
279
280
  uk_bin_collection/uk_bin_collection/councils/StAlbansCityAndDistrictCouncil.py,sha256=mPZz6Za6kTSkrfHnj0OfwtnpRYR1dKvxbuFEKnWsiL8,1451
280
281
  uk_bin_collection/uk_bin_collection/councils/StHelensBC.py,sha256=X9dvnQTNn7QUO8gv1A587e1aDI92TWN4iNLATTn3H3w,4777
281
282
  uk_bin_collection/uk_bin_collection/councils/StaffordBoroughCouncil.py,sha256=9Qj4HJI7Dbiqb2mVSG2UtkBe27Y7wvQ5SYFTwGzJ5g0,2292
282
- uk_bin_collection/uk_bin_collection/councils/StaffordshireMoorlandsDistrictCouncil.py,sha256=vdSnDbiKLQIdvvqVXDi2butZXRBMxv401aIrkLkKgy4,4370
283
+ uk_bin_collection/uk_bin_collection/councils/StaffordshireMoorlandsDistrictCouncil.py,sha256=VrPoRr5bl8T7ZMPbYVHg-4tq9OCfW5dPe9uhKiRjm9o,4507
283
284
  uk_bin_collection/uk_bin_collection/councils/StevenageBoroughCouncil.py,sha256=EiDIyOlHhdiJ-YYjo7T5uA5sN2jzNoysu6FctjuAjBI,3549
284
285
  uk_bin_collection/uk_bin_collection/councils/StirlingCouncil.py,sha256=QaUw5oP_mmFAXo98EdHhI6lWo4OjF2E8zY2M7HQo2bk,6308
285
286
  uk_bin_collection/uk_bin_collection/councils/StockportBoroughCouncil.py,sha256=v0HmioNVRoU1-9OnLJl2V3M5pVR1aVu1BgOLHFR1Sf4,1429
@@ -329,7 +330,7 @@ uk_bin_collection/uk_bin_collection/councils/WestNorthamptonshireCouncil.py,sha2
329
330
  uk_bin_collection/uk_bin_collection/councils/WestOxfordshireDistrictCouncil.py,sha256=LLgpBE-yZFTpyZ5StGPVYtiBGxzttZt4f4YEqiYqocE,4858
330
331
  uk_bin_collection/uk_bin_collection/councils/WestSuffolkCouncil.py,sha256=9i8AQHh-qIRPZ_5Ad97_h04-qgyLQDPV064obBzab1Y,2587
331
332
  uk_bin_collection/uk_bin_collection/councils/WiganBoroughCouncil.py,sha256=W9FGN47bTHvzARa6UInXEB3X6cToqfgzTkNlROzHgcM,3743
332
- uk_bin_collection/uk_bin_collection/councils/WiltshireCouncil.py,sha256=Q0ooHTQb9ynMXpSNBPk7XXEjI7zcHst3id4wxGdmVx4,5698
333
+ uk_bin_collection/uk_bin_collection/councils/WiltshireCouncil.py,sha256=HjNy1a9-RokuGKEXcBW8bAwDlsf4C_KpudjzVJvrnsA,6458
333
334
  uk_bin_collection/uk_bin_collection/councils/WinchesterCityCouncil.py,sha256=W2k00N5n9-1MzjMEqsNjldsQdOJPEPMjK7OGSinZm5Y,4335
334
335
  uk_bin_collection/uk_bin_collection/councils/WindsorAndMaidenheadCouncil.py,sha256=VWtFgypXL1wKQ63NmV-_O9odOkbDmv3uCQwrv8g9lMI,2294
335
336
  uk_bin_collection/uk_bin_collection/councils/WirralCouncil.py,sha256=X_e9zXEZAl_Mp6nPORHc9CTmf3QHdoMY3BCnKrXEr1I,2131
@@ -345,8 +346,8 @@ uk_bin_collection/uk_bin_collection/councils/YorkCouncil.py,sha256=I2kBYMlsD4bId
345
346
  uk_bin_collection/uk_bin_collection/councils/council_class_template/councilclasstemplate.py,sha256=QD4v4xpsEE0QheR_fGaNOIRMc2FatcUfKkkhAhseyVU,1159
346
347
  uk_bin_collection/uk_bin_collection/create_new_council.py,sha256=m-IhmWmeWQlFsTZC4OxuFvtw5ZtB8EAJHxJTH4O59lQ,1536
347
348
  uk_bin_collection/uk_bin_collection/get_bin_data.py,sha256=Qb76X46V0UMZJwO8zMNPvnVY7jNa-bmTlrirDi1tuJA,4553
348
- uk_bin_collection-0.152.10.dist-info/LICENSE,sha256=vABBUOzcrgfaTKpzeo-si9YVEun6juDkndqA8RKdKGs,1071
349
- uk_bin_collection-0.152.10.dist-info/METADATA,sha256=51DE3tDCHbfraq7wzD0wncD7biFXidumcQZfwgg3_Mk,26689
350
- uk_bin_collection-0.152.10.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
351
- uk_bin_collection-0.152.10.dist-info/entry_points.txt,sha256=36WCSGMWSc916S3Hi1ZkazzDKHaJ6CD-4fCEFm5MIao,90
352
- uk_bin_collection-0.152.10.dist-info/RECORD,,
349
+ uk_bin_collection-0.153.0.dist-info/LICENSE,sha256=vABBUOzcrgfaTKpzeo-si9YVEun6juDkndqA8RKdKGs,1071
350
+ uk_bin_collection-0.153.0.dist-info/METADATA,sha256=YQIxmkkmazvaTXCHyd2xKgJqkCcAEuG7AUBjlWZeZlM,26688
351
+ uk_bin_collection-0.153.0.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
352
+ uk_bin_collection-0.153.0.dist-info/entry_points.txt,sha256=36WCSGMWSc916S3Hi1ZkazzDKHaJ6CD-4fCEFm5MIao,90
353
+ uk_bin_collection-0.153.0.dist-info/RECORD,,