uk_bin_collection 0.105.1__py3-none-any.whl → 0.106.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -484,6 +484,20 @@
484
484
  "url": "https://www.fenland.gov.uk/article/13114/",
485
485
  "wiki_name": "Fenland District Council"
486
486
  },
487
+ "FifeCouncil": {
488
+ "url": "https://www.fife.gov.uk",
489
+ "wiki_command_url_override": "https://www.fife.gov.uk",
490
+ "uprn": "320203521",
491
+ "wiki_name": "Fife Council",
492
+ "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN."
493
+ },
494
+ "FlintshireCountyCouncil": {
495
+ "url": "https://digital.flintshire.gov.uk",
496
+ "wiki_command_url_override": "https://digital.flintshire.gov.uk",
497
+ "uprn": "100100213710",
498
+ "wiki_name": "Flintshire County Council",
499
+ "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN."
500
+ },
487
501
  "ForestOfDeanDistrictCouncil": {
488
502
  "house_number": "ELMOGAL, PARKEND ROAD, BREAM, LYDNEY",
489
503
  "postcode": "GL15 6JT",
@@ -1143,8 +1157,8 @@
1143
1157
  "wiki_name": "South Tyneside Council"
1144
1158
  },
1145
1159
  "SouthwarkCouncil": {
1146
- "url": "https://www.southwark.gov.uk/bins/lookup/",
1147
- "wiki_command_url_override": "https://www.southwark.gov.uk/bins/lookup/XXXXXXXX",
1160
+ "url": "https://services.southwark.gov.uk/bins/lookup/",
1161
+ "wiki_command_url_override": "https://services.southwark.gov.uk/bins/lookup/XXXXXXXX",
1148
1162
  "uprn": "200003469271",
1149
1163
  "wiki_name": "Southwark Council",
1150
1164
  "wiki_note": "Replace XXXXXXXX with UPRN. You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN."
@@ -1182,6 +1196,14 @@
1182
1196
  "wiki_name": "Stockport Borough Council",
1183
1197
  "wiki_note": "Replace XXXXXXXX with UPRN."
1184
1198
  },
1199
+ "StocktonOnTeesCouncil": {
1200
+ "house_number": "24",
1201
+ "postcode": "TS20 2RD",
1202
+ "skip_get_url": true,
1203
+ "url": "https://www.stockton.gov.uk",
1204
+ "web_driver": "http://selenium:4444",
1205
+ "wiki_name": "Stockton On Tees Council"
1206
+ },
1185
1207
  "StokeOnTrentCityCouncil": {
1186
1208
  "url": "https://www.stoke.gov.uk/jadu/custom/webserviceLookUps/BarTecWebServices_missed_bin_calendar.php?UPRN=3455121482",
1187
1209
  "wiki_command_url_override": "https://www.stoke.gov.uk/jadu/custom/webserviceLookUps/BarTecWebServices_missed_bin_calendar.php?UPRN=XXXXXXXXXX",
@@ -0,0 +1,68 @@
1
+ from datetime import datetime
2
+
3
+ import requests
4
+ from bs4 import BeautifulSoup
5
+
6
+ from uk_bin_collection.uk_bin_collection.common import *
7
+ from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
8
+
9
+
10
+ class CouncilClass(AbstractGetBinDataClass):
11
+ """
12
+ Concrete classes have to implement all abstract operations of the
13
+ base class. They can also override some operations with a default
14
+ implementation.
15
+ """
16
+
17
+ def parse_data(self, page: str, **kwargs) -> dict:
18
+ # Get and check UPRN
19
+ user_uprn = kwargs.get("uprn")
20
+ check_uprn(user_uprn)
21
+ bindata = {"bins": []}
22
+
23
+ API_URL = "https://www.fife.gov.uk/api/custom?action=powersuite_bin_calendar_collections&actionedby=bin_calendar&loadform=true&access=citizen&locale=en"
24
+ AUTH_URL = "https://www.fife.gov.uk/api/citizen?preview=false&locale=en"
25
+ AUTH_KEY = "Authorization"
26
+
27
+ r = requests.get(AUTH_URL)
28
+ r.raise_for_status()
29
+ auth_token = r.headers[AUTH_KEY]
30
+
31
+ post_data = {
32
+ "name": "bin_calendar",
33
+ "data": {
34
+ "uprn": user_uprn,
35
+ },
36
+ "email": "",
37
+ "caseid": "",
38
+ "xref": "",
39
+ "xref1": "",
40
+ "xref2": "",
41
+ }
42
+
43
+ headers = {
44
+ "referer": "https://www.fife.gov.uk/services/forms/bin-calendar",
45
+ "accept": "application/json",
46
+ "content-type": "application/json",
47
+ AUTH_KEY: auth_token,
48
+ }
49
+
50
+ r = requests.post(API_URL, data=json.dumps(post_data), headers=headers)
51
+ r.raise_for_status()
52
+
53
+ result = r.json()
54
+
55
+ for collection in result["data"]["tab_collections"]:
56
+ dict_data = {
57
+ "type": collection["colour"],
58
+ "collectionDate": datetime.strptime(
59
+ collection["date"],
60
+ "%A, %B %d, %Y",
61
+ ).strftime("%d/%m/%Y"),
62
+ }
63
+ bindata["bins"].append(dict_data)
64
+
65
+ bindata["bins"].sort(
66
+ key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y")
67
+ )
68
+ return bindata
@@ -0,0 +1,60 @@
1
+ import requests
2
+ from bs4 import BeautifulSoup
3
+
4
+ from uk_bin_collection.uk_bin_collection.common import *
5
+ from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
6
+
7
+
8
+ # import the wonderful Beautiful Soup and the URL grabber
9
+ class CouncilClass(AbstractGetBinDataClass):
10
+ """
11
+ Concrete classes have to implement all abstract operations of the
12
+ base class. They can also override some operations with a default
13
+ implementation.
14
+ """
15
+
16
+ def parse_data(self, page: str, **kwargs) -> dict:
17
+
18
+ user_uprn = kwargs.get("uprn")
19
+ check_uprn(user_uprn)
20
+ bindata = {"bins": []}
21
+
22
+ URI = f"https://digital.flintshire.gov.uk/FCC_BinDay/Home/Details2/{user_uprn}"
23
+
24
+ # Make the GET request
25
+ response = requests.get(URI)
26
+
27
+ # Parse the HTML content
28
+ soup = BeautifulSoup(response.content, "html.parser")
29
+
30
+ # Adjust these tags and classes based on actual structure
31
+ # Example for finding collection dates and types
32
+ bin_collections = soup.find_all(
33
+ "div", class_="col-md-12 col-lg-12 col-sm-12 col-xs-12"
34
+ ) # Replace with actual class name
35
+
36
+ # Extracting and printing the schedule data
37
+ schedule = []
38
+ for collection in bin_collections:
39
+ dates = collection.find_all("div", class_="col-lg-2 col-md-2 col-sm-2")
40
+ bin_type = collection.find("div", class_="col-lg-3 col-md-3 col-sm-3")
41
+
42
+ if dates[0].text.strip() == "Date of Collection":
43
+ continue
44
+
45
+ bin_types = bin_type.text.strip().split(" / ")
46
+ date = dates[0].text.strip()
47
+
48
+ # Loop through the dates for each collection type
49
+ for bin_type in bin_types:
50
+
51
+ dict_data = {
52
+ "type": bin_type,
53
+ "collectionDate": date,
54
+ }
55
+ bindata["bins"].append(dict_data)
56
+
57
+ bindata["bins"].sort(
58
+ key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y")
59
+ )
60
+ return bindata
@@ -19,7 +19,7 @@ class CouncilClass(AbstractGetBinDataClass):
19
19
  check_uprn(user_uprn)
20
20
  data = {"bins": []}
21
21
 
22
- baseurl = "https://www.southwark.gov.uk/bins/lookup/"
22
+ baseurl = "https://services.southwark.gov.uk/bins/lookup/"
23
23
  url = baseurl + user_uprn
24
24
 
25
25
  headers = {
@@ -74,9 +74,13 @@ class CouncilClass(AbstractGetBinDataClass):
74
74
  data["bins"].append(dict_data)
75
75
 
76
76
  # Extract food waste collection information
77
- food_section = soup.find("div", {"aria-labelledby": "organicsCollectionTitle"})
77
+ food_section = soup.find(
78
+ "div", {"aria-labelledby": "domesticFoodCollectionTitle"}
79
+ )
78
80
  if food_section:
79
- food_title = food_section.find("p", {"id": "organicsCollectionTitle"}).text
81
+ food_title = food_section.find(
82
+ "p", {"id": "domesticFoodCollectionTitle"}
83
+ ).text
80
84
  food_next_collection = (
81
85
  food_section.find(text=lambda text: "Next collection" in text)
82
86
  .strip()
@@ -0,0 +1,159 @@
1
+ import time
2
+
3
+ from bs4 import BeautifulSoup
4
+ from dateutil.relativedelta import relativedelta
5
+ from selenium.webdriver.common.by import By
6
+ from selenium.webdriver.support import expected_conditions as EC
7
+ from selenium.webdriver.support.ui import Select
8
+ from selenium.webdriver.support.wait import WebDriverWait
9
+
10
+ from uk_bin_collection.uk_bin_collection.common import *
11
+ from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
12
+
13
+
14
+ # import the wonderful Beautiful Soup and the URL grabber
15
+ class CouncilClass(AbstractGetBinDataClass):
16
+ """
17
+ Concrete classes have to implement all abstract operations of the
18
+ base class. They can also override some operations with a default
19
+ implementation.
20
+ """
21
+
22
+ def parse_data(self, page: str, **kwargs) -> dict:
23
+ driver = None
24
+ try:
25
+ data = {"bins": []}
26
+ collections = []
27
+ user_paon = kwargs.get("paon")
28
+ user_postcode = kwargs.get("postcode")
29
+ web_driver = kwargs.get("web_driver")
30
+ headless = kwargs.get("headless")
31
+ check_paon(user_paon)
32
+ check_postcode(user_postcode)
33
+
34
+ # Create Selenium webdriver
35
+ driver = create_webdriver(web_driver, headless, None, __name__)
36
+ driver.get("https://www.stockton.gov.uk/bin-collection-days")
37
+
38
+ # Wait for the postcode field to appear then populate it
39
+ inputElement_postcode = WebDriverWait(driver, 30).until(
40
+ EC.presence_of_element_located(
41
+ (
42
+ By.ID,
43
+ "LOOKUPBINDATESBYADDRESSSKIPOUTOFREGION_ADDRESSLOOKUPPOSTCODE",
44
+ )
45
+ )
46
+ )
47
+ inputElement_postcode.send_keys(user_postcode)
48
+
49
+ # Click search button
50
+ findAddress = WebDriverWait(driver, 10).until(
51
+ EC.presence_of_element_located(
52
+ (
53
+ By.ID,
54
+ "LOOKUPBINDATESBYADDRESSSKIPOUTOFREGION_ADDRESSLOOKUPSEARCH",
55
+ )
56
+ )
57
+ )
58
+ findAddress.click()
59
+
60
+ WebDriverWait(driver, 10).until(
61
+ EC.element_to_be_clickable(
62
+ (
63
+ By.XPATH,
64
+ ""
65
+ "//*[@id='LOOKUPBINDATESBYADDRESSSKIPOUTOFREGION_ADDRESSLOOKUPADDRESS']//option[contains(., '"
66
+ + user_paon
67
+ + "')]",
68
+ )
69
+ )
70
+ ).click()
71
+
72
+ # Wait for the submit button to appear, then click it to get the collection dates
73
+ WebDriverWait(driver, 30).until(
74
+ EC.presence_of_element_located(
75
+ (
76
+ By.XPATH,
77
+ '//*[@id="LOOKUPBINDATESBYADDRESSSKIPOUTOFREGION_COLLECTIONDETAILS2"]/div',
78
+ )
79
+ )
80
+ )
81
+ time.sleep(2)
82
+
83
+ soup = BeautifulSoup(driver.page_source, features="html.parser")
84
+ soup.prettify()
85
+
86
+ rubbish_div = soup.find(
87
+ "p",
88
+ {
89
+ "class": "myaccount-block__date myaccount-block__date--bin myaccount-block__date--waste"
90
+ },
91
+ )
92
+ rubbish_date = rubbish_div.text
93
+ if rubbish_date == "Today":
94
+ rubbish_date = datetime.now()
95
+ else:
96
+ rubbish_date = datetime.strptime(
97
+ remove_ordinal_indicator_from_date_string(rubbish_date).strip(),
98
+ "%a %d %B %Y",
99
+ ).replace(year=datetime.now().year)
100
+
101
+ recycling_div = soup.find(
102
+ "p",
103
+ {
104
+ "class": "myaccount-block__date myaccount-block__date--bin myaccount-block__date--recycling"
105
+ },
106
+ )
107
+ recycling_date = recycling_div.text
108
+ if recycling_date == "Today":
109
+ recycling_date = datetime.now()
110
+ else:
111
+ recycling_date = datetime.strptime(
112
+ remove_ordinal_indicator_from_date_string(recycling_date).strip(),
113
+ "%a %d %B %Y",
114
+ )
115
+
116
+ garden_div = soup.find(
117
+ "div",
118
+ {
119
+ "class": "myaccount-block__item myaccount-block__item--bin myaccount-block__item--garden"
120
+ },
121
+ )
122
+ garden_date = garden_div.find("strong")
123
+ if garden_date.text.strip() == "Date not available":
124
+ print("Garden waste unavailable")
125
+ else:
126
+ if garden_date.text == "Today":
127
+ garden_date = datetime.now()
128
+ collections.append(("Garden waste bin", garden_date))
129
+ else:
130
+ garden_date = datetime.strptime(
131
+ remove_ordinal_indicator_from_date_string(
132
+ garden_date.text
133
+ ).strip(),
134
+ "%a %d %B %Y",
135
+ )
136
+ collections.append(("Garden waste bin", garden_date))
137
+
138
+ collections.append(("Rubbish bin", rubbish_date))
139
+ collections.append(("Recycling bin", recycling_date))
140
+
141
+ ordered_data = sorted(collections, key=lambda x: x[1])
142
+ for item in ordered_data:
143
+ dict_data = {
144
+ "type": item[0].capitalize(),
145
+ "collectionDate": item[1].strftime(date_format),
146
+ }
147
+ data["bins"].append(dict_data)
148
+
149
+ print()
150
+ except Exception as e:
151
+ # Here you can log the exception if needed
152
+ print(f"An error occurred: {e}")
153
+ # Optionally, re-raise the exception if you want it to propagate
154
+ raise
155
+ finally:
156
+ # This block ensures that the driver is closed regardless of an exception
157
+ if driver:
158
+ driver.quit()
159
+ return data
@@ -77,44 +77,34 @@ class CouncilClass(AbstractGetBinDataClass):
77
77
  rubbish_div = soup.find(
78
78
  "div", {"id": "FINDYOURBINDAYS_RUBBISHDATE_OUTERDIV"}
79
79
  )
80
- try:
81
- rubbish_date = rubbish_div.find_all("div")[2]
82
- rubbish_date = datetime.strptime(
83
- rubbish_date.text,
84
- "%A %d %B",
85
- ).replace(year=datetime.now().year)
86
- except:
87
- rubbish_date = rubbish_div.find_all("div")[3]
80
+ rubbish_date = rubbish_div.find_all("div")[2]
81
+ if rubbish_date.text == "Today":
82
+ rubbish_date = datetime.now()
83
+ else:
88
84
  rubbish_date = datetime.strptime(
89
85
  rubbish_date.text,
90
86
  "%A %d %B",
91
87
  ).replace(year=datetime.now().year)
88
+
92
89
  recycling_div = soup.find(
93
90
  "div", {"id": "FINDYOURBINDAYS_RECYCLINGDATE_OUTERDIV"}
94
91
  )
95
- try:
96
- recycling_date = recycling_div.find_all("div")[2]
92
+ recycling_date = recycling_div.find_all("div")[2]
93
+ if recycling_date.text == "Today":
94
+ recycling_date = datetime.now()
95
+ else:
97
96
  recycling_date = datetime.strptime(
98
97
  recycling_date.text,
99
98
  "%A %d %B",
100
99
  ).replace(year=datetime.now().year)
101
- except:
102
- rubbish_date = recycling_div.find_all("div")[3]
103
- rubbish_date = datetime.strptime(
104
- rubbish_date.text,
105
- "%A %d %B",
106
- ).replace(year=datetime.now().year)
100
+
107
101
  food_div = soup.find(
108
102
  "div", {"id": "FINDYOURBINDAYS_RECYCLINGDATE_OUTERDIV"}
109
103
  )
110
- try:
111
- food_date = food_div.find_all("div")[2]
112
- food_date = datetime.strptime(
113
- food_date.text,
114
- "%A %d %B",
115
- ).replace(year=datetime.now().year)
116
- except:
117
- food_date = food_div.find_all("div")[3]
104
+ food_date = food_div.find_all("div")[2]
105
+ if food_date.text == "Today":
106
+ food_date = datetime.now()
107
+ else:
118
108
  food_date = datetime.strptime(
119
109
  food_date.text,
120
110
  "%A %d %B",
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: uk_bin_collection
3
- Version: 0.105.1
3
+ Version: 0.106.0
4
4
  Summary: Python Lib to collect UK Bin Data
5
5
  Author: Robert Bradley
6
6
  Author-email: robbrad182@gmail.com
@@ -2,7 +2,7 @@ uk_bin_collection/README.rst,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,
2
2
  uk_bin_collection/tests/council_feature_input_parity.py,sha256=DO6Mk4ImYgM5ZCZ-cutwz5RoYYWZRLYx2tr6zIs_9Rc,3843
3
3
  uk_bin_collection/tests/features/environment.py,sha256=VQZjJdJI_kZn08M0j5cUgvKT4k3iTw8icJge1DGOkoA,127
4
4
  uk_bin_collection/tests/features/validate_council_outputs.feature,sha256=SJK-Vc737hrf03tssxxbeg_JIvAH-ddB8f6gU1LTbuQ,251
5
- uk_bin_collection/tests/input.json,sha256=nnUn45wSbrroBexbVAfuqwgFblWnDYA-SA-JcKOrJ2A,74417
5
+ uk_bin_collection/tests/input.json,sha256=0F1vZN1L8JeOh5ECNrOoS-kOg0kh39tkE6tyV1ZqAi4,75381
6
6
  uk_bin_collection/tests/output.schema,sha256=ZwKQBwYyTDEM4G2hJwfLUVM-5v1vKRvRK9W9SS1sd18,1086
7
7
  uk_bin_collection/tests/step_defs/step_helpers/file_handler.py,sha256=Ygzi4V0S1MIHqbdstUlIqtRIwnynvhu4UtpweJ6-5N8,1474
8
8
  uk_bin_collection/tests/step_defs/test_validate_council.py,sha256=LrOSt_loA1Mw3vTqaO2LpaDMu7rYJy6k5Kr-EOBln7s,3424
@@ -79,6 +79,8 @@ uk_bin_collection/uk_bin_collection/councils/ErewashBoroughCouncil.py,sha256=QTQ
79
79
  uk_bin_collection/uk_bin_collection/councils/FalkirkCouncil.py,sha256=C3OA9PEhBsCYPzwsSdqVi_SbF8uiB186i2XfHWKd3VI,1694
80
80
  uk_bin_collection/uk_bin_collection/councils/FarehamBoroughCouncil.py,sha256=25QxeN5q3ad1Wwexs2d-B7ooH0ru6pOUx58413FOTY4,2352
81
81
  uk_bin_collection/uk_bin_collection/councils/FenlandDistrictCouncil.py,sha256=sFrnKzIE2tIcz0YrC6A9HcevzgNdf6E6_HLGMWDKtGw,2513
82
+ uk_bin_collection/uk_bin_collection/councils/FifeCouncil.py,sha256=eP_NnHtBLyflRUko9ubi_nxUPb7qg9SbaaSxqWZxNEs,2157
83
+ uk_bin_collection/uk_bin_collection/councils/FlintshireCountyCouncil.py,sha256=RvPHhGbzP3mcjgWe2rIQux43UuDH7XofJGIKs7wJRe0,2060
82
84
  uk_bin_collection/uk_bin_collection/councils/ForestOfDeanDistrictCouncil.py,sha256=xO5gqgsN9K-cQsuDoQF7ycZkjNdCPAQwIYOCFWxFJ_Y,4504
83
85
  uk_bin_collection/uk_bin_collection/councils/GatesheadCouncil.py,sha256=SRCgYhYs6rv_8C1UEDVORHZgXxcJkoZBjzdYS4Lu-ew,4531
84
86
  uk_bin_collection/uk_bin_collection/councils/GedlingBoroughCouncil.py,sha256=XzfFMCwclh9zAJgsbaj4jywjdiH0wPaFicaVsLrN3ms,2297
@@ -172,12 +174,13 @@ uk_bin_collection/uk_bin_collection/councils/SouthNorfolkCouncil.py,sha256=ThO-o
172
174
  uk_bin_collection/uk_bin_collection/councils/SouthOxfordshireCouncil.py,sha256=zW4bN3hcqNoK_Y0-vPpuZs3K0LTPvApu6_v9K-D7WjE,3879
173
175
  uk_bin_collection/uk_bin_collection/councils/SouthRibbleCouncil.py,sha256=OdexbeiI5WsCfjlsnHjAce8oGF5fW-n7q2XOuxcpHzw,3604
174
176
  uk_bin_collection/uk_bin_collection/councils/SouthTynesideCouncil.py,sha256=dxXGrJfg_fn2IPTBgq6Duwy0WY8GYLafMuisaCjOnbs,3426
175
- uk_bin_collection/uk_bin_collection/councils/SouthwarkCouncil.py,sha256=Kc9YrevYO4u1EI1r2LV74cmYCpEo5x2c8-WfFHecPCc,4817
177
+ uk_bin_collection/uk_bin_collection/councils/SouthwarkCouncil.py,sha256=Z6JIbUt3yr4oG60n1At4AjPIGrs7Qzn_sDNY-TsS62E,4882
176
178
  uk_bin_collection/uk_bin_collection/councils/StAlbansCityAndDistrictCouncil.py,sha256=mPZz6Za6kTSkrfHnj0OfwtnpRYR1dKvxbuFEKnWsiL8,1451
177
179
  uk_bin_collection/uk_bin_collection/councils/StHelensBC.py,sha256=c7ZM8gnUkKdz9GYIhFLzTtwN0KAoMEKomTWDVbtJIpM,2069
178
180
  uk_bin_collection/uk_bin_collection/councils/StaffordBoroughCouncil.py,sha256=9Qj4HJI7Dbiqb2mVSG2UtkBe27Y7wvQ5SYFTwGzJ5g0,2292
179
181
  uk_bin_collection/uk_bin_collection/councils/StaffordshireMoorlandsDistrictCouncil.py,sha256=_N8Cg26EbTaKp0RsWvQuELVcZDHbT2BlD2LW8qhkS_Q,4361
180
182
  uk_bin_collection/uk_bin_collection/councils/StockportBoroughCouncil.py,sha256=v0HmioNVRoU1-9OnLJl2V3M5pVR1aVu1BgOLHFR1Sf4,1429
183
+ uk_bin_collection/uk_bin_collection/councils/StocktonOnTeesCouncil.py,sha256=obaBgsmIJ95Ah7KaTNWdU107tZDPVuuJox0mGUoGjNk,6070
181
184
  uk_bin_collection/uk_bin_collection/councils/StokeOnTrentCityCouncil.py,sha256=KM0EgWeO7mk8lkozX0RCTfMchXdjrfBqIjCiOtB09aM,2884
182
185
  uk_bin_collection/uk_bin_collection/councils/StratfordUponAvonCouncil.py,sha256=DMTAcXT_lay8Cl1hBbzf_LN7-GwTDGxT3Ug9QJkaF9Y,3936
183
186
  uk_bin_collection/uk_bin_collection/councils/StroudDistrictCouncil.py,sha256=9bYWppi7ViLGHL4VEg--nFn28MLYJYbiEntull1uZxU,3561
@@ -207,7 +210,7 @@ uk_bin_collection/uk_bin_collection/councils/WatfordBoroughCouncil.py,sha256=zFk
207
210
  uk_bin_collection/uk_bin_collection/councils/WaverleyBoroughCouncil.py,sha256=tp9l7vdgSGRzNNG0pDfnNuFj4D2bpRJUJmAiTJ6bM0g,4662
208
211
  uk_bin_collection/uk_bin_collection/councils/WealdenDistrictCouncil.py,sha256=SvSSaLkx7iJjzypAwKkaJwegXkSsIQtUOS2V605kz1A,3368
209
212
  uk_bin_collection/uk_bin_collection/councils/WelhatCouncil.py,sha256=ikUft37dYNJghfe-_6Fskiq1JihqpLmLNj38QkKSUUA,2316
210
- uk_bin_collection/uk_bin_collection/councils/WestBerkshireCouncil.py,sha256=XhTimZAPNgcuFgNp5mQjkR8mC4LRqUEUCy6e6plHspM,6004
213
+ uk_bin_collection/uk_bin_collection/councils/WestBerkshireCouncil.py,sha256=2eHRlalZyY9jv_UsCWM9IYzOpRdhce2sEW5NtygEnpw,5513
211
214
  uk_bin_collection/uk_bin_collection/councils/WestLindseyDistrictCouncil.py,sha256=JFWUy4w0CKulGq16PfbRDKAdQEbokVEuabwlZYigdEU,4606
212
215
  uk_bin_collection/uk_bin_collection/councils/WestLothianCouncil.py,sha256=dq0jimtARvRkZiGbVFrXXZgY-BODtz3uYZ5UKn0bf64,4114
213
216
  uk_bin_collection/uk_bin_collection/councils/WestMorlandAndFurness.py,sha256=jbqV3460rn9D0yTBGWjpSe1IvWWcdGur5pzgj-hJcQ4,2513
@@ -227,8 +230,8 @@ uk_bin_collection/uk_bin_collection/councils/YorkCouncil.py,sha256=I2kBYMlsD4bId
227
230
  uk_bin_collection/uk_bin_collection/councils/council_class_template/councilclasstemplate.py,sha256=4s9ODGPAwPqwXc8SrTX5Wlfmizs3_58iXUtHc4Ir86o,1162
228
231
  uk_bin_collection/uk_bin_collection/create_new_council.py,sha256=m-IhmWmeWQlFsTZC4OxuFvtw5ZtB8EAJHxJTH4O59lQ,1536
229
232
  uk_bin_collection/uk_bin_collection/get_bin_data.py,sha256=YvmHfZqanwrJ8ToGch34x-L-7yPe31nB_x77_Mgl_vo,4545
230
- uk_bin_collection-0.105.1.dist-info/LICENSE,sha256=vABBUOzcrgfaTKpzeo-si9YVEun6juDkndqA8RKdKGs,1071
231
- uk_bin_collection-0.105.1.dist-info/METADATA,sha256=zoE5z9wR8LOa65itnqEBAZzWvtMRLYyUFbt6GWRzVuQ,17630
232
- uk_bin_collection-0.105.1.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
233
- uk_bin_collection-0.105.1.dist-info/entry_points.txt,sha256=36WCSGMWSc916S3Hi1ZkazzDKHaJ6CD-4fCEFm5MIao,90
234
- uk_bin_collection-0.105.1.dist-info/RECORD,,
233
+ uk_bin_collection-0.106.0.dist-info/LICENSE,sha256=vABBUOzcrgfaTKpzeo-si9YVEun6juDkndqA8RKdKGs,1071
234
+ uk_bin_collection-0.106.0.dist-info/METADATA,sha256=VwTe4USVrMOyNReH2x0yO6HJRYUQwMkBNDnlIhFRbmM,17630
235
+ uk_bin_collection-0.106.0.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
236
+ uk_bin_collection-0.106.0.dist-info/entry_points.txt,sha256=36WCSGMWSc916S3Hi1ZkazzDKHaJ6CD-4fCEFm5MIao,90
237
+ uk_bin_collection-0.106.0.dist-info/RECORD,,