uk_bin_collection 0.106.0__py3-none-any.whl → 0.108.0__py3-none-any.whl
Sign up to get free protection for your applications and to get access to all the features.
- uk_bin_collection/tests/input.json +37 -0
- uk_bin_collection/uk_bin_collection/councils/ArdsAndNorthDownCouncil.py +103 -0
- uk_bin_collection/uk_bin_collection/councils/AshfordBoroughCouncil.py +124 -0
- uk_bin_collection/uk_bin_collection/councils/CheshireWestAndChesterCouncil.py +0 -1
- uk_bin_collection/uk_bin_collection/councils/CroydonCouncil.py +3 -9
- uk_bin_collection/uk_bin_collection/councils/EastHertsCouncil.py +117 -0
- uk_bin_collection/uk_bin_collection/councils/ForestOfDeanDistrictCouncil.py +10 -3
- uk_bin_collection/uk_bin_collection/councils/PowysCouncil.py +138 -0
- uk_bin_collection/uk_bin_collection/councils/SouthKestevenDistrictCouncil.py +30 -17
- uk_bin_collection/uk_bin_collection/councils/SouthNorfolkCouncil.py +19 -6
- uk_bin_collection/uk_bin_collection/councils/WestOxfordshireDistrictCouncil.py +7 -0
- uk_bin_collection/uk_bin_collection/councils/WorcesterCityCouncil.py +58 -0
- {uk_bin_collection-0.106.0.dist-info → uk_bin_collection-0.108.0.dist-info}/METADATA +6 -4
- {uk_bin_collection-0.106.0.dist-info → uk_bin_collection-0.108.0.dist-info}/RECORD +17 -12
- {uk_bin_collection-0.106.0.dist-info → uk_bin_collection-0.108.0.dist-info}/WHEEL +1 -1
- {uk_bin_collection-0.106.0.dist-info → uk_bin_collection-0.108.0.dist-info}/LICENSE +0 -0
- {uk_bin_collection-0.106.0.dist-info → uk_bin_collection-0.108.0.dist-info}/entry_points.txt +0 -0
@@ -12,6 +12,13 @@
|
|
12
12
|
"wiki_name": "Adur and Worthing Councils",
|
13
13
|
"wiki_note": "Replace XXXXXXXX with UPRN. You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN."
|
14
14
|
},
|
15
|
+
"ArdsAndNorthDownCouncil": {
|
16
|
+
"url": "https://www.ardsandnorthdown.gov.uk",
|
17
|
+
"wiki_command_url_override": "https://www.ardsandnorthdown.gov.uk",
|
18
|
+
"uprn": "187136177",
|
19
|
+
"wiki_name": "Ards and North Down Council",
|
20
|
+
"wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN."
|
21
|
+
},
|
15
22
|
"ArmaghBanbridgeCraigavonCouncil": {
|
16
23
|
"url": "https://www.armaghbanbridgecraigavon.gov.uk/",
|
17
24
|
"wiki_command_url_override": "https://www.armaghbanbridgecraigavon.gov.uk/",
|
@@ -27,6 +34,14 @@
|
|
27
34
|
"web_driver": "http://selenium:4444",
|
28
35
|
"wiki_name": "Arun Council"
|
29
36
|
},
|
37
|
+
"AshfordBoroughCouncil": {
|
38
|
+
"url": "https://ashford.gov.uk",
|
39
|
+
"wiki_command_url_override": "https://ashford.gov.uk",
|
40
|
+
"postcode": "TN23 7SP",
|
41
|
+
"uprn": "100060777899",
|
42
|
+
"wiki_name": "Ashford Borough Council",
|
43
|
+
"wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN."
|
44
|
+
},
|
30
45
|
"AylesburyValeCouncil": {
|
31
46
|
"skip_get_url": true,
|
32
47
|
"uprn": "766252532",
|
@@ -391,6 +406,13 @@
|
|
391
406
|
"wiki_name": "East Devon District Council",
|
392
407
|
"wiki_note": "Replace XXXXXXXX with UPRN."
|
393
408
|
},
|
409
|
+
"EastHertsCouncil": {
|
410
|
+
"house_number": "1",
|
411
|
+
"postcode": "CM20 2FZ",
|
412
|
+
"skip_get_url": true,
|
413
|
+
"url": "https://www.eastherts.gov.uk",
|
414
|
+
"wiki_name": "East Herts Council"
|
415
|
+
},
|
394
416
|
"EastLindseyDistrictCouncil": {
|
395
417
|
"house_number": "Raf Coningsby",
|
396
418
|
"postcode": "LN4 4SY",
|
@@ -971,6 +993,14 @@
|
|
971
993
|
"web_driver": "http://selenium:4444",
|
972
994
|
"wiki_name": "Portsmouth City Council"
|
973
995
|
},
|
996
|
+
"PowysCouncil": {
|
997
|
+
"house_number": "LANE COTTAGE",
|
998
|
+
"postcode": "HR3 5JS",
|
999
|
+
"skip_get_url": true,
|
1000
|
+
"url": "https://www.powys.gov.uk",
|
1001
|
+
"web_driver": "http://selenium:4444",
|
1002
|
+
"wiki_name": "Powys Council"
|
1003
|
+
},
|
974
1004
|
"PrestonCityCouncil": {
|
975
1005
|
"house_number": "Town Hall",
|
976
1006
|
"postcode": "PR1 2RL",
|
@@ -1519,6 +1549,13 @@
|
|
1519
1549
|
"web_driver": "http://selenium:4444",
|
1520
1550
|
"wiki_name": "Wokingham Borough Council"
|
1521
1551
|
},
|
1552
|
+
"WorcesterCityCouncil": {
|
1553
|
+
"url": "https://www.Worcester.gov.uk",
|
1554
|
+
"wiki_command_url_override": "https://www.Worcester.gov.uk",
|
1555
|
+
"uprn": "100120650345",
|
1556
|
+
"wiki_name": "Worcester City Council",
|
1557
|
+
"wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN."
|
1558
|
+
},
|
1522
1559
|
"WychavonDistrictCouncil": {
|
1523
1560
|
"postcode": "WR3 7RU",
|
1524
1561
|
"skip_get_url": true,
|
@@ -0,0 +1,103 @@
|
|
1
|
+
from datetime import datetime
|
2
|
+
|
3
|
+
import requests
|
4
|
+
from bs4 import BeautifulSoup
|
5
|
+
|
6
|
+
from uk_bin_collection.uk_bin_collection.common import *
|
7
|
+
from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
|
8
|
+
|
9
|
+
|
10
|
+
class CouncilClass(AbstractGetBinDataClass):
|
11
|
+
"""
|
12
|
+
Concrete classes have to implement all abstract operations of the
|
13
|
+
base class. They can also override some operations with a default
|
14
|
+
implementation.
|
15
|
+
"""
|
16
|
+
|
17
|
+
def parse_data(self, page: str, **kwargs) -> dict:
|
18
|
+
# Get and check UPRN
|
19
|
+
user_uprn = kwargs.get("uprn")
|
20
|
+
check_uprn(user_uprn)
|
21
|
+
bindata = {"bins": []}
|
22
|
+
|
23
|
+
API_URL = (
|
24
|
+
"https://collections-ardsandnorthdown.azurewebsites.net/WSCollExternal.asmx"
|
25
|
+
)
|
26
|
+
|
27
|
+
# council seems to always be ARD no matter what the old council was
|
28
|
+
PAYLOAD = f"""<?xml version="1.0" encoding="utf-8" ?>
|
29
|
+
<soap:Envelope xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
30
|
+
xmlns:xsd="http://www.w3.org/2001/XMLSchema"
|
31
|
+
xmlns:soap="http://schemas.xmlsoap.org/soap/envelope/">
|
32
|
+
<soap:Body>
|
33
|
+
<getRoundCalendarForUPRN xmlns="http://webaspx-collections.azurewebsites.net/">
|
34
|
+
<council>ARD</council>
|
35
|
+
<UPRN>{user_uprn}</UPRN>
|
36
|
+
<from>Chtml</from>
|
37
|
+
</getRoundCalendarForUPRN >
|
38
|
+
</soap:Body>
|
39
|
+
</soap:Envelope>
|
40
|
+
"""
|
41
|
+
|
42
|
+
r = requests.post(
|
43
|
+
API_URL,
|
44
|
+
data=PAYLOAD,
|
45
|
+
headers={"Content-Type": "text/xml; charset=utf-8"},
|
46
|
+
)
|
47
|
+
r.raise_for_status()
|
48
|
+
|
49
|
+
# html unescape text
|
50
|
+
text = (
|
51
|
+
(r.text.replace("<", "<").replace(">", ">").replace("&", "&"))
|
52
|
+
.split("<getRoundCalendarForUPRNResult>")[-1]
|
53
|
+
.split("</getRoundCalendarForUPRNResult>")[0]
|
54
|
+
)
|
55
|
+
|
56
|
+
soup = BeautifulSoup(text, "html.parser")
|
57
|
+
|
58
|
+
# Initialize dictionary to store bin dates
|
59
|
+
bin_schedule = {}
|
60
|
+
|
61
|
+
# Define regex pattern to capture day and date (e.g., Tue 5 Nov)
|
62
|
+
date_pattern = re.compile(r"\b\w{3} \d{1,2} \w{3}\b")
|
63
|
+
|
64
|
+
current_year = datetime.now().year
|
65
|
+
|
66
|
+
# Find each bin collection line, parse date, and add to dictionary
|
67
|
+
for bin_info in soup.find_all("b"):
|
68
|
+
bin_type = bin_info.text.strip()
|
69
|
+
bin_details = bin_info.next_sibling.strip() if bin_info.next_sibling else ""
|
70
|
+
# Check for "Today" or "Tomorrow"
|
71
|
+
if "Today" in bin_details:
|
72
|
+
collection_date = datetime.now().strftime("%a %d %b")
|
73
|
+
bin_schedule[bin_type] = collection_date
|
74
|
+
elif "Tomorrow" in bin_details:
|
75
|
+
collection_date = (datetime.now() + timedelta(days=1)).strftime(
|
76
|
+
"%a %d %b"
|
77
|
+
)
|
78
|
+
bin_schedule[bin_type] = collection_date
|
79
|
+
else:
|
80
|
+
# Extract date if it's a full date format
|
81
|
+
date_match = date_pattern.search(bin_details)
|
82
|
+
if date_match:
|
83
|
+
bin_schedule[bin_type] = date_match.group()
|
84
|
+
|
85
|
+
# Display the parsed schedule with dates only
|
86
|
+
for bin_type, collection_date in bin_schedule.items():
|
87
|
+
date = datetime.strptime(collection_date, "%a %d %b")
|
88
|
+
|
89
|
+
if date.month == 1 and datetime.now().month > 1:
|
90
|
+
date = date.replace(year=current_year + 1)
|
91
|
+
else:
|
92
|
+
date = date.replace(year=current_year)
|
93
|
+
|
94
|
+
dict_data = {
|
95
|
+
"type": bin_type,
|
96
|
+
"collectionDate": date.strftime("%d/%m/%Y"),
|
97
|
+
}
|
98
|
+
bindata["bins"].append(dict_data)
|
99
|
+
|
100
|
+
bindata["bins"].sort(
|
101
|
+
key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y")
|
102
|
+
)
|
103
|
+
return bindata
|
@@ -0,0 +1,124 @@
|
|
1
|
+
from datetime import datetime
|
2
|
+
|
3
|
+
import requests
|
4
|
+
from bs4 import BeautifulSoup
|
5
|
+
from selenium.webdriver.common.by import By
|
6
|
+
from selenium.webdriver.support import expected_conditions as EC
|
7
|
+
from selenium.webdriver.support.ui import Select
|
8
|
+
from selenium.webdriver.support.wait import WebDriverWait
|
9
|
+
|
10
|
+
from uk_bin_collection.uk_bin_collection.common import *
|
11
|
+
from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
|
12
|
+
|
13
|
+
|
14
|
+
class CouncilClass(AbstractGetBinDataClass):
|
15
|
+
"""
|
16
|
+
Concrete classes have to implement all abstract operations of the
|
17
|
+
base class. They can also override some operations with a default
|
18
|
+
implementation.
|
19
|
+
"""
|
20
|
+
|
21
|
+
def parse_data(self, page: str, **kwargs) -> dict:
|
22
|
+
# Get and check UPRN
|
23
|
+
user_uprn = kwargs.get("uprn")
|
24
|
+
user_postcode = kwargs.get("postcode")
|
25
|
+
check_uprn(user_uprn)
|
26
|
+
check_postcode(user_postcode)
|
27
|
+
web_driver = kwargs.get("web_driver")
|
28
|
+
headless = kwargs.get("headless")
|
29
|
+
bindata = {"bins": []}
|
30
|
+
|
31
|
+
API_URL = "https://secure.ashford.gov.uk/waste/collectiondaylookup/"
|
32
|
+
|
33
|
+
# Create Selenium webdriver
|
34
|
+
driver = create_webdriver(web_driver, headless, None, __name__)
|
35
|
+
driver.get(API_URL)
|
36
|
+
|
37
|
+
# Wait for the postcode field to appear then populate it
|
38
|
+
inputElement_postcode = WebDriverWait(driver, 30).until(
|
39
|
+
EC.presence_of_element_located(
|
40
|
+
(By.ID, "ContentPlaceHolder1_CollectionDayLookup2_TextBox_PostCode")
|
41
|
+
)
|
42
|
+
)
|
43
|
+
inputElement_postcode.send_keys(user_postcode)
|
44
|
+
|
45
|
+
# Click search button
|
46
|
+
findAddress = WebDriverWait(driver, 10).until(
|
47
|
+
EC.presence_of_element_located(
|
48
|
+
(
|
49
|
+
By.ID,
|
50
|
+
"ContentPlaceHolder1_CollectionDayLookup2_Button_PostCodeSearch",
|
51
|
+
)
|
52
|
+
)
|
53
|
+
)
|
54
|
+
findAddress.click()
|
55
|
+
|
56
|
+
# Wait for the 'Select your property' dropdown to appear and select the first result
|
57
|
+
dropdown = WebDriverWait(driver, 10).until(
|
58
|
+
EC.element_to_be_clickable(
|
59
|
+
(
|
60
|
+
By.ID,
|
61
|
+
"ContentPlaceHolder1_CollectionDayLookup2_DropDownList_Addresses",
|
62
|
+
)
|
63
|
+
)
|
64
|
+
)
|
65
|
+
|
66
|
+
# Create a 'Select' for it, then select the first address in the list
|
67
|
+
# (Index 0 is "Make a selection from the list")
|
68
|
+
dropdownSelect = Select(dropdown)
|
69
|
+
dropdownSelect.select_by_value(str(user_uprn))
|
70
|
+
|
71
|
+
# Click search button
|
72
|
+
findAddress = WebDriverWait(driver, 10).until(
|
73
|
+
EC.presence_of_element_located(
|
74
|
+
(By.ID, "ContentPlaceHolder1_CollectionDayLookup2_Button_SelectAddress")
|
75
|
+
)
|
76
|
+
)
|
77
|
+
findAddress.click()
|
78
|
+
|
79
|
+
h4_element = WebDriverWait(driver, 10).until(
|
80
|
+
EC.presence_of_element_located(
|
81
|
+
(By.XPATH, "//h4[contains(text(), 'Collection Dates')]")
|
82
|
+
)
|
83
|
+
)
|
84
|
+
|
85
|
+
soup = BeautifulSoup(driver.page_source, features="html.parser")
|
86
|
+
|
87
|
+
bin_tables = soup.find_all("table")
|
88
|
+
|
89
|
+
for bin_table in bin_tables:
|
90
|
+
bin_text = bin_table.find("td", id=re.compile("CollectionDayLookup2_td_"))
|
91
|
+
if not bin_text:
|
92
|
+
continue
|
93
|
+
|
94
|
+
bin_type_soup = bin_text.find("b")
|
95
|
+
|
96
|
+
if not bin_type_soup:
|
97
|
+
continue
|
98
|
+
bin_type: str = bin_type_soup.text.strip().split(" (")[0]
|
99
|
+
|
100
|
+
date_soup = bin_text.find(
|
101
|
+
"span", id=re.compile(r"CollectionDayLookup2_Label_\w*_Date")
|
102
|
+
)
|
103
|
+
if not date_soup or (
|
104
|
+
" " not in date_soup.text.strip()
|
105
|
+
and date_soup.text.strip().lower() != "today"
|
106
|
+
):
|
107
|
+
continue
|
108
|
+
date_str: str = date_soup.text.strip()
|
109
|
+
try:
|
110
|
+
if date_soup.text.strip().lower() == "today":
|
111
|
+
date = datetime.now().date()
|
112
|
+
else:
|
113
|
+
date = datetime.strptime(date_str.split(" ")[1], "%d/%m/%Y").date()
|
114
|
+
|
115
|
+
except ValueError:
|
116
|
+
continue
|
117
|
+
|
118
|
+
dict_data = {
|
119
|
+
"type": bin_type,
|
120
|
+
"collectionDate": date.strftime("%d/%m/%Y"),
|
121
|
+
}
|
122
|
+
bindata["bins"].append(dict_data)
|
123
|
+
|
124
|
+
return bindata
|
@@ -267,18 +267,12 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
267
267
|
data = {"bins": []}
|
268
268
|
|
269
269
|
for e in collection_record_elements:
|
270
|
-
collection_type = e.
|
271
|
-
|
272
|
-
)[0].text.strip()
|
273
|
-
collection_date = (
|
274
|
-
e.find("div", {"class": "bin-collection-next"})
|
275
|
-
.attrs["data-current_value"]
|
276
|
-
.strip()
|
277
|
-
)
|
270
|
+
collection_type = e.find("h2").get_text()
|
271
|
+
collection_date = e.find("span", {"class": "value-as-text"}).get_text()
|
278
272
|
dict_data = {
|
279
273
|
"type": collection_type,
|
280
274
|
"collectionDate": datetime.strptime(
|
281
|
-
collection_date, "%d
|
275
|
+
collection_date, "%A %d %B %Y"
|
282
276
|
).strftime(date_format),
|
283
277
|
}
|
284
278
|
data["bins"].append(dict_data)
|
@@ -0,0 +1,117 @@
|
|
1
|
+
from bs4 import BeautifulSoup
|
2
|
+
from selenium.webdriver.common.by import By
|
3
|
+
from selenium.webdriver.support import expected_conditions as EC
|
4
|
+
from selenium.webdriver.support.wait import WebDriverWait
|
5
|
+
|
6
|
+
from uk_bin_collection.uk_bin_collection.common import *
|
7
|
+
from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
|
8
|
+
|
9
|
+
|
10
|
+
class CouncilClass(AbstractGetBinDataClass):
|
11
|
+
"""
|
12
|
+
Concrete classes have to implement all abstract operations of the
|
13
|
+
base class. They can also override some operations with a default
|
14
|
+
implementation.
|
15
|
+
"""
|
16
|
+
|
17
|
+
def parse_data(self, page: str, **kwargs) -> dict:
|
18
|
+
# Get and check UPRN
|
19
|
+
user_postcode = kwargs.get("postcode")
|
20
|
+
user_paon = kwargs.get("paon")
|
21
|
+
check_paon(user_paon)
|
22
|
+
check_postcode(user_postcode)
|
23
|
+
web_driver = kwargs.get("web_driver")
|
24
|
+
headless = kwargs.get("headless")
|
25
|
+
bindata = {"bins": []}
|
26
|
+
|
27
|
+
API_URL = "https://uhte-wrp.whitespacews.com"
|
28
|
+
|
29
|
+
# Create Selenium webdriver
|
30
|
+
driver = create_webdriver(web_driver, headless, None, __name__)
|
31
|
+
driver.get(API_URL)
|
32
|
+
|
33
|
+
# Click Find my bin collection day button
|
34
|
+
collectionButton = WebDriverWait(driver, 10).until(
|
35
|
+
EC.element_to_be_clickable((By.LINK_TEXT, "Find my bin collection day"))
|
36
|
+
)
|
37
|
+
collectionButton.click()
|
38
|
+
|
39
|
+
main_content = WebDriverWait(driver, 10).until(
|
40
|
+
EC.presence_of_element_located((By.ID, "main-content"))
|
41
|
+
)
|
42
|
+
|
43
|
+
# Wait for the property number field to appear then populate it
|
44
|
+
inputElement_number = WebDriverWait(driver, 10).until(
|
45
|
+
EC.element_to_be_clickable(
|
46
|
+
(
|
47
|
+
By.ID,
|
48
|
+
"address_name_number",
|
49
|
+
)
|
50
|
+
)
|
51
|
+
)
|
52
|
+
inputElement_number.send_keys(user_paon)
|
53
|
+
|
54
|
+
# Wait for the postcode field to appear then populate it
|
55
|
+
inputElement_postcode = WebDriverWait(driver, 10).until(
|
56
|
+
EC.element_to_be_clickable(
|
57
|
+
(
|
58
|
+
By.ID,
|
59
|
+
"address_postcode",
|
60
|
+
)
|
61
|
+
)
|
62
|
+
)
|
63
|
+
inputElement_postcode.send_keys(user_postcode)
|
64
|
+
|
65
|
+
# Click search button
|
66
|
+
continueButton = WebDriverWait(driver, 10).until(
|
67
|
+
EC.element_to_be_clickable(
|
68
|
+
(
|
69
|
+
By.ID,
|
70
|
+
"Submit",
|
71
|
+
)
|
72
|
+
)
|
73
|
+
)
|
74
|
+
continueButton.click()
|
75
|
+
|
76
|
+
# Wait for the 'Search Results' to appear and select the first result
|
77
|
+
property = WebDriverWait(driver, 10).until(
|
78
|
+
EC.element_to_be_clickable(
|
79
|
+
(
|
80
|
+
By.CSS_SELECTOR,
|
81
|
+
"li.app-subnav__section-item a",
|
82
|
+
# "app-subnav__link govuk-link clicker colordarkblue fontfamilyArial fontsize12rem",
|
83
|
+
# "//a[starts-with(@aria-label, '{user_paon}')]",
|
84
|
+
)
|
85
|
+
)
|
86
|
+
)
|
87
|
+
property.click()
|
88
|
+
|
89
|
+
upcoming_scheduled_collections = WebDriverWait(driver, 10).until(
|
90
|
+
EC.presence_of_element_located((By.ID, "upcoming-scheduled-collections"))
|
91
|
+
)
|
92
|
+
|
93
|
+
soup = BeautifulSoup(driver.page_source, features="html.parser")
|
94
|
+
|
95
|
+
collections = []
|
96
|
+
for collection in soup.find_all(
|
97
|
+
"u1",
|
98
|
+
class_="displayinlineblock justifycontentleft alignitemscenter margin0 padding0",
|
99
|
+
):
|
100
|
+
date = collection.find(
|
101
|
+
"p", string=lambda text: text and "/" in text
|
102
|
+
).text.strip() # Extract date
|
103
|
+
service = collection.find(
|
104
|
+
"p", string=lambda text: text and "Collection Service" in text
|
105
|
+
).text.strip() # Extract service type
|
106
|
+
collections.append({"date": date, "service": service})
|
107
|
+
|
108
|
+
# Print the parsed data
|
109
|
+
for item in collections:
|
110
|
+
|
111
|
+
dict_data = {
|
112
|
+
"type": item["service"],
|
113
|
+
"collectionDate": item["date"],
|
114
|
+
}
|
115
|
+
bindata["bins"].append(dict_data)
|
116
|
+
|
117
|
+
return bindata
|
@@ -82,9 +82,16 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
82
82
|
columns = row.find_all("td")
|
83
83
|
if columns:
|
84
84
|
container_type = row.find("th").text.strip()
|
85
|
-
|
86
|
-
|
87
|
-
|
85
|
+
if columns[0].get_text() == "Today":
|
86
|
+
collection_day = datetime.now().strftime("%a, %d %B")
|
87
|
+
elif columns[0].get_text() == "Tomorrow":
|
88
|
+
collection_day = (datetime.now() + timedelta(days=1)).strftime(
|
89
|
+
"%a, %d %B"
|
90
|
+
)
|
91
|
+
else:
|
92
|
+
collection_day = re.sub(
|
93
|
+
r"[^a-zA-Z0-9,\s]", "", columns[0].get_text()
|
94
|
+
).strip()
|
88
95
|
|
89
96
|
# Parse the date from the string
|
90
97
|
parsed_date = datetime.strptime(collection_day, "%a, %d %B")
|
@@ -0,0 +1,138 @@
|
|
1
|
+
import time
|
2
|
+
|
3
|
+
from bs4 import BeautifulSoup
|
4
|
+
from dateutil.relativedelta import relativedelta
|
5
|
+
from selenium.webdriver.common.by import By
|
6
|
+
from selenium.webdriver.support import expected_conditions as EC
|
7
|
+
from selenium.webdriver.support.ui import Select
|
8
|
+
from selenium.webdriver.support.wait import WebDriverWait
|
9
|
+
|
10
|
+
from uk_bin_collection.uk_bin_collection.common import *
|
11
|
+
from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
|
12
|
+
|
13
|
+
|
14
|
+
# import the wonderful Beautiful Soup and the URL grabber
|
15
|
+
class CouncilClass(AbstractGetBinDataClass):
|
16
|
+
"""
|
17
|
+
Concrete classes have to implement all abstract operations of the
|
18
|
+
base class. They can also override some operations with a default
|
19
|
+
implementation.
|
20
|
+
"""
|
21
|
+
|
22
|
+
def parse_data(self, page: str, **kwargs) -> dict:
|
23
|
+
driver = None
|
24
|
+
data = {"bins": []}
|
25
|
+
user_paon = kwargs.get("paon")
|
26
|
+
user_postcode = kwargs.get("postcode")
|
27
|
+
web_driver = kwargs.get("web_driver")
|
28
|
+
headless = kwargs.get("headless")
|
29
|
+
check_paon(user_paon)
|
30
|
+
check_postcode(user_postcode)
|
31
|
+
|
32
|
+
user_paon = user_paon.upper()
|
33
|
+
|
34
|
+
# Create Selenium webdriver
|
35
|
+
driver = create_webdriver(web_driver, headless, None, __name__)
|
36
|
+
driver.get("https://en.powys.gov.uk/binday")
|
37
|
+
|
38
|
+
accept_button = WebDriverWait(driver, timeout=10).until(
|
39
|
+
EC.element_to_be_clickable(
|
40
|
+
(
|
41
|
+
By.NAME,
|
42
|
+
"acceptall",
|
43
|
+
)
|
44
|
+
)
|
45
|
+
)
|
46
|
+
accept_button.click()
|
47
|
+
|
48
|
+
# Wait for the postcode field to appear then populate it
|
49
|
+
inputElement_postcode = WebDriverWait(driver, 10).until(
|
50
|
+
EC.presence_of_element_located(
|
51
|
+
(By.ID, "BINDAYLOOKUP_ADDRESSLOOKUP_ADDRESSLOOKUPPOSTCODE")
|
52
|
+
)
|
53
|
+
)
|
54
|
+
inputElement_postcode.send_keys(user_postcode)
|
55
|
+
|
56
|
+
# Click search button
|
57
|
+
findAddress = WebDriverWait(driver, 10).until(
|
58
|
+
EC.presence_of_element_located(
|
59
|
+
(By.ID, "BINDAYLOOKUP_ADDRESSLOOKUP_ADDRESSLOOKUPSEARCH")
|
60
|
+
)
|
61
|
+
)
|
62
|
+
findAddress.click()
|
63
|
+
|
64
|
+
# Wait for the 'Select address' dropdown to appear and select option matching the house name/number
|
65
|
+
WebDriverWait(driver, 10).until(
|
66
|
+
EC.element_to_be_clickable(
|
67
|
+
(
|
68
|
+
By.XPATH,
|
69
|
+
"//select[@id='BINDAYLOOKUP_ADDRESSLOOKUP_ADDRESSLOOKUPADDRESS']//option[contains(., '"
|
70
|
+
+ user_paon
|
71
|
+
+ "')]",
|
72
|
+
)
|
73
|
+
)
|
74
|
+
).click()
|
75
|
+
|
76
|
+
# Wait for the submit button to appear, then click it to get the collection dates
|
77
|
+
WebDriverWait(driver, 30).until(
|
78
|
+
EC.element_to_be_clickable(
|
79
|
+
(By.ID, "BINDAYLOOKUP_ADDRESSLOOKUP_ADDRESSLOOKUPBUTTONS_NEXT")
|
80
|
+
)
|
81
|
+
).click()
|
82
|
+
|
83
|
+
# Wait for the collections table to appear
|
84
|
+
WebDriverWait(driver, 10).until(
|
85
|
+
EC.presence_of_element_located(
|
86
|
+
(By.ID, "BINDAYLOOKUP_COLLECTIONDATES_COLLECTIONDATES")
|
87
|
+
)
|
88
|
+
)
|
89
|
+
|
90
|
+
soup = BeautifulSoup(driver.page_source, features="html.parser")
|
91
|
+
|
92
|
+
# General rubbish collection dates
|
93
|
+
general_rubbish_section = soup.find(
|
94
|
+
"h3", string="General Rubbish / Wheelie bin"
|
95
|
+
)
|
96
|
+
general_rubbish_dates = [
|
97
|
+
li.text for li in general_rubbish_section.find_next("ul").find_all("li")
|
98
|
+
]
|
99
|
+
|
100
|
+
for date in general_rubbish_dates:
|
101
|
+
dict_data = {
|
102
|
+
"type": "General Rubbish / Wheelie bin",
|
103
|
+
"collectionDate": datetime.strptime(
|
104
|
+
remove_ordinal_indicator_from_date_string(date), "%d %B %Y"
|
105
|
+
).strftime(date_format),
|
106
|
+
}
|
107
|
+
data["bins"].append(dict_data)
|
108
|
+
|
109
|
+
# Recycling and food waste collection dates
|
110
|
+
recycling_section = soup.find("h3", string="Recycling and Food Waste")
|
111
|
+
recycling_dates = [
|
112
|
+
li.text for li in recycling_section.find_next("ul").find_all("li")
|
113
|
+
]
|
114
|
+
|
115
|
+
for date in recycling_dates:
|
116
|
+
dict_data = {
|
117
|
+
"type": "Recycling and Food Waste",
|
118
|
+
"collectionDate": datetime.strptime(
|
119
|
+
remove_ordinal_indicator_from_date_string(date), "%d %B %Y"
|
120
|
+
).strftime(date_format),
|
121
|
+
}
|
122
|
+
data["bins"].append(dict_data)
|
123
|
+
|
124
|
+
# Garden waste collection dates
|
125
|
+
garden_waste_section = soup.find("h3", string="Garden Waste")
|
126
|
+
garden_waste_dates = [
|
127
|
+
li.text for li in garden_waste_section.find_next("ul").find_all("li")
|
128
|
+
]
|
129
|
+
for date in garden_waste_dates:
|
130
|
+
dict_data = {
|
131
|
+
"type": "Garden Waste",
|
132
|
+
"collectionDate": datetime.strptime(
|
133
|
+
remove_ordinal_indicator_from_date_string(date), "%d %B %Y"
|
134
|
+
).strftime(date_format),
|
135
|
+
}
|
136
|
+
data["bins"].append(dict_data)
|
137
|
+
|
138
|
+
return data
|
@@ -1,13 +1,12 @@
|
|
1
|
-
import
|
1
|
+
import re
|
2
2
|
from datetime import datetime
|
3
3
|
|
4
|
-
from selenium.webdriver.support.ui import Select
|
5
4
|
from bs4 import BeautifulSoup
|
6
5
|
from selenium.webdriver.common.by import By
|
6
|
+
from selenium.webdriver.common.keys import Keys
|
7
7
|
from selenium.webdriver.support import expected_conditions as EC
|
8
8
|
from selenium.webdriver.support.ui import Select
|
9
9
|
from selenium.webdriver.support.wait import WebDriverWait
|
10
|
-
from selenium.webdriver.common.keys import Keys
|
11
10
|
|
12
11
|
from uk_bin_collection.uk_bin_collection.common import *
|
13
12
|
from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
|
@@ -51,10 +50,9 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
51
50
|
|
52
51
|
def extract_bin_data(self, article):
|
53
52
|
date = article.find("div", class_="binday__cell--day").text.strip()
|
54
|
-
bin_type_class = article.get("class")[
|
55
|
-
|
56
|
-
|
57
|
-
bin_type = "black" if "black" in bin_type_class else "silver"
|
53
|
+
bin_type_class = article.get("class")[1]
|
54
|
+
bin_type = bin_type_class.split("--")[1]
|
55
|
+
# bin_type = "black" if "black" in bin_type_class else "silver"
|
58
56
|
formatted_date = self.format_date(date)
|
59
57
|
return {"type": bin_type, "collectionDate": formatted_date}
|
60
58
|
|
@@ -88,15 +86,17 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
88
86
|
)
|
89
87
|
inputElement_postcodesearch_btn.click()
|
90
88
|
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
89
|
+
# Wait for the 'Select address' dropdown to appear and select option matching the house name/number
|
90
|
+
WebDriverWait(driver, 10).until(
|
91
|
+
EC.element_to_be_clickable(
|
92
|
+
(
|
93
|
+
By.XPATH,
|
94
|
+
"//select[@id='address']//option[contains(., '"
|
95
|
+
+ house_number
|
96
|
+
+ "')]",
|
97
|
+
)
|
98
|
+
)
|
99
|
+
).click()
|
100
100
|
|
101
101
|
inputElement_results_btn = wait.until(
|
102
102
|
EC.element_to_be_clickable(
|
@@ -122,11 +122,24 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
122
122
|
# Extract data from the first aside element
|
123
123
|
first_aside = soup.find("aside", class_="alert")
|
124
124
|
if first_aside:
|
125
|
+
color_text = first_aside.find(
|
126
|
+
"p", string=re.compile("This is a")
|
127
|
+
).get_text()
|
128
|
+
color = re.search(r"This is a (\w+) bin day", color_text)
|
125
129
|
next_collection_date = first_aside.find(
|
126
130
|
"span", class_="alert__heading alpha"
|
127
131
|
).text.strip()
|
132
|
+
if next_collection_date == "Today":
|
133
|
+
next_collection_date = datetime.now().strftime("%a %d %B %Y")
|
134
|
+
elif next_collection_date == "Tomorrow":
|
135
|
+
next_collection_date = (
|
136
|
+
datetime.now() + timedelta(days=1)
|
137
|
+
).strftime("%a %d %B %Y")
|
138
|
+
|
128
139
|
bin_info = {
|
129
|
-
"type":
|
140
|
+
"type": color.group(
|
141
|
+
1
|
142
|
+
), # Based on the provided information in the HTML, assuming it's a purple bin day.
|
130
143
|
"collectionDate": self.format_date(next_collection_date),
|
131
144
|
}
|
132
145
|
bin_data.append(bin_info)
|
@@ -1,6 +1,7 @@
|
|
1
1
|
from xml.etree import ElementTree
|
2
2
|
|
3
3
|
from bs4 import BeautifulSoup
|
4
|
+
|
4
5
|
from uk_bin_collection.uk_bin_collection.common import *
|
5
6
|
from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
|
6
7
|
|
@@ -68,18 +69,30 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
68
69
|
collection_date = ""
|
69
70
|
results = re.search("([A-Za-z]+ \\d\\d? [A-Za-z]+) then", bin_info)
|
70
71
|
if results:
|
71
|
-
|
72
|
-
datetime.
|
73
|
-
|
74
|
-
|
72
|
+
if results[1] == "Today":
|
73
|
+
date = datetime.now()
|
74
|
+
elif results[1] == "Tomorrow":
|
75
|
+
date = datetime.now() + timedelta(days=1)
|
76
|
+
else:
|
77
|
+
date = get_next_occurrence_from_day_month(
|
78
|
+
datetime.strptime(
|
79
|
+
results[1] + " " + datetime.now().strftime("%Y"),
|
80
|
+
"%a %d %b %Y",
|
81
|
+
)
|
75
82
|
)
|
76
|
-
)
|
77
83
|
if date:
|
78
84
|
collection_date = date.strftime(date_format)
|
79
85
|
else:
|
80
86
|
results2 = re.search("([A-Za-z]+) then", bin_info)
|
81
87
|
if results2:
|
82
|
-
|
88
|
+
if results2[1] == "Today":
|
89
|
+
collection_date = datetime.now().strftime(date_format)
|
90
|
+
elif results2[1] == "Tomorrow":
|
91
|
+
collection_date = (
|
92
|
+
datetime.now() + timedelta(days=1)
|
93
|
+
).strftime(date_format)
|
94
|
+
else:
|
95
|
+
collection_date = results2[1]
|
83
96
|
|
84
97
|
if collection_date != "":
|
85
98
|
dict_data = {
|
@@ -86,6 +86,13 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
86
86
|
r"[^a-zA-Z0-9,\s]", "", columns[0].get_text()
|
87
87
|
).strip()
|
88
88
|
|
89
|
+
if columns[0].get_text() == "Today":
|
90
|
+
collection_day = datetime.now().strftime("%a, %d %B")
|
91
|
+
elif columns[0].get_text() == "Tomorrow":
|
92
|
+
collection_day = (datetime.now() + timedelta(days=1)).strftime(
|
93
|
+
"%a, %d %B"
|
94
|
+
)
|
95
|
+
|
89
96
|
# Parse the date from the string
|
90
97
|
parsed_date = datetime.strptime(collection_day, "%a, %d %B")
|
91
98
|
if parsed_date < datetime(
|
@@ -0,0 +1,58 @@
|
|
1
|
+
import requests
|
2
|
+
from bs4 import BeautifulSoup
|
3
|
+
|
4
|
+
from uk_bin_collection.uk_bin_collection.common import *
|
5
|
+
from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
|
6
|
+
|
7
|
+
|
8
|
+
# import the wonderful Beautiful Soup and the URL grabber
|
9
|
+
class CouncilClass(AbstractGetBinDataClass):
|
10
|
+
"""
|
11
|
+
Concrete classes have to implement all abstract operations of the
|
12
|
+
base class. They can also override some operations with a default
|
13
|
+
implementation.
|
14
|
+
"""
|
15
|
+
|
16
|
+
def parse_data(self, page: str, **kwargs) -> dict:
|
17
|
+
|
18
|
+
user_uprn = kwargs.get("uprn")
|
19
|
+
check_uprn(user_uprn)
|
20
|
+
bindata = {"bins": []}
|
21
|
+
|
22
|
+
URI = "https://selfserve.worcester.gov.uk/wccroundlookup/HandleSearchScreen"
|
23
|
+
|
24
|
+
post_data = {
|
25
|
+
"alAddrsel": user_uprn,
|
26
|
+
}
|
27
|
+
|
28
|
+
headers = {
|
29
|
+
"referer": "https://selfserve.worcester.gov.uk/wccroundlookup/HandleSearchScreen",
|
30
|
+
"content-type": "application/x-www-form-urlencoded",
|
31
|
+
}
|
32
|
+
|
33
|
+
response = requests.post(URI, data=post_data, headers=headers, verify=False)
|
34
|
+
|
35
|
+
soup = BeautifulSoup(response.content, "html.parser")
|
36
|
+
rows = soup.select("table.table tbody tr")
|
37
|
+
|
38
|
+
for row in rows:
|
39
|
+
bin_type = row.select_one("td:nth-of-type(2)").text.strip()
|
40
|
+
collection_date = row.select_one("td:nth-of-type(3) strong").text.strip()
|
41
|
+
|
42
|
+
if collection_date == "Not applicable":
|
43
|
+
continue
|
44
|
+
|
45
|
+
dict_data = {
|
46
|
+
"type": bin_type,
|
47
|
+
"collectionDate": datetime.strptime(
|
48
|
+
collection_date,
|
49
|
+
"%A %d/%m/%Y",
|
50
|
+
).strftime("%d/%m/%Y"),
|
51
|
+
}
|
52
|
+
bindata["bins"].append(dict_data)
|
53
|
+
|
54
|
+
bindata["bins"].sort(
|
55
|
+
key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y")
|
56
|
+
)
|
57
|
+
|
58
|
+
return bindata
|
@@ -1,17 +1,20 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: uk_bin_collection
|
3
|
-
Version: 0.
|
3
|
+
Version: 0.108.0
|
4
4
|
Summary: Python Lib to collect UK Bin Data
|
5
5
|
Author: Robert Bradley
|
6
6
|
Author-email: robbrad182@gmail.com
|
7
|
-
Requires-Python: >=3.
|
7
|
+
Requires-Python: >=3.12,<3.14
|
8
8
|
Classifier: Programming Language :: Python :: 3
|
9
|
-
Classifier: Programming Language :: Python :: 3.11
|
10
9
|
Classifier: Programming Language :: Python :: 3.12
|
10
|
+
Classifier: Programming Language :: Python :: 3.13
|
11
11
|
Requires-Dist: bs4
|
12
12
|
Requires-Dist: holidays
|
13
13
|
Requires-Dist: lxml
|
14
14
|
Requires-Dist: pandas
|
15
|
+
Requires-Dist: pytest-asyncio (>=0.24.0,<0.25.0)
|
16
|
+
Requires-Dist: pytest-freezer (>=0.4.8,<0.5.0)
|
17
|
+
Requires-Dist: pytest-homeassistant-custom-component (>=0.13.177,<0.14.0)
|
15
18
|
Requires-Dist: python-dateutil
|
16
19
|
Requires-Dist: requests
|
17
20
|
Requires-Dist: selenium
|
@@ -290,7 +293,6 @@ docker pull selenium/standalone-chrome docker run -d -p 4444:4444 --name seleniu
|
|
290
293
|
|
291
294
|
## Reports
|
292
295
|
|
293
|
-
- [3.11](https://robbrad.github.io/UKBinCollectionData/3.11/)
|
294
296
|
- [3.12](https://robbrad.github.io/UKBinCollectionData/3.12/)
|
295
297
|
|
296
298
|
---
|
@@ -2,7 +2,7 @@ uk_bin_collection/README.rst,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,
|
|
2
2
|
uk_bin_collection/tests/council_feature_input_parity.py,sha256=DO6Mk4ImYgM5ZCZ-cutwz5RoYYWZRLYx2tr6zIs_9Rc,3843
|
3
3
|
uk_bin_collection/tests/features/environment.py,sha256=VQZjJdJI_kZn08M0j5cUgvKT4k3iTw8icJge1DGOkoA,127
|
4
4
|
uk_bin_collection/tests/features/validate_council_outputs.feature,sha256=SJK-Vc737hrf03tssxxbeg_JIvAH-ddB8f6gU1LTbuQ,251
|
5
|
-
uk_bin_collection/tests/input.json,sha256=
|
5
|
+
uk_bin_collection/tests/input.json,sha256=NKV2_wqJAiwzefOwEMDnRwRheAJRvULKElt-87kbbHU,76941
|
6
6
|
uk_bin_collection/tests/output.schema,sha256=ZwKQBwYyTDEM4G2hJwfLUVM-5v1vKRvRK9W9SS1sd18,1086
|
7
7
|
uk_bin_collection/tests/step_defs/step_helpers/file_handler.py,sha256=Ygzi4V0S1MIHqbdstUlIqtRIwnynvhu4UtpweJ6-5N8,1474
|
8
8
|
uk_bin_collection/tests/step_defs/test_validate_council.py,sha256=LrOSt_loA1Mw3vTqaO2LpaDMu7rYJy6k5Kr-EOBln7s,3424
|
@@ -14,8 +14,10 @@ uk_bin_collection/uk_bin_collection/collect_data.py,sha256=dB7wWXsJX4fm5bIf84lex
|
|
14
14
|
uk_bin_collection/uk_bin_collection/common.py,sha256=oZQW8CYRGfbhNSyq7t7jEhrFl1tjb4H157xSx8QHWSY,10106
|
15
15
|
uk_bin_collection/uk_bin_collection/councils/AberdeenshireCouncil.py,sha256=aO1CSdyqa8oAD0fB79y1Q9bikAWCP_JFa7CsyTa2j9s,1655
|
16
16
|
uk_bin_collection/uk_bin_collection/councils/AdurAndWorthingCouncils.py,sha256=ppbrmm-MzB1wOulK--CU_0j4P-djNf3ozMhHnmQFqLo,1511
|
17
|
+
uk_bin_collection/uk_bin_collection/councils/ArdsAndNorthDownCouncil.py,sha256=iMBldxNErgi-ok1o6xpqdNgMvR6qapaNqoTWDTqMeGo,3824
|
17
18
|
uk_bin_collection/uk_bin_collection/councils/ArmaghBanbridgeCraigavonCouncil.py,sha256=o9NBbVCTdxKXnpYbP8-zxe1Gh8s57vwfV75Son_sAHE,2863
|
18
19
|
uk_bin_collection/uk_bin_collection/councils/ArunCouncil.py,sha256=yfhthv9nuogP19VOZ3TYQrq51qqjiCZcSel4sXhiKjs,4012
|
20
|
+
uk_bin_collection/uk_bin_collection/councils/AshfordBoroughCouncil.py,sha256=yC-8UMQHSbvze43PJ2_F4Z3cu7M7cynKTojipBJU7Ug,4307
|
19
21
|
uk_bin_collection/uk_bin_collection/councils/AylesburyValeCouncil.py,sha256=LouqjspEMt1TkOGqWHs2zkxwOETIy3n7p64uKIlAgUg,2401
|
20
22
|
uk_bin_collection/uk_bin_collection/councils/BCPCouncil.py,sha256=W7QBx6Mgso8RYosuXsaYo3GGNAu-tiyBSmuYxr1JSOU,1707
|
21
23
|
uk_bin_collection/uk_bin_collection/councils/BarnetCouncil.py,sha256=Sd4-pbv0QZsR7soxvXYqsfdOUIqZqS6notyoZthG77s,9182
|
@@ -46,7 +48,7 @@ uk_bin_collection/uk_bin_collection/councils/CastlepointDistrictCouncil.py,sha25
|
|
46
48
|
uk_bin_collection/uk_bin_collection/councils/CharnwoodBoroughCouncil.py,sha256=tXfzMetN6wxahuGGRp2mIyCCDSL4F2aG61HhUxw6COQ,2172
|
47
49
|
uk_bin_collection/uk_bin_collection/councils/ChelmsfordCityCouncil.py,sha256=EB88D0MNJwuDZ2GX1ENc5maGYx17mnHTCtNl6s-v11E,5090
|
48
50
|
uk_bin_collection/uk_bin_collection/councils/CheshireEastCouncil.py,sha256=aMqT5sy1Z1gklFO5Xl893OgeBmpf19OwpizWEKWQ3hg,1680
|
49
|
-
uk_bin_collection/uk_bin_collection/councils/CheshireWestAndChesterCouncil.py,sha256=
|
51
|
+
uk_bin_collection/uk_bin_collection/councils/CheshireWestAndChesterCouncil.py,sha256=xsJcx-Dcds0ZcX2vZ-xHVkCg-faQRvbhrJzRDY6Lguw,4779
|
50
52
|
uk_bin_collection/uk_bin_collection/councils/ChichesterDistrictCouncil.py,sha256=HxrLcJves7ZsE8FbooymeecTUmScY4R7Oi71vwCePPo,4118
|
51
53
|
uk_bin_collection/uk_bin_collection/councils/ChorleyCouncil.py,sha256=M7HjuUaFq8aSnOf_9m1QS4MmPPMmPhF3mLHSrfDPtV0,5194
|
52
54
|
uk_bin_collection/uk_bin_collection/councils/ColchesterCityCouncil.py,sha256=Mny-q2rQkWe2Tj1gINwEM1L4AkqQl1EDMAaKY0-deD4,3968
|
@@ -54,7 +56,7 @@ uk_bin_collection/uk_bin_collection/councils/ConwyCountyBorough.py,sha256=el75qv
|
|
54
56
|
uk_bin_collection/uk_bin_collection/councils/CornwallCouncil.py,sha256=WZiz50svwyZgO8QKUCLy7hfFuy2HmAx5h-TG3yAweRA,2836
|
55
57
|
uk_bin_collection/uk_bin_collection/councils/CoventryCityCouncil.py,sha256=kfAvA2e4MlO0W9YT70U_mW9gxVPrmr0BOGzV99Tw2Bg,2012
|
56
58
|
uk_bin_collection/uk_bin_collection/councils/CrawleyBoroughCouncil.py,sha256=_BEKZAjlS5Ad5DjyxqAEFSLn8F-KYox0zmn4BXaAD6A,2367
|
57
|
-
uk_bin_collection/uk_bin_collection/councils/CroydonCouncil.py,sha256=
|
59
|
+
uk_bin_collection/uk_bin_collection/councils/CroydonCouncil.py,sha256=Vxh5ICoaXTAvx0nDOq_95XQ4He9sQKcLdI5keV2uxM4,11384
|
58
60
|
uk_bin_collection/uk_bin_collection/councils/DacorumBoroughCouncil.py,sha256=Tm_6pvBPj-6qStbe6-02LXaoCOlnnDvVXAAocGVvf_E,3970
|
59
61
|
uk_bin_collection/uk_bin_collection/councils/DartfordBoroughCouncil.py,sha256=SPirUUoweMwX5Txtsr0ocdcFtKxCQ9LhzTTJN20tM4w,1550
|
60
62
|
uk_bin_collection/uk_bin_collection/councils/DerbyshireDalesDistrictCouncil.py,sha256=MQC1-jXezXczrxTcvPQvkpGgyyAbzSKlX38WsmftHak,4007
|
@@ -66,6 +68,7 @@ uk_bin_collection/uk_bin_collection/councils/DurhamCouncil.py,sha256=6O8bNsQVYQb
|
|
66
68
|
uk_bin_collection/uk_bin_collection/councils/EalingCouncil.py,sha256=UhNXGi-_6NYZu50988VEvOzmAVunxOoyJ6mz0OEaUz4,1321
|
67
69
|
uk_bin_collection/uk_bin_collection/councils/EastCambridgeshireCouncil.py,sha256=aYUVE5QqTxdj8FHhCB4EiFVDJahWJD9Pq0d1upBEvXg,1501
|
68
70
|
uk_bin_collection/uk_bin_collection/councils/EastDevonDC.py,sha256=U0VwSNIldMv5nUoiXtFgjbE0m6Kb-8W2WZQGVCNF_WI,3261
|
71
|
+
uk_bin_collection/uk_bin_collection/councils/EastHertsCouncil.py,sha256=hjIrZXM0qe8xvHfrBqMDyXnq0_h_ySODqTfmOI5ahTc,4071
|
69
72
|
uk_bin_collection/uk_bin_collection/councils/EastLindseyDistrictCouncil.py,sha256=o_HPSFhb2ybmwv32_7T7CO1f2mGDkYCNPfaM5xz6bUI,4356
|
70
73
|
uk_bin_collection/uk_bin_collection/councils/EastRenfrewshireCouncil.py,sha256=5giegMCKQ2JhVDR5M4mevVxIdhZtSW7kbuuoSkj3EGk,4361
|
71
74
|
uk_bin_collection/uk_bin_collection/councils/EastRidingCouncil.py,sha256=oL-NqriLVy_NChGASNh8qTqeakLn4iP_XzoMC6VlPGM,5216
|
@@ -81,7 +84,7 @@ uk_bin_collection/uk_bin_collection/councils/FarehamBoroughCouncil.py,sha256=25Q
|
|
81
84
|
uk_bin_collection/uk_bin_collection/councils/FenlandDistrictCouncil.py,sha256=sFrnKzIE2tIcz0YrC6A9HcevzgNdf6E6_HLGMWDKtGw,2513
|
82
85
|
uk_bin_collection/uk_bin_collection/councils/FifeCouncil.py,sha256=eP_NnHtBLyflRUko9ubi_nxUPb7qg9SbaaSxqWZxNEs,2157
|
83
86
|
uk_bin_collection/uk_bin_collection/councils/FlintshireCountyCouncil.py,sha256=RvPHhGbzP3mcjgWe2rIQux43UuDH7XofJGIKs7wJRe0,2060
|
84
|
-
uk_bin_collection/uk_bin_collection/councils/ForestOfDeanDistrictCouncil.py,sha256=
|
87
|
+
uk_bin_collection/uk_bin_collection/councils/ForestOfDeanDistrictCouncil.py,sha256=YWT2GM2-bQ3Zh9ps1K14XRZfanuJOlV-zHpOOYMXAXY,4893
|
85
88
|
uk_bin_collection/uk_bin_collection/councils/GatesheadCouncil.py,sha256=SRCgYhYs6rv_8C1UEDVORHZgXxcJkoZBjzdYS4Lu-ew,4531
|
86
89
|
uk_bin_collection/uk_bin_collection/councils/GedlingBoroughCouncil.py,sha256=XzfFMCwclh9zAJgsbaj4jywjdiH0wPaFicaVsLrN3ms,2297
|
87
90
|
uk_bin_collection/uk_bin_collection/councils/GlasgowCityCouncil.py,sha256=i7BympEhCm7D9yR0p5_QQICtWvNcDYNJIWB19SA0g2k,2303
|
@@ -147,6 +150,7 @@ uk_bin_collection/uk_bin_collection/councils/OldhamCouncil.py,sha256=9dlesCxNoVX
|
|
147
150
|
uk_bin_collection/uk_bin_collection/councils/PerthAndKinrossCouncil.py,sha256=Kos5GzN2co3Ij3tSHOXB9S71Yt78RROCfVRtnh7M1VU,3657
|
148
151
|
uk_bin_collection/uk_bin_collection/councils/PlymouthCouncil.py,sha256=FJqpJ0GJhpjYeyZ9ioZPkKGl-zrqMD3y5iKa07e_i30,3202
|
149
152
|
uk_bin_collection/uk_bin_collection/councils/PortsmouthCityCouncil.py,sha256=xogNgVvwM5FljCziiNLgZ_wzkOnrQkifi1dkPMDRMtg,5588
|
153
|
+
uk_bin_collection/uk_bin_collection/councils/PowysCouncil.py,sha256=E6AGmbU3GfmScrpS-hrnCz4uOwucmckq4R-hLmq80b8,5004
|
150
154
|
uk_bin_collection/uk_bin_collection/councils/PrestonCityCouncil.py,sha256=3Nuin2hQsiEsbJR_kHldtzRhzmnPFctH7C7MFG7thj8,3838
|
151
155
|
uk_bin_collection/uk_bin_collection/councils/ReadingBoroughCouncil.py,sha256=ZlQjU0IeKylGE9VlivSMh4XKwoLgntESPiylSOYkuD4,1009
|
152
156
|
uk_bin_collection/uk_bin_collection/councils/ReigateAndBansteadBoroughCouncil.py,sha256=HMLKdRUO5DdMJe1d1X5qtKtQsf6d5TAPViIZpMzAfes,3251
|
@@ -168,9 +172,9 @@ uk_bin_collection/uk_bin_collection/councils/SouthAyrshireCouncil.py,sha256=03ea
|
|
168
172
|
uk_bin_collection/uk_bin_collection/councils/SouthCambridgeshireCouncil.py,sha256=xGSMcikxjS4UzqKs0X50LJKmn09C-XAAs98SPhNZgkQ,2308
|
169
173
|
uk_bin_collection/uk_bin_collection/councils/SouthDerbyshireDistrictCouncil.py,sha256=irqelQSENPsZLlNtYtpt-Z7GwKUyvhp94kKKVIIDjQg,2087
|
170
174
|
uk_bin_collection/uk_bin_collection/councils/SouthGloucestershireCouncil.py,sha256=ytQot0J7i6DTJo6hb9koTB1UpXLATKVeRU4FBF9kHRo,2412
|
171
|
-
uk_bin_collection/uk_bin_collection/councils/SouthKestevenDistrictCouncil.py,sha256=
|
175
|
+
uk_bin_collection/uk_bin_collection/councils/SouthKestevenDistrictCouncil.py,sha256=_26ouWln5VrKiIFcp2b6ZzuwCKpp3aNcS2n5d4-8NsA,6210
|
172
176
|
uk_bin_collection/uk_bin_collection/councils/SouthLanarkshireCouncil.py,sha256=fj-eZI0yrvQVCv8GvhcovZ3b9bV6Xv_ws3IunWjnv4U,3126
|
173
|
-
uk_bin_collection/uk_bin_collection/councils/SouthNorfolkCouncil.py,sha256=
|
177
|
+
uk_bin_collection/uk_bin_collection/councils/SouthNorfolkCouncil.py,sha256=C2qIZjjbl9JnuukX9OH2RbfP0hSdp3uX76APGY33qKs,4622
|
174
178
|
uk_bin_collection/uk_bin_collection/councils/SouthOxfordshireCouncil.py,sha256=zW4bN3hcqNoK_Y0-vPpuZs3K0LTPvApu6_v9K-D7WjE,3879
|
175
179
|
uk_bin_collection/uk_bin_collection/councils/SouthRibbleCouncil.py,sha256=OdexbeiI5WsCfjlsnHjAce8oGF5fW-n7q2XOuxcpHzw,3604
|
176
180
|
uk_bin_collection/uk_bin_collection/councils/SouthTynesideCouncil.py,sha256=dxXGrJfg_fn2IPTBgq6Duwy0WY8GYLafMuisaCjOnbs,3426
|
@@ -215,7 +219,7 @@ uk_bin_collection/uk_bin_collection/councils/WestLindseyDistrictCouncil.py,sha25
|
|
215
219
|
uk_bin_collection/uk_bin_collection/councils/WestLothianCouncil.py,sha256=dq0jimtARvRkZiGbVFrXXZgY-BODtz3uYZ5UKn0bf64,4114
|
216
220
|
uk_bin_collection/uk_bin_collection/councils/WestMorlandAndFurness.py,sha256=jbqV3460rn9D0yTBGWjpSe1IvWWcdGur5pzgj-hJcQ4,2513
|
217
221
|
uk_bin_collection/uk_bin_collection/councils/WestNorthamptonshireCouncil.py,sha256=F1GeJUGND4DN_HuM6N0Elpeb0DAMm9_KeqG6qtIgZf4,1079
|
218
|
-
uk_bin_collection/uk_bin_collection/councils/WestOxfordshireDistrictCouncil.py,sha256=
|
222
|
+
uk_bin_collection/uk_bin_collection/councils/WestOxfordshireDistrictCouncil.py,sha256=bkE7BUwRIEJQyfOHyXYeaJB1ruGTFu9LHIGursIBEIQ,4859
|
219
223
|
uk_bin_collection/uk_bin_collection/councils/WestSuffolkCouncil.py,sha256=9i8AQHh-qIRPZ_5Ad97_h04-qgyLQDPV064obBzab1Y,2587
|
220
224
|
uk_bin_collection/uk_bin_collection/councils/WiganBoroughCouncil.py,sha256=3gqFA4-BVx_In6QOu3KUNqPN4Fkn9iMlZTeopMK9p6A,3746
|
221
225
|
uk_bin_collection/uk_bin_collection/councils/WiltshireCouncil.py,sha256=it2Oh5Kmq3lD30gAZgk2bzZPNCtJcFHyQO1NgOQtfvU,5653
|
@@ -224,14 +228,15 @@ uk_bin_collection/uk_bin_collection/councils/WindsorAndMaidenheadCouncil.py,sha2
|
|
224
228
|
uk_bin_collection/uk_bin_collection/councils/WirralCouncil.py,sha256=X_e9zXEZAl_Mp6nPORHc9CTmf3QHdoMY3BCnKrXEr1I,2131
|
225
229
|
uk_bin_collection/uk_bin_collection/councils/WokingBoroughCouncil.py,sha256=37igH9g0xe4XIhRhcJ-ZJBU8MxTp5yzgpadWbdE33Yg,5205
|
226
230
|
uk_bin_collection/uk_bin_collection/councils/WokinghamBoroughCouncil.py,sha256=H8aFHlacwV07X-6T9RQua4irqDA0cIQrF4O1FfPR7yI,4114
|
231
|
+
uk_bin_collection/uk_bin_collection/councils/WorcesterCityCouncil.py,sha256=dKHB2fPSmOGOwyvfpbdR4U8XW2ctBf63gCPxX06kwKA,1867
|
227
232
|
uk_bin_collection/uk_bin_collection/councils/WychavonDistrictCouncil.py,sha256=YuZdzEW0CZLwusm1VQcGRIKXAab_UDFLaCnN60itt_E,5776
|
228
233
|
uk_bin_collection/uk_bin_collection/councils/WyreCouncil.py,sha256=zDDa7n4K_zm5PgDL08A26gD9yOOsOhuexI3x2seaBF4,3511
|
229
234
|
uk_bin_collection/uk_bin_collection/councils/YorkCouncil.py,sha256=I2kBYMlsD4bIdsvmoSzBjJAvTTi6yPfJa8xjJx1ys2w,1490
|
230
235
|
uk_bin_collection/uk_bin_collection/councils/council_class_template/councilclasstemplate.py,sha256=4s9ODGPAwPqwXc8SrTX5Wlfmizs3_58iXUtHc4Ir86o,1162
|
231
236
|
uk_bin_collection/uk_bin_collection/create_new_council.py,sha256=m-IhmWmeWQlFsTZC4OxuFvtw5ZtB8EAJHxJTH4O59lQ,1536
|
232
237
|
uk_bin_collection/uk_bin_collection/get_bin_data.py,sha256=YvmHfZqanwrJ8ToGch34x-L-7yPe31nB_x77_Mgl_vo,4545
|
233
|
-
uk_bin_collection-0.
|
234
|
-
uk_bin_collection-0.
|
235
|
-
uk_bin_collection-0.
|
236
|
-
uk_bin_collection-0.
|
237
|
-
uk_bin_collection-0.
|
238
|
+
uk_bin_collection-0.108.0.dist-info/LICENSE,sha256=vABBUOzcrgfaTKpzeo-si9YVEun6juDkndqA8RKdKGs,1071
|
239
|
+
uk_bin_collection-0.108.0.dist-info/METADATA,sha256=whM0AryElMTyBl5WOjE2VSIpY609j4U7nOjYrbAY5OI,17744
|
240
|
+
uk_bin_collection-0.108.0.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
|
241
|
+
uk_bin_collection-0.108.0.dist-info/entry_points.txt,sha256=36WCSGMWSc916S3Hi1ZkazzDKHaJ6CD-4fCEFm5MIao,90
|
242
|
+
uk_bin_collection-0.108.0.dist-info/RECORD,,
|
File without changes
|
{uk_bin_collection-0.106.0.dist-info → uk_bin_collection-0.108.0.dist-info}/entry_points.txt
RENAMED
File without changes
|