uk_bin_collection 0.140.0__py3-none-any.whl → 0.141.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- uk_bin_collection/tests/input.json +9 -0
- uk_bin_collection/uk_bin_collection/councils/AshfordBoroughCouncil.py +97 -86
- uk_bin_collection/uk_bin_collection/councils/EastHertsCouncil.py +102 -91
- uk_bin_collection/uk_bin_collection/councils/HertsmereBoroughCouncil.py +140 -130
- uk_bin_collection/uk_bin_collection/councils/PeterboroughCityCouncil.py +167 -0
- uk_bin_collection/uk_bin_collection/councils/SwaleBoroughCouncil.py +110 -99
- uk_bin_collection/uk_bin_collection/councils/WindsorAndMaidenheadCouncil.py +43 -31
- {uk_bin_collection-0.140.0.dist-info → uk_bin_collection-0.141.1.dist-info}/METADATA +1 -1
- {uk_bin_collection-0.140.0.dist-info → uk_bin_collection-0.141.1.dist-info}/RECORD +12 -11
- {uk_bin_collection-0.140.0.dist-info → uk_bin_collection-0.141.1.dist-info}/LICENSE +0 -0
- {uk_bin_collection-0.140.0.dist-info → uk_bin_collection-0.141.1.dist-info}/WHEEL +0 -0
- {uk_bin_collection-0.140.0.dist-info → uk_bin_collection-0.141.1.dist-info}/entry_points.txt +0 -0
@@ -1493,6 +1493,15 @@
|
|
1493
1493
|
"wiki_name": "Oxford City Council",
|
1494
1494
|
"wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN."
|
1495
1495
|
},
|
1496
|
+
"PeterboroughCityCouncil": {
|
1497
|
+
"house_number": "7 Arundel Road, Peterborough, PE4 6JJ",
|
1498
|
+
"postcode": "PE4 6JJ",
|
1499
|
+
"skip_get_url": true,
|
1500
|
+
"url": "https://report.peterborough.gov.uk/waste",
|
1501
|
+
"web_driver": "http://selenium:4444",
|
1502
|
+
"wiki_name": "Peterborough City Council",
|
1503
|
+
"wiki_note": "Pass the full address as it appears o nthe Peterborough website and postcode in their respective parameters. This parser requires a Selenium webdriver."
|
1504
|
+
},
|
1496
1505
|
"PerthAndKinrossCouncil": {
|
1497
1506
|
"uprn": "124032322",
|
1498
1507
|
"url": "https://www.pkc.gov.uk",
|
@@ -19,106 +19,117 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
19
19
|
"""
|
20
20
|
|
21
21
|
def parse_data(self, page: str, **kwargs) -> dict:
|
22
|
+
driver = None
|
23
|
+
try:
|
22
24
|
# Get and check UPRN
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
25
|
+
user_uprn = kwargs.get("uprn")
|
26
|
+
user_postcode = kwargs.get("postcode")
|
27
|
+
check_uprn(user_uprn)
|
28
|
+
check_postcode(user_postcode)
|
29
|
+
web_driver = kwargs.get("web_driver")
|
30
|
+
headless = kwargs.get("headless")
|
31
|
+
bindata = {"bins": []}
|
32
|
+
|
33
|
+
API_URL = "https://secure.ashford.gov.uk/waste/collectiondaylookup/"
|
34
|
+
|
35
|
+
# Create Selenium webdriver
|
36
|
+
driver = create_webdriver(web_driver, headless, None, __name__)
|
37
|
+
driver.get(API_URL)
|
38
|
+
|
39
|
+
# Wait for the postcode field to appear then populate it
|
40
|
+
inputElement_postcode = WebDriverWait(driver, 30).until(
|
41
|
+
EC.presence_of_element_located(
|
42
|
+
(By.ID, "ContentPlaceHolder1_CollectionDayLookup2_TextBox_PostCode")
|
43
|
+
)
|
41
44
|
)
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
45
|
+
inputElement_postcode.send_keys(user_postcode)
|
46
|
+
|
47
|
+
# Click search button
|
48
|
+
findAddress = WebDriverWait(driver, 10).until(
|
49
|
+
EC.presence_of_element_located(
|
50
|
+
(
|
51
|
+
By.ID,
|
52
|
+
"ContentPlaceHolder1_CollectionDayLookup2_Button_PostCodeSearch",
|
53
|
+
)
|
51
54
|
)
|
52
55
|
)
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
56
|
+
findAddress.click()
|
57
|
+
|
58
|
+
# Wait for the 'Select your property' dropdown to appear and select the first result
|
59
|
+
dropdown = WebDriverWait(driver, 10).until(
|
60
|
+
EC.element_to_be_clickable(
|
61
|
+
(
|
62
|
+
By.ID,
|
63
|
+
"ContentPlaceHolder1_CollectionDayLookup2_DropDownList_Addresses",
|
64
|
+
)
|
62
65
|
)
|
63
66
|
)
|
64
|
-
)
|
65
67
|
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
68
|
+
# Create a 'Select' for it, then select the first address in the list
|
69
|
+
# (Index 0 is "Make a selection from the list")
|
70
|
+
dropdownSelect = Select(dropdown)
|
71
|
+
dropdownSelect.select_by_value(str(user_uprn))
|
70
72
|
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
73
|
+
# Click search button
|
74
|
+
findAddress = WebDriverWait(driver, 10).until(
|
75
|
+
EC.presence_of_element_located(
|
76
|
+
(By.ID, "ContentPlaceHolder1_CollectionDayLookup2_Button_SelectAddress")
|
77
|
+
)
|
75
78
|
)
|
76
|
-
|
77
|
-
findAddress.click()
|
79
|
+
findAddress.click()
|
78
80
|
|
79
|
-
|
80
|
-
|
81
|
-
|
81
|
+
h4_element = WebDriverWait(driver, 10).until(
|
82
|
+
EC.presence_of_element_located(
|
83
|
+
(By.XPATH, "//h4[contains(text(), 'Collection Dates')]")
|
84
|
+
)
|
82
85
|
)
|
83
|
-
)
|
84
86
|
|
85
|
-
|
87
|
+
soup = BeautifulSoup(driver.page_source, features="html.parser")
|
86
88
|
|
87
|
-
|
89
|
+
bin_tables = soup.find_all("table")
|
88
90
|
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
91
|
+
for bin_table in bin_tables:
|
92
|
+
bin_text = bin_table.find("td", id=re.compile("CollectionDayLookup2_td_"))
|
93
|
+
if not bin_text:
|
94
|
+
continue
|
93
95
|
|
94
|
-
|
96
|
+
bin_type_soup = bin_text.find("b")
|
95
97
|
|
96
|
-
|
97
|
-
|
98
|
-
|
98
|
+
if not bin_type_soup:
|
99
|
+
continue
|
100
|
+
bin_type: str = bin_type_soup.text.strip().split(" (")[0]
|
99
101
|
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
106
|
-
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
|
111
|
-
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
102
|
+
date_soup = bin_text.find(
|
103
|
+
"span", id=re.compile(r"CollectionDayLookup2_Label_\w*_Date")
|
104
|
+
)
|
105
|
+
if not date_soup or (
|
106
|
+
" " not in date_soup.text.strip()
|
107
|
+
and date_soup.text.strip().lower() != "today"
|
108
|
+
):
|
109
|
+
continue
|
110
|
+
date_str: str = date_soup.text.strip()
|
111
|
+
try:
|
112
|
+
if date_soup.text.strip().lower() == "today":
|
113
|
+
date = datetime.now().date()
|
114
|
+
else:
|
115
|
+
date = datetime.strptime(date_str.split(" ")[1], "%d/%m/%Y").date()
|
116
|
+
|
117
|
+
except ValueError:
|
118
|
+
continue
|
119
|
+
|
120
|
+
dict_data = {
|
121
|
+
"type": bin_type,
|
122
|
+
"collectionDate": date.strftime("%d/%m/%Y"),
|
123
|
+
}
|
124
|
+
bindata["bins"].append(dict_data)
|
125
|
+
|
126
|
+
except Exception as e:
|
127
|
+
# Here you can log the exception if needed
|
128
|
+
print(f"An error occurred: {e}")
|
129
|
+
# Optionally, re-raise the exception if you want it to propagate
|
130
|
+
raise
|
131
|
+
finally:
|
132
|
+
# This block ensures that the driver is closed regardless of an exception
|
133
|
+
if driver:
|
134
|
+
driver.quit()
|
135
|
+
return bindata
|
@@ -16,102 +16,113 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
16
16
|
|
17
17
|
def parse_data(self, page: str, **kwargs) -> dict:
|
18
18
|
# Get and check UPRN
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
19
|
+
driver = None
|
20
|
+
try:
|
21
|
+
user_postcode = kwargs.get("postcode")
|
22
|
+
user_paon = kwargs.get("paon")
|
23
|
+
check_paon(user_paon)
|
24
|
+
check_postcode(user_postcode)
|
25
|
+
web_driver = kwargs.get("web_driver")
|
26
|
+
headless = kwargs.get("headless")
|
27
|
+
bindata = {"bins": []}
|
28
|
+
|
29
|
+
API_URL = "https://uhte-wrp.whitespacews.com"
|
30
|
+
|
31
|
+
# Create Selenium webdriver
|
32
|
+
driver = create_webdriver(web_driver, headless, None, __name__)
|
33
|
+
driver.get(API_URL)
|
34
|
+
|
35
|
+
# Click Find my bin collection day button
|
36
|
+
collectionButton = WebDriverWait(driver, 10).until(
|
37
|
+
EC.element_to_be_clickable((By.LINK_TEXT, "Find my bin collection day"))
|
38
|
+
)
|
39
|
+
collectionButton.click()
|
40
|
+
|
41
|
+
main_content = WebDriverWait(driver, 10).until(
|
42
|
+
EC.presence_of_element_located((By.ID, "main-content"))
|
43
|
+
)
|
44
|
+
|
45
|
+
# Wait for the property number field to appear then populate it
|
46
|
+
inputElement_number = WebDriverWait(driver, 10).until(
|
47
|
+
EC.element_to_be_clickable(
|
48
|
+
(
|
49
|
+
By.ID,
|
50
|
+
"address_name_number",
|
51
|
+
)
|
49
52
|
)
|
50
53
|
)
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
54
|
+
inputElement_number.send_keys(user_paon)
|
55
|
+
|
56
|
+
# Wait for the postcode field to appear then populate it
|
57
|
+
inputElement_postcode = WebDriverWait(driver, 10).until(
|
58
|
+
EC.element_to_be_clickable(
|
59
|
+
(
|
60
|
+
By.ID,
|
61
|
+
"address_postcode",
|
62
|
+
)
|
60
63
|
)
|
61
64
|
)
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
65
|
+
inputElement_postcode.send_keys(user_postcode)
|
66
|
+
|
67
|
+
# Click search button
|
68
|
+
continueButton = WebDriverWait(driver, 10).until(
|
69
|
+
EC.element_to_be_clickable(
|
70
|
+
(
|
71
|
+
By.ID,
|
72
|
+
"Submit",
|
73
|
+
)
|
71
74
|
)
|
72
75
|
)
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
76
|
+
continueButton.click()
|
77
|
+
|
78
|
+
# Wait for the 'Search Results' to appear and select the first result
|
79
|
+
property = WebDriverWait(driver, 10).until(
|
80
|
+
EC.element_to_be_clickable(
|
81
|
+
(
|
82
|
+
By.CSS_SELECTOR,
|
83
|
+
"li.app-subnav__section-item a",
|
84
|
+
# "app-subnav__link govuk-link clicker colordarkblue fontfamilyArial fontsize12rem",
|
85
|
+
# "//a[starts-with(@aria-label, '{user_paon}')]",
|
86
|
+
)
|
84
87
|
)
|
85
88
|
)
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
106
|
-
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
|
111
|
-
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
89
|
+
property.click()
|
90
|
+
|
91
|
+
upcoming_scheduled_collections = WebDriverWait(driver, 10).until(
|
92
|
+
EC.presence_of_element_located((By.ID, "upcoming-scheduled-collections"))
|
93
|
+
)
|
94
|
+
|
95
|
+
soup = BeautifulSoup(driver.page_source, features="html.parser")
|
96
|
+
|
97
|
+
collections = []
|
98
|
+
for collection in soup.find_all(
|
99
|
+
"u1",
|
100
|
+
class_="displayinlineblock justifycontentleft alignitemscenter margin0 padding0",
|
101
|
+
):
|
102
|
+
date = collection.find(
|
103
|
+
"p", string=lambda text: text and "/" in text
|
104
|
+
).text.strip() # Extract date
|
105
|
+
service = collection.find(
|
106
|
+
"p", string=lambda text: text and "Collection Service" in text
|
107
|
+
).text.strip() # Extract service type
|
108
|
+
collections.append({"date": date, "service": service})
|
109
|
+
|
110
|
+
# Print the parsed data
|
111
|
+
for item in collections:
|
112
|
+
|
113
|
+
dict_data = {
|
114
|
+
"type": item["service"],
|
115
|
+
"collectionDate": item["date"],
|
116
|
+
}
|
117
|
+
bindata["bins"].append(dict_data)
|
118
|
+
|
119
|
+
except Exception as e:
|
120
|
+
# Here you can log the exception if needed
|
121
|
+
print(f"An error occurred: {e}")
|
122
|
+
# Optionally, re-raise the exception if you want it to propagate
|
123
|
+
raise
|
124
|
+
finally:
|
125
|
+
# This block ensures that the driver is closed regardless of an exception
|
126
|
+
if driver:
|
127
|
+
driver.quit()
|
128
|
+
return bindata
|
@@ -21,141 +21,151 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
21
21
|
"""
|
22
22
|
|
23
23
|
def parse_data(self, page: str, **kwargs) -> dict:
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
for bin in bin_type:
|
53
|
-
dict_data = {
|
54
|
-
"week": tag.text.replace("\xa0", " "),
|
55
|
-
"bin_type": bin,
|
56
|
-
}
|
57
|
-
bin_weeks.append(dict_data)
|
58
|
-
|
59
|
-
driver.get(URI_2)
|
60
|
-
|
61
|
-
# Wait for the postcode field to appear then populate it
|
62
|
-
inputElement_postcode = WebDriverWait(driver, 30).until(
|
63
|
-
EC.presence_of_element_located(
|
64
|
-
(
|
65
|
-
By.CLASS_NAME,
|
66
|
-
"relation_path_type_ahead_search",
|
24
|
+
driver = None
|
25
|
+
try:
|
26
|
+
user_paon = kwargs.get("paon")
|
27
|
+
user_postcode = kwargs.get("postcode")
|
28
|
+
web_driver = kwargs.get("web_driver")
|
29
|
+
headless = kwargs.get("headless")
|
30
|
+
check_paon(user_paon)
|
31
|
+
check_postcode(user_postcode)
|
32
|
+
bindata = {"bins": []}
|
33
|
+
|
34
|
+
URI_1 = "https://www.hertsmere.gov.uk/Environment-Refuse-and-Recycling/Recycling--Waste/Bin-collections/Collections-and-calendar.aspx"
|
35
|
+
URI_2 = "https://hertsmere-services.onmats.com/w/webpage/round-search"
|
36
|
+
|
37
|
+
# Create Selenium webdriver
|
38
|
+
driver = create_webdriver(web_driver, headless, None, __name__)
|
39
|
+
driver.get(URI_1)
|
40
|
+
|
41
|
+
soup = BeautifulSoup(driver.page_source, "html.parser")
|
42
|
+
|
43
|
+
current_week = (soup.find("li", class_="current")).text.strip()
|
44
|
+
|
45
|
+
strong = soup.find_all("strong", text=re.compile(r"^Week"))
|
46
|
+
|
47
|
+
bin_weeks = []
|
48
|
+
for tag in strong:
|
49
|
+
parent = tag.parent
|
50
|
+
bin_type = (
|
51
|
+
(parent.text).split("-")[1].strip().replace("\xa0", " ").split(" and ")
|
67
52
|
)
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
53
|
+
for bin in bin_type:
|
54
|
+
dict_data = {
|
55
|
+
"week": tag.text.replace("\xa0", " "),
|
56
|
+
"bin_type": bin,
|
57
|
+
}
|
58
|
+
bin_weeks.append(dict_data)
|
59
|
+
|
60
|
+
driver.get(URI_2)
|
61
|
+
|
62
|
+
# Wait for the postcode field to appear then populate it
|
63
|
+
inputElement_postcode = WebDriverWait(driver, 30).until(
|
64
|
+
EC.presence_of_element_located(
|
65
|
+
(
|
66
|
+
By.CLASS_NAME,
|
67
|
+
"relation_path_type_ahead_search",
|
68
|
+
)
|
77
69
|
)
|
78
70
|
)
|
79
|
-
|
71
|
+
inputElement_postcode.send_keys(user_postcode)
|
72
|
+
|
73
|
+
WebDriverWait(driver, 10).until(
|
74
|
+
EC.element_to_be_clickable(
|
75
|
+
(
|
76
|
+
By.XPATH,
|
77
|
+
f"//ul[@class='result_list']/li[starts-with(@aria-label, '{user_paon}')]",
|
78
|
+
)
|
79
|
+
)
|
80
|
+
).click()
|
81
|
+
|
82
|
+
WebDriverWait(driver, timeout=10).until(
|
83
|
+
EC.element_to_be_clickable(
|
84
|
+
(
|
85
|
+
By.CSS_SELECTOR,
|
86
|
+
"input.fragment_presenter_template_edit.btn.bg-primary.btn-medium[type='submit']",
|
87
|
+
)
|
88
|
+
)
|
89
|
+
).click()
|
80
90
|
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
By.CSS_SELECTOR,
|
85
|
-
"input.fragment_presenter_template_edit.btn.bg-primary.btn-medium[type='submit']",
|
91
|
+
WebDriverWait(driver, timeout=10).until(
|
92
|
+
EC.presence_of_element_located(
|
93
|
+
(By.XPATH, "//h3[contains(text(), 'Collection days')]")
|
86
94
|
)
|
87
95
|
)
|
88
|
-
).click()
|
89
96
|
|
90
|
-
|
91
|
-
|
92
|
-
|
97
|
+
soup = BeautifulSoup(driver.page_source, "html.parser")
|
98
|
+
|
99
|
+
table = soup.find("table", class_="table listing table-striped")
|
100
|
+
|
101
|
+
# Check if the table was found
|
102
|
+
if table:
|
103
|
+
# Extract table rows and cells
|
104
|
+
table_data = []
|
105
|
+
for row in table.find("tbody").find_all("tr"):
|
106
|
+
# Extract cell data from each <td> tag
|
107
|
+
row_data = [cell.get_text(strip=True) for cell in row.find_all("td")]
|
108
|
+
table_data.append(row_data)
|
109
|
+
|
110
|
+
else:
|
111
|
+
print("Table not found.")
|
112
|
+
|
113
|
+
collection_day = (table_data[0])[1]
|
114
|
+
|
115
|
+
current_week_bins = [bin for bin in bin_weeks if bin["week"] == current_week]
|
116
|
+
next_week_bins = [bin for bin in bin_weeks if bin["week"] != current_week]
|
117
|
+
|
118
|
+
days_of_week = [
|
119
|
+
"Monday",
|
120
|
+
"Tuesday",
|
121
|
+
"Wednesday",
|
122
|
+
"Thursday",
|
123
|
+
"Friday",
|
124
|
+
"Saturday",
|
125
|
+
"Sunday",
|
126
|
+
]
|
127
|
+
|
128
|
+
today = datetime.now()
|
129
|
+
today_idx = today.weekday() # Monday is 0 and Sunday is 6
|
130
|
+
target_idx = days_of_week.index(collection_day)
|
131
|
+
|
132
|
+
days_until_target = (target_idx - today_idx) % 7
|
133
|
+
if days_until_target == 0:
|
134
|
+
next_day = today
|
135
|
+
else:
|
136
|
+
next_day = today + timedelta(days=days_until_target)
|
137
|
+
|
138
|
+
current_week_dates = get_dates_every_x_days(next_day, 14, 7)
|
139
|
+
next_week_date = next_day + timedelta(days=7)
|
140
|
+
next_week_dates = get_dates_every_x_days(next_week_date, 14, 7)
|
141
|
+
|
142
|
+
for date in current_week_dates:
|
143
|
+
for bin in current_week_bins:
|
144
|
+
dict_data = {
|
145
|
+
"type": bin["bin_type"],
|
146
|
+
"collectionDate": date,
|
147
|
+
}
|
148
|
+
bindata["bins"].append(dict_data)
|
149
|
+
|
150
|
+
for date in next_week_dates:
|
151
|
+
for bin in next_week_bins:
|
152
|
+
dict_data = {
|
153
|
+
"type": bin["bin_type"],
|
154
|
+
"collectionDate": date,
|
155
|
+
}
|
156
|
+
bindata["bins"].append(dict_data)
|
157
|
+
|
158
|
+
bindata["bins"].sort(
|
159
|
+
key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y")
|
93
160
|
)
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
# Extract cell data from each <td> tag
|
106
|
-
row_data = [cell.get_text(strip=True) for cell in row.find_all("td")]
|
107
|
-
table_data.append(row_data)
|
108
|
-
|
109
|
-
else:
|
110
|
-
print("Table not found.")
|
111
|
-
|
112
|
-
collection_day = (table_data[0])[1]
|
113
|
-
|
114
|
-
current_week_bins = [bin for bin in bin_weeks if bin["week"] == current_week]
|
115
|
-
next_week_bins = [bin for bin in bin_weeks if bin["week"] != current_week]
|
116
|
-
|
117
|
-
days_of_week = [
|
118
|
-
"Monday",
|
119
|
-
"Tuesday",
|
120
|
-
"Wednesday",
|
121
|
-
"Thursday",
|
122
|
-
"Friday",
|
123
|
-
"Saturday",
|
124
|
-
"Sunday",
|
125
|
-
]
|
126
|
-
|
127
|
-
today = datetime.now()
|
128
|
-
today_idx = today.weekday() # Monday is 0 and Sunday is 6
|
129
|
-
target_idx = days_of_week.index(collection_day)
|
130
|
-
|
131
|
-
days_until_target = (target_idx - today_idx) % 7
|
132
|
-
if days_until_target == 0:
|
133
|
-
next_day = today
|
134
|
-
else:
|
135
|
-
next_day = today + timedelta(days=days_until_target)
|
136
|
-
|
137
|
-
current_week_dates = get_dates_every_x_days(next_day, 14, 7)
|
138
|
-
next_week_date = next_day + timedelta(days=7)
|
139
|
-
next_week_dates = get_dates_every_x_days(next_week_date, 14, 7)
|
140
|
-
|
141
|
-
for date in current_week_dates:
|
142
|
-
for bin in current_week_bins:
|
143
|
-
dict_data = {
|
144
|
-
"type": bin["bin_type"],
|
145
|
-
"collectionDate": date,
|
146
|
-
}
|
147
|
-
bindata["bins"].append(dict_data)
|
148
|
-
|
149
|
-
for date in next_week_dates:
|
150
|
-
for bin in next_week_bins:
|
151
|
-
dict_data = {
|
152
|
-
"type": bin["bin_type"],
|
153
|
-
"collectionDate": date,
|
154
|
-
}
|
155
|
-
bindata["bins"].append(dict_data)
|
156
|
-
|
157
|
-
bindata["bins"].sort(
|
158
|
-
key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y")
|
159
|
-
)
|
160
|
-
|
161
|
-
return bindata
|
161
|
+
|
162
|
+
except Exception as e:
|
163
|
+
# Here you can log the exception if needed
|
164
|
+
print(f"An error occurred: {e}")
|
165
|
+
# Optionally, re-raise the exception if you want it to propagate
|
166
|
+
raise
|
167
|
+
finally:
|
168
|
+
# This block ensures that the driver is closed regardless of an exception
|
169
|
+
if driver:
|
170
|
+
driver.quit()
|
171
|
+
return bindata
|
@@ -0,0 +1,167 @@
|
|
1
|
+
import time
|
2
|
+
|
3
|
+
from bs4 import BeautifulSoup
|
4
|
+
from selenium.webdriver.common.by import By
|
5
|
+
from selenium.webdriver.support import expected_conditions as EC
|
6
|
+
from selenium.webdriver.support.ui import Select, WebDriverWait
|
7
|
+
|
8
|
+
from uk_bin_collection.uk_bin_collection.common import *
|
9
|
+
from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
|
10
|
+
|
11
|
+
|
12
|
+
# import the wonderful Beautiful Soup and the URL grabber
|
13
|
+
class CouncilClass(AbstractGetBinDataClass):
|
14
|
+
"""
|
15
|
+
Concrete classes have to implement all abstract operations of the
|
16
|
+
base class. They can also override some operations with a default
|
17
|
+
implementation.
|
18
|
+
"""
|
19
|
+
|
20
|
+
def parse_data(self, page: str, **kwargs) -> dict:
|
21
|
+
driver = None
|
22
|
+
try:
|
23
|
+
user_poan = kwargs.get("paon")
|
24
|
+
user_postcode = kwargs.get("postcode")
|
25
|
+
if not user_postcode:
|
26
|
+
raise ValueError("No postcode provided.")
|
27
|
+
check_postcode(user_postcode)
|
28
|
+
|
29
|
+
headless = kwargs.get("headless")
|
30
|
+
web_driver = kwargs.get("web_driver")
|
31
|
+
driver = create_webdriver(web_driver, headless, None, __name__)
|
32
|
+
page = "https://report.peterborough.gov.uk/waste"
|
33
|
+
|
34
|
+
driver.get(page)
|
35
|
+
|
36
|
+
wait = WebDriverWait(driver, 30)
|
37
|
+
|
38
|
+
try:
|
39
|
+
# Cookies confirmed working in selenium
|
40
|
+
accept_cookies_button = wait.until(
|
41
|
+
EC.element_to_be_clickable(
|
42
|
+
(
|
43
|
+
By.XPATH,
|
44
|
+
"//button/span[contains(text(), 'I Accept Cookies')]",
|
45
|
+
)
|
46
|
+
)
|
47
|
+
)
|
48
|
+
accept_cookies_button.click()
|
49
|
+
except:
|
50
|
+
print(
|
51
|
+
"Accept cookies banner not found or clickable within the specified time."
|
52
|
+
)
|
53
|
+
pass
|
54
|
+
|
55
|
+
postcode_input = wait.until(
|
56
|
+
EC.presence_of_element_located((By.XPATH, '//input[@id="postcode"]'))
|
57
|
+
)
|
58
|
+
|
59
|
+
postcode_input.send_keys(user_postcode)
|
60
|
+
|
61
|
+
postcode_go_button = wait.until(
|
62
|
+
EC.element_to_be_clickable((By.XPATH, '//input[@id="go"]'))
|
63
|
+
)
|
64
|
+
|
65
|
+
postcode_go_button.click()
|
66
|
+
|
67
|
+
# Wait for the select address drop down to be present
|
68
|
+
select_address_input = wait.until(
|
69
|
+
EC.presence_of_element_located((By.XPATH, '//input[@id="address"]'))
|
70
|
+
)
|
71
|
+
|
72
|
+
select_address_input.click()
|
73
|
+
time.sleep(2)
|
74
|
+
|
75
|
+
select_address_input_item = wait.until(
|
76
|
+
EC.presence_of_element_located(
|
77
|
+
(By.XPATH, f"//li[contains(text(), '{user_poan}')]")
|
78
|
+
)
|
79
|
+
)
|
80
|
+
|
81
|
+
select_address_input_item.click()
|
82
|
+
|
83
|
+
address_continue_button = wait.until(
|
84
|
+
EC.element_to_be_clickable((By.XPATH, '//input[@value="Continue"]'))
|
85
|
+
)
|
86
|
+
|
87
|
+
address_continue_button.click()
|
88
|
+
|
89
|
+
your_collections_heading = wait.until(
|
90
|
+
EC.presence_of_element_located(
|
91
|
+
(By.XPATH, "//h2[contains(text(), 'Your collections')]")
|
92
|
+
)
|
93
|
+
)
|
94
|
+
|
95
|
+
results_page = wait.until(
|
96
|
+
EC.presence_of_element_located(
|
97
|
+
(By.XPATH, "//div[@class='waste__collections']")
|
98
|
+
)
|
99
|
+
)
|
100
|
+
|
101
|
+
soup = BeautifulSoup(results_page.get_attribute("innerHTML"), "html.parser")
|
102
|
+
|
103
|
+
data = {"bins": []}
|
104
|
+
output_date_format = "%d/%m/%Y"
|
105
|
+
input_date_format = "%A, %d %B %Y" # Expect: Thursday, 17 April 2025
|
106
|
+
|
107
|
+
# Each bin section is within a waste-service-wrapper div
|
108
|
+
collection_panels = soup.find_all("div", class_="waste-service-wrapper")
|
109
|
+
|
110
|
+
for panel in collection_panels:
|
111
|
+
try:
|
112
|
+
# Bin type
|
113
|
+
bin_type_tag = panel.find("h3", class_="waste-service-name")
|
114
|
+
if not bin_type_tag:
|
115
|
+
continue
|
116
|
+
bin_type = bin_type_tag.get_text(strip=True)
|
117
|
+
|
118
|
+
# Get 'Next collection' date
|
119
|
+
rows = panel.find_all("div", class_="govuk-summary-list__row")
|
120
|
+
next_collection = None
|
121
|
+
for row in rows:
|
122
|
+
key = row.find("dt", class_="govuk-summary-list__key")
|
123
|
+
value = row.find("dd", class_="govuk-summary-list__value")
|
124
|
+
if key and value and "Next collection" in key.get_text():
|
125
|
+
raw_date = " ".join(value.get_text().split())
|
126
|
+
|
127
|
+
# ✅ Remove st/nd/rd/th suffix from the day (e.g. 17th → 17)
|
128
|
+
cleaned_date = re.sub(
|
129
|
+
r"(\d{1,2})(st|nd|rd|th)", r"\1", raw_date
|
130
|
+
)
|
131
|
+
next_collection = cleaned_date
|
132
|
+
break
|
133
|
+
|
134
|
+
if not next_collection:
|
135
|
+
continue
|
136
|
+
|
137
|
+
print(f"Found next collection for {bin_type}: '{next_collection}'")
|
138
|
+
|
139
|
+
parsed_date = datetime.strptime(next_collection, input_date_format)
|
140
|
+
formatted_date = parsed_date.strftime(output_date_format)
|
141
|
+
|
142
|
+
data["bins"].append(
|
143
|
+
{
|
144
|
+
"type": bin_type,
|
145
|
+
"collectionDate": formatted_date,
|
146
|
+
}
|
147
|
+
)
|
148
|
+
|
149
|
+
except Exception as e:
|
150
|
+
print(
|
151
|
+
f"Error processing panel for bin '{bin_type if 'bin_type' in locals() else 'unknown'}': {e}"
|
152
|
+
)
|
153
|
+
|
154
|
+
# Sort the data
|
155
|
+
data["bins"].sort(
|
156
|
+
key=lambda x: datetime.strptime(x["collectionDate"], output_date_format)
|
157
|
+
)
|
158
|
+
except Exception as e:
|
159
|
+
# Here you can log the exception if needed
|
160
|
+
print(f"An error occurred: {e}")
|
161
|
+
# Optionally, re-raise the exception if you want it to propagate
|
162
|
+
raise
|
163
|
+
finally:
|
164
|
+
# This block ensures that the driver is closed regardless of an exception
|
165
|
+
if driver:
|
166
|
+
driver.quit()
|
167
|
+
return data
|
@@ -28,108 +28,119 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
28
28
|
"""
|
29
29
|
|
30
30
|
def parse_data(self, page: str, **kwargs) -> dict:
|
31
|
-
|
32
|
-
user_postcode = kwargs.get("postcode")
|
33
|
-
user_paon = kwargs.get("paon")
|
34
|
-
web_driver = kwargs.get("web_driver")
|
35
|
-
headless = kwargs.get("headless")
|
36
|
-
check_postcode(user_postcode)
|
37
|
-
check_paon(user_paon)
|
38
|
-
|
39
|
-
# Build URL to parse
|
40
|
-
council_url = "https://swale.gov.uk/bins-littering-and-the-environment/bins/check-your-bin-day"
|
41
|
-
|
42
|
-
# Create Selenium webdriver
|
43
|
-
driver = create_webdriver(web_driver, headless, None, __name__)
|
44
|
-
driver.get(council_url)
|
45
|
-
|
46
|
-
# Wait for the postcode field to appear then populate it
|
31
|
+
driver = None
|
47
32
|
try:
|
48
|
-
|
49
|
-
|
33
|
+
# Get postcode and UPRN from kwargs
|
34
|
+
user_postcode = kwargs.get("postcode")
|
35
|
+
user_paon = kwargs.get("paon")
|
36
|
+
web_driver = kwargs.get("web_driver")
|
37
|
+
headless = kwargs.get("headless")
|
38
|
+
check_postcode(user_postcode)
|
39
|
+
check_paon(user_paon)
|
40
|
+
|
41
|
+
# Build URL to parse
|
42
|
+
council_url = "https://swale.gov.uk/bins-littering-and-the-environment/bins/check-your-bin-day"
|
43
|
+
|
44
|
+
# Create Selenium webdriver
|
45
|
+
driver = create_webdriver(web_driver, headless, None, __name__)
|
46
|
+
driver.get(council_url)
|
47
|
+
|
48
|
+
# Wait for the postcode field to appear then populate it
|
49
|
+
try:
|
50
|
+
inputElement_postcode = WebDriverWait(driver, 10).until(
|
51
|
+
EC.presence_of_element_located((By.ID, "q485476_q1"))
|
52
|
+
)
|
53
|
+
inputElement_postcode.send_keys(user_postcode)
|
54
|
+
except Exception:
|
55
|
+
print("Page failed to load. Probably due to Cloudflare robot check!")
|
56
|
+
|
57
|
+
# Click search button
|
58
|
+
findAddress = WebDriverWait(driver, 10).until(
|
59
|
+
EC.presence_of_element_located((By.ID, "form_email_485465_submit"))
|
50
60
|
)
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
WebDriverWait(driver, 10).until(
|
63
|
-
EC.element_to_be_clickable(
|
64
|
-
(
|
65
|
-
By.XPATH,
|
66
|
-
"//select[@name='q485480:q1']//option[contains(., '"
|
67
|
-
+ user_paon
|
68
|
-
+ "')]",
|
61
|
+
driver.execute_script("arguments[0].click();", findAddress)
|
62
|
+
|
63
|
+
# Wait for the 'Select address' dropdown to appear and select option matching the house name/number
|
64
|
+
WebDriverWait(driver, 10).until(
|
65
|
+
EC.element_to_be_clickable(
|
66
|
+
(
|
67
|
+
By.XPATH,
|
68
|
+
"//select[@name='q485480:q1']//option[contains(., '"
|
69
|
+
+ user_paon
|
70
|
+
+ "')]",
|
71
|
+
)
|
69
72
|
)
|
73
|
+
).click()
|
74
|
+
|
75
|
+
# Click search button
|
76
|
+
getBins = WebDriverWait(driver, 10).until(
|
77
|
+
EC.presence_of_element_located((By.ID, "form_email_485465_submit"))
|
78
|
+
)
|
79
|
+
driver.execute_script("arguments[0].click();", getBins)
|
80
|
+
|
81
|
+
BinTable = WebDriverWait(driver, 30).until(
|
82
|
+
EC.presence_of_element_located((By.ID, "SBCYBDSummary"))
|
83
|
+
)
|
84
|
+
|
85
|
+
soup = BeautifulSoup(driver.page_source, features="html.parser")
|
86
|
+
soup.prettify()
|
87
|
+
|
88
|
+
data = {"bins": []}
|
89
|
+
|
90
|
+
current_year = datetime.now().year
|
91
|
+
next_year = current_year + 1
|
92
|
+
|
93
|
+
next_collection_date = soup.find(
|
94
|
+
"strong", id="SBC-YBD-collectionDate"
|
95
|
+
).text.strip()
|
96
|
+
|
97
|
+
# Extract bins for the next collection
|
98
|
+
next_bins = [li.text.strip() for li in soup.select("#SBCFirstBins ul li")]
|
99
|
+
|
100
|
+
# Extract future collection details
|
101
|
+
future_collection_date_tag = soup.find(
|
102
|
+
"p", text=lambda t: t and "starting from" in t
|
103
|
+
)
|
104
|
+
future_collection_date = (
|
105
|
+
future_collection_date_tag.text.split("starting from")[-1].strip()
|
106
|
+
if future_collection_date_tag
|
107
|
+
else "No future date found"
|
70
108
|
)
|
71
|
-
).click()
|
72
|
-
|
73
|
-
# Click search button
|
74
|
-
getBins = WebDriverWait(driver, 10).until(
|
75
|
-
EC.presence_of_element_located((By.ID, "form_email_485465_submit"))
|
76
|
-
)
|
77
|
-
driver.execute_script("arguments[0].click();", getBins)
|
78
|
-
|
79
|
-
BinTable = WebDriverWait(driver, 30).until(
|
80
|
-
EC.presence_of_element_located((By.ID, "SBCYBDSummary"))
|
81
|
-
)
|
82
|
-
|
83
|
-
soup = BeautifulSoup(driver.page_source, features="html.parser")
|
84
|
-
soup.prettify()
|
85
|
-
|
86
|
-
data = {"bins": []}
|
87
|
-
|
88
|
-
current_year = datetime.now().year
|
89
|
-
next_year = current_year + 1
|
90
|
-
|
91
|
-
next_collection_date = soup.find(
|
92
|
-
"strong", id="SBC-YBD-collectionDate"
|
93
|
-
).text.strip()
|
94
|
-
|
95
|
-
# Extract bins for the next collection
|
96
|
-
next_bins = [li.text.strip() for li in soup.select("#SBCFirstBins ul li")]
|
97
|
-
|
98
|
-
# Extract future collection details
|
99
|
-
future_collection_date_tag = soup.find(
|
100
|
-
"p", text=lambda t: t and "starting from" in t
|
101
|
-
)
|
102
|
-
future_collection_date = (
|
103
|
-
future_collection_date_tag.text.split("starting from")[-1].strip()
|
104
|
-
if future_collection_date_tag
|
105
|
-
else "No future date found"
|
106
|
-
)
|
107
|
-
|
108
|
-
future_bins = [li.text.strip() for li in soup.select("#FirstFutureBins li")]
|
109
|
-
|
110
|
-
for bin in next_bins:
|
111
|
-
collection_date = datetime.strptime(next_collection_date, "%A, %d %B")
|
112
|
-
if (datetime.now().month == 12) and (collection_date.month == 1):
|
113
|
-
collection_date = collection_date.replace(year=next_year)
|
114
|
-
else:
|
115
|
-
collection_date = collection_date.replace(year=current_year)
|
116
|
-
|
117
|
-
dict_data = {
|
118
|
-
"type": bin,
|
119
|
-
"collectionDate": collection_date.strftime(date_format),
|
120
|
-
}
|
121
|
-
data["bins"].append(dict_data)
|
122
|
-
|
123
|
-
for bin in future_bins:
|
124
|
-
collection_date = datetime.strptime(future_collection_date, "%A, %d %B")
|
125
|
-
if (datetime.now().month == 12) and (collection_date.month == 1):
|
126
|
-
collection_date = collection_date.replace(year=next_year)
|
127
|
-
else:
|
128
|
-
collection_date = collection_date.replace(year=current_year)
|
129
|
-
dict_data = {
|
130
|
-
"type": bin,
|
131
|
-
"collectionDate": collection_date.strftime(date_format),
|
132
|
-
}
|
133
|
-
data["bins"].append(dict_data)
|
134
109
|
|
110
|
+
future_bins = [li.text.strip() for li in soup.select("#FirstFutureBins li")]
|
111
|
+
|
112
|
+
for bin in next_bins:
|
113
|
+
collection_date = datetime.strptime(next_collection_date, "%A, %d %B")
|
114
|
+
if (datetime.now().month == 12) and (collection_date.month == 1):
|
115
|
+
collection_date = collection_date.replace(year=next_year)
|
116
|
+
else:
|
117
|
+
collection_date = collection_date.replace(year=current_year)
|
118
|
+
|
119
|
+
dict_data = {
|
120
|
+
"type": bin,
|
121
|
+
"collectionDate": collection_date.strftime(date_format),
|
122
|
+
}
|
123
|
+
data["bins"].append(dict_data)
|
124
|
+
|
125
|
+
for bin in future_bins:
|
126
|
+
collection_date = datetime.strptime(future_collection_date, "%A, %d %B")
|
127
|
+
if (datetime.now().month == 12) and (collection_date.month == 1):
|
128
|
+
collection_date = collection_date.replace(year=next_year)
|
129
|
+
else:
|
130
|
+
collection_date = collection_date.replace(year=current_year)
|
131
|
+
dict_data = {
|
132
|
+
"type": bin,
|
133
|
+
"collectionDate": collection_date.strftime(date_format),
|
134
|
+
}
|
135
|
+
data["bins"].append(dict_data)
|
136
|
+
|
137
|
+
except Exception as e:
|
138
|
+
# Here you can log the exception if needed
|
139
|
+
print(f"An error occurred: {e}")
|
140
|
+
# Optionally, re-raise the exception if you want it to propagate
|
141
|
+
raise
|
142
|
+
finally:
|
143
|
+
# This block ensures that the driver is closed regardless of an exception
|
144
|
+
if driver:
|
145
|
+
driver.quit()
|
135
146
|
return data
|
@@ -16,35 +16,47 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
16
16
|
"""
|
17
17
|
|
18
18
|
def parse_data(self, page: str, **kwargs) -> dict:
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
19
|
+
driver = None
|
20
|
+
try:
|
21
|
+
data = {"bins": []}
|
22
|
+
user_uprn = kwargs.get("uprn")
|
23
|
+
web_driver = kwargs.get("web_driver")
|
24
|
+
headless = kwargs.get("headless")
|
25
|
+
check_uprn(user_uprn)
|
26
|
+
|
27
|
+
root_url = "https://forms.rbwm.gov.uk/bincollections?uprn="
|
28
|
+
api_url = root_url + user_uprn
|
29
|
+
|
30
|
+
# Create Selenium webdriver
|
31
|
+
driver = create_webdriver(web_driver, headless, None, __name__)
|
32
|
+
driver.get(api_url)
|
33
|
+
|
34
|
+
soup = BeautifulSoup(driver.page_source, features="html.parser")
|
35
|
+
soup.prettify()
|
36
|
+
|
37
|
+
# Get collections div
|
38
|
+
next_collection_div = soup.find("div", {"class": "widget-bin-collections"})
|
39
|
+
|
40
|
+
for tbody in next_collection_div.find_all("tbody"):
|
41
|
+
for tr in tbody.find_all("tr"):
|
42
|
+
td = tr.find_all("td")
|
43
|
+
next_collection_type = td[0].get_text()
|
44
|
+
next_collection_date = dateutil.parser.parse(td[1].get_text())
|
45
|
+
print(next_collection_date)
|
46
|
+
dict_data = {
|
47
|
+
"type": next_collection_type,
|
48
|
+
"collectionDate": next_collection_date.strftime("%d/%m/%Y"),
|
49
|
+
}
|
50
|
+
data["bins"].append(dict_data)
|
51
|
+
|
52
|
+
except Exception as e:
|
53
|
+
# Here you can log the exception if needed
|
54
|
+
print(f"An error occurred: {e}")
|
55
|
+
# Optionally, re-raise the exception if you want it to propagate
|
56
|
+
raise
|
57
|
+
finally:
|
58
|
+
# This block ensures that the driver is closed regardless of an exception
|
59
|
+
if driver:
|
60
|
+
driver.quit()
|
50
61
|
return data
|
62
|
+
|
@@ -3,7 +3,7 @@ uk_bin_collection/tests/check_selenium_url_in_input.json.py,sha256=Iecdja0I3XIiY
|
|
3
3
|
uk_bin_collection/tests/council_feature_input_parity.py,sha256=DO6Mk4ImYgM5ZCZ-cutwz5RoYYWZRLYx2tr6zIs_9Rc,3843
|
4
4
|
uk_bin_collection/tests/features/environment.py,sha256=VQZjJdJI_kZn08M0j5cUgvKT4k3iTw8icJge1DGOkoA,127
|
5
5
|
uk_bin_collection/tests/features/validate_council_outputs.feature,sha256=SJK-Vc737hrf03tssxxbeg_JIvAH-ddB8f6gU1LTbuQ,251
|
6
|
-
uk_bin_collection/tests/input.json,sha256=
|
6
|
+
uk_bin_collection/tests/input.json,sha256=uvi5_CrjVy26H4gkWdoRXCJ1wsJPgntzJB26hXwC5jI,120556
|
7
7
|
uk_bin_collection/tests/output.schema,sha256=ZwKQBwYyTDEM4G2hJwfLUVM-5v1vKRvRK9W9SS1sd18,1086
|
8
8
|
uk_bin_collection/tests/step_defs/step_helpers/file_handler.py,sha256=Ygzi4V0S1MIHqbdstUlIqtRIwnynvhu4UtpweJ6-5N8,1474
|
9
9
|
uk_bin_collection/tests/step_defs/test_validate_council.py,sha256=VZ0a81sioJULD7syAYHjvK_-nT_Rd36tUyzPetSA0gk,3475
|
@@ -23,7 +23,7 @@ uk_bin_collection/uk_bin_collection/councils/ArgyllandButeCouncil.py,sha256=fJ0U
|
|
23
23
|
uk_bin_collection/uk_bin_collection/councils/ArmaghBanbridgeCraigavonCouncil.py,sha256=o9NBbVCTdxKXnpYbP8-zxe1Gh8s57vwfV75Son_sAHE,2863
|
24
24
|
uk_bin_collection/uk_bin_collection/councils/ArunCouncil.py,sha256=yfhthv9nuogP19VOZ3TYQrq51qqjiCZcSel4sXhiKjs,4012
|
25
25
|
uk_bin_collection/uk_bin_collection/councils/AshfieldDistrictCouncil.py,sha256=fhX7S_A3jqoND7NE6qITPMPvdk3FJSKZ3Eoa5RtSg3I,4247
|
26
|
-
uk_bin_collection/uk_bin_collection/councils/AshfordBoroughCouncil.py,sha256=
|
26
|
+
uk_bin_collection/uk_bin_collection/councils/AshfordBoroughCouncil.py,sha256=4ebRyX2qZRbZdomuN1aJOSXij-r7YagRBQvV1vyPkqY,5059
|
27
27
|
uk_bin_collection/uk_bin_collection/councils/AylesburyValeCouncil.py,sha256=LouqjspEMt1TkOGqWHs2zkxwOETIy3n7p64uKIlAgUg,2401
|
28
28
|
uk_bin_collection/uk_bin_collection/councils/BCPCouncil.py,sha256=W7QBx6Mgso8RYosuXsaYo3GGNAu-tiyBSmuYxr1JSOU,1707
|
29
29
|
uk_bin_collection/uk_bin_collection/councils/BaberghDistrictCouncil.py,sha256=1eXdST58xFRMdYl8AGNG_EwyQeLa31WSWUe882hQ2ec,6329
|
@@ -100,7 +100,7 @@ uk_bin_collection/uk_bin_collection/councils/EalingCouncil.py,sha256=UhNXGi-_6NY
|
|
100
100
|
uk_bin_collection/uk_bin_collection/councils/EastAyrshireCouncil.py,sha256=i3AcWkeAnk7rD59nOm0QCSH7AggqjUAdwsXuSIC8ZJE,1614
|
101
101
|
uk_bin_collection/uk_bin_collection/councils/EastCambridgeshireCouncil.py,sha256=aYUVE5QqTxdj8FHhCB4EiFVDJahWJD9Pq0d1upBEvXg,1501
|
102
102
|
uk_bin_collection/uk_bin_collection/councils/EastDevonDC.py,sha256=U0VwSNIldMv5nUoiXtFgjbE0m6Kb-8W2WZQGVCNF_WI,3261
|
103
|
-
uk_bin_collection/uk_bin_collection/councils/EastHertsCouncil.py,sha256=
|
103
|
+
uk_bin_collection/uk_bin_collection/councils/EastHertsCouncil.py,sha256=gujFsqn3j4YGudXxhJcLiO_pVYHnY_rd6GeZHzSqqs4,4823
|
104
104
|
uk_bin_collection/uk_bin_collection/councils/EastLindseyDistrictCouncil.py,sha256=RSOTD1MIXSW27eGf3TixCiJK4HtSJnpfME2CjalDeXs,4326
|
105
105
|
uk_bin_collection/uk_bin_collection/councils/EastLothianCouncil.py,sha256=zTp-GDWYeUIlFaqfkqGvo7XMtxJd0VbxdGgqaAwRACk,2792
|
106
106
|
uk_bin_collection/uk_bin_collection/councils/EastRenfrewshireCouncil.py,sha256=5giegMCKQ2JhVDR5M4mevVxIdhZtSW7kbuuoSkj3EGk,4361
|
@@ -141,7 +141,7 @@ uk_bin_collection/uk_bin_collection/councils/HartDistrictCouncil.py,sha256=_llxT
|
|
141
141
|
uk_bin_collection/uk_bin_collection/councils/HartlepoolBoroughCouncil.py,sha256=MUT1A24iZShT2p55rXEvgYwGUuw3W05Z4ZQAveehv-s,2842
|
142
142
|
uk_bin_collection/uk_bin_collection/councils/HastingsBoroughCouncil.py,sha256=9MCuit4awXSZTbZCXWBsQGX2tp2mHZ1eP1wENZdMvgA,1806
|
143
143
|
uk_bin_collection/uk_bin_collection/councils/HerefordshireCouncil.py,sha256=JpQhkWM6Jeuzf1W7r0HqvtVnEqNi18nhwJX70YucdsI,1848
|
144
|
-
uk_bin_collection/uk_bin_collection/councils/HertsmereBoroughCouncil.py,sha256
|
144
|
+
uk_bin_collection/uk_bin_collection/councils/HertsmereBoroughCouncil.py,sha256=k6MKZ3Xa-_a0oIpH5Rv5haYVq1eRHKSCiI6UAqAVTUk,6274
|
145
145
|
uk_bin_collection/uk_bin_collection/councils/HighPeakCouncil.py,sha256=x7dfy8mdt2iGl8qJxHb-uBh4u0knmi9MJ6irOJw9WYA,4805
|
146
146
|
uk_bin_collection/uk_bin_collection/councils/HighlandCouncil.py,sha256=GNxDU65QuZHV5va2IrKtcJ6TQoDdwmV03JvkVqOauP4,3291
|
147
147
|
uk_bin_collection/uk_bin_collection/councils/HinckleyandBosworthBoroughCouncil.py,sha256=51vXTKrstfJhb7cLCcrsvA9qKCsptyNMZvy7ML9DasM,2344
|
@@ -216,6 +216,7 @@ uk_bin_collection/uk_bin_collection/councils/OadbyAndWigstonBoroughCouncil.py,sh
|
|
216
216
|
uk_bin_collection/uk_bin_collection/councils/OldhamCouncil.py,sha256=9dlesCxNoVXlmQaqZj7QFh00smnJbm1Gnjkr_Uvzurs,1771
|
217
217
|
uk_bin_collection/uk_bin_collection/councils/OxfordCityCouncil.py,sha256=d_bY0cXRDH4kSoWGGCTNN61MNErapSOf2WSTYDJr2r8,2318
|
218
218
|
uk_bin_collection/uk_bin_collection/councils/PerthAndKinrossCouncil.py,sha256=Kos5GzN2co3Ij3tSHOXB9S71Yt78RROCfVRtnh7M1VU,3657
|
219
|
+
uk_bin_collection/uk_bin_collection/councils/PeterboroughCityCouncil.py,sha256=lOrDD4jfJ-_C5UwCGqRcQ1G-U1F5X6rf255ypzYEBcg,6300
|
219
220
|
uk_bin_collection/uk_bin_collection/councils/PlymouthCouncil.py,sha256=FJqpJ0GJhpjYeyZ9ioZPkKGl-zrqMD3y5iKa07e_i30,3202
|
220
221
|
uk_bin_collection/uk_bin_collection/councils/PortsmouthCityCouncil.py,sha256=xogNgVvwM5FljCziiNLgZ_wzkOnrQkifi1dkPMDRMtg,5588
|
221
222
|
uk_bin_collection/uk_bin_collection/councils/PowysCouncil.py,sha256=db3Y5FJz-LFDqmVZqPdzcBxh0Q26OFPrbUxlQ7r4vsQ,5896
|
@@ -271,7 +272,7 @@ uk_bin_collection/uk_bin_collection/councils/StratfordUponAvonCouncil.py,sha256=
|
|
271
272
|
uk_bin_collection/uk_bin_collection/councils/StroudDistrictCouncil.py,sha256=Akx80Ve7D8RVdIW1vkWLYp80VrhL6Qc3dMMKnbFWUhY,3653
|
272
273
|
uk_bin_collection/uk_bin_collection/councils/SunderlandCityCouncil.py,sha256=Eyaer--n2JQmGNaEdDbOhgIIGA6mDu72N8ph2wPIZy4,4027
|
273
274
|
uk_bin_collection/uk_bin_collection/councils/SurreyHeathBoroughCouncil.py,sha256=MROVvf7RSRYYjM2ZDD83rAEwf8BSnqXVrasgBiJC92A,5220
|
274
|
-
uk_bin_collection/uk_bin_collection/councils/SwaleBoroughCouncil.py,sha256=
|
275
|
+
uk_bin_collection/uk_bin_collection/councils/SwaleBoroughCouncil.py,sha256=FuQGlPaiFxfQS3KzkyrvcuTaVb8VkwdWYgjm42FfUt8,5762
|
275
276
|
uk_bin_collection/uk_bin_collection/councils/SwanseaCouncil.py,sha256=nmVPoPhnFgVi--vczX2i4Sf3bqM5RWJuwfhioRUr5XE,2303
|
276
277
|
uk_bin_collection/uk_bin_collection/councils/SwindonBoroughCouncil.py,sha256=lSIykpkBjVwQSf3rrnrNuh7YRepgnkKQLbf1iErMuJs,1932
|
277
278
|
uk_bin_collection/uk_bin_collection/councils/TamesideMBCouncil.py,sha256=k2TAAZG7n2S1BWVyxbE_-4-lZuzhOimCNz4yimUCOGk,1995
|
@@ -312,7 +313,7 @@ uk_bin_collection/uk_bin_collection/councils/WestSuffolkCouncil.py,sha256=9i8AQH
|
|
312
313
|
uk_bin_collection/uk_bin_collection/councils/WiganBoroughCouncil.py,sha256=3gqFA4-BVx_In6QOu3KUNqPN4Fkn9iMlZTeopMK9p6A,3746
|
313
314
|
uk_bin_collection/uk_bin_collection/councils/WiltshireCouncil.py,sha256=Q0ooHTQb9ynMXpSNBPk7XXEjI7zcHst3id4wxGdmVx4,5698
|
314
315
|
uk_bin_collection/uk_bin_collection/councils/WinchesterCityCouncil.py,sha256=W2k00N5n9-1MzjMEqsNjldsQdOJPEPMjK7OGSinZm5Y,4335
|
315
|
-
uk_bin_collection/uk_bin_collection/councils/WindsorAndMaidenheadCouncil.py,sha256=
|
316
|
+
uk_bin_collection/uk_bin_collection/councils/WindsorAndMaidenheadCouncil.py,sha256=d294otB0MRTLNx0Axf8Giy2ggzRRqn8ABN35wyu0l5w,2295
|
316
317
|
uk_bin_collection/uk_bin_collection/councils/WirralCouncil.py,sha256=X_e9zXEZAl_Mp6nPORHc9CTmf3QHdoMY3BCnKrXEr1I,2131
|
317
318
|
uk_bin_collection/uk_bin_collection/councils/WokingBoroughCouncil.py,sha256=37igH9g0xe4XIhRhcJ-ZJBU8MxTp5yzgpadWbdE33Yg,5205
|
318
319
|
uk_bin_collection/uk_bin_collection/councils/WokinghamBoroughCouncil.py,sha256=H8aFHlacwV07X-6T9RQua4irqDA0cIQrF4O1FfPR7yI,4114
|
@@ -325,8 +326,8 @@ uk_bin_collection/uk_bin_collection/councils/YorkCouncil.py,sha256=I2kBYMlsD4bId
|
|
325
326
|
uk_bin_collection/uk_bin_collection/councils/council_class_template/councilclasstemplate.py,sha256=EQWRhZ2pEejlvm0fPyOTsOHKvUZmPnxEYO_OWRGKTjs,1158
|
326
327
|
uk_bin_collection/uk_bin_collection/create_new_council.py,sha256=m-IhmWmeWQlFsTZC4OxuFvtw5ZtB8EAJHxJTH4O59lQ,1536
|
327
328
|
uk_bin_collection/uk_bin_collection/get_bin_data.py,sha256=YvmHfZqanwrJ8ToGch34x-L-7yPe31nB_x77_Mgl_vo,4545
|
328
|
-
uk_bin_collection-0.
|
329
|
-
uk_bin_collection-0.
|
330
|
-
uk_bin_collection-0.
|
331
|
-
uk_bin_collection-0.
|
332
|
-
uk_bin_collection-0.
|
329
|
+
uk_bin_collection-0.141.1.dist-info/LICENSE,sha256=vABBUOzcrgfaTKpzeo-si9YVEun6juDkndqA8RKdKGs,1071
|
330
|
+
uk_bin_collection-0.141.1.dist-info/METADATA,sha256=OnpbVrmjYIjaD38jCRatXYehUPJHXHczfJ5yYrqZfpI,19851
|
331
|
+
uk_bin_collection-0.141.1.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
|
332
|
+
uk_bin_collection-0.141.1.dist-info/entry_points.txt,sha256=36WCSGMWSc916S3Hi1ZkazzDKHaJ6CD-4fCEFm5MIao,90
|
333
|
+
uk_bin_collection-0.141.1.dist-info/RECORD,,
|
File without changes
|
File without changes
|
{uk_bin_collection-0.140.0.dist-info → uk_bin_collection-0.141.1.dist-info}/entry_points.txt
RENAMED
File without changes
|