uk_bin_collection 0.138.0__py3-none-any.whl → 0.139.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- uk_bin_collection/tests/input.json +28 -0
- uk_bin_collection/uk_bin_collection/councils/AberdeenCityCouncil.py +2 -1
- uk_bin_collection/uk_bin_collection/councils/AberdeenshireCouncil.py +1 -0
- uk_bin_collection/uk_bin_collection/councils/ArdsAndNorthDownCouncil.py +1 -0
- uk_bin_collection/uk_bin_collection/councils/BarnsleyMBCouncil.py +1 -0
- uk_bin_collection/uk_bin_collection/councils/BroxbourneCouncil.py +7 -3
- uk_bin_collection/uk_bin_collection/councils/CeredigionCountyCouncil.py +157 -0
- uk_bin_collection/uk_bin_collection/councils/CheltenhamBoroughCouncil.py +95 -61
- uk_bin_collection/uk_bin_collection/councils/CheshireEastCouncil.py +1 -0
- uk_bin_collection/uk_bin_collection/councils/CoventryCityCouncil.py +4 -1
- uk_bin_collection/uk_bin_collection/councils/ForestOfDeanDistrictCouncil.py +52 -41
- uk_bin_collection/uk_bin_collection/councils/GooglePublicCalendarCouncil.py +3 -4
- uk_bin_collection/uk_bin_collection/councils/LondonBoroughOfRichmondUponThames.py +11 -9
- uk_bin_collection/uk_bin_collection/councils/MiddlesbroughCouncil.py +13 -4
- uk_bin_collection/uk_bin_collection/councils/MonmouthshireCountyCouncil.py +5 -1
- uk_bin_collection/uk_bin_collection/councils/NewForestCouncil.py +1 -3
- uk_bin_collection/uk_bin_collection/councils/NorthDevonCountyCouncil.py +159 -0
- uk_bin_collection/uk_bin_collection/councils/NorwichCityCouncil.py +15 -3
- uk_bin_collection/uk_bin_collection/councils/NuneatonBedworthBoroughCouncil.py +873 -871
- uk_bin_collection/uk_bin_collection/councils/RugbyBoroughCouncil.py +1 -1
- uk_bin_collection/uk_bin_collection/councils/RushcliffeBoroughCouncil.py +3 -6
- uk_bin_collection/uk_bin_collection/councils/SouthHollandDistrictCouncil.py +136 -0
- uk_bin_collection/uk_bin_collection/councils/WalsallCouncil.py +6 -2
- uk_bin_collection/uk_bin_collection/councils/WalthamForest.py +12 -12
- uk_bin_collection/uk_bin_collection/councils/WestLindseyDistrictCouncil.py +6 -3
- uk_bin_collection/uk_bin_collection/councils/WychavonDistrictCouncil.py +1 -0
- {uk_bin_collection-0.138.0.dist-info → uk_bin_collection-0.139.0.dist-info}/METADATA +1 -1
- {uk_bin_collection-0.138.0.dist-info → uk_bin_collection-0.139.0.dist-info}/RECORD +31 -28
- {uk_bin_collection-0.138.0.dist-info → uk_bin_collection-0.139.0.dist-info}/LICENSE +0 -0
- {uk_bin_collection-0.138.0.dist-info → uk_bin_collection-0.139.0.dist-info}/WHEEL +0 -0
- {uk_bin_collection-0.138.0.dist-info → uk_bin_collection-0.139.0.dist-info}/entry_points.txt +0 -0
@@ -12,6 +12,7 @@ from uk_bin_collection.uk_bin_collection.common import *
|
|
12
12
|
from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
|
13
13
|
|
14
14
|
# import the wonderful Beautiful Soup and the URL grabber
|
15
|
+
import re
|
15
16
|
|
16
17
|
|
17
18
|
class CouncilClass(AbstractGetBinDataClass):
|
@@ -42,72 +43,82 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
42
43
|
wait = WebDriverWait(driver, 60)
|
43
44
|
address_entry_field = wait.until(
|
44
45
|
EC.presence_of_element_located(
|
45
|
-
(By.XPATH, '//*[@
|
46
|
+
(By.XPATH, '//*[@placeholder="Search Properties..."]')
|
46
47
|
)
|
47
48
|
)
|
48
49
|
|
49
50
|
address_entry_field.send_keys(str(full_address))
|
50
51
|
|
51
52
|
address_entry_field = wait.until(
|
52
|
-
EC.element_to_be_clickable((By.XPATH, '//*[@
|
53
|
+
EC.element_to_be_clickable((By.XPATH, f'//*[@title="{full_address}"]'))
|
53
54
|
)
|
54
55
|
address_entry_field.click()
|
55
|
-
address_entry_field.send_keys(Keys.BACKSPACE)
|
56
|
-
address_entry_field.send_keys(str(full_address[len(full_address) - 1]))
|
57
56
|
|
58
|
-
|
57
|
+
next_button = wait.until(
|
59
58
|
EC.element_to_be_clickable(
|
60
|
-
(By.XPATH,
|
59
|
+
(By.XPATH, "//lightning-button/button[contains(text(), 'Next')]")
|
61
60
|
)
|
62
61
|
)
|
62
|
+
next_button.click()
|
63
63
|
|
64
|
-
|
65
|
-
# Wait for the 'Select your property' dropdown to appear and select the first result
|
66
|
-
next_btn = wait.until(
|
67
|
-
EC.element_to_be_clickable((By.XPATH, "//lightning-button/button"))
|
68
|
-
)
|
69
|
-
next_btn.click()
|
70
|
-
bin_data = wait.until(
|
64
|
+
result = wait.until(
|
71
65
|
EC.presence_of_element_located(
|
72
|
-
(
|
66
|
+
(
|
67
|
+
By.XPATH,
|
68
|
+
'//table[@class="slds-table slds-table_header-fixed slds-table_bordered slds-table_edit slds-table_resizable-cols"]',
|
69
|
+
)
|
73
70
|
)
|
74
71
|
)
|
75
72
|
|
76
|
-
|
73
|
+
# Make a BS4 object
|
74
|
+
soup = BeautifulSoup(
|
75
|
+
result.get_attribute("innerHTML"), features="html.parser"
|
76
|
+
) # Wait for the 'Select your property' dropdown to appear and select the first result
|
77
77
|
|
78
|
+
data = {"bins": []}
|
79
|
+
today = datetime.now()
|
80
|
+
current_year = today.year
|
81
|
+
|
82
|
+
# Find all bin rows in the table
|
78
83
|
rows = soup.find_all("tr", class_="slds-hint-parent")
|
79
|
-
current_year = datetime.now().year
|
80
84
|
|
81
85
|
for row in rows:
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
else:
|
92
|
-
collection_day = re.sub(
|
93
|
-
r"[^a-zA-Z0-9,\s]", "", columns[0].get_text()
|
94
|
-
).strip()
|
95
|
-
|
96
|
-
# Parse the date from the string
|
97
|
-
parsed_date = datetime.strptime(collection_day, "%a, %d %B")
|
98
|
-
if parsed_date < datetime(
|
99
|
-
parsed_date.year, parsed_date.month, parsed_date.day
|
100
|
-
):
|
101
|
-
parsed_date = parsed_date.replace(year=current_year + 1)
|
102
|
-
else:
|
103
|
-
parsed_date = parsed_date.replace(year=current_year)
|
104
|
-
# Format the date as %d/%m/%Y
|
105
|
-
formatted_date = parsed_date.strftime("%d/%m/%Y")
|
86
|
+
try:
|
87
|
+
bin_type_cell = row.find("th")
|
88
|
+
date_cell = row.find("td")
|
89
|
+
|
90
|
+
if not bin_type_cell or not date_cell:
|
91
|
+
continue
|
92
|
+
|
93
|
+
container_type = bin_type_cell.get("data-cell-value", "").strip()
|
94
|
+
raw_date_text = date_cell.get("data-cell-value", "").strip()
|
106
95
|
|
107
|
-
#
|
96
|
+
# Handle relative values like "Today" or "Tomorrow"
|
97
|
+
if "today" in raw_date_text.lower():
|
98
|
+
parsed_date = today
|
99
|
+
elif "tomorrow" in raw_date_text.lower():
|
100
|
+
parsed_date = today + timedelta(days=1)
|
101
|
+
else:
|
102
|
+
# Expected format: "Thu, 10 April"
|
103
|
+
# Strip any rogue characters and try parsing
|
104
|
+
cleaned_date = re.sub(r"[^\w\s,]", "", raw_date_text)
|
105
|
+
try:
|
106
|
+
parsed_date = datetime.strptime(cleaned_date, "%a, %d %B")
|
107
|
+
parsed_date = parsed_date.replace(year=current_year)
|
108
|
+
if parsed_date < today:
|
109
|
+
# Date has passed this year, must be next year
|
110
|
+
parsed_date = parsed_date.replace(year=current_year + 1)
|
111
|
+
except Exception as e:
|
112
|
+
print(f"Could not parse date '{cleaned_date}': {e}")
|
113
|
+
continue
|
114
|
+
|
115
|
+
formatted_date = parsed_date.strftime(date_format)
|
108
116
|
data["bins"].append(
|
109
117
|
{"type": container_type, "collectionDate": formatted_date}
|
110
118
|
)
|
119
|
+
|
120
|
+
except Exception as e:
|
121
|
+
print(f"Error processing row: {e}")
|
111
122
|
except Exception as e:
|
112
123
|
# Here you can log the exception if needed
|
113
124
|
print(f"An error occurred: {e}")
|
@@ -30,9 +30,8 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
30
30
|
except Exception:
|
31
31
|
continue
|
32
32
|
|
33
|
-
bindata["bins"].append(
|
34
|
-
"type": event.name,
|
35
|
-
|
36
|
-
})
|
33
|
+
bindata["bins"].append(
|
34
|
+
{"type": event.name, "collectionDate": collection_date}
|
35
|
+
)
|
37
36
|
|
38
37
|
return bindata
|
@@ -50,12 +50,18 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
50
50
|
for index, bin_type in enumerate(bin_types):
|
51
51
|
# currently only handled weekly and garden collection, special collections like Christmas Day need to be added
|
52
52
|
if index == WEEKLY_COLLECTION:
|
53
|
-
next_collection_date = get_next_day_of_week(
|
53
|
+
next_collection_date = get_next_day_of_week(
|
54
|
+
collection_days[index].text.strip(), date_format
|
55
|
+
)
|
54
56
|
elif index == GARDEN_COLLECTION:
|
55
57
|
split_date_part = collection_days[index].text.split("More dates")[0]
|
56
|
-
next_collection_date = datetime.strptime(
|
58
|
+
next_collection_date = datetime.strptime(
|
59
|
+
split_date_part.strip(), "%d %B %Y"
|
60
|
+
).strftime(date_format)
|
57
61
|
else:
|
58
|
-
next_collection_date = datetime.strptime(
|
62
|
+
next_collection_date = datetime.strptime(
|
63
|
+
collection_days[index].text.strip(), "%d %B %Y"
|
64
|
+
).strftime(date_format)
|
59
65
|
|
60
66
|
dict_data = {
|
61
67
|
"type": bin_type.text.strip(),
|
@@ -83,16 +89,12 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
83
89
|
|
84
90
|
def input_street_name(self, street_name, wait):
|
85
91
|
input_element_postcodesearch = wait.until(
|
86
|
-
EC.visibility_of_element_located(
|
87
|
-
(By.ID, "Street")
|
88
|
-
)
|
92
|
+
EC.visibility_of_element_located((By.ID, "Street"))
|
89
93
|
)
|
90
94
|
input_element_postcodesearch.send_keys(street_name)
|
91
95
|
|
92
96
|
def dismiss_cookie_banner(self, wait):
|
93
97
|
cookie_banner = wait.until(
|
94
|
-
EC.visibility_of_element_located(
|
95
|
-
(By.ID, "ccc-dismiss-button")
|
96
|
-
)
|
98
|
+
EC.visibility_of_element_located((By.ID, "ccc-dismiss-button"))
|
97
99
|
)
|
98
100
|
cookie_banner.send_keys(Keys.ENTER)
|
@@ -12,6 +12,7 @@ from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataC
|
|
12
12
|
|
13
13
|
import re
|
14
14
|
|
15
|
+
|
15
16
|
class CouncilClass(AbstractGetBinDataClass):
|
16
17
|
def parse_data(self, page: str, **kwargs) -> dict:
|
17
18
|
try:
|
@@ -63,19 +64,27 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
63
64
|
|
64
65
|
# **Regex to match "Wednesday, February 19" format**
|
65
66
|
match = re.match(r"([A-Za-z]+), ([A-Za-z]+) (\d{1,2})", raw_date)
|
66
|
-
|
67
|
+
|
67
68
|
if match:
|
68
|
-
day_name, month_name, day_number =
|
69
|
+
day_name, month_name, day_number = (
|
70
|
+
match.groups()
|
71
|
+
) # Extract components
|
69
72
|
extracted_month = datetime.strptime(month_name, "%B").month
|
70
73
|
extracted_day = int(day_number)
|
71
74
|
|
72
75
|
# Handle Dec-Jan rollover: If month is before the current month, assume next year
|
73
|
-
inferred_year =
|
76
|
+
inferred_year = (
|
77
|
+
current_year + 1
|
78
|
+
if extracted_month < current_month
|
79
|
+
else current_year
|
80
|
+
)
|
74
81
|
|
75
82
|
# **Correct the raw_date format before parsing**
|
76
83
|
raw_date = f"{day_name}, {month_name} {day_number}, {inferred_year}"
|
77
84
|
|
78
|
-
print(
|
85
|
+
print(
|
86
|
+
f"DEBUG: Final raw_date before parsing -> {raw_date}"
|
87
|
+
) # Debugging output
|
79
88
|
|
80
89
|
# Convert to required format (%d/%m/%Y)
|
81
90
|
try:
|
@@ -43,7 +43,11 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
43
43
|
|
44
44
|
# Extract collection date (e.g., "Monday 9th December")
|
45
45
|
date_tag = panel.find("p")
|
46
|
-
if
|
46
|
+
if (
|
47
|
+
date_tag
|
48
|
+
and "Your next collection date is"
|
49
|
+
in date_tag.text.strip().replace("\r", "").replace("\n", "")
|
50
|
+
):
|
47
51
|
collection_date = date_tag.find("strong").text.strip()
|
48
52
|
else:
|
49
53
|
continue
|
@@ -127,9 +127,7 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
127
127
|
# Garden waste
|
128
128
|
garden_waste = soup.find("div", class_="eb-2HIpCnWC-Override-EditorInput")
|
129
129
|
if garden_waste:
|
130
|
-
match = re.search(
|
131
|
-
r"(\d{2}/\d{2}/\d{4})", garden_waste.text
|
132
|
-
)
|
130
|
+
match = re.search(r"(\d{2}/\d{2}/\d{4})", garden_waste.text)
|
133
131
|
if match:
|
134
132
|
bins.append(
|
135
133
|
{"type": "Garden waste", "collectionDate": match.group(1)}
|
@@ -0,0 +1,159 @@
|
|
1
|
+
from time import sleep
|
2
|
+
|
3
|
+
from bs4 import BeautifulSoup
|
4
|
+
from selenium.webdriver.common.by import By
|
5
|
+
from selenium.webdriver.support import expected_conditions as EC
|
6
|
+
from selenium.webdriver.support.ui import Select, WebDriverWait
|
7
|
+
|
8
|
+
from uk_bin_collection.uk_bin_collection.common import *
|
9
|
+
from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
|
10
|
+
|
11
|
+
|
12
|
+
# import the wonderful Beautiful Soup and the URL grabber
|
13
|
+
class CouncilClass(AbstractGetBinDataClass):
|
14
|
+
"""
|
15
|
+
Concrete classes have to implement all abstract operations of the base
|
16
|
+
class. They can also override some operations with a default
|
17
|
+
implementation.
|
18
|
+
"""
|
19
|
+
|
20
|
+
def parse_data(self, page: str, **kwargs) -> dict:
|
21
|
+
driver = None
|
22
|
+
try:
|
23
|
+
user_uprn = kwargs.get("uprn")
|
24
|
+
user_postcode = kwargs.get("postcode")
|
25
|
+
web_driver = kwargs.get("web_driver")
|
26
|
+
headless = kwargs.get("headless")
|
27
|
+
check_uprn(user_uprn)
|
28
|
+
check_postcode(user_postcode)
|
29
|
+
|
30
|
+
# Create Selenium webdriver
|
31
|
+
driver = create_webdriver(web_driver, headless, None, __name__)
|
32
|
+
driver.get(
|
33
|
+
"https://my.northdevon.gov.uk/service/WasteRecyclingCollectionCalendar"
|
34
|
+
)
|
35
|
+
|
36
|
+
# Wait for iframe to load and switch to it
|
37
|
+
WebDriverWait(driver, 30).until(
|
38
|
+
EC.frame_to_be_available_and_switch_to_it((By.ID, "fillform-frame-1"))
|
39
|
+
)
|
40
|
+
|
41
|
+
# Wait for postcode entry box
|
42
|
+
postcode = WebDriverWait(driver, 10).until(
|
43
|
+
EC.presence_of_element_located((By.ID, "postcode_search"))
|
44
|
+
)
|
45
|
+
# Enter postcode
|
46
|
+
postcode.send_keys(user_postcode.replace(" ", ""))
|
47
|
+
|
48
|
+
# Wait for address selection dropdown to appear
|
49
|
+
address = Select(
|
50
|
+
WebDriverWait(driver, 10).until(
|
51
|
+
EC.visibility_of_element_located((By.ID, "chooseAddress"))
|
52
|
+
)
|
53
|
+
)
|
54
|
+
|
55
|
+
# Wait for spinner to disappear (signifies options are loaded for select)
|
56
|
+
WebDriverWait(driver, 10).until(
|
57
|
+
EC.invisibility_of_element_located(
|
58
|
+
(By.CLASS_NAME, "spinner-outer")
|
59
|
+
) # row-fluid spinner-outer
|
60
|
+
)
|
61
|
+
|
62
|
+
# Sometimes the options aren't fully there despite the spinner being gone, wait another 2 seconds.
|
63
|
+
sleep(2)
|
64
|
+
|
65
|
+
# Select address by UPRN
|
66
|
+
address.select_by_value(user_uprn)
|
67
|
+
|
68
|
+
# Wait for spinner to disappear (signifies data is loaded)
|
69
|
+
WebDriverWait(driver, 10).until(
|
70
|
+
EC.invisibility_of_element_located((By.CLASS_NAME, "spinner-outer"))
|
71
|
+
)
|
72
|
+
|
73
|
+
sleep(2)
|
74
|
+
|
75
|
+
address_confirmation = WebDriverWait(driver, 10).until(
|
76
|
+
EC.presence_of_element_located(
|
77
|
+
(By.XPATH, "//h2[contains(text(), 'Your address')]")
|
78
|
+
)
|
79
|
+
)
|
80
|
+
|
81
|
+
next_button = WebDriverWait(driver, 10).until(
|
82
|
+
EC.presence_of_element_located(
|
83
|
+
(By.XPATH, "//button/span[contains(@class, 'nextText')]")
|
84
|
+
)
|
85
|
+
)
|
86
|
+
|
87
|
+
next_button.click()
|
88
|
+
|
89
|
+
results = WebDriverWait(driver, 10).until(
|
90
|
+
EC.presence_of_element_located(
|
91
|
+
(By.XPATH, "//h4[contains(text(), 'Key')]")
|
92
|
+
)
|
93
|
+
)
|
94
|
+
|
95
|
+
# Find data table
|
96
|
+
data_table = WebDriverWait(driver, 10).until(
|
97
|
+
EC.presence_of_element_located(
|
98
|
+
(
|
99
|
+
By.XPATH,
|
100
|
+
'//div[@data-field-name="html1"]/div[contains(@class, "fieldContent")]',
|
101
|
+
)
|
102
|
+
)
|
103
|
+
)
|
104
|
+
|
105
|
+
# Make a BS4 object
|
106
|
+
soup = BeautifulSoup(
|
107
|
+
data_table.get_attribute("innerHTML"), features="html.parser"
|
108
|
+
)
|
109
|
+
|
110
|
+
# Initialize the data dictionary
|
111
|
+
data = {"bins": []}
|
112
|
+
|
113
|
+
# Loop through each list of waste dates
|
114
|
+
waste_sections = soup.find_all("ul", class_="wasteDates")
|
115
|
+
|
116
|
+
current_month_year = None
|
117
|
+
|
118
|
+
for section in waste_sections:
|
119
|
+
for li in section.find_all("li", recursive=False):
|
120
|
+
if "MonthLabel" in li.get("class", []):
|
121
|
+
# Extract month and year (e.g., "April 2025")
|
122
|
+
header = li.find("h4")
|
123
|
+
if header:
|
124
|
+
current_month_year = header.text.strip()
|
125
|
+
elif any(
|
126
|
+
bin_class in li.get("class", [])
|
127
|
+
for bin_class in ["BlackBin", "GreenBin", "Recycling"]
|
128
|
+
):
|
129
|
+
bin_type = li.find("span", class_="wasteType").text.strip()
|
130
|
+
day = li.find("span", class_="wasteDay").text.strip()
|
131
|
+
weekday = li.find("span", class_="wasteName").text.strip()
|
132
|
+
|
133
|
+
if current_month_year and day:
|
134
|
+
try:
|
135
|
+
full_date = f"{day} {current_month_year}"
|
136
|
+
collection_date = datetime.strptime(
|
137
|
+
full_date, "%d %B %Y"
|
138
|
+
).strftime(date_format)
|
139
|
+
dict_data = {
|
140
|
+
"type": bin_type,
|
141
|
+
"collectionDate": collection_date,
|
142
|
+
}
|
143
|
+
data["bins"].append(dict_data)
|
144
|
+
except Exception as e:
|
145
|
+
print(f"Skipping invalid date '{full_date}': {e}")
|
146
|
+
|
147
|
+
data["bins"].sort(
|
148
|
+
key=lambda x: datetime.strptime(x.get("collectionDate"), date_format)
|
149
|
+
)
|
150
|
+
except Exception as e:
|
151
|
+
# Here you can log the exception if needed
|
152
|
+
print(f"An error occurred: {e}")
|
153
|
+
# Optionally, re-raise the exception if you want it to propagate
|
154
|
+
raise
|
155
|
+
finally:
|
156
|
+
# This block ensures that the driver is closed regardless of an exception
|
157
|
+
if driver:
|
158
|
+
driver.quit()
|
159
|
+
return data
|
@@ -53,7 +53,12 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
53
53
|
if alternateCheck:
|
54
54
|
bin_types = strong[2].text.strip().replace(".", "").split(" and ")
|
55
55
|
for bin in bin_types:
|
56
|
-
collections.append(
|
56
|
+
collections.append(
|
57
|
+
(
|
58
|
+
bin.capitalize(),
|
59
|
+
datetime.strptime(strong[1].text.strip(), date_format),
|
60
|
+
)
|
61
|
+
)
|
57
62
|
|
58
63
|
else:
|
59
64
|
p_tag = soup.find_all("p")
|
@@ -63,11 +68,18 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
63
68
|
p.text.split("Your ")[1].split(" is collected")[0].split(" and ")
|
64
69
|
)
|
65
70
|
for bin in bin_types:
|
66
|
-
collections.append(
|
71
|
+
collections.append(
|
72
|
+
(
|
73
|
+
bin.capitalize(),
|
74
|
+
datetime.strptime(strong[1].text.strip(), date_format),
|
75
|
+
)
|
76
|
+
)
|
67
77
|
i += 2
|
68
78
|
|
69
79
|
if len(strong) > 3:
|
70
|
-
collections.append(
|
80
|
+
collections.append(
|
81
|
+
("Garden", datetime.strptime(strong[4].text.strip(), date_format))
|
82
|
+
)
|
71
83
|
|
72
84
|
ordered_data = sorted(collections, key=lambda x: x[1])
|
73
85
|
for item in ordered_data:
|