uk_bin_collection 0.121.0__py3-none-any.whl → 0.121.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,3 +1,4 @@
1
+ import re
1
2
  import time
2
3
 
3
4
  from bs4 import BeautifulSoup
@@ -27,106 +28,113 @@ class CouncilClass(AbstractGetBinDataClass):
27
28
  bindata = {"bins": []}
28
29
 
29
30
  # Initialize the WebDriver (Chrome in this case)
30
- driver = create_webdriver(
31
+ with create_webdriver(
31
32
  web_driver,
32
33
  headless,
33
34
  "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36",
34
35
  __name__,
35
- )
36
+ ) as driver:
36
37
 
37
- # Step 1: Navigate to the form page
38
- driver.get(
39
- "https://lewisham.gov.uk/myservices/recycling-and-rubbish/your-bins/collection"
40
- )
38
+ # Step 1: Navigate to the form page
39
+ driver.get(
40
+ "https://lewisham.gov.uk/myservices/recycling-and-rubbish/your-bins/collection"
41
+ )
41
42
 
42
- try:
43
- cookie_accept_button = WebDriverWait(driver, 5).until(
44
- EC.element_to_be_clickable(
45
- (By.ID, "CybotCookiebotDialogBodyLevelButtonLevelOptinAllowAll")
43
+ try:
44
+ cookie_accept_button = WebDriverWait(driver, 5).until(
45
+ EC.element_to_be_clickable(
46
+ (By.ID, "CybotCookiebotDialogBodyLevelButtonLevelOptinAllowAll")
47
+ )
46
48
  )
49
+ cookie_accept_button.click()
50
+ except Exception:
51
+ print("No cookie consent banner found or already dismissed.")
52
+
53
+ # Wait for the form to load
54
+ WebDriverWait(driver, 10).until(
55
+ EC.presence_of_element_located((By.CLASS_NAME, "address-finder"))
47
56
  )
48
- cookie_accept_button.click()
49
- except Exception:
50
- print("No cookie consent banner found or already dismissed.")
51
-
52
- # Wait for the form to load
53
- WebDriverWait(driver, 10).until(
54
- EC.presence_of_element_located((By.CLASS_NAME, "address-finder"))
55
- )
56
-
57
- # Step 2: Locate the input field for the postcode
58
- postcode_input = driver.find_element(By.CLASS_NAME, "js-address-finder-input")
59
-
60
- # Enter the postcode
61
- postcode_input.send_keys(user_postcode) # Replace with your desired postcode
62
- time.sleep(1) # Optional: Wait for the UI to react
63
-
64
- # Step 4: Click the "Find address" button with retry logic
65
- find_button = WebDriverWait(driver, 10).until(
66
- EC.element_to_be_clickable(
67
- (By.CLASS_NAME, "js-address-finder-step-address")
57
+
58
+ # Step 2: Locate the input field for the postcode
59
+ postcode_input = driver.find_element(
60
+ By.CLASS_NAME, "js-address-finder-input"
68
61
  )
69
- )
70
- find_button.click()
71
-
72
- # Wait for the address selector to appear and options to load
73
- WebDriverWait(driver, 10).until(
74
- lambda d: len(
75
- d.find_element(By.ID, "address-selector").find_elements(
76
- By.TAG_NAME, "option"
62
+
63
+ # Enter the postcode
64
+ postcode_input.send_keys(
65
+ user_postcode
66
+ ) # Replace with your desired postcode
67
+ time.sleep(1) # Optional: Wait for the UI to react
68
+
69
+ # Step 4: Click the "Find address" button with retry logic
70
+ find_button = WebDriverWait(driver, 10).until(
71
+ EC.element_to_be_clickable(
72
+ (By.CLASS_NAME, "js-address-finder-step-address")
77
73
  )
78
74
  )
79
- > 1
80
- )
81
-
82
- # Select the dropdown and print available options
83
- address_selector = driver.find_element(By.ID, "address-selector")
84
-
85
- # Use Select class to interact with the dropdown
86
- select = Select(address_selector)
87
- if len(select.options) > 1:
88
- select.select_by_value(user_uprn)
89
- else:
90
- print("No additional addresses available to select")
91
-
92
- # Wait until the URL contains the expected substring
93
- WebDriverWait(driver, 10).until(
94
- EC.url_contains("/find-your-collection-day-result")
95
- )
96
-
97
- # Parse the HTML
98
- soup = BeautifulSoup(driver.page_source, "html.parser")
99
-
100
- # Extract the main container
101
- collection_result = soup.find("div", class_="js-find-collection-result")
102
-
103
- # Extract each collection type and its frequency/day
104
- for strong_tag in collection_result.find_all("strong"):
105
- bin_type = strong_tag.text.strip() # e.g., "Food waste"
106
- # Extract day from the sibling text
107
- schedule_text = (
108
- strong_tag.next_sibling.next_sibling.next_sibling.text.strip()
109
- .split("on\n")[-1]
110
- .replace("\n", "")
111
- .replace("\t", "")
75
+ find_button.click()
76
+
77
+ # Wait for the address selector to appear and options to load
78
+ WebDriverWait(driver, 10).until(
79
+ lambda d: len(
80
+ d.find_element(By.ID, "address-selector").find_elements(
81
+ By.TAG_NAME, "option"
82
+ )
83
+ )
84
+ > 1
112
85
  )
113
- day = schedule_text.strip().split(".")[0]
114
86
 
115
- # Extract the next collection date
116
- if "Your next collection date is" in schedule_text:
117
- start_index = schedule_text.index("Your next collection date is") + len(
118
- "Your next collection date is"
119
- )
120
- next_collection_date = (
121
- schedule_text[start_index:].strip().split("\n")[0].strip()
122
- )
87
+ # Select the dropdown and print available options
88
+ address_selector = driver.find_element(By.ID, "address-selector")
89
+
90
+ # Use Select class to interact with the dropdown
91
+ select = Select(address_selector)
92
+ if len(select.options) > 1:
93
+ select.select_by_value(user_uprn)
123
94
  else:
124
- next_collection_date = get_next_day_of_week(day, date_format)
95
+ print("No additional addresses available to select")
96
+
97
+ # Wait until the URL contains the expected substring
98
+ WebDriverWait(driver, 10).until(
99
+ EC.url_contains("/find-your-collection-day-result")
100
+ )
101
+
102
+ # Parse the HTML
103
+ soup = BeautifulSoup(driver.page_source, "html.parser")
125
104
 
126
- dict_data = {
127
- "type": bin_type,
128
- "collectionDate": next_collection_date,
129
- }
130
- bindata["bins"].append(dict_data)
105
+ # Extract the main container
106
+ collection_result = soup.find("div", class_="js-find-collection-result")
131
107
 
132
- return bindata
108
+ # Extract each collection type and its frequency/day
109
+ for strong_tag in collection_result.find_all("strong"):
110
+ bin_type = strong_tag.text.strip() # e.g., "Food waste"
111
+ # Extract the sibling text
112
+ schedule_text = (
113
+ strong_tag.next_sibling.next_sibling.next_sibling.text.strip()
114
+ .replace("\n", " ")
115
+ .replace("\t", " ")
116
+ )
117
+
118
+ # Extract the day using regex
119
+ print(schedule_text)
120
+ day_match = re.search(r"on\s*(\w+day)", schedule_text)
121
+ print(day_match)
122
+ day = day_match.group(1) if day_match else None
123
+
124
+ # Extract the next collection date using regex
125
+ date_match = re.search(
126
+ r"Your next collection date is\s*(\d{2}/\d{2}/\d{4})(.?)",
127
+ schedule_text,
128
+ )
129
+ if date_match:
130
+ next_collection_date = date_match.group(1)
131
+ else:
132
+ next_collection_date = get_next_day_of_week(day, date_format)
133
+
134
+ dict_data = {
135
+ "type": bin_type,
136
+ "collectionDate": next_collection_date,
137
+ }
138
+ bindata["bins"].append(dict_data)
139
+
140
+ return bindata
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: uk_bin_collection
3
- Version: 0.121.0
3
+ Version: 0.121.1
4
4
  Summary: Python Lib to collect UK Bin Data
5
5
  Author: Robert Bradley
6
6
  Author-email: robbrad182@gmail.com
@@ -141,7 +141,7 @@ uk_bin_collection/uk_bin_collection/councils/LondonBoroughHarrow.py,sha256=kzKwb
141
141
  uk_bin_collection/uk_bin_collection/councils/LondonBoroughHavering.py,sha256=6DNX6IShdoEG4FjoyrzrY_HLEu9R2Bgl6PN0bSCbhow,2436
142
142
  uk_bin_collection/uk_bin_collection/councils/LondonBoroughHounslow.py,sha256=UOeiOxGMvVMm2UFaqjmQpm7vxzqJNSSN8LM9lAUjs2c,3021
143
143
  uk_bin_collection/uk_bin_collection/councils/LondonBoroughLambeth.py,sha256=r9D5lHe5kIRStCd5lRIax16yhb4KTFzzfYEFv1bacWw,2009
144
- uk_bin_collection/uk_bin_collection/councils/LondonBoroughLewisham.py,sha256=s5RIuNeOpuQd8z_NO1-YdSaq8l3snE5D95SNVd0on9s,4871
144
+ uk_bin_collection/uk_bin_collection/councils/LondonBoroughLewisham.py,sha256=d8rlJDTbY3nj-Zjg6iwvwfe-X13Gq86DGGW6QkQAUW0,5310
145
145
  uk_bin_collection/uk_bin_collection/councils/LondonBoroughRedbridge.py,sha256=A_6Sis5hsF53Th04KeadHRasGbpAm6aoaWJ6X8eC4Y8,6604
146
146
  uk_bin_collection/uk_bin_collection/councils/LondonBoroughSutton.py,sha256=c2haHOwX7Dy1phVUTv3xaQgM9VeHmFuBwdAgqGcGs8Y,2369
147
147
  uk_bin_collection/uk_bin_collection/councils/LutonBoroughCouncil.py,sha256=vScUi_R8FnBddii2_zLlZBLxuh85mKmCm8nKW3zxky0,2758
@@ -285,8 +285,8 @@ uk_bin_collection/uk_bin_collection/councils/YorkCouncil.py,sha256=I2kBYMlsD4bId
285
285
  uk_bin_collection/uk_bin_collection/councils/council_class_template/councilclasstemplate.py,sha256=EQWRhZ2pEejlvm0fPyOTsOHKvUZmPnxEYO_OWRGKTjs,1158
286
286
  uk_bin_collection/uk_bin_collection/create_new_council.py,sha256=m-IhmWmeWQlFsTZC4OxuFvtw5ZtB8EAJHxJTH4O59lQ,1536
287
287
  uk_bin_collection/uk_bin_collection/get_bin_data.py,sha256=YvmHfZqanwrJ8ToGch34x-L-7yPe31nB_x77_Mgl_vo,4545
288
- uk_bin_collection-0.121.0.dist-info/LICENSE,sha256=vABBUOzcrgfaTKpzeo-si9YVEun6juDkndqA8RKdKGs,1071
289
- uk_bin_collection-0.121.0.dist-info/METADATA,sha256=f-aX3ZbYEjfWr5ieH7-qxbRqF7ChjB79u73BupT5sIY,17574
290
- uk_bin_collection-0.121.0.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
291
- uk_bin_collection-0.121.0.dist-info/entry_points.txt,sha256=36WCSGMWSc916S3Hi1ZkazzDKHaJ6CD-4fCEFm5MIao,90
292
- uk_bin_collection-0.121.0.dist-info/RECORD,,
288
+ uk_bin_collection-0.121.1.dist-info/LICENSE,sha256=vABBUOzcrgfaTKpzeo-si9YVEun6juDkndqA8RKdKGs,1071
289
+ uk_bin_collection-0.121.1.dist-info/METADATA,sha256=ZqB1SnI8jyknJKcVIK20CPMBBgHQMIV5jpHIhx1jHFM,17574
290
+ uk_bin_collection-0.121.1.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
291
+ uk_bin_collection-0.121.1.dist-info/entry_points.txt,sha256=36WCSGMWSc916S3Hi1ZkazzDKHaJ6CD-4fCEFm5MIao,90
292
+ uk_bin_collection-0.121.1.dist-info/RECORD,,