uk_bin_collection 0.82.1__py3-none-any.whl → 0.84.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -247,6 +247,13 @@
247
247
  "wiki_name": "Conwy County Borough Council",
248
248
  "wiki_note": "Conwy County Borough Council is a straight up uprn in the url eg &uprn=XXXXXXXXXXXXX ."
249
249
  },
250
+ "CornwallCouncil": {
251
+ "skip_get_url": true,
252
+ "uprn": "100040128734",
253
+ "url": "https://www.cornwall.gov.uk/my-area/",
254
+ "wiki_name": "Cornwall Council",
255
+ "wiki_note": "Use https://uprn.uk/ to find your UPRN."
256
+ },
250
257
  "CrawleyBoroughCouncil": {
251
258
  "house_number": "9701076",
252
259
  "skip_get_url": true,
@@ -393,11 +400,11 @@
393
400
  "wiki_note": "Pass the house name/number in the house number parameter, wrapped in double quotes"
394
401
  },
395
402
  "GedlingBoroughCouncil": {
396
- "house_number": "Valeside Gardens",
403
+ "house_number": "Friday G4, Friday J",
397
404
  "skip_get_url": true,
398
405
  "url": "https://www.gedling.gov.uk/",
399
406
  "wiki_name": "Gedling Borough Council",
400
- "wiki_note": "Pass the street name into the -n parameter. Use [this](https://apps.gedling.gov.uk/refuse/search.aspx) to help work out a street search that is unique. Known issues: 1) If multiple streets returned, it will pick the first and these may have different bin schedules. 2) This data is hand entered as council only provide non-parseable non-accessible PDFs - Please double check for any issues. 2b) Data is only included until end of Nov 2024 (refuse) and March 2024 (Garden). Data will need to be updated by hand after these dates."
407
+ "wiki_note": "Use [this site](https://www.gbcbincalendars.co.uk/) to find the collections for your address. Use the -n parameter to add them in a comma-separated list inside quotes, such as: 'Friday G4, Friday J'."
401
408
  },
402
409
  "GlasgowCityCouncil": {
403
410
  "url": "https://www.glasgow.gov.uk/forms/refuseandrecyclingcalendar/PrintCalendar.aspx?UPRN=906700034497",
@@ -106,6 +106,15 @@ def get_date_with_ordinal(date_number: int) -> str:
106
106
  else {1: "st", 2: "nd", 3: "rd"}.get(date_number % 10, "th")
107
107
  )
108
108
 
109
+ def has_numbers(inputString: str) -> bool:
110
+ """
111
+
112
+ :rtype: bool
113
+ :param inputString: String to check for numbers
114
+ :return: True if any numbers are found in input string
115
+ """
116
+ return any(char.isdigit() for char in inputString)
117
+
109
118
 
110
119
  def remove_ordinal_indicator_from_date_string(date_string: str) -> str:
111
120
  """
@@ -0,0 +1,70 @@
1
+ from bs4 import BeautifulSoup
2
+ from uk_bin_collection.uk_bin_collection.common import *
3
+ from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
4
+ from dateutil.relativedelta import relativedelta
5
+
6
+
7
+
8
+ # import the wonderful Beautiful Soup and the URL grabber
9
+ class CouncilClass(AbstractGetBinDataClass):
10
+ """
11
+ Concrete classes have to implement all abstract operations of the
12
+ base class. They can also override some operations with a default
13
+ implementation.
14
+ """
15
+
16
+ def parse_data(self, page: str, **kwargs) -> dict:
17
+ data = {"bins": []}
18
+ collections = []
19
+
20
+ curr_date = datetime.today()
21
+
22
+ user_uprn = kwargs.get("uprn")
23
+ check_uprn(user_uprn)
24
+
25
+ headers = {
26
+ 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7',
27
+ 'Accept-Language': 'en-GB,en;q=0.9',
28
+ 'Cache-Control': 'no-cache',
29
+ 'Connection': 'keep-alive',
30
+ 'Pragma': 'no-cache',
31
+ 'Sec-Fetch-Dest': 'document',
32
+ 'Sec-Fetch-Mode': 'navigate',
33
+ 'Sec-Fetch-Site': 'none',
34
+ 'Sec-Fetch-User': '?1',
35
+ 'Upgrade-Insecure-Requests': '1',
36
+ 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/125.0.6422.143 Safari/537.36',
37
+ 'sec-ch-ua': '"Opera GX";v="111", "Chromium";v="125", "Not.A/Brand";v="24"',
38
+ 'sec-ch-ua-mobile': '?0',
39
+ 'sec-ch-ua-platform': '"Windows"',
40
+ }
41
+ params = {
42
+ 'uprn': f'{user_uprn}',
43
+ # 'uprn': f'100040128734',
44
+ }
45
+ response = requests.get(
46
+ 'https://www.cornwall.gov.uk/umbraco/surface/waste/MyCollectionDays',
47
+ params=params,
48
+ headers=headers
49
+ )
50
+
51
+ soup = BeautifulSoup(response.text, features="html.parser")
52
+ soup.prettify()
53
+
54
+ for item in soup.find_all('div', class_='collection text-center service'):
55
+ bin_type = item.contents[1].text + " bin"
56
+ collection_date = datetime.strptime(item.contents[5].text, "%d %b").replace(year=curr_date.year)
57
+ if curr_date.month == 12 and collection_date.month == 1:
58
+ collection_date = collection_date + relativedelta(years=1)
59
+ collections.append((bin_type, collection_date))
60
+
61
+ ordered_data = sorted(collections, key=lambda x: x[1])
62
+ data = {"bins": []}
63
+ for bin in ordered_data:
64
+ dict_data = {
65
+ "type": bin[0].capitalize().strip(),
66
+ "collectionDate": bin[1].strftime(date_format),
67
+ }
68
+ data["bins"].append(dict_data)
69
+
70
+ return data