uk_bin_collection 0.82.0__py3-none-any.whl → 0.83.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -283,10 +283,10 @@
283
283
  "wiki_name": "Doncaster Council"
284
284
  },
285
285
  "DorsetCouncil": {
286
- "url": "https://gi.dorsetcouncil.gov.uk/mapping/mylocal/viewresults/100040711049",
287
- "wiki_command_url_override": "https://gi.dorsetcouncil.gov.uk/mapping/mylocal/viewresults/XXXXXXXX",
288
- "wiki_name": "Dorset Council",
289
- "wiki_note": "Replace XXXXXXXX with UPRN."
286
+ "skip_get_url": true,
287
+ "url": "https://www.dorsetcouncil.gov.uk/",
288
+ "uprn": "100040711049",
289
+ "wiki_name": "Dorset Council"
290
290
  },
291
291
  "DoverDistrictCouncil": {
292
292
  "url": "https://collections.dover.gov.uk/property/100060908340",
@@ -393,11 +393,11 @@
393
393
  "wiki_note": "Pass the house name/number in the house number parameter, wrapped in double quotes"
394
394
  },
395
395
  "GedlingBoroughCouncil": {
396
- "house_number": "Valeside Gardens",
396
+ "house_number": "Friday G4, Friday J",
397
397
  "skip_get_url": true,
398
398
  "url": "https://www.gedling.gov.uk/",
399
399
  "wiki_name": "Gedling Borough Council",
400
- "wiki_note": "Pass the street name into the -n parameter. Use [this](https://apps.gedling.gov.uk/refuse/search.aspx) to help work out a street search that is unique. Known issues: 1) If multiple streets returned, it will pick the first and these may have different bin schedules. 2) This data is hand entered as council only provide non-parseable non-accessible PDFs - Please double check for any issues. 2b) Data is only included until end of Nov 2024 (refuse) and March 2024 (Garden). Data will need to be updated by hand after these dates."
400
+ "wiki_note": "Use [this site](https://www.gbcbincalendars.co.uk/) to find the collections for your address. Use the -n parameter to add them in a comma-separated list inside quotes, such as: 'Friday G4, Friday J'."
401
401
  },
402
402
  "GlasgowCityCouncil": {
403
403
  "url": "https://www.glasgow.gov.uk/forms/refuseandrecyclingcalendar/PrintCalendar.aspx?UPRN=906700034497",
@@ -106,6 +106,15 @@ def get_date_with_ordinal(date_number: int) -> str:
106
106
  else {1: "st", 2: "nd", 3: "rd"}.get(date_number % 10, "th")
107
107
  )
108
108
 
109
+ def has_numbers(inputString: str) -> bool:
110
+ """
111
+
112
+ :rtype: bool
113
+ :param inputString: String to check for numbers
114
+ :return: True if any numbers are found in input string
115
+ """
116
+ return any(char.isdigit() for char in inputString)
117
+
109
118
 
110
119
  def remove_ordinal_indicator_from_date_string(date_string: str) -> str:
111
120
  """
@@ -14,38 +14,22 @@ class CouncilClass(AbstractGetBinDataClass):
14
14
  def parse_data(self, page: str, **kwargs) -> dict:
15
15
  data = {"bins": []}
16
16
  collections = []
17
-
18
- # Parse the page and find all the result boxes
19
- soup = BeautifulSoup(page.text, features="html.parser")
20
- soup.prettify()
21
- results = soup.find_all("li", {"class": "resultListItem"})
22
-
23
- # If the result box has a wanted string in, we can use it. Check the contents of each box and find the
24
- # desired text and dates
25
- for r in results:
26
- if "Your next" in r.text:
27
- if type(r.contents[10]) is element.NavigableString:
28
- bin_text = r.contents[10].text.split(" ")[2].title() + " bin"
29
- bin_date = datetime.strptime(
30
- remove_ordinal_indicator_from_date_string(
31
- r.contents[11].text.strip()
32
- ),
33
- "%A %d %B %Y",
34
- )
35
- else:
36
- bin_text = r.contents[11].text.split(" ")[2].title() + " bin"
37
- bin_date = datetime.strptime(
38
- remove_ordinal_indicator_from_date_string(
39
- r.contents[12].text.strip()
40
- ),
41
- "%A %d %B %Y",
42
- )
43
-
44
- if bin_date.date() >= datetime.now().date():
45
- collections.append((bin_text, bin_date))
46
-
47
- # Sort the text and date elements by date
48
- ordered_data = sorted(collections, key=lambda x: x[1])
17
+ url_base = "https://geoapi.dorsetcouncil.gov.uk/v1/services/"
18
+ url_types = ["recyclingday", "refuseday", "foodwasteday", "gardenwasteday"]
19
+
20
+ uprn = kwargs.get("uprn")
21
+ # Check the UPRN is valid
22
+ check_uprn(uprn)
23
+
24
+ for url_type in url_types:
25
+ response = requests.get(f"{url_base}{url_type}/{uprn}")
26
+ if response.status_code != 200:
27
+ raise ConnectionError(f"Could not fetch from {url_type} endpoint")
28
+ json_data = response.json()["values"][0]
29
+ collections.append((f"{json_data.get('type')} bin", datetime.strptime(json_data.get('dateNextVisit'), "%Y-%m-%d")))
30
+
31
+ # Sort the text and date elements by date
32
+ ordered_data = sorted(collections, key=lambda x: x[1])
49
33
 
50
34
  # Put the elements into the dictionary
51
35
  for item in ordered_data: