uk_bin_collection 0.105.0__py3-none-any.whl → 0.106.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- uk_bin_collection/tests/input.json +29 -5
 - uk_bin_collection/uk_bin_collection/councils/BarnsleyMBCouncil.py +41 -25
 - uk_bin_collection/uk_bin_collection/councils/FifeCouncil.py +68 -0
 - uk_bin_collection/uk_bin_collection/councils/FlintshireCountyCouncil.py +60 -0
 - uk_bin_collection/uk_bin_collection/councils/MidlothianCouncil.py +115 -36
 - uk_bin_collection/uk_bin_collection/councils/SouthwarkCouncil.py +7 -3
 - uk_bin_collection/uk_bin_collection/councils/StocktonOnTeesCouncil.py +159 -0
 - uk_bin_collection/uk_bin_collection/councils/WestBerkshireCouncil.py +14 -24
 - {uk_bin_collection-0.105.0.dist-info → uk_bin_collection-0.106.0.dist-info}/METADATA +1 -1
 - {uk_bin_collection-0.105.0.dist-info → uk_bin_collection-0.106.0.dist-info}/RECORD +13 -10
 - {uk_bin_collection-0.105.0.dist-info → uk_bin_collection-0.106.0.dist-info}/LICENSE +0 -0
 - {uk_bin_collection-0.105.0.dist-info → uk_bin_collection-0.106.0.dist-info}/WHEEL +0 -0
 - {uk_bin_collection-0.105.0.dist-info → uk_bin_collection-0.106.0.dist-info}/entry_points.txt +0 -0
 
| 
         @@ -484,6 +484,20 @@ 
     | 
|
| 
       484 
484 
     | 
    
         
             
                    "url": "https://www.fenland.gov.uk/article/13114/",
         
     | 
| 
       485 
485 
     | 
    
         
             
                    "wiki_name": "Fenland District Council"
         
     | 
| 
       486 
486 
     | 
    
         
             
                },
         
     | 
| 
      
 487 
     | 
    
         
            +
                "FifeCouncil": {
         
     | 
| 
      
 488 
     | 
    
         
            +
                    "url": "https://www.fife.gov.uk",
         
     | 
| 
      
 489 
     | 
    
         
            +
                    "wiki_command_url_override": "https://www.fife.gov.uk",
         
     | 
| 
      
 490 
     | 
    
         
            +
                    "uprn": "320203521",
         
     | 
| 
      
 491 
     | 
    
         
            +
                    "wiki_name": "Fife Council",
         
     | 
| 
      
 492 
     | 
    
         
            +
                    "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN."
         
     | 
| 
      
 493 
     | 
    
         
            +
                },
         
     | 
| 
      
 494 
     | 
    
         
            +
                "FlintshireCountyCouncil": {
         
     | 
| 
      
 495 
     | 
    
         
            +
                    "url": "https://digital.flintshire.gov.uk",
         
     | 
| 
      
 496 
     | 
    
         
            +
                    "wiki_command_url_override": "https://digital.flintshire.gov.uk",
         
     | 
| 
      
 497 
     | 
    
         
            +
                    "uprn": "100100213710",
         
     | 
| 
      
 498 
     | 
    
         
            +
                    "wiki_name": "Flintshire County Council",
         
     | 
| 
      
 499 
     | 
    
         
            +
                    "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN."
         
     | 
| 
      
 500 
     | 
    
         
            +
                },
         
     | 
| 
       487 
501 
     | 
    
         
             
                "ForestOfDeanDistrictCouncil": {
         
     | 
| 
       488 
502 
     | 
    
         
             
                    "house_number": "ELMOGAL, PARKEND ROAD, BREAM, LYDNEY",
         
     | 
| 
       489 
503 
     | 
    
         
             
                    "postcode": "GL15 6JT",
         
     | 
| 
         @@ -754,10 +768,12 @@ 
     | 
|
| 
       754 
768 
     | 
    
         
             
                    "wiki_note": "Pass the house name/number plus the name of the street with the postcode parameter, wrapped in double quotes.  Check the address in the web site first. This version will only pick the first SHOW button returned by the search or if it is fully unique.  The search is not very predictable (e.g. house number 4 returns 14,24,4,44 etc.)."
         
     | 
| 
       755 
769 
     | 
    
         
             
                },
         
     | 
| 
       756 
770 
     | 
    
         
             
                "MidlothianCouncil": {
         
     | 
| 
       757 
     | 
    
         
            -
                    " 
     | 
| 
       758 
     | 
    
         
            -
                    " 
     | 
| 
      
 771 
     | 
    
         
            +
                    "house_number": "52",
         
     | 
| 
      
 772 
     | 
    
         
            +
                    "postcode": "EH19 2EB",
         
     | 
| 
      
 773 
     | 
    
         
            +
                    "skip_get_url": true,
         
     | 
| 
      
 774 
     | 
    
         
            +
                    "url": "https://www.midlothian.gov.uk/info/1054/bins_and_recycling/343/bin_collection_days",
         
     | 
| 
       759 
775 
     | 
    
         
             
                    "wiki_name": "Midlothian Council",
         
     | 
| 
       760 
     | 
    
         
            -
                    "wiki_note": " 
     | 
| 
      
 776 
     | 
    
         
            +
                    "wiki_note": "Pass the house name/number wrapped in double quotes along with the postcode parameter"
         
     | 
| 
       761 
777 
     | 
    
         
             
                },
         
     | 
| 
       762 
778 
     | 
    
         
             
                "MidSussexDistrictCouncil": {
         
     | 
| 
       763 
779 
     | 
    
         
             
                    "house_number": "OAKLANDS, OAKLANDS ROAD RH16 1SS",
         
     | 
| 
         @@ -1141,8 +1157,8 @@ 
     | 
|
| 
       1141 
1157 
     | 
    
         
             
                    "wiki_name": "South Tyneside Council"
         
     | 
| 
       1142 
1158 
     | 
    
         
             
                },
         
     | 
| 
       1143 
1159 
     | 
    
         
             
                "SouthwarkCouncil": {
         
     | 
| 
       1144 
     | 
    
         
            -
                    "url": "https:// 
     | 
| 
       1145 
     | 
    
         
            -
                    "wiki_command_url_override": "https:// 
     | 
| 
      
 1160 
     | 
    
         
            +
                    "url": "https://services.southwark.gov.uk/bins/lookup/",
         
     | 
| 
      
 1161 
     | 
    
         
            +
                    "wiki_command_url_override": "https://services.southwark.gov.uk/bins/lookup/XXXXXXXX",
         
     | 
| 
       1146 
1162 
     | 
    
         
             
                    "uprn": "200003469271",
         
     | 
| 
       1147 
1163 
     | 
    
         
             
                    "wiki_name": "Southwark Council",
         
     | 
| 
       1148 
1164 
     | 
    
         
             
                    "wiki_note": "Replace XXXXXXXX with UPRN. You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN."
         
     | 
| 
         @@ -1180,6 +1196,14 @@ 
     | 
|
| 
       1180 
1196 
     | 
    
         
             
                    "wiki_name": "Stockport Borough Council",
         
     | 
| 
       1181 
1197 
     | 
    
         
             
                    "wiki_note": "Replace XXXXXXXX with UPRN."
         
     | 
| 
       1182 
1198 
     | 
    
         
             
                },
         
     | 
| 
      
 1199 
     | 
    
         
            +
                "StocktonOnTeesCouncil": {
         
     | 
| 
      
 1200 
     | 
    
         
            +
                    "house_number": "24",
         
     | 
| 
      
 1201 
     | 
    
         
            +
                    "postcode": "TS20 2RD",
         
     | 
| 
      
 1202 
     | 
    
         
            +
                    "skip_get_url": true,
         
     | 
| 
      
 1203 
     | 
    
         
            +
                    "url": "https://www.stockton.gov.uk",
         
     | 
| 
      
 1204 
     | 
    
         
            +
                    "web_driver": "http://selenium:4444",
         
     | 
| 
      
 1205 
     | 
    
         
            +
                    "wiki_name": "Stockton On Tees Council"
         
     | 
| 
      
 1206 
     | 
    
         
            +
                },
         
     | 
| 
       1183 
1207 
     | 
    
         
             
                "StokeOnTrentCityCouncil": {
         
     | 
| 
       1184 
1208 
     | 
    
         
             
                    "url": "https://www.stoke.gov.uk/jadu/custom/webserviceLookUps/BarTecWebServices_missed_bin_calendar.php?UPRN=3455121482",
         
     | 
| 
       1185 
1209 
     | 
    
         
             
                    "wiki_command_url_override": "https://www.stoke.gov.uk/jadu/custom/webserviceLookUps/BarTecWebServices_missed_bin_calendar.php?UPRN=XXXXXXXXXX",
         
     | 
| 
         @@ -1,4 +1,4 @@ 
     | 
|
| 
       1 
     | 
    
         
            -
            from typing import Dict, Any
         
     | 
| 
      
 1 
     | 
    
         
            +
            from typing import Dict, List, Any
         
     | 
| 
       2 
2 
     | 
    
         
             
            from bs4 import BeautifulSoup
         
     | 
| 
       3 
3 
     | 
    
         
             
            from dateutil.relativedelta import relativedelta
         
     | 
| 
       4 
4 
     | 
    
         
             
            import requests
         
     | 
| 
         @@ -11,6 +11,30 @@ from uk_bin_collection.uk_bin_collection.common import ( 
     | 
|
| 
       11 
11 
     | 
    
         
             
            from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
         
     | 
| 
       12 
12 
     | 
    
         | 
| 
       13 
13 
     | 
    
         | 
| 
      
 14 
     | 
    
         
            +
            def parse_bin_text(bin_type_str: str, bin_date_str: str) -> List[Dict[str, str]]:
         
     | 
| 
      
 15 
     | 
    
         
            +
                """
         
     | 
| 
      
 16 
     | 
    
         
            +
                Takes a raw bin and date string, parses the bin(s) and date, then returns
         
     | 
| 
      
 17 
     | 
    
         
            +
                a list of bins with their date.
         
     | 
| 
      
 18 
     | 
    
         
            +
                """
         
     | 
| 
      
 19 
     | 
    
         
            +
             
     | 
| 
      
 20 
     | 
    
         
            +
                bins = []
         
     | 
| 
      
 21 
     | 
    
         
            +
             
     | 
| 
      
 22 
     | 
    
         
            +
                if bin_date_str == "Today":
         
     | 
| 
      
 23 
     | 
    
         
            +
                    bin_date = datetime.today()
         
     | 
| 
      
 24 
     | 
    
         
            +
                elif bin_date_str == "Tomorrow":
         
     | 
| 
      
 25 
     | 
    
         
            +
                    bin_date = datetime.today() + relativedelta(days=1)
         
     | 
| 
      
 26 
     | 
    
         
            +
                else:
         
     | 
| 
      
 27 
     | 
    
         
            +
                    bin_date = datetime.strptime(bin_date_str, "%A, %B %d, %Y")
         
     | 
| 
      
 28 
     | 
    
         
            +
             
     | 
| 
      
 29 
     | 
    
         
            +
                for bin_type in bin_type_str.split(", "):
         
     | 
| 
      
 30 
     | 
    
         
            +
                    bins.append({
         
     | 
| 
      
 31 
     | 
    
         
            +
                        "type": bin_type.strip() + " bin",
         
     | 
| 
      
 32 
     | 
    
         
            +
                        "collectionDate": bin_date.strftime(date_format)
         
     | 
| 
      
 33 
     | 
    
         
            +
                    })
         
     | 
| 
      
 34 
     | 
    
         
            +
             
     | 
| 
      
 35 
     | 
    
         
            +
                return bins
         
     | 
| 
      
 36 
     | 
    
         
            +
                
         
     | 
| 
      
 37 
     | 
    
         
            +
             
     | 
| 
       14 
38 
     | 
    
         
             
            class CouncilClass(AbstractGetBinDataClass):
         
     | 
| 
       15 
39 
     | 
    
         
             
                """
         
     | 
| 
       16 
40 
     | 
    
         
             
                Concrete classes have to implement all abstract operations of the
         
     | 
| 
         @@ -73,37 +97,29 @@ class CouncilClass(AbstractGetBinDataClass): 
     | 
|
| 
       73 
97 
     | 
    
         
             
                    bin_date_str = highlight_content.find(
         
     | 
| 
       74 
98 
     | 
    
         
             
                        "em", {"class": "ui-bin-next-date"}
         
     | 
| 
       75 
99 
     | 
    
         
             
                    ).text.strip()
         
     | 
| 
       76 
     | 
    
         
            -
                     
     | 
| 
       77 
     | 
    
         
            -
                         
     | 
| 
       78 
     | 
    
         
            -
             
     | 
| 
       79 
     | 
    
         
            -
                    )
         
     | 
| 
      
 100 
     | 
    
         
            +
                    bin_type_str = highlight_content.find(
         
     | 
| 
      
 101 
     | 
    
         
            +
                        "p", {"class": "ui-bin-next-type"}
         
     | 
| 
      
 102 
     | 
    
         
            +
                    ).text.strip()
         
     | 
| 
       80 
103 
     | 
    
         | 
| 
       81 
     | 
    
         
            -
                     
     | 
| 
       82 
     | 
    
         
            -
                        bin_date = datetime.today()
         
     | 
| 
       83 
     | 
    
         
            -
                    elif bin_date_str == "Tomorrow":
         
     | 
| 
       84 
     | 
    
         
            -
                        bin_date = datetime.today() + relativedelta(days=1)
         
     | 
| 
       85 
     | 
    
         
            -
                    else:
         
     | 
| 
       86 
     | 
    
         
            -
                        bin_date = datetime.strptime(bin_date_str, "%A, %B %d, %Y")
         
     | 
| 
      
 104 
     | 
    
         
            +
                    data["bins"].extend(parse_bin_text(bin_type_str, bin_date_str))
         
     | 
| 
       87 
105 
     | 
    
         | 
| 
       88 
     | 
    
         
            -
                     
     | 
| 
       89 
     | 
    
         
            -
             
     | 
| 
       90 
     | 
    
         
            -
             
     | 
| 
       91 
     | 
    
         
            -
                    }
         
     | 
| 
       92 
     | 
    
         
            -
                    data["bins"].append(dict_data)
         
     | 
| 
      
 106 
     | 
    
         
            +
                    # Hold bins we already got from next collection, to avoid re-adding
         
     | 
| 
      
 107 
     | 
    
         
            +
                    # from upcoming collections.
         
     | 
| 
      
 108 
     | 
    
         
            +
                    used_bins = set(bin["type"] for bin in data["bins"])
         
     | 
| 
       93 
109 
     | 
    
         | 
| 
       94 
110 
     | 
    
         
             
                    # Upcoming collections
         
     | 
| 
       95 
111 
     | 
    
         
             
                    upcoming_collections = results[1].find("tbody").find_all("tr")
         
     | 
| 
       96 
112 
     | 
    
         
             
                    for row in upcoming_collections:
         
     | 
| 
       97 
113 
     | 
    
         
             
                        columns = row.find_all("td")
         
     | 
| 
       98 
114 
     | 
    
         
             
                        bin_date_str = columns[0].text.strip()
         
     | 
| 
       99 
     | 
    
         
            -
                         
     | 
| 
       100 
     | 
    
         
            -
             
     | 
| 
       101 
     | 
    
         
            -
                         
     | 
| 
       102 
     | 
    
         
            -
             
     | 
| 
       103 
     | 
    
         
            -
                             
     | 
| 
       104 
     | 
    
         
            -
                                " 
     | 
| 
       105 
     | 
    
         
            -
             
     | 
| 
       106 
     | 
    
         
            -
             
     | 
| 
       107 
     | 
    
         
            -
             
     | 
| 
      
 115 
     | 
    
         
            +
                        bin_type_str = columns[1].text.strip()
         
     | 
| 
      
 116 
     | 
    
         
            +
             
     | 
| 
      
 117 
     | 
    
         
            +
                        # Only add to bin list if not already present.
         
     | 
| 
      
 118 
     | 
    
         
            +
                        for bin in parse_bin_text(bin_type_str, bin_date_str):
         
     | 
| 
      
 119 
     | 
    
         
            +
                            if bin["type"] not in used_bins:
         
     | 
| 
      
 120 
     | 
    
         
            +
                                data["bins"].append(bin)
         
     | 
| 
      
 121 
     | 
    
         
            +
             
     | 
| 
      
 122 
     | 
    
         
            +
                                # Add to used bins, so future collections are not re-added.
         
     | 
| 
      
 123 
     | 
    
         
            +
                                used_bins.add(bin["type"])
         
     | 
| 
       108 
124 
     | 
    
         | 
| 
       109 
125 
     | 
    
         
             
                    return data
         
     | 
| 
         @@ -0,0 +1,68 @@ 
     | 
|
| 
      
 1 
     | 
    
         
            +
            from datetime import datetime
         
     | 
| 
      
 2 
     | 
    
         
            +
             
     | 
| 
      
 3 
     | 
    
         
            +
            import requests
         
     | 
| 
      
 4 
     | 
    
         
            +
            from bs4 import BeautifulSoup
         
     | 
| 
      
 5 
     | 
    
         
            +
             
     | 
| 
      
 6 
     | 
    
         
            +
            from uk_bin_collection.uk_bin_collection.common import *
         
     | 
| 
      
 7 
     | 
    
         
            +
            from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
         
     | 
| 
      
 8 
     | 
    
         
            +
             
     | 
| 
      
 9 
     | 
    
         
            +
             
     | 
| 
      
 10 
     | 
    
         
            +
            class CouncilClass(AbstractGetBinDataClass):
         
     | 
| 
      
 11 
     | 
    
         
            +
                """
         
     | 
| 
      
 12 
     | 
    
         
            +
                Concrete classes have to implement all abstract operations of the
         
     | 
| 
      
 13 
     | 
    
         
            +
                base class. They can also override some operations with a default
         
     | 
| 
      
 14 
     | 
    
         
            +
                implementation.
         
     | 
| 
      
 15 
     | 
    
         
            +
                """
         
     | 
| 
      
 16 
     | 
    
         
            +
             
     | 
| 
      
 17 
     | 
    
         
            +
                def parse_data(self, page: str, **kwargs) -> dict:
         
     | 
| 
      
 18 
     | 
    
         
            +
                    # Get and check UPRN
         
     | 
| 
      
 19 
     | 
    
         
            +
                    user_uprn = kwargs.get("uprn")
         
     | 
| 
      
 20 
     | 
    
         
            +
                    check_uprn(user_uprn)
         
     | 
| 
      
 21 
     | 
    
         
            +
                    bindata = {"bins": []}
         
     | 
| 
      
 22 
     | 
    
         
            +
             
     | 
| 
      
 23 
     | 
    
         
            +
                    API_URL = "https://www.fife.gov.uk/api/custom?action=powersuite_bin_calendar_collections&actionedby=bin_calendar&loadform=true&access=citizen&locale=en"
         
     | 
| 
      
 24 
     | 
    
         
            +
                    AUTH_URL = "https://www.fife.gov.uk/api/citizen?preview=false&locale=en"
         
     | 
| 
      
 25 
     | 
    
         
            +
                    AUTH_KEY = "Authorization"
         
     | 
| 
      
 26 
     | 
    
         
            +
             
     | 
| 
      
 27 
     | 
    
         
            +
                    r = requests.get(AUTH_URL)
         
     | 
| 
      
 28 
     | 
    
         
            +
                    r.raise_for_status()
         
     | 
| 
      
 29 
     | 
    
         
            +
                    auth_token = r.headers[AUTH_KEY]
         
     | 
| 
      
 30 
     | 
    
         
            +
             
     | 
| 
      
 31 
     | 
    
         
            +
                    post_data = {
         
     | 
| 
      
 32 
     | 
    
         
            +
                        "name": "bin_calendar",
         
     | 
| 
      
 33 
     | 
    
         
            +
                        "data": {
         
     | 
| 
      
 34 
     | 
    
         
            +
                            "uprn": user_uprn,
         
     | 
| 
      
 35 
     | 
    
         
            +
                        },
         
     | 
| 
      
 36 
     | 
    
         
            +
                        "email": "",
         
     | 
| 
      
 37 
     | 
    
         
            +
                        "caseid": "",
         
     | 
| 
      
 38 
     | 
    
         
            +
                        "xref": "",
         
     | 
| 
      
 39 
     | 
    
         
            +
                        "xref1": "",
         
     | 
| 
      
 40 
     | 
    
         
            +
                        "xref2": "",
         
     | 
| 
      
 41 
     | 
    
         
            +
                    }
         
     | 
| 
      
 42 
     | 
    
         
            +
             
     | 
| 
      
 43 
     | 
    
         
            +
                    headers = {
         
     | 
| 
      
 44 
     | 
    
         
            +
                        "referer": "https://www.fife.gov.uk/services/forms/bin-calendar",
         
     | 
| 
      
 45 
     | 
    
         
            +
                        "accept": "application/json",
         
     | 
| 
      
 46 
     | 
    
         
            +
                        "content-type": "application/json",
         
     | 
| 
      
 47 
     | 
    
         
            +
                        AUTH_KEY: auth_token,
         
     | 
| 
      
 48 
     | 
    
         
            +
                    }
         
     | 
| 
      
 49 
     | 
    
         
            +
             
     | 
| 
      
 50 
     | 
    
         
            +
                    r = requests.post(API_URL, data=json.dumps(post_data), headers=headers)
         
     | 
| 
      
 51 
     | 
    
         
            +
                    r.raise_for_status()
         
     | 
| 
      
 52 
     | 
    
         
            +
             
     | 
| 
      
 53 
     | 
    
         
            +
                    result = r.json()
         
     | 
| 
      
 54 
     | 
    
         
            +
             
     | 
| 
      
 55 
     | 
    
         
            +
                    for collection in result["data"]["tab_collections"]:
         
     | 
| 
      
 56 
     | 
    
         
            +
                        dict_data = {
         
     | 
| 
      
 57 
     | 
    
         
            +
                            "type": collection["colour"],
         
     | 
| 
      
 58 
     | 
    
         
            +
                            "collectionDate": datetime.strptime(
         
     | 
| 
      
 59 
     | 
    
         
            +
                                collection["date"],
         
     | 
| 
      
 60 
     | 
    
         
            +
                                "%A, %B %d, %Y",
         
     | 
| 
      
 61 
     | 
    
         
            +
                            ).strftime("%d/%m/%Y"),
         
     | 
| 
      
 62 
     | 
    
         
            +
                        }
         
     | 
| 
      
 63 
     | 
    
         
            +
                        bindata["bins"].append(dict_data)
         
     | 
| 
      
 64 
     | 
    
         
            +
             
     | 
| 
      
 65 
     | 
    
         
            +
                    bindata["bins"].sort(
         
     | 
| 
      
 66 
     | 
    
         
            +
                        key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y")
         
     | 
| 
      
 67 
     | 
    
         
            +
                    )
         
     | 
| 
      
 68 
     | 
    
         
            +
                    return bindata
         
     | 
| 
         @@ -0,0 +1,60 @@ 
     | 
|
| 
      
 1 
     | 
    
         
            +
            import requests
         
     | 
| 
      
 2 
     | 
    
         
            +
            from bs4 import BeautifulSoup
         
     | 
| 
      
 3 
     | 
    
         
            +
             
     | 
| 
      
 4 
     | 
    
         
            +
            from uk_bin_collection.uk_bin_collection.common import *
         
     | 
| 
      
 5 
     | 
    
         
            +
            from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
         
     | 
| 
      
 6 
     | 
    
         
            +
             
     | 
| 
      
 7 
     | 
    
         
            +
             
     | 
| 
      
 8 
     | 
    
         
            +
            # import the wonderful Beautiful Soup and the URL grabber
         
     | 
| 
      
 9 
     | 
    
         
            +
            class CouncilClass(AbstractGetBinDataClass):
         
     | 
| 
      
 10 
     | 
    
         
            +
                """
         
     | 
| 
      
 11 
     | 
    
         
            +
                Concrete classes have to implement all abstract operations of the
         
     | 
| 
      
 12 
     | 
    
         
            +
                base class. They can also override some operations with a default
         
     | 
| 
      
 13 
     | 
    
         
            +
                implementation.
         
     | 
| 
      
 14 
     | 
    
         
            +
                """
         
     | 
| 
      
 15 
     | 
    
         
            +
             
     | 
| 
      
 16 
     | 
    
         
            +
                def parse_data(self, page: str, **kwargs) -> dict:
         
     | 
| 
      
 17 
     | 
    
         
            +
             
     | 
| 
      
 18 
     | 
    
         
            +
                    user_uprn = kwargs.get("uprn")
         
     | 
| 
      
 19 
     | 
    
         
            +
                    check_uprn(user_uprn)
         
     | 
| 
      
 20 
     | 
    
         
            +
                    bindata = {"bins": []}
         
     | 
| 
      
 21 
     | 
    
         
            +
             
     | 
| 
      
 22 
     | 
    
         
            +
                    URI = f"https://digital.flintshire.gov.uk/FCC_BinDay/Home/Details2/{user_uprn}"
         
     | 
| 
      
 23 
     | 
    
         
            +
             
     | 
| 
      
 24 
     | 
    
         
            +
                    # Make the GET request
         
     | 
| 
      
 25 
     | 
    
         
            +
                    response = requests.get(URI)
         
     | 
| 
      
 26 
     | 
    
         
            +
             
     | 
| 
      
 27 
     | 
    
         
            +
                    # Parse the HTML content
         
     | 
| 
      
 28 
     | 
    
         
            +
                    soup = BeautifulSoup(response.content, "html.parser")
         
     | 
| 
      
 29 
     | 
    
         
            +
             
     | 
| 
      
 30 
     | 
    
         
            +
                    # Adjust these tags and classes based on actual structure
         
     | 
| 
      
 31 
     | 
    
         
            +
                    # Example for finding collection dates and types
         
     | 
| 
      
 32 
     | 
    
         
            +
                    bin_collections = soup.find_all(
         
     | 
| 
      
 33 
     | 
    
         
            +
                        "div", class_="col-md-12 col-lg-12 col-sm-12 col-xs-12"
         
     | 
| 
      
 34 
     | 
    
         
            +
                    )  # Replace with actual class name
         
     | 
| 
      
 35 
     | 
    
         
            +
             
     | 
| 
      
 36 
     | 
    
         
            +
                    # Extracting and printing the schedule data
         
     | 
| 
      
 37 
     | 
    
         
            +
                    schedule = []
         
     | 
| 
      
 38 
     | 
    
         
            +
                    for collection in bin_collections:
         
     | 
| 
      
 39 
     | 
    
         
            +
                        dates = collection.find_all("div", class_="col-lg-2 col-md-2 col-sm-2")
         
     | 
| 
      
 40 
     | 
    
         
            +
                        bin_type = collection.find("div", class_="col-lg-3 col-md-3 col-sm-3")
         
     | 
| 
      
 41 
     | 
    
         
            +
             
     | 
| 
      
 42 
     | 
    
         
            +
                        if dates[0].text.strip() == "Date of Collection":
         
     | 
| 
      
 43 
     | 
    
         
            +
                            continue
         
     | 
| 
      
 44 
     | 
    
         
            +
             
     | 
| 
      
 45 
     | 
    
         
            +
                        bin_types = bin_type.text.strip().split(" / ")
         
     | 
| 
      
 46 
     | 
    
         
            +
                        date = dates[0].text.strip()
         
     | 
| 
      
 47 
     | 
    
         
            +
             
     | 
| 
      
 48 
     | 
    
         
            +
                        # Loop through the dates for each collection type
         
     | 
| 
      
 49 
     | 
    
         
            +
                        for bin_type in bin_types:
         
     | 
| 
      
 50 
     | 
    
         
            +
             
     | 
| 
      
 51 
     | 
    
         
            +
                            dict_data = {
         
     | 
| 
      
 52 
     | 
    
         
            +
                                "type": bin_type,
         
     | 
| 
      
 53 
     | 
    
         
            +
                                "collectionDate": date,
         
     | 
| 
      
 54 
     | 
    
         
            +
                            }
         
     | 
| 
      
 55 
     | 
    
         
            +
                            bindata["bins"].append(dict_data)
         
     | 
| 
      
 56 
     | 
    
         
            +
             
     | 
| 
      
 57 
     | 
    
         
            +
                    bindata["bins"].sort(
         
     | 
| 
      
 58 
     | 
    
         
            +
                        key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y")
         
     | 
| 
      
 59 
     | 
    
         
            +
                    )
         
     | 
| 
      
 60 
     | 
    
         
            +
                    return bindata
         
     | 
| 
         @@ -1,3 +1,5 @@ 
     | 
|
| 
      
 1 
     | 
    
         
            +
            from urllib.parse import quote, urljoin
         
     | 
| 
      
 2 
     | 
    
         
            +
             
     | 
| 
       1 
3 
     | 
    
         
             
            from bs4 import BeautifulSoup
         
     | 
| 
       2 
4 
     | 
    
         | 
| 
       3 
5 
     | 
    
         
             
            from uk_bin_collection.uk_bin_collection.common import *
         
     | 
| 
         @@ -12,57 +14,134 @@ class CouncilClass(AbstractGetBinDataClass): 
     | 
|
| 
       12 
14 
     | 
    
         
             
                implementation.
         
     | 
| 
       13 
15 
     | 
    
         
             
                """
         
     | 
| 
       14 
16 
     | 
    
         | 
| 
      
 17 
     | 
    
         
            +
                BASE_URL = "https://www.midlothian.gov.uk"
         
     | 
| 
      
 18 
     | 
    
         
            +
                DIRECTORY_URL = f"{BASE_URL}/site/scripts/directory_search.php?directoryID=35&keywords={{}}&search=Search"
         
     | 
| 
      
 19 
     | 
    
         
            +
                BIN_TYPES = {
         
     | 
| 
      
 20 
     | 
    
         
            +
                    "Next recycling collection": "Recycling",
         
     | 
| 
      
 21 
     | 
    
         
            +
                    "Next grey bin collection": "Grey Bin",
         
     | 
| 
      
 22 
     | 
    
         
            +
                    "Next brown bin collection": "Brown Bin",
         
     | 
| 
      
 23 
     | 
    
         
            +
                    "Next food bin collection": "Food Bin",
         
     | 
| 
      
 24 
     | 
    
         
            +
                }
         
     | 
| 
      
 25 
     | 
    
         
            +
             
     | 
| 
       15 
26 
     | 
    
         
             
                def parse_data(self, page: str, **kwargs) -> dict:
         
     | 
| 
       16 
     | 
    
         
            -
                    # Parse the HTML content using BeautifulSoup
         
     | 
| 
       17 
     | 
    
         
            -
                    soup = BeautifulSoup(page.text, features="html.parser")
         
     | 
| 
       18 
27 
     | 
    
         | 
| 
       19 
     | 
    
         
            -
                     
     | 
| 
      
 28 
     | 
    
         
            +
                    house_identifier = kwargs.get(
         
     | 
| 
      
 29 
     | 
    
         
            +
                        "paon", ""
         
     | 
| 
      
 30 
     | 
    
         
            +
                    ).strip()  # Could be house number or name
         
     | 
| 
      
 31 
     | 
    
         
            +
                    postcode = kwargs.get("postcode")
         
     | 
| 
      
 32 
     | 
    
         
            +
             
     | 
| 
      
 33 
     | 
    
         
            +
                    # Check if both house identifier and postcode are provided
         
     | 
| 
      
 34 
     | 
    
         
            +
                    if not house_identifier:
         
     | 
| 
      
 35 
     | 
    
         
            +
                        print("Error: House identifier (number or name) must be provided.")
         
     | 
| 
      
 36 
     | 
    
         
            +
                        return {"bins": []}
         
     | 
| 
      
 37 
     | 
    
         
            +
             
     | 
| 
      
 38 
     | 
    
         
            +
                    if not postcode:
         
     | 
| 
      
 39 
     | 
    
         
            +
                        print("Error: Postcode must be provided.")
         
     | 
| 
      
 40 
     | 
    
         
            +
                        return {"bins": []}
         
     | 
| 
      
 41 
     | 
    
         
            +
             
     | 
| 
      
 42 
     | 
    
         
            +
                    check_postcode(postcode)
         
     | 
| 
      
 43 
     | 
    
         
            +
                    check_paon(house_identifier)
         
     | 
| 
      
 44 
     | 
    
         
            +
             
     | 
| 
       20 
45 
     | 
    
         
             
                    data = {"bins": []}
         
     | 
| 
      
 46 
     | 
    
         
            +
                    search_url = self.DIRECTORY_URL.format(quote(postcode))
         
     | 
| 
      
 47 
     | 
    
         
            +
             
     | 
| 
      
 48 
     | 
    
         
            +
                    try:
         
     | 
| 
      
 49 
     | 
    
         
            +
                        search_results_html = requests.get(search_url)
         
     | 
| 
      
 50 
     | 
    
         
            +
                        search_results_html.raise_for_status()
         
     | 
| 
      
 51 
     | 
    
         
            +
             
     | 
| 
      
 52 
     | 
    
         
            +
                        soup = BeautifulSoup(search_results_html.text, "html.parser")
         
     | 
| 
      
 53 
     | 
    
         
            +
                        address_link = self._get_result_by_identifier(soup, house_identifier)
         
     | 
| 
      
 54 
     | 
    
         
            +
             
     | 
| 
      
 55 
     | 
    
         
            +
                        if address_link:
         
     | 
| 
      
 56 
     | 
    
         
            +
                            collections_url = urljoin(search_url, address_link["href"])
         
     | 
| 
      
 57 
     | 
    
         
            +
                            bin_collection_data = self._fetch_bin_collection_data(collections_url)
         
     | 
| 
      
 58 
     | 
    
         
            +
             
     | 
| 
      
 59 
     | 
    
         
            +
                            if bin_collection_data:
         
     | 
| 
      
 60 
     | 
    
         
            +
                                data["bins"].extend(bin_collection_data)
         
     | 
| 
      
 61 
     | 
    
         
            +
             
     | 
| 
      
 62 
     | 
    
         
            +
                    except requests.RequestException as e:
         
     | 
| 
      
 63 
     | 
    
         
            +
                        print(f"Warning: Failed to fetch data from {search_url}. Error: {e}")
         
     | 
| 
      
 64 
     | 
    
         
            +
             
     | 
| 
      
 65 
     | 
    
         
            +
                    return data
         
     | 
| 
       21 
66 
     | 
    
         | 
| 
       22 
     | 
    
         
            -
             
     | 
| 
       23 
     | 
    
         
            -
                     
     | 
| 
       24 
     | 
    
         
            -
             
     | 
| 
       25 
     | 
    
         
            -
                         
     | 
| 
       26 
     | 
    
         
            -
             
     | 
| 
       27 
     | 
    
         
            -
             
     | 
| 
       28 
     | 
    
         
            -
             
     | 
| 
       29 
     | 
    
         
            -
             
     | 
| 
       30 
     | 
    
         
            -
             
     | 
| 
       31 
     | 
    
         
            -
             
     | 
| 
       32 
     | 
    
         
            -
             
     | 
| 
       33 
     | 
    
         
            -
             
     | 
| 
       34 
     | 
    
         
            -
             
     | 
| 
       35 
     | 
    
         
            -
             
     | 
| 
       36 
     | 
    
         
            -
             
     | 
| 
       37 
     | 
    
         
            -
             
     | 
| 
       38 
     | 
    
         
            -
             
     | 
| 
       39 
     | 
    
         
            -
             
     | 
| 
       40 
     | 
    
         
            -
             
     | 
| 
       41 
     | 
    
         
            -
             
     | 
| 
       42 
     | 
    
         
            -
             
     | 
| 
       43 
     | 
    
         
            -
             
     | 
| 
      
 67 
     | 
    
         
            +
                def _get_result_by_identifier(self, soup, identifier: str) -> list:
         
     | 
| 
      
 68 
     | 
    
         
            +
                    """Extract the result link that matches the given house number or house name."""
         
     | 
| 
      
 69 
     | 
    
         
            +
                    try:
         
     | 
| 
      
 70 
     | 
    
         
            +
                        results_list = (
         
     | 
| 
      
 71 
     | 
    
         
            +
                            soup.find("article", class_="container")
         
     | 
| 
      
 72 
     | 
    
         
            +
                            .find("h2", text="Search results")
         
     | 
| 
      
 73 
     | 
    
         
            +
                            .find_next("ul", class_="item-list item-list__rich")
         
     | 
| 
      
 74 
     | 
    
         
            +
                        )
         
     | 
| 
      
 75 
     | 
    
         
            +
             
     | 
| 
      
 76 
     | 
    
         
            +
                        pattern = re.compile(re.escape(identifier.lower()) + r"[ ,]")
         
     | 
| 
      
 77 
     | 
    
         
            +
             
     | 
| 
      
 78 
     | 
    
         
            +
                        for item in results_list.find_all("li"):
         
     | 
| 
      
 79 
     | 
    
         
            +
                            address_link = item.find("a")
         
     | 
| 
      
 80 
     | 
    
         
            +
                            if address_link:
         
     | 
| 
      
 81 
     | 
    
         
            +
                                link_text = address_link.text.strip().lower()
         
     | 
| 
      
 82 
     | 
    
         
            +
                                if pattern.match(link_text):
         
     | 
| 
      
 83 
     | 
    
         
            +
                                    return address_link
         
     | 
| 
      
 84 
     | 
    
         
            +
             
     | 
| 
      
 85 
     | 
    
         
            +
                        print(f"Warning: No results found for identifier '{identifier}'.")
         
     | 
| 
      
 86 
     | 
    
         
            +
                        return None  # Return None if no match is found
         
     | 
| 
      
 87 
     | 
    
         
            +
             
     | 
| 
      
 88 
     | 
    
         
            +
                    except AttributeError as e:
         
     | 
| 
      
 89 
     | 
    
         
            +
                        print(f"Warning: Could not find the search results. Error: {e}")
         
     | 
| 
      
 90 
     | 
    
         
            +
                        return None  # Return None if no result found
         
     | 
| 
      
 91 
     | 
    
         
            +
             
     | 
| 
      
 92 
     | 
    
         
            +
                def _fetch_bin_collection_data(self, url: str) -> list:
         
     | 
| 
      
 93 
     | 
    
         
            +
                    """Fetch and parse bin collection data from the given URL."""
         
     | 
| 
      
 94 
     | 
    
         
            +
                    try:
         
     | 
| 
      
 95 
     | 
    
         
            +
                        bin_collection_html = requests.get(url)
         
     | 
| 
      
 96 
     | 
    
         
            +
                        bin_collection_html.raise_for_status()
         
     | 
| 
      
 97 
     | 
    
         
            +
             
     | 
| 
      
 98 
     | 
    
         
            +
                        soup = BeautifulSoup(bin_collection_html.text, "html.parser")
         
     | 
| 
      
 99 
     | 
    
         
            +
                        bin_collections = soup.find("ul", class_="data-table")
         
     | 
| 
      
 100 
     | 
    
         
            +
             
     | 
| 
      
 101 
     | 
    
         
            +
                        if bin_collections:
         
     | 
| 
      
 102 
     | 
    
         
            +
                            return self._parse_bin_collection_items(
         
     | 
| 
      
 103 
     | 
    
         
            +
                                bin_collections.find_all("li")[2:]  # Skip the first two items
         
     | 
| 
      
 104 
     | 
    
         
            +
                            )
         
     | 
| 
      
 105 
     | 
    
         
            +
             
     | 
| 
      
 106 
     | 
    
         
            +
                    except requests.RequestException as e:
         
     | 
| 
      
 107 
     | 
    
         
            +
                        print(
         
     | 
| 
      
 108 
     | 
    
         
            +
                            f"Warning: Failed to fetch bin collection data from {url}. Error: {e}"
         
     | 
| 
      
 109 
     | 
    
         
            +
                        )
         
     | 
| 
      
 110 
     | 
    
         
            +
             
     | 
| 
      
 111 
     | 
    
         
            +
                    return []  # Return an empty list on error
         
     | 
| 
      
 112 
     | 
    
         
            +
             
     | 
| 
      
 113 
     | 
    
         
            +
                def _parse_bin_collection_items(self, bin_items: list) -> list:
         
     | 
| 
      
 114 
     | 
    
         
            +
                    """Parse bin collection items into a structured format."""
         
     | 
| 
      
 115 
     | 
    
         
            +
                    parsed_bins = []
         
     | 
| 
      
 116 
     | 
    
         
            +
             
     | 
| 
      
 117 
     | 
    
         
            +
                    for bin_item in bin_items:
         
     | 
| 
      
 118 
     | 
    
         
            +
                        bin_type = None
         
     | 
| 
      
 119 
     | 
    
         
            +
                        try:
         
     | 
| 
      
 120 
     | 
    
         
            +
                            if bin_item.h2 and bin_item.h2.text.strip() in self.BIN_TYPES:
         
     | 
| 
      
 121 
     | 
    
         
            +
                                bin_type = self.BIN_TYPES[bin_item.h2.text.strip()]
         
     | 
| 
       44 
122 
     | 
    
         | 
| 
       45 
123 
     | 
    
         
             
                            bin_collection_date = None
         
     | 
| 
       46 
     | 
    
         
            -
                             
     | 
| 
       47 
     | 
    
         
            -
                            if bin.div and bin.div.text.strip():
         
     | 
| 
      
 124 
     | 
    
         
            +
                            if bin_item.div and bin_item.div.text.strip():
         
     | 
| 
       48 
125 
     | 
    
         
             
                                try:
         
     | 
| 
       49 
     | 
    
         
            -
                                    # Parse the collection date from the div text and format it
         
     | 
| 
       50 
126 
     | 
    
         
             
                                    bin_collection_date = datetime.strptime(
         
     | 
| 
       51 
     | 
    
         
            -
                                         
     | 
| 
       52 
     | 
    
         
            -
                                        "%A %d/%m/%Y",
         
     | 
| 
      
 127 
     | 
    
         
            +
                                        bin_item.div.text.strip(), "%A %d/%m/%Y"
         
     | 
| 
       53 
128 
     | 
    
         
             
                                    ).strftime(date_format)
         
     | 
| 
       54 
129 
     | 
    
         
             
                                except ValueError:
         
     | 
| 
       55 
     | 
    
         
            -
                                     
     | 
| 
       56 
     | 
    
         
            -
             
     | 
| 
      
 130 
     | 
    
         
            +
                                    print(
         
     | 
| 
      
 131 
     | 
    
         
            +
                                        f"Warning: Date parsing failed for {bin_item.div.text.strip()}."
         
     | 
| 
      
 132 
     | 
    
         
            +
                                    )
         
     | 
| 
       57 
133 
     | 
    
         | 
| 
       58 
     | 
    
         
            -
                            # If both bin type and collection date are identified, add to the data
         
     | 
| 
       59 
134 
     | 
    
         
             
                            if bin_type and bin_collection_date:
         
     | 
| 
       60 
     | 
    
         
            -
                                 
     | 
| 
      
 135 
     | 
    
         
            +
                                parsed_bins.append(
         
     | 
| 
       61 
136 
     | 
    
         
             
                                    {
         
     | 
| 
       62 
137 
     | 
    
         
             
                                        "type": bin_type,
         
     | 
| 
       63 
138 
     | 
    
         
             
                                        "collectionDate": bin_collection_date,
         
     | 
| 
       64 
139 
     | 
    
         
             
                                    }
         
     | 
| 
       65 
140 
     | 
    
         
             
                                )
         
     | 
| 
      
 141 
     | 
    
         
            +
                            else:
         
     | 
| 
      
 142 
     | 
    
         
            +
                                print(f"Warning: Missing data for bin item: {bin_item}")
         
     | 
| 
       66 
143 
     | 
    
         | 
| 
       67 
     | 
    
         
            -
             
     | 
| 
       68 
     | 
    
         
            -
             
     | 
| 
      
 144 
     | 
    
         
            +
                        except Exception as e:
         
     | 
| 
      
 145 
     | 
    
         
            +
                            print(f"Warning: An error occurred while parsing bin item. Error: {e}")
         
     | 
| 
      
 146 
     | 
    
         
            +
             
     | 
| 
      
 147 
     | 
    
         
            +
                    return parsed_bins
         
     | 
| 
         @@ -19,7 +19,7 @@ class CouncilClass(AbstractGetBinDataClass): 
     | 
|
| 
       19 
19 
     | 
    
         
             
                    check_uprn(user_uprn)
         
     | 
| 
       20 
20 
     | 
    
         
             
                    data = {"bins": []}
         
     | 
| 
       21 
21 
     | 
    
         | 
| 
       22 
     | 
    
         
            -
                    baseurl = "https:// 
     | 
| 
      
 22 
     | 
    
         
            +
                    baseurl = "https://services.southwark.gov.uk/bins/lookup/"
         
     | 
| 
       23 
23 
     | 
    
         
             
                    url = baseurl + user_uprn
         
     | 
| 
       24 
24 
     | 
    
         | 
| 
       25 
25 
     | 
    
         
             
                    headers = {
         
     | 
| 
         @@ -74,9 +74,13 @@ class CouncilClass(AbstractGetBinDataClass): 
     | 
|
| 
       74 
74 
     | 
    
         
             
                        data["bins"].append(dict_data)
         
     | 
| 
       75 
75 
     | 
    
         | 
| 
       76 
76 
     | 
    
         
             
                    # Extract food waste collection information
         
     | 
| 
       77 
     | 
    
         
            -
                    food_section = soup.find( 
     | 
| 
      
 77 
     | 
    
         
            +
                    food_section = soup.find(
         
     | 
| 
      
 78 
     | 
    
         
            +
                        "div", {"aria-labelledby": "domesticFoodCollectionTitle"}
         
     | 
| 
      
 79 
     | 
    
         
            +
                    )
         
     | 
| 
       78 
80 
     | 
    
         
             
                    if food_section:
         
     | 
| 
       79 
     | 
    
         
            -
                        food_title = food_section.find( 
     | 
| 
      
 81 
     | 
    
         
            +
                        food_title = food_section.find(
         
     | 
| 
      
 82 
     | 
    
         
            +
                            "p", {"id": "domesticFoodCollectionTitle"}
         
     | 
| 
      
 83 
     | 
    
         
            +
                        ).text
         
     | 
| 
       80 
84 
     | 
    
         
             
                        food_next_collection = (
         
     | 
| 
       81 
85 
     | 
    
         
             
                            food_section.find(text=lambda text: "Next collection" in text)
         
     | 
| 
       82 
86 
     | 
    
         
             
                            .strip()
         
     | 
| 
         @@ -0,0 +1,159 @@ 
     | 
|
| 
      
 1 
     | 
    
         
            +
            import time
         
     | 
| 
      
 2 
     | 
    
         
            +
             
     | 
| 
      
 3 
     | 
    
         
            +
            from bs4 import BeautifulSoup
         
     | 
| 
      
 4 
     | 
    
         
            +
            from dateutil.relativedelta import relativedelta
         
     | 
| 
      
 5 
     | 
    
         
            +
            from selenium.webdriver.common.by import By
         
     | 
| 
      
 6 
     | 
    
         
            +
            from selenium.webdriver.support import expected_conditions as EC
         
     | 
| 
      
 7 
     | 
    
         
            +
            from selenium.webdriver.support.ui import Select
         
     | 
| 
      
 8 
     | 
    
         
            +
            from selenium.webdriver.support.wait import WebDriverWait
         
     | 
| 
      
 9 
     | 
    
         
            +
             
     | 
| 
      
 10 
     | 
    
         
            +
            from uk_bin_collection.uk_bin_collection.common import *
         
     | 
| 
      
 11 
     | 
    
         
            +
            from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
         
     | 
| 
      
 12 
     | 
    
         
            +
             
     | 
| 
      
 13 
     | 
    
         
            +
             
     | 
| 
      
 14 
     | 
    
         
            +
            # import the wonderful Beautiful Soup and the URL grabber
         
     | 
| 
      
 15 
     | 
    
         
            +
            class CouncilClass(AbstractGetBinDataClass):
         
     | 
| 
      
 16 
     | 
    
         
            +
                """
         
     | 
| 
      
 17 
     | 
    
         
            +
                Concrete classes have to implement all abstract operations of the
         
     | 
| 
      
 18 
     | 
    
         
            +
                base class. They can also override some operations with a default
         
     | 
| 
      
 19 
     | 
    
         
            +
                implementation.
         
     | 
| 
      
 20 
     | 
    
         
            +
                """
         
     | 
| 
      
 21 
     | 
    
         
            +
             
     | 
| 
      
 22 
     | 
    
         
            +
                def parse_data(self, page: str, **kwargs) -> dict:
         
     | 
| 
      
 23 
     | 
    
         
            +
                    driver = None
         
     | 
| 
      
 24 
     | 
    
         
            +
                    try:
         
     | 
| 
      
 25 
     | 
    
         
            +
                        data = {"bins": []}
         
     | 
| 
      
 26 
     | 
    
         
            +
                        collections = []
         
     | 
| 
      
 27 
     | 
    
         
            +
                        user_paon = kwargs.get("paon")
         
     | 
| 
      
 28 
     | 
    
         
            +
                        user_postcode = kwargs.get("postcode")
         
     | 
| 
      
 29 
     | 
    
         
            +
                        web_driver = kwargs.get("web_driver")
         
     | 
| 
      
 30 
     | 
    
         
            +
                        headless = kwargs.get("headless")
         
     | 
| 
      
 31 
     | 
    
         
            +
                        check_paon(user_paon)
         
     | 
| 
      
 32 
     | 
    
         
            +
                        check_postcode(user_postcode)
         
     | 
| 
      
 33 
     | 
    
         
            +
             
     | 
| 
      
 34 
     | 
    
         
            +
                        # Create Selenium webdriver
         
     | 
| 
      
 35 
     | 
    
         
            +
                        driver = create_webdriver(web_driver, headless, None, __name__)
         
     | 
| 
      
 36 
     | 
    
         
            +
                        driver.get("https://www.stockton.gov.uk/bin-collection-days")
         
     | 
| 
      
 37 
     | 
    
         
            +
             
     | 
| 
      
 38 
     | 
    
         
            +
                        # Wait for the postcode field to appear then populate it
         
     | 
| 
      
 39 
     | 
    
         
            +
                        inputElement_postcode = WebDriverWait(driver, 30).until(
         
     | 
| 
      
 40 
     | 
    
         
            +
                            EC.presence_of_element_located(
         
     | 
| 
      
 41 
     | 
    
         
            +
                                (
         
     | 
| 
      
 42 
     | 
    
         
            +
                                    By.ID,
         
     | 
| 
      
 43 
     | 
    
         
            +
                                    "LOOKUPBINDATESBYADDRESSSKIPOUTOFREGION_ADDRESSLOOKUPPOSTCODE",
         
     | 
| 
      
 44 
     | 
    
         
            +
                                )
         
     | 
| 
      
 45 
     | 
    
         
            +
                            )
         
     | 
| 
      
 46 
     | 
    
         
            +
                        )
         
     | 
| 
      
 47 
     | 
    
         
            +
                        inputElement_postcode.send_keys(user_postcode)
         
     | 
| 
      
 48 
     | 
    
         
            +
             
     | 
| 
      
 49 
     | 
    
         
            +
                        # Click search button
         
     | 
| 
      
 50 
     | 
    
         
            +
                        findAddress = WebDriverWait(driver, 10).until(
         
     | 
| 
      
 51 
     | 
    
         
            +
                            EC.presence_of_element_located(
         
     | 
| 
      
 52 
     | 
    
         
            +
                                (
         
     | 
| 
      
 53 
     | 
    
         
            +
                                    By.ID,
         
     | 
| 
      
 54 
     | 
    
         
            +
                                    "LOOKUPBINDATESBYADDRESSSKIPOUTOFREGION_ADDRESSLOOKUPSEARCH",
         
     | 
| 
      
 55 
     | 
    
         
            +
                                )
         
     | 
| 
      
 56 
     | 
    
         
            +
                            )
         
     | 
| 
      
 57 
     | 
    
         
            +
                        )
         
     | 
| 
      
 58 
     | 
    
         
            +
                        findAddress.click()
         
     | 
| 
      
 59 
     | 
    
         
            +
             
     | 
| 
      
 60 
     | 
    
         
            +
                        WebDriverWait(driver, 10).until(
         
     | 
| 
      
 61 
     | 
    
         
            +
                            EC.element_to_be_clickable(
         
     | 
| 
      
 62 
     | 
    
         
            +
                                (
         
     | 
| 
      
 63 
     | 
    
         
            +
                                    By.XPATH,
         
     | 
| 
      
 64 
     | 
    
         
            +
                                    ""
         
     | 
| 
      
 65 
     | 
    
         
            +
                                    "//*[@id='LOOKUPBINDATESBYADDRESSSKIPOUTOFREGION_ADDRESSLOOKUPADDRESS']//option[contains(., '"
         
     | 
| 
      
 66 
     | 
    
         
            +
                                    + user_paon
         
     | 
| 
      
 67 
     | 
    
         
            +
                                    + "')]",
         
     | 
| 
      
 68 
     | 
    
         
            +
                                )
         
     | 
| 
      
 69 
     | 
    
         
            +
                            )
         
     | 
| 
      
 70 
     | 
    
         
            +
                        ).click()
         
     | 
| 
      
 71 
     | 
    
         
            +
             
     | 
| 
      
 72 
     | 
    
         
            +
                        # Wait for the submit button to appear, then click it to get the collection dates
         
     | 
| 
      
 73 
     | 
    
         
            +
                        WebDriverWait(driver, 30).until(
         
     | 
| 
      
 74 
     | 
    
         
            +
                            EC.presence_of_element_located(
         
     | 
| 
      
 75 
     | 
    
         
            +
                                (
         
     | 
| 
      
 76 
     | 
    
         
            +
                                    By.XPATH,
         
     | 
| 
      
 77 
     | 
    
         
            +
                                    '//*[@id="LOOKUPBINDATESBYADDRESSSKIPOUTOFREGION_COLLECTIONDETAILS2"]/div',
         
     | 
| 
      
 78 
     | 
    
         
            +
                                )
         
     | 
| 
      
 79 
     | 
    
         
            +
                            )
         
     | 
| 
      
 80 
     | 
    
         
            +
                        )
         
     | 
| 
      
 81 
     | 
    
         
            +
                        time.sleep(2)
         
     | 
| 
      
 82 
     | 
    
         
            +
             
     | 
| 
      
 83 
     | 
    
         
            +
                        soup = BeautifulSoup(driver.page_source, features="html.parser")
         
     | 
| 
      
 84 
     | 
    
         
            +
                        soup.prettify()
         
     | 
| 
      
 85 
     | 
    
         
            +
             
     | 
| 
      
 86 
     | 
    
         
            +
                        rubbish_div = soup.find(
         
     | 
| 
      
 87 
     | 
    
         
            +
                            "p",
         
     | 
| 
      
 88 
     | 
    
         
            +
                            {
         
     | 
| 
      
 89 
     | 
    
         
            +
                                "class": "myaccount-block__date myaccount-block__date--bin myaccount-block__date--waste"
         
     | 
| 
      
 90 
     | 
    
         
            +
                            },
         
     | 
| 
      
 91 
     | 
    
         
            +
                        )
         
     | 
| 
      
 92 
     | 
    
         
            +
                        rubbish_date = rubbish_div.text
         
     | 
| 
      
 93 
     | 
    
         
            +
                        if rubbish_date == "Today":
         
     | 
| 
      
 94 
     | 
    
         
            +
                            rubbish_date = datetime.now()
         
     | 
| 
      
 95 
     | 
    
         
            +
                        else:
         
     | 
| 
      
 96 
     | 
    
         
            +
                            rubbish_date = datetime.strptime(
         
     | 
| 
      
 97 
     | 
    
         
            +
                                remove_ordinal_indicator_from_date_string(rubbish_date).strip(),
         
     | 
| 
      
 98 
     | 
    
         
            +
                                "%a %d %B %Y",
         
     | 
| 
      
 99 
     | 
    
         
            +
                            ).replace(year=datetime.now().year)
         
     | 
| 
      
 100 
     | 
    
         
            +
             
     | 
| 
      
 101 
     | 
    
         
            +
                        recycling_div = soup.find(
         
     | 
| 
      
 102 
     | 
    
         
            +
                            "p",
         
     | 
| 
      
 103 
     | 
    
         
            +
                            {
         
     | 
| 
      
 104 
     | 
    
         
            +
                                "class": "myaccount-block__date myaccount-block__date--bin myaccount-block__date--recycling"
         
     | 
| 
      
 105 
     | 
    
         
            +
                            },
         
     | 
| 
      
 106 
     | 
    
         
            +
                        )
         
     | 
| 
      
 107 
     | 
    
         
            +
                        recycling_date = recycling_div.text
         
     | 
| 
      
 108 
     | 
    
         
            +
                        if recycling_date == "Today":
         
     | 
| 
      
 109 
     | 
    
         
            +
                            recycling_date = datetime.now()
         
     | 
| 
      
 110 
     | 
    
         
            +
                        else:
         
     | 
| 
      
 111 
     | 
    
         
            +
                            recycling_date = datetime.strptime(
         
     | 
| 
      
 112 
     | 
    
         
            +
                                remove_ordinal_indicator_from_date_string(recycling_date).strip(),
         
     | 
| 
      
 113 
     | 
    
         
            +
                                "%a %d %B %Y",
         
     | 
| 
      
 114 
     | 
    
         
            +
                            )
         
     | 
| 
      
 115 
     | 
    
         
            +
             
     | 
| 
      
 116 
     | 
    
         
            +
                        garden_div = soup.find(
         
     | 
| 
      
 117 
     | 
    
         
            +
                            "div",
         
     | 
| 
      
 118 
     | 
    
         
            +
                            {
         
     | 
| 
      
 119 
     | 
    
         
            +
                                "class": "myaccount-block__item myaccount-block__item--bin myaccount-block__item--garden"
         
     | 
| 
      
 120 
     | 
    
         
            +
                            },
         
     | 
| 
      
 121 
     | 
    
         
            +
                        )
         
     | 
| 
      
 122 
     | 
    
         
            +
                        garden_date = garden_div.find("strong")
         
     | 
| 
      
 123 
     | 
    
         
            +
                        if garden_date.text.strip() == "Date not available":
         
     | 
| 
      
 124 
     | 
    
         
            +
                            print("Garden waste unavailable")
         
     | 
| 
      
 125 
     | 
    
         
            +
                        else:
         
     | 
| 
      
 126 
     | 
    
         
            +
                            if garden_date.text == "Today":
         
     | 
| 
      
 127 
     | 
    
         
            +
                                garden_date = datetime.now()
         
     | 
| 
      
 128 
     | 
    
         
            +
                                collections.append(("Garden waste bin", garden_date))
         
     | 
| 
      
 129 
     | 
    
         
            +
                            else:
         
     | 
| 
      
 130 
     | 
    
         
            +
                                garden_date = datetime.strptime(
         
     | 
| 
      
 131 
     | 
    
         
            +
                                    remove_ordinal_indicator_from_date_string(
         
     | 
| 
      
 132 
     | 
    
         
            +
                                        garden_date.text
         
     | 
| 
      
 133 
     | 
    
         
            +
                                    ).strip(),
         
     | 
| 
      
 134 
     | 
    
         
            +
                                    "%a %d %B %Y",
         
     | 
| 
      
 135 
     | 
    
         
            +
                                )
         
     | 
| 
      
 136 
     | 
    
         
            +
                                collections.append(("Garden waste bin", garden_date))
         
     | 
| 
      
 137 
     | 
    
         
            +
             
     | 
| 
      
 138 
     | 
    
         
            +
                        collections.append(("Rubbish bin", rubbish_date))
         
     | 
| 
      
 139 
     | 
    
         
            +
                        collections.append(("Recycling bin", recycling_date))
         
     | 
| 
      
 140 
     | 
    
         
            +
             
     | 
| 
      
 141 
     | 
    
         
            +
                        ordered_data = sorted(collections, key=lambda x: x[1])
         
     | 
| 
      
 142 
     | 
    
         
            +
                        for item in ordered_data:
         
     | 
| 
      
 143 
     | 
    
         
            +
                            dict_data = {
         
     | 
| 
      
 144 
     | 
    
         
            +
                                "type": item[0].capitalize(),
         
     | 
| 
      
 145 
     | 
    
         
            +
                                "collectionDate": item[1].strftime(date_format),
         
     | 
| 
      
 146 
     | 
    
         
            +
                            }
         
     | 
| 
      
 147 
     | 
    
         
            +
                            data["bins"].append(dict_data)
         
     | 
| 
      
 148 
     | 
    
         
            +
             
     | 
| 
      
 149 
     | 
    
         
            +
                        print()
         
     | 
| 
      
 150 
     | 
    
         
            +
                    except Exception as e:
         
     | 
| 
      
 151 
     | 
    
         
            +
                        # Here you can log the exception if needed
         
     | 
| 
      
 152 
     | 
    
         
            +
                        print(f"An error occurred: {e}")
         
     | 
| 
      
 153 
     | 
    
         
            +
                        # Optionally, re-raise the exception if you want it to propagate
         
     | 
| 
      
 154 
     | 
    
         
            +
                        raise
         
     | 
| 
      
 155 
     | 
    
         
            +
                    finally:
         
     | 
| 
      
 156 
     | 
    
         
            +
                        # This block ensures that the driver is closed regardless of an exception
         
     | 
| 
      
 157 
     | 
    
         
            +
                        if driver:
         
     | 
| 
      
 158 
     | 
    
         
            +
                            driver.quit()
         
     | 
| 
      
 159 
     | 
    
         
            +
                    return data
         
     | 
| 
         @@ -77,44 +77,34 @@ class CouncilClass(AbstractGetBinDataClass): 
     | 
|
| 
       77 
77 
     | 
    
         
             
                        rubbish_div = soup.find(
         
     | 
| 
       78 
78 
     | 
    
         
             
                            "div", {"id": "FINDYOURBINDAYS_RUBBISHDATE_OUTERDIV"}
         
     | 
| 
       79 
79 
     | 
    
         
             
                        )
         
     | 
| 
       80 
     | 
    
         
            -
                         
     | 
| 
       81 
     | 
    
         
            -
             
     | 
| 
       82 
     | 
    
         
            -
                            rubbish_date = datetime. 
     | 
| 
       83 
     | 
    
         
            -
             
     | 
| 
       84 
     | 
    
         
            -
                                "%A %d %B",
         
     | 
| 
       85 
     | 
    
         
            -
                            ).replace(year=datetime.now().year)
         
     | 
| 
       86 
     | 
    
         
            -
                        except:
         
     | 
| 
       87 
     | 
    
         
            -
                            rubbish_date = rubbish_div.find_all("div")[3]
         
     | 
| 
      
 80 
     | 
    
         
            +
                        rubbish_date = rubbish_div.find_all("div")[2]
         
     | 
| 
      
 81 
     | 
    
         
            +
                        if rubbish_date.text == "Today":
         
     | 
| 
      
 82 
     | 
    
         
            +
                            rubbish_date = datetime.now()
         
     | 
| 
      
 83 
     | 
    
         
            +
                        else:
         
     | 
| 
       88 
84 
     | 
    
         
             
                            rubbish_date = datetime.strptime(
         
     | 
| 
       89 
85 
     | 
    
         
             
                                rubbish_date.text,
         
     | 
| 
       90 
86 
     | 
    
         
             
                                "%A %d %B",
         
     | 
| 
       91 
87 
     | 
    
         
             
                            ).replace(year=datetime.now().year)
         
     | 
| 
      
 88 
     | 
    
         
            +
             
     | 
| 
       92 
89 
     | 
    
         
             
                        recycling_div = soup.find(
         
     | 
| 
       93 
90 
     | 
    
         
             
                            "div", {"id": "FINDYOURBINDAYS_RECYCLINGDATE_OUTERDIV"}
         
     | 
| 
       94 
91 
     | 
    
         
             
                        )
         
     | 
| 
       95 
     | 
    
         
            -
                         
     | 
| 
       96 
     | 
    
         
            -
             
     | 
| 
      
 92 
     | 
    
         
            +
                        recycling_date = recycling_div.find_all("div")[2]
         
     | 
| 
      
 93 
     | 
    
         
            +
                        if recycling_date.text == "Today":
         
     | 
| 
      
 94 
     | 
    
         
            +
                            recycling_date = datetime.now()
         
     | 
| 
      
 95 
     | 
    
         
            +
                        else:
         
     | 
| 
       97 
96 
     | 
    
         
             
                            recycling_date = datetime.strptime(
         
     | 
| 
       98 
97 
     | 
    
         
             
                                recycling_date.text,
         
     | 
| 
       99 
98 
     | 
    
         
             
                                "%A %d %B",
         
     | 
| 
       100 
99 
     | 
    
         
             
                            ).replace(year=datetime.now().year)
         
     | 
| 
       101 
     | 
    
         
            -
             
     | 
| 
       102 
     | 
    
         
            -
                            rubbish_date = recycling_div.find_all("div")[3]
         
     | 
| 
       103 
     | 
    
         
            -
                            rubbish_date = datetime.strptime(
         
     | 
| 
       104 
     | 
    
         
            -
                                rubbish_date.text,
         
     | 
| 
       105 
     | 
    
         
            -
                                "%A %d %B",
         
     | 
| 
       106 
     | 
    
         
            -
                            ).replace(year=datetime.now().year)
         
     | 
| 
      
 100 
     | 
    
         
            +
             
     | 
| 
       107 
101 
     | 
    
         
             
                        food_div = soup.find(
         
     | 
| 
       108 
102 
     | 
    
         
             
                            "div", {"id": "FINDYOURBINDAYS_RECYCLINGDATE_OUTERDIV"}
         
     | 
| 
       109 
103 
     | 
    
         
             
                        )
         
     | 
| 
       110 
     | 
    
         
            -
                         
     | 
| 
       111 
     | 
    
         
            -
             
     | 
| 
       112 
     | 
    
         
            -
                            food_date = datetime. 
     | 
| 
       113 
     | 
    
         
            -
             
     | 
| 
       114 
     | 
    
         
            -
                                "%A %d %B",
         
     | 
| 
       115 
     | 
    
         
            -
                            ).replace(year=datetime.now().year)
         
     | 
| 
       116 
     | 
    
         
            -
                        except:
         
     | 
| 
       117 
     | 
    
         
            -
                            food_date = food_div.find_all("div")[3]
         
     | 
| 
      
 104 
     | 
    
         
            +
                        food_date = food_div.find_all("div")[2]
         
     | 
| 
      
 105 
     | 
    
         
            +
                        if food_date.text == "Today":
         
     | 
| 
      
 106 
     | 
    
         
            +
                            food_date = datetime.now()
         
     | 
| 
      
 107 
     | 
    
         
            +
                        else:
         
     | 
| 
       118 
108 
     | 
    
         
             
                            food_date = datetime.strptime(
         
     | 
| 
       119 
109 
     | 
    
         
             
                                food_date.text,
         
     | 
| 
       120 
110 
     | 
    
         
             
                                "%A %d %B",
         
     | 
| 
         @@ -2,7 +2,7 @@ uk_bin_collection/README.rst,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU, 
     | 
|
| 
       2 
2 
     | 
    
         
             
            uk_bin_collection/tests/council_feature_input_parity.py,sha256=DO6Mk4ImYgM5ZCZ-cutwz5RoYYWZRLYx2tr6zIs_9Rc,3843
         
     | 
| 
       3 
3 
     | 
    
         
             
            uk_bin_collection/tests/features/environment.py,sha256=VQZjJdJI_kZn08M0j5cUgvKT4k3iTw8icJge1DGOkoA,127
         
     | 
| 
       4 
4 
     | 
    
         
             
            uk_bin_collection/tests/features/validate_council_outputs.feature,sha256=SJK-Vc737hrf03tssxxbeg_JIvAH-ddB8f6gU1LTbuQ,251
         
     | 
| 
       5 
     | 
    
         
            -
            uk_bin_collection/tests/input.json,sha256= 
     | 
| 
      
 5 
     | 
    
         
            +
            uk_bin_collection/tests/input.json,sha256=0F1vZN1L8JeOh5ECNrOoS-kOg0kh39tkE6tyV1ZqAi4,75381
         
     | 
| 
       6 
6 
     | 
    
         
             
            uk_bin_collection/tests/output.schema,sha256=ZwKQBwYyTDEM4G2hJwfLUVM-5v1vKRvRK9W9SS1sd18,1086
         
     | 
| 
       7 
7 
     | 
    
         
             
            uk_bin_collection/tests/step_defs/step_helpers/file_handler.py,sha256=Ygzi4V0S1MIHqbdstUlIqtRIwnynvhu4UtpweJ6-5N8,1474
         
     | 
| 
       8 
8 
     | 
    
         
             
            uk_bin_collection/tests/step_defs/test_validate_council.py,sha256=LrOSt_loA1Mw3vTqaO2LpaDMu7rYJy6k5Kr-EOBln7s,3424
         
     | 
| 
         @@ -19,7 +19,7 @@ uk_bin_collection/uk_bin_collection/councils/ArunCouncil.py,sha256=yfhthv9nuogP1 
     | 
|
| 
       19 
19 
     | 
    
         
             
            uk_bin_collection/uk_bin_collection/councils/AylesburyValeCouncil.py,sha256=LouqjspEMt1TkOGqWHs2zkxwOETIy3n7p64uKIlAgUg,2401
         
     | 
| 
       20 
20 
     | 
    
         
             
            uk_bin_collection/uk_bin_collection/councils/BCPCouncil.py,sha256=W7QBx6Mgso8RYosuXsaYo3GGNAu-tiyBSmuYxr1JSOU,1707
         
     | 
| 
       21 
21 
     | 
    
         
             
            uk_bin_collection/uk_bin_collection/councils/BarnetCouncil.py,sha256=Sd4-pbv0QZsR7soxvXYqsfdOUIqZqS6notyoZthG77s,9182
         
     | 
| 
       22 
     | 
    
         
            -
            uk_bin_collection/uk_bin_collection/councils/BarnsleyMBCouncil.py,sha256= 
     | 
| 
      
 22 
     | 
    
         
            +
            uk_bin_collection/uk_bin_collection/councils/BarnsleyMBCouncil.py,sha256=jHLSfRU9lPDUn249mUgNPc23UElU9SKmDon917S6ct0,4733
         
     | 
| 
       23 
23 
     | 
    
         
             
            uk_bin_collection/uk_bin_collection/councils/BasildonCouncil.py,sha256=UBHINX8WknQfnHU43Wp5kXAqmHl00aWM0Fh8NQdWBZA,3244
         
     | 
| 
       24 
24 
     | 
    
         
             
            uk_bin_collection/uk_bin_collection/councils/BasingstokeCouncil.py,sha256=VPWGljnH4C3q8qs5ZmCtqjNjgWQvviALzjk00q3EZeQ,2632
         
     | 
| 
       25 
25 
     | 
    
         
             
            uk_bin_collection/uk_bin_collection/councils/BathAndNorthEastSomersetCouncil.py,sha256=N_TPiIv8VBzN3rY0p3JtLlxSEru-6k1wW4UNIhN5X1M,3709
         
     | 
| 
         @@ -79,6 +79,8 @@ uk_bin_collection/uk_bin_collection/councils/ErewashBoroughCouncil.py,sha256=QTQ 
     | 
|
| 
       79 
79 
     | 
    
         
             
            uk_bin_collection/uk_bin_collection/councils/FalkirkCouncil.py,sha256=C3OA9PEhBsCYPzwsSdqVi_SbF8uiB186i2XfHWKd3VI,1694
         
     | 
| 
       80 
80 
     | 
    
         
             
            uk_bin_collection/uk_bin_collection/councils/FarehamBoroughCouncil.py,sha256=25QxeN5q3ad1Wwexs2d-B7ooH0ru6pOUx58413FOTY4,2352
         
     | 
| 
       81 
81 
     | 
    
         
             
            uk_bin_collection/uk_bin_collection/councils/FenlandDistrictCouncil.py,sha256=sFrnKzIE2tIcz0YrC6A9HcevzgNdf6E6_HLGMWDKtGw,2513
         
     | 
| 
      
 82 
     | 
    
         
            +
            uk_bin_collection/uk_bin_collection/councils/FifeCouncil.py,sha256=eP_NnHtBLyflRUko9ubi_nxUPb7qg9SbaaSxqWZxNEs,2157
         
     | 
| 
      
 83 
     | 
    
         
            +
            uk_bin_collection/uk_bin_collection/councils/FlintshireCountyCouncil.py,sha256=RvPHhGbzP3mcjgWe2rIQux43UuDH7XofJGIKs7wJRe0,2060
         
     | 
| 
       82 
84 
     | 
    
         
             
            uk_bin_collection/uk_bin_collection/councils/ForestOfDeanDistrictCouncil.py,sha256=xO5gqgsN9K-cQsuDoQF7ycZkjNdCPAQwIYOCFWxFJ_Y,4504
         
     | 
| 
       83 
85 
     | 
    
         
             
            uk_bin_collection/uk_bin_collection/councils/GatesheadCouncil.py,sha256=SRCgYhYs6rv_8C1UEDVORHZgXxcJkoZBjzdYS4Lu-ew,4531
         
     | 
| 
       84 
86 
     | 
    
         
             
            uk_bin_collection/uk_bin_collection/councils/GedlingBoroughCouncil.py,sha256=XzfFMCwclh9zAJgsbaj4jywjdiH0wPaFicaVsLrN3ms,2297
         
     | 
| 
         @@ -117,7 +119,7 @@ uk_bin_collection/uk_bin_collection/councils/MansfieldDistrictCouncil.py,sha256= 
     | 
|
| 
       117 
119 
     | 
    
         
             
            uk_bin_collection/uk_bin_collection/councils/MertonCouncil.py,sha256=3Y2Un4xXo1sCcMsudynODSzocV_mMofWkX2JqONDb5o,1997
         
     | 
| 
       118 
120 
     | 
    
         
             
            uk_bin_collection/uk_bin_collection/councils/MidAndEastAntrimBoroughCouncil.py,sha256=oOWwU5FSgGej2Mv7FQ66N-EzS5nZgmGsd0WnfLWUc1I,5238
         
     | 
| 
       119 
121 
     | 
    
         
             
            uk_bin_collection/uk_bin_collection/councils/MidSussexDistrictCouncil.py,sha256=AZgC9wmDLEjUOtIFvf0ehF5LHturXTH4DkE3ioPSVBA,6254
         
     | 
| 
       120 
     | 
    
         
            -
            uk_bin_collection/uk_bin_collection/councils/MidlothianCouncil.py,sha256= 
     | 
| 
      
 122 
     | 
    
         
            +
            uk_bin_collection/uk_bin_collection/councils/MidlothianCouncil.py,sha256=mM5-itJDNhjsT5UEjSFfWppmfmPFSns4u_1QblewuFU,5605
         
     | 
| 
       121 
123 
     | 
    
         
             
            uk_bin_collection/uk_bin_collection/councils/MiltonKeynesCityCouncil.py,sha256=3olsWa77L34vz-c7NgeGK9xmNuR4Ws_oAk5D4UpIkPw,2005
         
     | 
| 
       122 
124 
     | 
    
         
             
            uk_bin_collection/uk_bin_collection/councils/MoleValleyDistrictCouncil.py,sha256=xWR5S0gwQu9gXxjl788Wux1KaC0CT7ZFw0iXuRLZCEM,5599
         
     | 
| 
       123 
125 
     | 
    
         
             
            uk_bin_collection/uk_bin_collection/councils/NeathPortTalbotCouncil.py,sha256=ychYR2nsyk2UIb8tjWaKrLUT4hxSsHN558l3RqZ0mjw,5635
         
     | 
| 
         @@ -172,12 +174,13 @@ uk_bin_collection/uk_bin_collection/councils/SouthNorfolkCouncil.py,sha256=ThO-o 
     | 
|
| 
       172 
174 
     | 
    
         
             
            uk_bin_collection/uk_bin_collection/councils/SouthOxfordshireCouncil.py,sha256=zW4bN3hcqNoK_Y0-vPpuZs3K0LTPvApu6_v9K-D7WjE,3879
         
     | 
| 
       173 
175 
     | 
    
         
             
            uk_bin_collection/uk_bin_collection/councils/SouthRibbleCouncil.py,sha256=OdexbeiI5WsCfjlsnHjAce8oGF5fW-n7q2XOuxcpHzw,3604
         
     | 
| 
       174 
176 
     | 
    
         
             
            uk_bin_collection/uk_bin_collection/councils/SouthTynesideCouncil.py,sha256=dxXGrJfg_fn2IPTBgq6Duwy0WY8GYLafMuisaCjOnbs,3426
         
     | 
| 
       175 
     | 
    
         
            -
            uk_bin_collection/uk_bin_collection/councils/SouthwarkCouncil.py,sha256= 
     | 
| 
      
 177 
     | 
    
         
            +
            uk_bin_collection/uk_bin_collection/councils/SouthwarkCouncil.py,sha256=Z6JIbUt3yr4oG60n1At4AjPIGrs7Qzn_sDNY-TsS62E,4882
         
     | 
| 
       176 
178 
     | 
    
         
             
            uk_bin_collection/uk_bin_collection/councils/StAlbansCityAndDistrictCouncil.py,sha256=mPZz6Za6kTSkrfHnj0OfwtnpRYR1dKvxbuFEKnWsiL8,1451
         
     | 
| 
       177 
179 
     | 
    
         
             
            uk_bin_collection/uk_bin_collection/councils/StHelensBC.py,sha256=c7ZM8gnUkKdz9GYIhFLzTtwN0KAoMEKomTWDVbtJIpM,2069
         
     | 
| 
       178 
180 
     | 
    
         
             
            uk_bin_collection/uk_bin_collection/councils/StaffordBoroughCouncil.py,sha256=9Qj4HJI7Dbiqb2mVSG2UtkBe27Y7wvQ5SYFTwGzJ5g0,2292
         
     | 
| 
       179 
181 
     | 
    
         
             
            uk_bin_collection/uk_bin_collection/councils/StaffordshireMoorlandsDistrictCouncil.py,sha256=_N8Cg26EbTaKp0RsWvQuELVcZDHbT2BlD2LW8qhkS_Q,4361
         
     | 
| 
       180 
182 
     | 
    
         
             
            uk_bin_collection/uk_bin_collection/councils/StockportBoroughCouncil.py,sha256=v0HmioNVRoU1-9OnLJl2V3M5pVR1aVu1BgOLHFR1Sf4,1429
         
     | 
| 
      
 183 
     | 
    
         
            +
            uk_bin_collection/uk_bin_collection/councils/StocktonOnTeesCouncil.py,sha256=obaBgsmIJ95Ah7KaTNWdU107tZDPVuuJox0mGUoGjNk,6070
         
     | 
| 
       181 
184 
     | 
    
         
             
            uk_bin_collection/uk_bin_collection/councils/StokeOnTrentCityCouncil.py,sha256=KM0EgWeO7mk8lkozX0RCTfMchXdjrfBqIjCiOtB09aM,2884
         
     | 
| 
       182 
185 
     | 
    
         
             
            uk_bin_collection/uk_bin_collection/councils/StratfordUponAvonCouncil.py,sha256=DMTAcXT_lay8Cl1hBbzf_LN7-GwTDGxT3Ug9QJkaF9Y,3936
         
     | 
| 
       183 
186 
     | 
    
         
             
            uk_bin_collection/uk_bin_collection/councils/StroudDistrictCouncil.py,sha256=9bYWppi7ViLGHL4VEg--nFn28MLYJYbiEntull1uZxU,3561
         
     | 
| 
         @@ -207,7 +210,7 @@ uk_bin_collection/uk_bin_collection/councils/WatfordBoroughCouncil.py,sha256=zFk 
     | 
|
| 
       207 
210 
     | 
    
         
             
            uk_bin_collection/uk_bin_collection/councils/WaverleyBoroughCouncil.py,sha256=tp9l7vdgSGRzNNG0pDfnNuFj4D2bpRJUJmAiTJ6bM0g,4662
         
     | 
| 
       208 
211 
     | 
    
         
             
            uk_bin_collection/uk_bin_collection/councils/WealdenDistrictCouncil.py,sha256=SvSSaLkx7iJjzypAwKkaJwegXkSsIQtUOS2V605kz1A,3368
         
     | 
| 
       209 
212 
     | 
    
         
             
            uk_bin_collection/uk_bin_collection/councils/WelhatCouncil.py,sha256=ikUft37dYNJghfe-_6Fskiq1JihqpLmLNj38QkKSUUA,2316
         
     | 
| 
       210 
     | 
    
         
            -
            uk_bin_collection/uk_bin_collection/councils/WestBerkshireCouncil.py,sha256= 
     | 
| 
      
 213 
     | 
    
         
            +
            uk_bin_collection/uk_bin_collection/councils/WestBerkshireCouncil.py,sha256=2eHRlalZyY9jv_UsCWM9IYzOpRdhce2sEW5NtygEnpw,5513
         
     | 
| 
       211 
214 
     | 
    
         
             
            uk_bin_collection/uk_bin_collection/councils/WestLindseyDistrictCouncil.py,sha256=JFWUy4w0CKulGq16PfbRDKAdQEbokVEuabwlZYigdEU,4606
         
     | 
| 
       212 
215 
     | 
    
         
             
            uk_bin_collection/uk_bin_collection/councils/WestLothianCouncil.py,sha256=dq0jimtARvRkZiGbVFrXXZgY-BODtz3uYZ5UKn0bf64,4114
         
     | 
| 
       213 
216 
     | 
    
         
             
            uk_bin_collection/uk_bin_collection/councils/WestMorlandAndFurness.py,sha256=jbqV3460rn9D0yTBGWjpSe1IvWWcdGur5pzgj-hJcQ4,2513
         
     | 
| 
         @@ -227,8 +230,8 @@ uk_bin_collection/uk_bin_collection/councils/YorkCouncil.py,sha256=I2kBYMlsD4bId 
     | 
|
| 
       227 
230 
     | 
    
         
             
            uk_bin_collection/uk_bin_collection/councils/council_class_template/councilclasstemplate.py,sha256=4s9ODGPAwPqwXc8SrTX5Wlfmizs3_58iXUtHc4Ir86o,1162
         
     | 
| 
       228 
231 
     | 
    
         
             
            uk_bin_collection/uk_bin_collection/create_new_council.py,sha256=m-IhmWmeWQlFsTZC4OxuFvtw5ZtB8EAJHxJTH4O59lQ,1536
         
     | 
| 
       229 
232 
     | 
    
         
             
            uk_bin_collection/uk_bin_collection/get_bin_data.py,sha256=YvmHfZqanwrJ8ToGch34x-L-7yPe31nB_x77_Mgl_vo,4545
         
     | 
| 
       230 
     | 
    
         
            -
            uk_bin_collection-0. 
     | 
| 
       231 
     | 
    
         
            -
            uk_bin_collection-0. 
     | 
| 
       232 
     | 
    
         
            -
            uk_bin_collection-0. 
     | 
| 
       233 
     | 
    
         
            -
            uk_bin_collection-0. 
     | 
| 
       234 
     | 
    
         
            -
            uk_bin_collection-0. 
     | 
| 
      
 233 
     | 
    
         
            +
            uk_bin_collection-0.106.0.dist-info/LICENSE,sha256=vABBUOzcrgfaTKpzeo-si9YVEun6juDkndqA8RKdKGs,1071
         
     | 
| 
      
 234 
     | 
    
         
            +
            uk_bin_collection-0.106.0.dist-info/METADATA,sha256=VwTe4USVrMOyNReH2x0yO6HJRYUQwMkBNDnlIhFRbmM,17630
         
     | 
| 
      
 235 
     | 
    
         
            +
            uk_bin_collection-0.106.0.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
         
     | 
| 
      
 236 
     | 
    
         
            +
            uk_bin_collection-0.106.0.dist-info/entry_points.txt,sha256=36WCSGMWSc916S3Hi1ZkazzDKHaJ6CD-4fCEFm5MIao,90
         
     | 
| 
      
 237 
     | 
    
         
            +
            uk_bin_collection-0.106.0.dist-info/RECORD,,
         
     | 
| 
         
            File without changes
         
     | 
| 
         
            File without changes
         
     | 
    
        {uk_bin_collection-0.105.0.dist-info → uk_bin_collection-0.106.0.dist-info}/entry_points.txt
    RENAMED
    
    | 
         
            File without changes
         
     |