uk_bin_collection 0.98.0__py3-none-any.whl → 0.98.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- uk_bin_collection/tests/input.json +1 -1
- uk_bin_collection/uk_bin_collection/councils/BarnetCouncil.py +11 -2
- uk_bin_collection/uk_bin_collection/councils/BexleyCouncil.py +66 -66
- uk_bin_collection/uk_bin_collection/councils/MoleValleyDistrictCouncil.py +71 -54
- {uk_bin_collection-0.98.0.dist-info → uk_bin_collection-0.98.2.dist-info}/METADATA +1 -1
- {uk_bin_collection-0.98.0.dist-info → uk_bin_collection-0.98.2.dist-info}/RECORD +9 -9
- {uk_bin_collection-0.98.0.dist-info → uk_bin_collection-0.98.2.dist-info}/LICENSE +0 -0
- {uk_bin_collection-0.98.0.dist-info → uk_bin_collection-0.98.2.dist-info}/WHEEL +0 -0
- {uk_bin_collection-0.98.0.dist-info → uk_bin_collection-0.98.2.dist-info}/entry_points.txt +0 -0
@@ -49,7 +49,7 @@
|
|
49
49
|
"wiki_name": "Barnsley Metropolitan Borough Council",
|
50
50
|
"wiki_note": "To get the UPRN, you will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search)."
|
51
51
|
},
|
52
|
-
"BasildonCouncil": {
|
52
|
+
"BasildonCouncil": {
|
53
53
|
"skip_get_url": true,
|
54
54
|
"uprn": "10013350430",
|
55
55
|
"url": "https://basildonportal.azurewebsites.net/api/getPropertyRefuseInformation",
|
@@ -70,10 +70,19 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
70
70
|
headless = kwargs.get("headless")
|
71
71
|
web_driver = kwargs.get("web_driver")
|
72
72
|
driver = create_webdriver(web_driver, headless, None, __name__)
|
73
|
-
page = "https://
|
73
|
+
page = "https://www.barnet.gov.uk/recycling-and-waste/bin-collections/find-your-bin-collection-day"
|
74
|
+
|
74
75
|
driver.get(page)
|
75
76
|
|
76
|
-
|
77
|
+
# Wait for the element to be clickable
|
78
|
+
find_your_collection_button = WebDriverWait(driver, 10).until(
|
79
|
+
EC.element_to_be_clickable(
|
80
|
+
(By.XPATH, '//a[contains(text(), "Find your household collection day")]')
|
81
|
+
)
|
82
|
+
)
|
83
|
+
|
84
|
+
# Click the element
|
85
|
+
find_your_collection_button.click()
|
77
86
|
|
78
87
|
try:
|
79
88
|
accept_cookies = WebDriverWait(driver, timeout=10).until(
|
@@ -24,7 +24,7 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
24
24
|
def parse_data(self, page: str, **kwargs) -> dict:
|
25
25
|
driver = None
|
26
26
|
try:
|
27
|
-
page = "https://
|
27
|
+
page = "https://waste.bexley.gov.uk/waste"
|
28
28
|
|
29
29
|
data = {"bins": []}
|
30
30
|
|
@@ -38,102 +38,102 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
38
38
|
driver = create_webdriver(web_driver, headless, None, __name__)
|
39
39
|
driver.get(page)
|
40
40
|
|
41
|
-
|
42
|
-
|
43
|
-
iframe_presense = WebDriverWait(driver, 30).until(
|
44
|
-
EC.presence_of_element_located((By.ID, "fillform-frame-1"))
|
45
|
-
)
|
46
|
-
|
47
|
-
driver.switch_to.frame(iframe_presense)
|
48
|
-
wait = WebDriverWait(driver, 60)
|
49
|
-
start_btn = wait.until(
|
50
|
-
EC.element_to_be_clickable(
|
51
|
-
(By.XPATH, "//button/span[contains(text(), 'Next')]")
|
52
|
-
)
|
53
|
-
)
|
54
|
-
|
55
|
-
start_btn.click()
|
41
|
+
wait = WebDriverWait(driver, 10)
|
56
42
|
|
57
43
|
inputElement_postcodesearch = wait.until(
|
58
|
-
EC.element_to_be_clickable((By.ID, "
|
44
|
+
EC.element_to_be_clickable((By.ID, "pc"))
|
59
45
|
)
|
60
46
|
inputElement_postcodesearch.send_keys(user_postcode)
|
61
47
|
|
48
|
+
|
49
|
+
|
62
50
|
find_address_btn = wait.until(
|
63
|
-
EC.element_to_be_clickable((By.XPATH, '//*[@id="
|
51
|
+
EC.element_to_be_clickable((By.XPATH, '//*[@id="sub"]'))
|
64
52
|
)
|
65
53
|
find_address_btn.click()
|
66
54
|
|
67
55
|
dropdown_options = wait.until(
|
68
56
|
EC.presence_of_element_located(
|
69
|
-
(By.XPATH, '//*[@id="
|
57
|
+
(By.XPATH, '//*[@id="address"]')
|
70
58
|
)
|
71
59
|
)
|
72
60
|
time.sleep(2)
|
73
61
|
dropdown_options.click()
|
74
62
|
time.sleep(1)
|
75
|
-
|
63
|
+
|
64
|
+
# Wait for the element to be clickable
|
65
|
+
address = WebDriverWait(driver, 10).until(
|
66
|
+
EC.element_to_be_clickable(
|
67
|
+
(By.XPATH, f'//li[contains(text(), "{user_paon}")]')
|
68
|
+
)
|
69
|
+
)
|
70
|
+
|
71
|
+
# Click the element
|
72
|
+
address.click()
|
73
|
+
|
74
|
+
|
75
|
+
submit_address = wait.until(
|
76
76
|
EC.presence_of_element_located(
|
77
|
-
(By.XPATH, '//*[@id="
|
77
|
+
(By.XPATH, '//*[@id="go"]')
|
78
78
|
)
|
79
79
|
)
|
80
|
-
time.sleep(
|
81
|
-
|
82
|
-
dropdown_input.send_keys(Keys.ENTER)
|
80
|
+
time.sleep(2)
|
81
|
+
submit_address.click()
|
83
82
|
|
84
83
|
results_found = wait.until(
|
85
|
-
EC.presence_of_element_located((By.CLASS_NAME, "found-content"))
|
86
|
-
)
|
87
|
-
finish_btn = wait.until(
|
88
84
|
EC.element_to_be_clickable(
|
89
|
-
(By.XPATH,
|
85
|
+
(By.XPATH, '//h1[contains(text(), "Your bin days")]')
|
86
|
+
)
|
90
87
|
)
|
91
|
-
|
92
|
-
finish_btn.click()
|
88
|
+
|
93
89
|
final_page = wait.until(
|
94
90
|
EC.presence_of_element_located(
|
95
|
-
(By.CLASS_NAME, "
|
91
|
+
(By.CLASS_NAME, "waste__collections")
|
96
92
|
)
|
97
93
|
)
|
98
94
|
|
99
95
|
soup = BeautifulSoup(driver.page_source, features="html.parser")
|
100
96
|
|
101
|
-
|
102
|
-
# Define your XPath
|
103
|
-
|
104
|
-
for bin in bin_fields:
|
105
|
-
# Extract h3 text from the current element
|
106
|
-
h3_text = (
|
107
|
-
bin.find("h3", class_="container-name").get_text(strip=True)
|
108
|
-
if bin.find("h3", class_="container-name")
|
109
|
-
else None
|
110
|
-
)
|
111
|
-
|
112
|
-
date_text = (
|
113
|
-
bin.find("p", class_="container-status").get_text(strip=True)
|
114
|
-
if bin.find("p", class_="container-status")
|
115
|
-
else None
|
116
|
-
)
|
97
|
+
# Find all waste services
|
117
98
|
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
|
126
|
-
|
127
|
-
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
|
136
|
-
|
99
|
+
# Initialize the data dictionary
|
100
|
+
data = {"bins": []}
|
101
|
+
bin_sections = soup.find_all("h3", class_="waste-service-name")
|
102
|
+
|
103
|
+
# Loop through each bin field
|
104
|
+
for bin_section in bin_sections:
|
105
|
+
# Extract the bin type (e.g., "Brown Caddy", "Green Wheelie Bin", etc.)
|
106
|
+
bin_type = bin_section.get_text(strip=True).split("\n")[0] # The first part is the bin type
|
107
|
+
|
108
|
+
# Find the next sibling <dl> tag that contains the next collection information
|
109
|
+
summary_list = bin_section.find_next("dl", class_="govuk-summary-list")
|
110
|
+
|
111
|
+
if summary_list:
|
112
|
+
# Now, instead of finding by class, we'll search by text within the dt element
|
113
|
+
next_collection_dt = summary_list.find("dt", string=lambda text: "Next collection" in text)
|
114
|
+
|
115
|
+
if next_collection_dt:
|
116
|
+
# Find the sibling <dd> tag for the collection date
|
117
|
+
next_collection = next_collection_dt.find_next_sibling("dd").get_text(strip=True)
|
118
|
+
|
119
|
+
if next_collection:
|
120
|
+
try:
|
121
|
+
# Parse the next collection date (assuming the format is like "Tuesday 15 October 2024")
|
122
|
+
parsed_date = datetime.strptime(next_collection, "%A %d %B %Y")
|
123
|
+
|
124
|
+
# Add the bin information to the data dictionary
|
125
|
+
data["bins"].append({
|
126
|
+
"type": bin_type,
|
127
|
+
"collectionDate": parsed_date.strftime(date_format),
|
128
|
+
})
|
129
|
+
except ValueError as e:
|
130
|
+
print(f"Error parsing date for {bin_type}: {e}")
|
131
|
+
else:
|
132
|
+
print(f"No next collection date found for {bin_type}")
|
133
|
+
else:
|
134
|
+
print(f"No 'Next collection' text found for {bin_type}")
|
135
|
+
else:
|
136
|
+
print(f"No summary list found for {bin_type}")
|
137
137
|
|
138
138
|
except Exception as e:
|
139
139
|
# Here you can log the exception if needed
|
@@ -7,28 +7,18 @@ from bs4 import BeautifulSoup
|
|
7
7
|
from uk_bin_collection.uk_bin_collection.common import *
|
8
8
|
from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
|
9
9
|
|
10
|
-
|
11
|
-
# import the wonderful Beautiful Soup and the URL grabber
|
12
10
|
class CouncilClass(AbstractGetBinDataClass):
|
13
|
-
"""
|
14
|
-
Concrete classes have to implement all abstract operations of the
|
15
|
-
base class. They can also override some operations with a default
|
16
|
-
implementation.
|
17
|
-
"""
|
18
|
-
|
19
11
|
def parse_data(self, page: str, **kwargs) -> dict:
|
20
12
|
|
21
13
|
user_postcode = kwargs.get("postcode")
|
22
14
|
check_postcode(user_postcode)
|
23
15
|
|
24
|
-
|
25
|
-
|
26
|
-
)
|
27
|
-
requests.packages.urllib3.disable_warnings()
|
16
|
+
# Fetch the page content
|
17
|
+
root_url = "https://myproperty.molevalley.gov.uk/molevalley/api/live_addresses/{}?format=json".format(user_postcode)
|
28
18
|
response = requests.get(root_url, verify=False)
|
29
19
|
|
30
20
|
if not response.ok:
|
31
|
-
raise ValueError("Invalid server response code
|
21
|
+
raise ValueError("Invalid server response code retrieving data.")
|
32
22
|
|
33
23
|
jsonData = response.json()
|
34
24
|
|
@@ -37,7 +27,6 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
37
27
|
|
38
28
|
properties_found = jsonData["results"]["features"]
|
39
29
|
|
40
|
-
# If UPRN is provided, we can check a specific address.
|
41
30
|
html_data = None
|
42
31
|
uprn = kwargs.get("uprn")
|
43
32
|
if uprn:
|
@@ -49,53 +38,81 @@ class CouncilClass(AbstractGetBinDataClass):
|
|
49
38
|
if html_data is None:
|
50
39
|
raise ValueError("No collection data found for UPRN provided.")
|
51
40
|
else:
|
52
|
-
# If UPRN not provided, just use the first result
|
53
41
|
html_data = properties_found[0]["properties"]["three_column_layout_html"]
|
54
42
|
|
55
|
-
|
56
|
-
|
43
|
+
# Conditionally replace the commented-out sections (<!-- ... -->)
|
44
|
+
if "<!--" in html_data and "-->" in html_data:
|
45
|
+
print("Commented-out section found, replacing comments...")
|
46
|
+
html_data = html_data.replace("<!--", "").replace("-->", "")
|
47
|
+
else:
|
48
|
+
print("No commented-out section found, processing as is.")
|
49
|
+
|
50
|
+
# Process the updated HTML data with BeautifulSoup
|
51
|
+
soup = BeautifulSoup(html_data, "html.parser")
|
57
52
|
|
58
53
|
data = {"bins": []}
|
59
54
|
all_collection_dates = []
|
60
|
-
regex_date = re.compile(r"
|
55
|
+
regex_date = re.compile(r"(\d{2}/\d{2}/\d{4})") # Adjusted date regex
|
61
56
|
regex_additional_collection = re.compile(r"We also collect (.*) on (.*) -")
|
62
57
|
|
58
|
+
# Debugging to verify the HTML content is parsed correctly
|
59
|
+
print("HTML content parsed successfully.")
|
60
|
+
|
63
61
|
# Search for the 'Bins and Recycling' panel
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
62
|
+
bins_panel = soup.find("h2", string="Bins and Recycling")
|
63
|
+
if bins_panel:
|
64
|
+
panel = bins_panel.find_parent("div", class_="panel")
|
65
|
+
print("Found 'Bins and Recycling' panel.")
|
66
|
+
|
67
|
+
# Extract bin collection info from the un-commented HTML
|
68
|
+
for strong_tag in panel.find_all("strong"):
|
69
|
+
bin_type = strong_tag.text.strip()
|
70
|
+
collection_string = strong_tag.find_next("p").text.strip()
|
71
|
+
|
72
|
+
# Debugging output
|
73
|
+
print(f"Processing bin type: {bin_type}")
|
74
|
+
print(f"Collection string: {collection_string}")
|
75
|
+
|
76
|
+
match = regex_date.search(collection_string)
|
77
|
+
if match:
|
78
|
+
collection_date = datetime.strptime(match.group(1), "%d/%m/%Y").date()
|
79
|
+
data["bins"].append({
|
80
|
+
"type": bin_type,
|
81
|
+
"collectionDate": collection_date.strftime("%d/%m/%Y"),
|
82
|
+
})
|
83
|
+
all_collection_dates.append(collection_date)
|
84
|
+
else:
|
85
|
+
# Add a debug line to show which collections are missing dates
|
86
|
+
print(f"No valid date found for bin type: {bin_type}")
|
87
|
+
|
88
|
+
# Search for additional collections like electrical and textiles
|
89
|
+
for p in panel.find_all("p"):
|
90
|
+
additional_match = regex_additional_collection.match(p.text.strip())
|
91
|
+
|
92
|
+
# Debugging output for additional collections
|
93
|
+
if additional_match:
|
94
|
+
bin_type = additional_match.group(1)
|
95
|
+
print(f"Found additional collection: {bin_type}")
|
96
|
+
if "each collection day" in additional_match.group(2):
|
97
|
+
if all_collection_dates:
|
98
|
+
collection_date = min(all_collection_dates)
|
99
|
+
data["bins"].append({
|
78
100
|
"type": bin_type,
|
79
101
|
"collectionDate": collection_date.strftime("%d/%m/%Y"),
|
80
|
-
}
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
}
|
98
|
-
)
|
99
|
-
break
|
100
|
-
|
101
|
-
return data
|
102
|
+
})
|
103
|
+
else:
|
104
|
+
print("No collection dates available for additional collection.")
|
105
|
+
raise ValueError("No valid bin collection dates found.")
|
106
|
+
else:
|
107
|
+
print(f"No additional collection found in paragraph: {p.text.strip()}")
|
108
|
+
else:
|
109
|
+
raise ValueError("Unable to find 'Bins and Recycling' panel in the HTML data.")
|
110
|
+
|
111
|
+
# Debugging to check collected data
|
112
|
+
print(f"Collected bin data: {data}")
|
113
|
+
|
114
|
+
# Handle the case where no collection dates were found
|
115
|
+
if not all_collection_dates:
|
116
|
+
raise ValueError("No valid collection dates were found in the data.")
|
117
|
+
|
118
|
+
return data
|
@@ -2,7 +2,7 @@ uk_bin_collection/README.rst,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,
|
|
2
2
|
uk_bin_collection/tests/council_feature_input_parity.py,sha256=DO6Mk4ImYgM5ZCZ-cutwz5RoYYWZRLYx2tr6zIs_9Rc,3843
|
3
3
|
uk_bin_collection/tests/features/environment.py,sha256=VQZjJdJI_kZn08M0j5cUgvKT4k3iTw8icJge1DGOkoA,127
|
4
4
|
uk_bin_collection/tests/features/validate_council_outputs.feature,sha256=SJK-Vc737hrf03tssxxbeg_JIvAH-ddB8f6gU1LTbuQ,251
|
5
|
-
uk_bin_collection/tests/input.json,sha256=
|
5
|
+
uk_bin_collection/tests/input.json,sha256=Lb2LZ6Q1LvG-yaG4KCjPIiKdLyK9IE6N_4vb7wwdExw,68074
|
6
6
|
uk_bin_collection/tests/output.schema,sha256=ZwKQBwYyTDEM4G2hJwfLUVM-5v1vKRvRK9W9SS1sd18,1086
|
7
7
|
uk_bin_collection/tests/step_defs/step_helpers/file_handler.py,sha256=Ygzi4V0S1MIHqbdstUlIqtRIwnynvhu4UtpweJ6-5N8,1474
|
8
8
|
uk_bin_collection/tests/step_defs/test_validate_council.py,sha256=LrOSt_loA1Mw3vTqaO2LpaDMu7rYJy6k5Kr-EOBln7s,3424
|
@@ -17,7 +17,7 @@ uk_bin_collection/uk_bin_collection/councils/ArmaghBanbridgeCraigavonCouncil.py,
|
|
17
17
|
uk_bin_collection/uk_bin_collection/councils/ArunCouncil.py,sha256=yfhthv9nuogP19VOZ3TYQrq51qqjiCZcSel4sXhiKjs,4012
|
18
18
|
uk_bin_collection/uk_bin_collection/councils/AylesburyValeCouncil.py,sha256=LouqjspEMt1TkOGqWHs2zkxwOETIy3n7p64uKIlAgUg,2401
|
19
19
|
uk_bin_collection/uk_bin_collection/councils/BCPCouncil.py,sha256=W7QBx6Mgso8RYosuXsaYo3GGNAu-tiyBSmuYxr1JSOU,1707
|
20
|
-
uk_bin_collection/uk_bin_collection/councils/BarnetCouncil.py,sha256=
|
20
|
+
uk_bin_collection/uk_bin_collection/councils/BarnetCouncil.py,sha256=NccMd0830aiNRQ8SH6mc4r5Hlugfey5-PyUd24QLo4s,8478
|
21
21
|
uk_bin_collection/uk_bin_collection/councils/BarnsleyMBCouncil.py,sha256=MgF_7XyIcIoNzFR0OJsjBkLCZKgWxBrV6nTcutMxO1Q,4244
|
22
22
|
uk_bin_collection/uk_bin_collection/councils/BasildonCouncil.py,sha256=SBvAa0GZM3V7ygK8ARawbHAPH6R_303U30RH8WYPi5Q,3020
|
23
23
|
uk_bin_collection/uk_bin_collection/councils/BasingstokeCouncil.py,sha256=VPWGljnH4C3q8qs5ZmCtqjNjgWQvviALzjk00q3EZeQ,2632
|
@@ -25,7 +25,7 @@ uk_bin_collection/uk_bin_collection/councils/BathAndNorthEastSomersetCouncil.py,
|
|
25
25
|
uk_bin_collection/uk_bin_collection/councils/BedfordBoroughCouncil.py,sha256=CvGB7w9HMn7XyEtwfd9MWZE_HlZ75pDcaKMsQJz0xhk,1669
|
26
26
|
uk_bin_collection/uk_bin_collection/councils/BedfordshireCouncil.py,sha256=U1HOr9YLMAlFoZysfw5n04E0bVuCliO5Yj1FMiiwcHA,2549
|
27
27
|
uk_bin_collection/uk_bin_collection/councils/BelfastCityCouncil.py,sha256=mspYVHO8fgoVIwogT6V2Go1tbf3PDbEmr8kZMJug5Gs,4235
|
28
|
-
uk_bin_collection/uk_bin_collection/councils/BexleyCouncil.py,sha256=
|
28
|
+
uk_bin_collection/uk_bin_collection/councils/BexleyCouncil.py,sha256=nzet8yKKzpiAQXDr0tmqXuJ4R-EdJqtxSNGiUCh79bA,5591
|
29
29
|
uk_bin_collection/uk_bin_collection/councils/BirminghamCityCouncil.py,sha256=now2xgpfshYM33UWC18j6xa6BuBydO5Sl7OrDQOo6b0,4687
|
30
30
|
uk_bin_collection/uk_bin_collection/councils/BlackburnCouncil.py,sha256=jHbCK8sL09vdmdP7Xnh8lIrU5AHTnJLEZfOLephPvWg,4090
|
31
31
|
uk_bin_collection/uk_bin_collection/councils/BoltonCouncil.py,sha256=r7wjrRPT2EinRMnYjGxmsCD6aMFhEOyRNd8_3R9PdQU,4117
|
@@ -111,7 +111,7 @@ uk_bin_collection/uk_bin_collection/councils/MertonCouncil.py,sha256=3Y2Un4xXo1s
|
|
111
111
|
uk_bin_collection/uk_bin_collection/councils/MidAndEastAntrimBoroughCouncil.py,sha256=oOWwU5FSgGej2Mv7FQ66N-EzS5nZgmGsd0WnfLWUc1I,5238
|
112
112
|
uk_bin_collection/uk_bin_collection/councils/MidSussexDistrictCouncil.py,sha256=AZgC9wmDLEjUOtIFvf0ehF5LHturXTH4DkE3ioPSVBA,6254
|
113
113
|
uk_bin_collection/uk_bin_collection/councils/MiltonKeynesCityCouncil.py,sha256=3olsWa77L34vz-c7NgeGK9xmNuR4Ws_oAk5D4UpIkPw,2005
|
114
|
-
uk_bin_collection/uk_bin_collection/councils/MoleValleyDistrictCouncil.py,sha256=
|
114
|
+
uk_bin_collection/uk_bin_collection/councils/MoleValleyDistrictCouncil.py,sha256=bvCrC4Qcg0Uzp9zZGcC7-7-oJcMh2cb1VaXfdkB11oc,5257
|
115
115
|
uk_bin_collection/uk_bin_collection/councils/NeathPortTalbotCouncil.py,sha256=V9URKAv3dA_deYmStL2Nmn4GbVCM-tU2qnKobivmGew,5583
|
116
116
|
uk_bin_collection/uk_bin_collection/councils/NewForestCouncil.py,sha256=ylTn9KmWITtaO9_Z8kJCN2w2ALfhrfGt3SeJ78lgw7M,5391
|
117
117
|
uk_bin_collection/uk_bin_collection/councils/NewarkAndSherwoodDC.py,sha256=lAleYfCGUWCKOi7Ye_cjgfpI3pWwTcFctlYmh0hjebM,2140
|
@@ -210,8 +210,8 @@ uk_bin_collection/uk_bin_collection/councils/YorkCouncil.py,sha256=I2kBYMlsD4bId
|
|
210
210
|
uk_bin_collection/uk_bin_collection/councils/council_class_template/councilclasstemplate.py,sha256=4s9ODGPAwPqwXc8SrTX5Wlfmizs3_58iXUtHc4Ir86o,1162
|
211
211
|
uk_bin_collection/uk_bin_collection/create_new_council.py,sha256=m-IhmWmeWQlFsTZC4OxuFvtw5ZtB8EAJHxJTH4O59lQ,1536
|
212
212
|
uk_bin_collection/uk_bin_collection/get_bin_data.py,sha256=YvmHfZqanwrJ8ToGch34x-L-7yPe31nB_x77_Mgl_vo,4545
|
213
|
-
uk_bin_collection-0.98.
|
214
|
-
uk_bin_collection-0.98.
|
215
|
-
uk_bin_collection-0.98.
|
216
|
-
uk_bin_collection-0.98.
|
217
|
-
uk_bin_collection-0.98.
|
213
|
+
uk_bin_collection-0.98.2.dist-info/LICENSE,sha256=vABBUOzcrgfaTKpzeo-si9YVEun6juDkndqA8RKdKGs,1071
|
214
|
+
uk_bin_collection-0.98.2.dist-info/METADATA,sha256=lhQtluGdKsFsfZJo7MJXKlWLeGLv-rbTX1aA081Fj3g,16843
|
215
|
+
uk_bin_collection-0.98.2.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
216
|
+
uk_bin_collection-0.98.2.dist-info/entry_points.txt,sha256=36WCSGMWSc916S3Hi1ZkazzDKHaJ6CD-4fCEFm5MIao,90
|
217
|
+
uk_bin_collection-0.98.2.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|