uk_bin_collection 0.134.0__py3-none-any.whl → 0.134.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -383,12 +383,12 @@
383
383
  "wiki_note": "Follow the instructions [here](https://www.chelmsford.gov.uk/myhome/) until you get the page listing your address, then copy the entire address text and use that in the house number field."
384
384
  },
385
385
  "CheltenhamBoroughCouncil": {
386
- "house_number": "Monday",
387
- "postcode": "Week 1",
386
+ "uprn": "100120372027",
387
+ "postcode": "GL51 3NA",
388
388
  "skip_get_url": true,
389
389
  "url": "https://www.cheltenham.gov.uk",
390
390
  "wiki_name": "Cheltenham Borough Council",
391
- "wiki_note": "Use the House Number field to pass the DAY of the week for your collections. [Monday/Tuesday/Wednesday/Thursday/Friday]. Use the 'postcode' field to pass the WEEK (wrapped in quotes) for your collections. [Week 1/Week 2]."
391
+ "wiki_note": "Pass the UPRN. You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search)."
392
392
  },
393
393
  "CheshireEastCouncil": {
394
394
  "url": "https://online.cheshireeast.gov.uk/MyCollectionDay/SearchByAjax/GetBartecJobList?uprn=100012791226&onelineaddress=3%20COBBLERS%20YARD,%20SK9%207DZ&_=1689413260149",
@@ -609,6 +609,7 @@
609
609
  "house_number": "1",
610
610
  "postcode": "CM20 2FZ",
611
611
  "skip_get_url": true,
612
+ "web_driver": "http://selenium:4444",
612
613
  "url": "https://www.eastherts.gov.uk",
613
614
  "wiki_name": "East Herts Council",
614
615
  "wiki_note": "Pass the house number and postcode in their respective parameters."
@@ -2173,4 +2174,4 @@
2173
2174
  "wiki_name": "York Council",
2174
2175
  "wiki_note": "Provide your UPRN."
2175
2176
  }
2176
- }
2177
+ }
@@ -1,18 +1,18 @@
1
- import re
2
- import time
1
+ # import re
3
2
 
4
3
  import requests
5
- from bs4 import BeautifulSoup
6
- from selenium.webdriver.common.by import By
7
- from selenium.webdriver.support import expected_conditions as EC
8
- from selenium.webdriver.support.ui import Select
9
- from selenium.webdriver.support.wait import WebDriverWait
10
4
 
11
- from uk_bin_collection.uk_bin_collection.common import *
5
+ from uk_bin_collection.uk_bin_collection.common import (
6
+ check_postcode,
7
+ check_uprn,
8
+ datetime,
9
+ get_dates_every_x_days,
10
+ json,
11
+ timedelta,
12
+ )
12
13
  from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
13
14
 
14
15
 
15
- # import the wonderful Beautiful Soup and the URL grabber
16
16
  class CouncilClass(AbstractGetBinDataClass):
17
17
  """
18
18
  Concrete classes have to implement all abstract operations of the
@@ -20,42 +20,193 @@ class CouncilClass(AbstractGetBinDataClass):
20
20
  implementation.
21
21
  """
22
22
 
23
- def parse_data(self, page: str, **kwargs) -> dict:
23
+ def parse_data(self, page: str, **kwargs: str) -> dict[str, list[dict[str, str]]]:
24
+ if (postcode := kwargs.get("postcode")) is None:
25
+ raise KeyError("Missing: postcode")
26
+ if (uprn := kwargs.get("uprn")) is None:
27
+ raise KeyError("Missing: uprn")
28
+ check_postcode(postcode)
29
+ check_uprn(uprn)
30
+ bindata: dict[str, list[dict[str, str]]] = {"bins": []}
31
+ location_x: int = 0
32
+ location_y: int = 0
33
+ location_usrn: str = ""
24
34
 
25
- collection_day = kwargs.get("paon")
26
- collection_week = kwargs.get("postcode")
27
- bindata = {"bins": []}
35
+ # Ensure any cookies set are maintained in a requests session
36
+ s = requests.session()
37
+
38
+ # Ask for a new SessionId from the server
39
+ session_id_url = "https://maps.cheltenham.gov.uk/map/Aurora.svc/"\
40
+ "RequestSession?userName=guest+CBC&password=&"\
41
+ "script=%5CAurora%5CCBC+Waste+Streets.AuroraScript%24"
42
+ session_id_response = s.get(session_id_url)
43
+ session_id_response.raise_for_status()
44
+ session_id = session_id_response.json().get("Session").get("SessionId")
45
+
46
+ # Ask what tasks we can do within the session
47
+ tasks_url = f"https://maps.cheltenham.gov.uk/map/Aurora.svc/"\
48
+ f"GetWorkflow?sessionId={session_id}&workflowId=wastestreet"
49
+ tasks_response = s.get(tasks_url)
50
+ tasks_response.raise_for_status()
51
+ # JSON response contained a BOM marker
52
+ tasks = json.loads(tasks_response.text[1:])
53
+ retrieve_results_task_id, initialise_map_task_id, drilldown_task_id = None, None, None
54
+ # Pull out the ID's of the tasks we will need
55
+ for task in tasks.get("Tasks"):
56
+ if task.get("$type") == "StatMap.Aurora.FetchResultSetTask, StatMapService":
57
+ retrieve_results_task_id = task.get("Id")
58
+ elif task.get("$type") == "StatMap.Aurora.SaveStateTask, StatMapService":
59
+ initialise_map_task_id = task.get("Id")
60
+ elif task.get("$type") == "StatMap.Aurora.DrillDownTask, StatMapService":
61
+ drilldown_task_id = task.get("Id")
62
+ if not all([retrieve_results_task_id, initialise_map_task_id, drilldown_task_id]):
63
+ raise ValueError("Not all task ID's found")
64
+
65
+ # Find the X / Y coordinates for the requested postcode
66
+ postcode_search_url = "https://maps.cheltenham.gov.uk/map/Aurora.svc/FindLocation?"\
67
+ f"sessionId={session_id}&address={postcode}&limit=1000"
68
+ postcode_search_response = s.get(postcode_search_url)
69
+ postcode_search_response.raise_for_status()
70
+ if len(locations_list := postcode_search_response.json().get("Locations")) == 0:
71
+ raise ValueError("Address locations empty")
72
+ for location in locations_list:
73
+ location_search_url = "https://maps.cheltenham.gov.uk/map/Aurora.svc/FindLocation?"\
74
+ f"sessionId={session_id}&locationId={location.get('Id')}"
75
+ location_search_response = s.get(location_search_url)
76
+ location_search_response.raise_for_status()
77
+ if not (location_list := location_search_response.json().get("Locations")):
78
+ raise KeyError("Locations wasn't present in results")
79
+ if not (location_detail := location_list[0].get("Details")):
80
+ raise KeyError("Details wasn't present in location")
81
+ location_uprn = [detail.get(
82
+ "Value") for detail in location_detail if detail.get("Name") == "UPRN"][0]
83
+ if str(location_uprn) == uprn:
84
+ location_usrn = str([detail.get(
85
+ "Value") for detail in location_detail if detail.get("Name") == "USRN"][0])
86
+ location_x = location_list[0].get("X")
87
+ location_y = location_list[0].get("Y")
88
+ break
89
+
90
+ # Needed to initialise the server to allow follow on call
91
+ open_map_url = "https://maps.cheltenham.gov.uk/map/Aurora.svc/OpenScriptMap?"\
92
+ f"sessionId={session_id}"
93
+ if res := s.get(open_map_url):
94
+ res.raise_for_status()
95
+
96
+ # Needed to initialise the server to allow follow on call
97
+ save_state_map_url = "https://maps.cheltenham.gov.uk/map/Aurora.svc/ExecuteTaskJob?"\
98
+ f"sessionId={session_id}&taskId={initialise_map_task_id}&job="\
99
+ "%7BTask%3A+%7B+%24type%3A+%27StatMap.Aurora.SaveStateTask%2C"\
100
+ "+StatMapService%27+%7D%7D"
101
+ if res := s.get(save_state_map_url):
102
+ res.raise_for_status()
103
+
104
+ # Start search for address given by x / y coord
105
+ drilldown_map_url = "https://maps.cheltenham.gov.uk/map/Aurora.svc/ExecuteTaskJob?"\
106
+ f"sessionId={session_id}&taskId={drilldown_task_id}&job=%7B%22"\
107
+ f"QueryX%22%3A{location_x}%2C%22QueryY%22%3A{location_y}%2C%22"\
108
+ "Task%22%3A%7B%22Type%22%3A%22StatMap.Aurora.DrillDownTask%2C"\
109
+ "+StatMapService%22%7D%7D"
110
+ if res := s.get(drilldown_map_url):
111
+ res.raise_for_status()
112
+
113
+ # Get results from search for address given by x / y coord
114
+ address_details_url = "https://maps.cheltenham.gov.uk/map/Aurora.svc/ExecuteTaskJob?"\
115
+ f"sessionId={session_id}&taskId={retrieve_results_task_id}"\
116
+ f"&job=%7B%22QueryX%22%3A{location_x}%2C%22QueryY%22%3A"\
117
+ f"{location_y}%2C%22Task%22%3A%7B%22Type%22%3A%22"\
118
+ "StatMap.Aurora.FetchResultSetTask%2C+StatMapService"\
119
+ "%22%2C%22ResultSetName%22%3A%22inspection%22%7D%7D"
120
+ address_details_response = s.get(address_details_url)
121
+ address_details_response.raise_for_status()
122
+ # JSON response contained a BOM marker, skip first character
123
+ address_details = json.loads(address_details_response.text[1:])
124
+ if not (task_results := address_details.get("TaskResult")):
125
+ raise KeyError("TaskResult wasn't present in results")
126
+ if not (distance_export_set := task_results.get("DistanceOrderedSet")):
127
+ raise KeyError("DistanceOrderedSet wasn't present in TaskResult")
128
+ if not (result_set := distance_export_set.get("ResultSet")):
129
+ raise KeyError("ResultSet wasn't present in DistanceOrderedSet")
130
+ if not (result_tables := result_set.get("Tables")):
131
+ raise KeyError("Tables wasn't present in ResultSet")
132
+ result = result_tables[0]
133
+ column_names: dict[int, str] = {}
134
+ result_dict: dict[str, str | int] = {}
135
+ for column in result.get("ColumnDefinitions"):
136
+ column_names[column.get("ColumnIndex")] = column.get("ColumnName")
137
+ for r in result.get("Records"):
138
+ result_dict: dict[str, str | int] = {}
139
+ for idx, column_value in enumerate(r):
140
+ if not (column_name := column_names.get(idx)):
141
+ raise IndexError("Column index out of range")
142
+ result_dict[column_name.upper()] = column_value
143
+ # Validate the street against the USRN. Some locations can return multiple results.
144
+ # Break on first match of USRN
145
+ # TODO: Need to select the correct option out of all available options
146
+ if location_usrn == str(result_dict.get("USRN")):
147
+ break
148
+
149
+ refuse_week, recycling_week, garden_week = 0, 0, 0
150
+ # After we've got the correct result, pull out the week number each bin type is taken on
151
+ if (refuse_week_raw := result_dict.get("New_Refuse_Week".upper())) is not None:
152
+ refuse_week = int(refuse_week_raw)
153
+ if (recycling_week_raw := result_dict.get("New_Recycling_Week".upper())) is not None:
154
+ recycling_week = int(recycling_week_raw)
155
+ if (garden_week_raw := result_dict.get("Garden_Bin_Week".upper())) is not None:
156
+ garden_week = int(garden_week_raw)
157
+
158
+ if not all([refuse_week, recycling_week, garden_week]):
159
+ raise KeyError("Not all week numbers found")
28
160
 
29
161
  days_of_week = [
30
- "Monday",
31
- "Tuesday",
32
- "Wednesday",
33
- "Thursday",
34
- "Friday",
35
- "Saturday",
36
- "Sunday",
162
+ "MON",
163
+ "TUE",
164
+ "WED",
165
+ "THU",
166
+ "FRI",
167
+ "SAT",
168
+ "SUN",
37
169
  ]
38
170
 
39
- week = ["Week 1", "Week 2"]
171
+ refuse_day_offset = days_of_week.index(
172
+ str(result_dict.get("New_Refuse_Day_internal".upper())).upper())
173
+ recycling_day_offset = days_of_week.index(
174
+ str(result_dict.get("New_Recycling_Day".upper())).upper())
175
+ garden_day_offset = days_of_week.index(
176
+ str(result_dict.get("New_Garden_Day".upper())).upper())
177
+ food_day_offset = days_of_week.index(
178
+ str(result_dict.get("New_Food_Day".upper())).upper())
40
179
 
41
- offset_days = days_of_week.index(collection_day)
42
- collection_week = week.index(collection_week)
180
+ # Initialise WEEK-1/WEEK-2 based on known details
181
+ week_1_epoch = datetime(2025, 1, 13)
43
182
 
44
- if collection_week == 0:
45
- refusestartDate = datetime(2024, 11, 25)
46
- recyclingstartDate = datetime(2024, 11, 18)
47
- else:
48
- refusestartDate = datetime(2024, 11, 18)
49
- recyclingstartDate = datetime(2024, 11, 25)
183
+ # Start of this week
184
+ this_week = datetime.now() - timedelta(days=datetime.now().weekday())
50
185
 
51
- refuse_dates = get_dates_every_x_days(refusestartDate, 14, 28)
52
- recycling_dates = get_dates_every_x_days(recyclingstartDate, 14, 28)
53
- food_dates = get_dates_every_x_days(recyclingstartDate, 7, 56)
186
+ # If there's an even number of weeks between the week-1
187
+ # epoch and this week, then this week is of type week-1
188
+ if (((this_week - week_1_epoch).days // 7)) % 2 == 0:
189
+ week = {
190
+ 1: this_week,
191
+ 2: this_week + timedelta(days=7)
192
+ }
193
+ else:
194
+ week = {
195
+ 1: this_week - timedelta(days=7),
196
+ 2: this_week
197
+ }
54
198
 
55
- for refuseDate in refuse_dates:
199
+ refuse_dates: list[str] = get_dates_every_x_days(
200
+ week[refuse_week], 14, 28)
201
+ recycling_dates: list[str] = get_dates_every_x_days(
202
+ week[recycling_week], 14, 28)
203
+ garden_dates: list[str] = get_dates_every_x_days(
204
+ week[garden_week], 14, 28)
56
205
 
206
+ for refuse_date in refuse_dates:
57
207
  collection_date = (
58
- datetime.strptime(refuseDate, "%d/%m/%Y") + timedelta(days=offset_days)
208
+ datetime.strptime(refuse_date, "%d/%m/%Y") +
209
+ timedelta(days=refuse_day_offset)
59
210
  ).strftime("%d/%m/%Y")
60
211
 
61
212
  dict_data = {
@@ -64,11 +215,11 @@ class CouncilClass(AbstractGetBinDataClass):
64
215
  }
65
216
  bindata["bins"].append(dict_data)
66
217
 
67
- for recyclingDate in recycling_dates:
218
+ for recycling_date in recycling_dates:
68
219
 
69
220
  collection_date = (
70
- datetime.strptime(recyclingDate, "%d/%m/%Y")
71
- + timedelta(days=offset_days)
221
+ datetime.strptime(recycling_date, "%d/%m/%Y") +
222
+ timedelta(days=recycling_day_offset)
72
223
  ).strftime("%d/%m/%Y")
73
224
 
74
225
  dict_data = {
@@ -77,26 +228,81 @@ class CouncilClass(AbstractGetBinDataClass):
77
228
  }
78
229
  bindata["bins"].append(dict_data)
79
230
 
80
- dict_data = {
81
- "type": "Garden Waste Bin",
82
- "collectionDate": collection_date,
83
- }
84
- bindata["bins"].append(dict_data)
85
-
86
- for food_date in food_dates:
231
+ for garden_date in garden_dates:
87
232
 
88
233
  collection_date = (
89
- datetime.strptime(food_date, "%d/%m/%Y") + timedelta(days=offset_days)
234
+ datetime.strptime(garden_date, "%d/%m/%Y") +
235
+ timedelta(days=garden_day_offset)
90
236
  ).strftime("%d/%m/%Y")
91
237
 
92
238
  dict_data = {
93
- "type": "Food Waste Bin",
239
+ "type": "Garden Waste Bin",
94
240
  "collectionDate": collection_date,
95
241
  }
96
242
  bindata["bins"].append(dict_data)
97
243
 
244
+ if ((food_waste_week := str(result_dict.get("FOOD_WASTE_WEEK_EXTERNAL", "")).upper())
245
+ == "weekly".upper()):
246
+ food_dates: list[str] = get_dates_every_x_days(week[1], 7, 56)
247
+
248
+ for food_date in food_dates:
249
+
250
+ collection_date = (
251
+ datetime.strptime(food_date, "%d/%m/%Y") +
252
+ timedelta(days=food_day_offset)
253
+ ).strftime("%d/%m/%Y")
254
+
255
+ dict_data = {
256
+ "type": "Food Waste Bin",
257
+ "collectionDate": collection_date,
258
+ }
259
+ bindata["bins"].append(dict_data)
260
+ # Sometimes the food bin is collected on different days between
261
+ # week-1 and week-2
262
+ else:
263
+ first_week: str | int
264
+ second_week_detail: str
265
+ first_week, _, second_week_detail = food_waste_week.partition("&")
266
+ first_week = int(first_week.strip())
267
+
268
+ second_week_day, _, second_week_number = second_week_detail.partition(
269
+ "WEEK")
270
+ second_week_number = int(second_week_number.strip())
271
+ second_week_day: str = second_week_day.strip()[:3]
272
+
273
+ food_dates_first: list[str] = get_dates_every_x_days(
274
+ week[first_week], 14, 28)
275
+ food_dates_second: list[str] = get_dates_every_x_days(
276
+ week[second_week_number], 14, 28)
277
+ second_week_offset = days_of_week.index(second_week_day)
278
+
279
+ for food_date in food_dates_first:
280
+
281
+ collection_date = (
282
+ datetime.strptime(food_date, "%d/%m/%Y") +
283
+ timedelta(days=food_day_offset)
284
+ ).strftime("%d/%m/%Y")
285
+
286
+ dict_data = {
287
+ "type": "Food Waste Bin",
288
+ "collectionDate": collection_date,
289
+ }
290
+ bindata["bins"].append(dict_data)
291
+ for food_date in food_dates_second:
292
+
293
+ collection_date = (
294
+ datetime.strptime(food_date, "%d/%m/%Y") +
295
+ timedelta(days=second_week_offset)
296
+ ).strftime("%d/%m/%Y")
297
+
298
+ dict_data = {
299
+ "type": "Food Waste Bin",
300
+ "collectionDate": collection_date,
301
+ }
302
+ bindata["bins"].append(dict_data)
303
+
98
304
  bindata["bins"].sort(
99
- key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y")
305
+ key=lambda x: datetime.strptime(
306
+ x.get("collectionDate", ""), "%d/%m/%Y")
100
307
  )
101
-
102
308
  return bindata
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: uk_bin_collection
3
- Version: 0.134.0
3
+ Version: 0.134.2
4
4
  Summary: Python Lib to collect UK Bin Data
5
5
  Author: Robert Bradley
6
6
  Author-email: robbrad182@gmail.com
@@ -2,7 +2,7 @@ uk_bin_collection/README.rst,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,
2
2
  uk_bin_collection/tests/council_feature_input_parity.py,sha256=DO6Mk4ImYgM5ZCZ-cutwz5RoYYWZRLYx2tr6zIs_9Rc,3843
3
3
  uk_bin_collection/tests/features/environment.py,sha256=VQZjJdJI_kZn08M0j5cUgvKT4k3iTw8icJge1DGOkoA,127
4
4
  uk_bin_collection/tests/features/validate_council_outputs.feature,sha256=SJK-Vc737hrf03tssxxbeg_JIvAH-ddB8f6gU1LTbuQ,251
5
- uk_bin_collection/tests/input.json,sha256=b5kS_5XsRt-cTRw-YdKTLciyPZHAk1mzHXPSqNgUbJ0,114203
5
+ uk_bin_collection/tests/input.json,sha256=93b85jhY57G2qIx1RZh-DL_J3kia3UJZ_oo9d-Sq_OI,114121
6
6
  uk_bin_collection/tests/output.schema,sha256=ZwKQBwYyTDEM4G2hJwfLUVM-5v1vKRvRK9W9SS1sd18,1086
7
7
  uk_bin_collection/tests/step_defs/step_helpers/file_handler.py,sha256=Ygzi4V0S1MIHqbdstUlIqtRIwnynvhu4UtpweJ6-5N8,1474
8
8
  uk_bin_collection/tests/step_defs/test_validate_council.py,sha256=VZ0a81sioJULD7syAYHjvK_-nT_Rd36tUyzPetSA0gk,3475
@@ -62,7 +62,7 @@ uk_bin_collection/uk_bin_collection/councils/CarmarthenshireCountyCouncil.py,sha
62
62
  uk_bin_collection/uk_bin_collection/councils/CastlepointDistrictCouncil.py,sha256=JVPYUIlU2ISgbUSr5AOOXNK6IFQFtQmhZyYIMAOedD4,3858
63
63
  uk_bin_collection/uk_bin_collection/councils/CharnwoodBoroughCouncil.py,sha256=tXfzMetN6wxahuGGRp2mIyCCDSL4F2aG61HhUxw6COQ,2172
64
64
  uk_bin_collection/uk_bin_collection/councils/ChelmsfordCityCouncil.py,sha256=EB88D0MNJwuDZ2GX1ENc5maGYx17mnHTCtNl6s-v11E,5090
65
- uk_bin_collection/uk_bin_collection/councils/CheltenhamBoroughCouncil.py,sha256=z59g9h6QwdeCNzonM5AI-6NvpLKocD-DT8eSvqk8RwU,3198
65
+ uk_bin_collection/uk_bin_collection/councils/CheltenhamBoroughCouncil.py,sha256=fo9n2AuD8a174h3YMa9awa3S4YO2XU1KaInQ4ULz6s4,13686
66
66
  uk_bin_collection/uk_bin_collection/councils/CheshireEastCouncil.py,sha256=K95UaS67nxGSFzLoyqMSZcLmfhwjDImVXznMZC_bFa4,1680
67
67
  uk_bin_collection/uk_bin_collection/councils/CheshireWestAndChesterCouncil.py,sha256=5mKZf22NgdyBY-SqV0c2q8b8IJobkoZrsfGEVUcxUyM,3544
68
68
  uk_bin_collection/uk_bin_collection/councils/ChesterfieldBoroughCouncil.py,sha256=mZiM8Ugm_OP0JkC5pLaQmi4i79mAp4SNNrcIdsREjHw,7198
@@ -307,8 +307,8 @@ uk_bin_collection/uk_bin_collection/councils/YorkCouncil.py,sha256=I2kBYMlsD4bId
307
307
  uk_bin_collection/uk_bin_collection/councils/council_class_template/councilclasstemplate.py,sha256=EQWRhZ2pEejlvm0fPyOTsOHKvUZmPnxEYO_OWRGKTjs,1158
308
308
  uk_bin_collection/uk_bin_collection/create_new_council.py,sha256=m-IhmWmeWQlFsTZC4OxuFvtw5ZtB8EAJHxJTH4O59lQ,1536
309
309
  uk_bin_collection/uk_bin_collection/get_bin_data.py,sha256=YvmHfZqanwrJ8ToGch34x-L-7yPe31nB_x77_Mgl_vo,4545
310
- uk_bin_collection-0.134.0.dist-info/LICENSE,sha256=vABBUOzcrgfaTKpzeo-si9YVEun6juDkndqA8RKdKGs,1071
311
- uk_bin_collection-0.134.0.dist-info/METADATA,sha256=Svl74e_j58YAsZNRQkgc2gzL1rrJNozpkXcz-nwQWSo,19741
312
- uk_bin_collection-0.134.0.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
313
- uk_bin_collection-0.134.0.dist-info/entry_points.txt,sha256=36WCSGMWSc916S3Hi1ZkazzDKHaJ6CD-4fCEFm5MIao,90
314
- uk_bin_collection-0.134.0.dist-info/RECORD,,
310
+ uk_bin_collection-0.134.2.dist-info/LICENSE,sha256=vABBUOzcrgfaTKpzeo-si9YVEun6juDkndqA8RKdKGs,1071
311
+ uk_bin_collection-0.134.2.dist-info/METADATA,sha256=AJh8NQGTC8kAz_sSl8NK8gd1ksOeXucIsEIyX11Eazg,19741
312
+ uk_bin_collection-0.134.2.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
313
+ uk_bin_collection-0.134.2.dist-info/entry_points.txt,sha256=36WCSGMWSc916S3Hi1ZkazzDKHaJ6CD-4fCEFm5MIao,90
314
+ uk_bin_collection-0.134.2.dist-info/RECORD,,