sunholo 0.132.0__py3-none-any.whl → 0.134.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sunholo/custom_logging.py +20 -0
- sunholo/database/alloydb_client.py +59 -0
- sunholo/discovery_engine/cli.py +20 -10
- sunholo/discovery_engine/discovery_engine_client.py +36 -16
- {sunholo-0.132.0.dist-info → sunholo-0.134.1.dist-info}/METADATA +1 -1
- {sunholo-0.132.0.dist-info → sunholo-0.134.1.dist-info}/RECORD +10 -10
- {sunholo-0.132.0.dist-info → sunholo-0.134.1.dist-info}/WHEEL +0 -0
- {sunholo-0.132.0.dist-info → sunholo-0.134.1.dist-info}/entry_points.txt +0 -0
- {sunholo-0.132.0.dist-info → sunholo-0.134.1.dist-info}/licenses/LICENSE.txt +0 -0
- {sunholo-0.132.0.dist-info → sunholo-0.134.1.dist-info}/top_level.txt +0 -0
sunholo/custom_logging.py
CHANGED
@@ -193,6 +193,26 @@ class GoogleCloudLogging:
|
|
193
193
|
except Exception as text_err:
|
194
194
|
print(f"Even fallback text logging failed: {text_err}")
|
195
195
|
|
196
|
+
def log(self, message, *args, **kwargs):
|
197
|
+
"""
|
198
|
+
Some weird bug keeps calling this method - do not use normally
|
199
|
+
|
200
|
+
A catch-all method to handle unexpected .log() calls on this class.
|
201
|
+
Routes the call to the appropriate logging method based on severity level.
|
202
|
+
"""
|
203
|
+
severity = kwargs.get('severity', 'INFO')
|
204
|
+
# Remove severity from kwargs if it exists to avoid passing it twice
|
205
|
+
if 'severity' in kwargs:
|
206
|
+
del kwargs['severity']
|
207
|
+
|
208
|
+
# Determine if this is a structured log or simple message
|
209
|
+
if isinstance(message, dict):
|
210
|
+
# Assume this is a structured log
|
211
|
+
return self.structured_log(log_struct=message, severity=severity)
|
212
|
+
else:
|
213
|
+
# Assume this is a text log
|
214
|
+
return self.structured_log(log_text=message, severity=severity)
|
215
|
+
|
196
216
|
def debug(self, log_text=None, log_struct=None):
|
197
217
|
|
198
218
|
"""
|
@@ -941,6 +941,65 @@ class AlloyDBClient:
|
|
941
941
|
log.info(f"Updated row in {table_name} with {primary_key_column}={primary_key_value}")
|
942
942
|
|
943
943
|
return result
|
944
|
+
|
945
|
+
async def check_row(self, table_name: str, primary_key_column: str, primary_key_value: str,
|
946
|
+
columns: list = None, condition: str = None):
|
947
|
+
"""
|
948
|
+
Retrieves a row from the specified table based on the primary key.
|
949
|
+
|
950
|
+
Args:
|
951
|
+
table_name (str): Name of the table to query
|
952
|
+
primary_key_column (str): Name of the primary key column (e.g., 'id')
|
953
|
+
primary_key_value (str): Value of the primary key for the row to retrieve
|
954
|
+
columns (list, optional): List of column names to retrieve. If None, retrieves all columns
|
955
|
+
condition (str, optional): Additional condition for the WHERE clause
|
956
|
+
|
957
|
+
Returns:
|
958
|
+
The row data if found, None otherwise
|
959
|
+
"""
|
960
|
+
# Determine which columns to select
|
961
|
+
if columns and isinstance(columns, list):
|
962
|
+
columns_str = ", ".join([f'"{col}"' for col in columns])
|
963
|
+
else:
|
964
|
+
columns_str = "*" # Select all columns if none specified
|
965
|
+
|
966
|
+
# Create the WHERE clause
|
967
|
+
where_clause = f'"{primary_key_column}" = :pk_value'
|
968
|
+
values = {'pk_value': primary_key_value}
|
969
|
+
|
970
|
+
if condition:
|
971
|
+
where_clause += f" AND ({condition})"
|
972
|
+
|
973
|
+
# Construct the SQL statement
|
974
|
+
sql = f'SELECT {columns_str} FROM "{table_name}" WHERE {where_clause} LIMIT 1'
|
975
|
+
|
976
|
+
log.info(f"Checking row in {table_name} with {primary_key_column}={primary_key_value}")
|
977
|
+
|
978
|
+
# Execute SQL based on engine type
|
979
|
+
try:
|
980
|
+
if self.engine_type == "pg8000":
|
981
|
+
# Use the synchronous method for pg8000
|
982
|
+
result = self._execute_sql_pg8000(sql, values)
|
983
|
+
# Extract the row data from the result
|
984
|
+
if result and hasattr(result, 'fetchone'):
|
985
|
+
row = result.fetchone()
|
986
|
+
if row:
|
987
|
+
# If we have column names, convert to dictionary
|
988
|
+
if hasattr(result, 'keys'):
|
989
|
+
column_names = result.keys()
|
990
|
+
return dict(zip(column_names, row))
|
991
|
+
return row
|
992
|
+
return None
|
993
|
+
else:
|
994
|
+
# Use the async method for langchain
|
995
|
+
result = await self._execute_sql_async_langchain(sql, values)
|
996
|
+
# For langchain engine, check result format and return first row if exists
|
997
|
+
if result and len(result) > 0:
|
998
|
+
return result[0]
|
999
|
+
return None
|
1000
|
+
except Exception as e:
|
1001
|
+
log.error(f"Error checking row: {e}")
|
1002
|
+
return None
|
944
1003
|
|
945
1004
|
async def get_table_columns(self, table_name, schema="public"):
|
946
1005
|
"""
|
sunholo/discovery_engine/cli.py
CHANGED
@@ -277,11 +277,25 @@ def search_by_id_and_or_date_command(args):
|
|
277
277
|
elif results_data:
|
278
278
|
console.print("\n[bold magenta]--- Individual Chunks (Filtered) ---[/bold magenta]")
|
279
279
|
chunk_count = 0
|
280
|
-
# ... (pager iteration identical to search_command) ...
|
281
280
|
try:
|
281
|
+
# Iterate through the pager returned by get_chunks
|
282
282
|
for page in results_data.pages:
|
283
|
-
|
284
|
-
|
283
|
+
if not hasattr(page, 'results') or not page.results: continue
|
284
|
+
for result in page.results:
|
285
|
+
# Ensure the result structure is as expected by get_chunks
|
286
|
+
if hasattr(result, 'chunk'):
|
287
|
+
chunk_count += 1
|
288
|
+
console.print(f"\n[bold]Chunk {chunk_count}:[/bold]")
|
289
|
+
# Use the client's formatter if available
|
290
|
+
console.print(client.chunk_format(result.chunk))
|
291
|
+
elif hasattr(result, 'document') and hasattr(result.document, 'chunks'):
|
292
|
+
# Fallback if structure is different (e.g., document with chunks)
|
293
|
+
for chunk in result.document.chunks:
|
294
|
+
chunk_count += 1
|
295
|
+
console.print(f"\n[bold]Chunk {chunk_count} (from doc {result.document.id}):[/bold]")
|
296
|
+
console.print(f" Content: {getattr(chunk, 'content', 'N/A')}")
|
297
|
+
console.print(f" Doc Name: {getattr(chunk, 'document_metadata', {}).get('name', 'N/A')}") # Example access
|
298
|
+
|
285
299
|
if chunk_count == 0:
|
286
300
|
console.print("[yellow]No chunks found in the filtered results.[/yellow]")
|
287
301
|
except Exception as page_err:
|
@@ -306,9 +320,7 @@ def search_engine_command(args):
|
|
306
320
|
try:
|
307
321
|
client = DiscoveryEngineClient(
|
308
322
|
project_id=args.project,
|
309
|
-
|
310
|
-
# Provide a default or the primary one associated with the project/engine.
|
311
|
-
data_store_id=args.data_store_id_for_init,
|
323
|
+
engine_id=args.engine_id,
|
312
324
|
location=args.location
|
313
325
|
)
|
314
326
|
|
@@ -404,8 +416,8 @@ def search_engine_command(args):
|
|
404
416
|
try:
|
405
417
|
struct_dict = convert_composite_to_native(doc.struct_data)
|
406
418
|
metadata_output = struct_dict.get("structData", {})
|
407
|
-
console.print(f
|
408
|
-
console.print(f
|
419
|
+
console.print(f' Title: {struct_dict.get("title", "")}')
|
420
|
+
console.print(f' Content: {struct_dict.get("content", "")}')
|
409
421
|
except Exception as json_err:
|
410
422
|
console.print(f"[yellow] Warning: Could not convert metadata Struct to JSON: {json_err}[/yellow]")
|
411
423
|
metadata_output = doc.struct_data
|
@@ -525,8 +537,6 @@ def setup_discovery_engine_subparser(subparsers):
|
|
525
537
|
search_engine_parser = discovery_engine_subparsers.add_parser('search-engine', help='Search a Discovery Engine (fetches documents/summary)')
|
526
538
|
search_engine_parser.add_argument('--query', required=True, help='The search query')
|
527
539
|
search_engine_parser.add_argument('--engine-id', required=True, help='Engine ID to search')
|
528
|
-
# Add data_store_id needed for client init, maybe make it optional if client handles it?
|
529
|
-
search_engine_parser.add_argument('--data-store-id-for-init', required=True, help='A primary data store ID associated with the project/engine (for client init)')
|
530
540
|
search_engine_parser.add_argument('--serving-config-id', default='default_config', help='Serving config ID for the engine')
|
531
541
|
search_engine_parser.add_argument('--collection-id', default='default_collection', help='Collection ID for the engine path')
|
532
542
|
search_engine_parser.add_argument('--page-size', type=int, default=10, help='Max results per page')
|
@@ -67,12 +67,19 @@ class DiscoveryEngineClient:
|
|
67
67
|
print(f"Document Name: {chunk_document_name}")
|
68
68
|
```
|
69
69
|
"""
|
70
|
-
def __init__(self, data_store_id, project_id, location="eu"):
|
70
|
+
def __init__(self, data_store_id=None, engine_id=None, project_id=None, location="eu"):
|
71
71
|
if not discoveryengine:
|
72
72
|
raise ImportError("Google Cloud Discovery Engine not available, install via `pip install sunholo[gcp]`")
|
73
|
+
|
74
|
+
if project_id is None:
|
75
|
+
raise ValueError("Must specify project_id")
|
76
|
+
|
77
|
+
if data_store_id is None and engine_id is None:
|
78
|
+
raise ValueError("Must specify at least one of data_store_id or engine_id")
|
73
79
|
|
74
80
|
self.project_id = project_id
|
75
81
|
self.data_store_id = data_store_id
|
82
|
+
self.engine_id = engine_id
|
76
83
|
self.location = location
|
77
84
|
client_options = (
|
78
85
|
ClientOptions(api_endpoint=f"{location}-discoveryengine.googleapis.com")
|
@@ -469,7 +476,7 @@ class DiscoveryEngineClient:
|
|
469
476
|
operation = self.engine_client.create_engine(request=request)
|
470
477
|
except AlreadyExists as err:
|
471
478
|
log.info(f"Engine already exists: - {str(err)}")
|
472
|
-
|
479
|
+
self.engine_id = engine_id
|
473
480
|
return engine_id
|
474
481
|
|
475
482
|
log.info(f"Waiting for create vertex ai search operation to complete: {operation.operation.name}")
|
@@ -481,7 +488,7 @@ class DiscoveryEngineClient:
|
|
481
488
|
|
482
489
|
# Handle the response
|
483
490
|
log.info(f"{response=} {metadata=}")
|
484
|
-
|
491
|
+
self.engine_id = engine_id
|
485
492
|
return operation.operation.name
|
486
493
|
|
487
494
|
def _import_document_request(self,
|
@@ -862,7 +869,7 @@ class DiscoveryEngineClient:
|
|
862
869
|
def search_engine(
|
863
870
|
self,
|
864
871
|
search_query: str,
|
865
|
-
engine_id: str,
|
872
|
+
engine_id: str = None,
|
866
873
|
serving_config_id: str = "default_config",
|
867
874
|
page_size: int = 10,
|
868
875
|
return_snippet: bool = True,
|
@@ -890,7 +897,7 @@ class DiscoveryEngineClient:
|
|
890
897
|
|
891
898
|
Args:
|
892
899
|
search_query: The user's search query string.
|
893
|
-
engine_id: The ID of the search engine to query.
|
900
|
+
engine_id: The ID of the search engine to query or uses class engine_id it init with.
|
894
901
|
serving_config_id: The ID of the specific serving config for the engine.
|
895
902
|
page_size: Maximum number of results per page.
|
896
903
|
return_snippet: Whether to request snippets in the results.
|
@@ -961,13 +968,20 @@ class DiscoveryEngineClient:
|
|
961
968
|
log.error("Discovery Engine library not available at runtime.")
|
962
969
|
return None
|
963
970
|
|
971
|
+
if engine_id:
|
972
|
+
self.engine_id = engine_id
|
973
|
+
|
974
|
+
if engine_id is None and self.engine_id is None:
|
975
|
+
raise ValueError("Could not find self.engine_id")
|
976
|
+
|
977
|
+
|
964
978
|
try:
|
965
979
|
# Construct the serving config path for an ENGINE
|
966
980
|
# Note: The client library path helper is for data stores/serving configs within them.
|
967
981
|
# We need the path for an engine's serving config.
|
968
982
|
serving_config_path = (
|
969
983
|
f"projects/{self.project_id}/locations/{self.location}/"
|
970
|
-
f"collections/{collection_id}/engines/{engine_id}/"
|
984
|
+
f"collections/{collection_id}/engines/{self.engine_id}/"
|
971
985
|
f"servingConfigs/{serving_config_id}"
|
972
986
|
)
|
973
987
|
log.info(f"Using Engine Serving Config Path: {serving_config_path}")
|
@@ -1029,22 +1043,22 @@ class DiscoveryEngineClient:
|
|
1029
1043
|
# Add other relevant fields like facet_specs if needed
|
1030
1044
|
)
|
1031
1045
|
|
1032
|
-
log.info(f"Searching engine '{engine_id}' with request: {request}")
|
1046
|
+
log.info(f"Searching engine '{self.engine_id}' with request: {request}")
|
1033
1047
|
response_pager = self.search_client.search(request)
|
1034
|
-
log.info(f"Search successful for query '{search_query}' against engine '{engine_id}'.")
|
1048
|
+
log.info(f"Search successful for query '{search_query}' against engine '{self.engine_id}'.")
|
1035
1049
|
return response_pager
|
1036
1050
|
|
1037
1051
|
except GoogleAPIError as e:
|
1038
|
-
log.error(f"API error searching engine '{engine_id}': {e}")
|
1052
|
+
log.error(f"API error searching engine '{self.engine_id}': {e}")
|
1039
1053
|
return None
|
1040
1054
|
except Exception as e:
|
1041
|
-
log.error(f"Unexpected error searching engine '{engine_id}': {e}\n{traceback.format_exc()}")
|
1055
|
+
log.error(f"Unexpected error searching engine '{self.engine_id}': {e}\n{traceback.format_exc()}")
|
1042
1056
|
return None
|
1043
1057
|
|
1044
1058
|
async def async_search_engine(
|
1045
1059
|
self,
|
1046
1060
|
search_query: str,
|
1047
|
-
engine_id: str,
|
1061
|
+
engine_id: str=None,
|
1048
1062
|
serving_config_id: str = "default_config",
|
1049
1063
|
page_size: int = 10,
|
1050
1064
|
return_snippet: bool = True,
|
@@ -1077,6 +1091,12 @@ class DiscoveryEngineClient:
|
|
1077
1091
|
An SearchAsyncPager object to iterate through results asynchronously,
|
1078
1092
|
or None if an error occurs or the async client is not available.
|
1079
1093
|
"""
|
1094
|
+
if engine_id:
|
1095
|
+
self.engine_id = engine_id
|
1096
|
+
|
1097
|
+
if engine_id is None and self.engine_id is None:
|
1098
|
+
raise ValueError("Could not find self.engine_id")
|
1099
|
+
|
1080
1100
|
if not self.async_search_client:
|
1081
1101
|
log.error("Cannot call async_search_engine: Async client not initialized.")
|
1082
1102
|
raise RuntimeError("Async client not initialized. Ensure class is instantiated within an async context.")
|
@@ -1085,7 +1105,7 @@ class DiscoveryEngineClient:
|
|
1085
1105
|
# Construct the serving config path for an ENGINE (same as sync)
|
1086
1106
|
serving_config_path = (
|
1087
1107
|
f"projects/{self.project_id}/locations/{self.location}/"
|
1088
|
-
f"collections/{collection_id}/engines/{engine_id}/"
|
1108
|
+
f"collections/{collection_id}/engines/{self.engine_id}/"
|
1089
1109
|
f"servingConfigs/{serving_config_id}"
|
1090
1110
|
)
|
1091
1111
|
log.info(f"Using Async Engine Serving Config Path: {serving_config_path}")
|
@@ -1138,16 +1158,16 @@ class DiscoveryEngineClient:
|
|
1138
1158
|
user_pseudo_id=user_pseudo_id,
|
1139
1159
|
)
|
1140
1160
|
|
1141
|
-
log.info(f"Async searching engine '{engine_id}' with request: {request}")
|
1161
|
+
log.info(f"Async searching engine '{self.engine_id}' with request: {request}")
|
1142
1162
|
response_pager = await self.async_search_client.search(request)
|
1143
|
-
log.info(f"Async search successful for query '{search_query}' against engine '{engine_id}'.")
|
1163
|
+
log.info(f"Async search successful for query '{search_query}' against engine '{self.engine_id}'.")
|
1144
1164
|
return response_pager
|
1145
1165
|
|
1146
1166
|
except GoogleAPIError as e:
|
1147
|
-
log.error(f"Async API error searching engine '{engine_id}': {e}")
|
1167
|
+
log.error(f"Async API error searching engine '{self.engine_id}': {e}")
|
1148
1168
|
return None
|
1149
1169
|
except Exception as e:
|
1150
|
-
log.error(f"Async unexpected error searching engine '{engine_id}': {e}\n{traceback.format_exc()}")
|
1170
|
+
log.error(f"Async unexpected error searching engine '{self.engine_id}': {e}\n{traceback.format_exc()}")
|
1151
1171
|
return None
|
1152
1172
|
|
1153
1173
|
# --- End of DiscoveryEngineClient class ---
|
@@ -1,5 +1,5 @@
|
|
1
1
|
sunholo/__init__.py,sha256=InRbX4V0-qdNHo9zYH3GEye7ASLR6LX8-SMvPV4Jsaw,1212
|
2
|
-
sunholo/custom_logging.py,sha256
|
2
|
+
sunholo/custom_logging.py,sha256=PrUXreBF0nZJMfa8A8cbFhTWoaOWWB_i6qwZfnjvRlo,18296
|
3
3
|
sunholo/langchain_types.py,sha256=uZ4zvgej_f7pLqjtu4YP7qMC_eZD5ym_5x4pyvA1Ih4,1834
|
4
4
|
sunholo/agents/__init__.py,sha256=X2I3pPkGeKWjc3d0QgSpkTyqD8J8JtrEWqwrumf1MMc,391
|
5
5
|
sunholo/agents/chat_history.py,sha256=Gph_CdlP2otYnNdR1q1Umyyyvcad2F6K3LxU5yBQ9l0,5387
|
@@ -60,7 +60,7 @@ sunholo/components/retriever.py,sha256=Wmchv3huAM4w7DIS-a5Lp9Hi7M8pE6vZdxgseiT9S
|
|
60
60
|
sunholo/components/vectorstore.py,sha256=k7GS1Y5c6ZGXSDAJvyCes6dTjhDAi0fjGbVLqpyfzBc,5918
|
61
61
|
sunholo/database/__init__.py,sha256=bpB5Nk21kwqYj-qdVnvNgXjLsbflnH4g-San7OHMqR4,283
|
62
62
|
sunholo/database/alloydb.py,sha256=x1zUMB-EVWbE2Zvp4nAs2Z-tB_kOZmS45H2lwVHdYnk,11678
|
63
|
-
sunholo/database/alloydb_client.py,sha256=
|
63
|
+
sunholo/database/alloydb_client.py,sha256=gDYlg3sD88Nd5llGRpO7maRQ1KOnFT9XBFvS7Tx1fXM,56026
|
64
64
|
sunholo/database/database.py,sha256=VqhZdkXUNdvWn8sUcUV3YNby1JDVf7IykPVXWBtxo9U,7361
|
65
65
|
sunholo/database/lancedb.py,sha256=DyfZntiFKBlVPaFooNN1Z6Pl-LAs4nxWKKuq8GBqN58,715
|
66
66
|
sunholo/database/static_dbs.py,sha256=8cvcMwUK6c32AS2e_WguKXWMkFf5iN3g9WHzsh0C07Q,442
|
@@ -73,9 +73,9 @@ sunholo/database/sql/sb/return_sources.sql,sha256=89KAnxfK8n_qGK9jy1OQT8f9n4uYUt
|
|
73
73
|
sunholo/database/sql/sb/setup.sql,sha256=CvoFvZQev2uWjmFa3aj3m3iuPFzAAJZ0S7Qi3L3-zZI,89
|
74
74
|
sunholo/discovery_engine/__init__.py,sha256=hLgqRDJ22Aov9o2QjAEfsVgnL3kMdM-g5p8RJ9OyKdQ,130
|
75
75
|
sunholo/discovery_engine/chunker_handler.py,sha256=wkvXl4rFtYfN6AZUKdW9_QD49Whf77BukDbO82UwlAg,7480
|
76
|
-
sunholo/discovery_engine/cli.py,sha256=
|
76
|
+
sunholo/discovery_engine/cli.py,sha256=ZqP6a1bgZvNdPAytkQdC2cN1U-KEc3fNtv8Ynqay0vY,33055
|
77
77
|
sunholo/discovery_engine/create_new.py,sha256=WUi4_xh_dFaGX3xA9jkNKZhaR6LCELjMPeRb0hyj4FU,1226
|
78
|
-
sunholo/discovery_engine/discovery_engine_client.py,sha256=
|
78
|
+
sunholo/discovery_engine/discovery_engine_client.py,sha256=lB6D05ZOXm9Avl6hM6vJZvPZD_TzNroyBl-E5cJYWAk,52661
|
79
79
|
sunholo/discovery_engine/get_ai_search_chunks.py,sha256=I6Dt1CznqEvE7XIZ2PkLqopmjpO96iVEWJJqL5cJjOU,5554
|
80
80
|
sunholo/embedder/__init__.py,sha256=sI4N_CqgEVcrMDxXgxKp1FsfsB4FpjoXgPGkl4N_u4I,44
|
81
81
|
sunholo/embedder/embed_chunk.py,sha256=did2pKkWM2o0KkRcb0H9l2x_WjCq6OyuHDxGbITFKPM,6530
|
@@ -168,9 +168,9 @@ sunholo/vertex/init.py,sha256=1OQwcPBKZYBTDPdyU7IM4X4OmiXLdsNV30C-fee2scQ,2875
|
|
168
168
|
sunholo/vertex/memory_tools.py,sha256=tBZxqVZ4InTmdBvLlOYwoSEWu4-kGquc-gxDwZCC4FA,7667
|
169
169
|
sunholo/vertex/safety.py,sha256=S9PgQT1O_BQAkcqauWncRJaydiP8Q_Jzmu9gxYfy1VA,2482
|
170
170
|
sunholo/vertex/type_dict_to_json.py,sha256=uTzL4o9tJRao4u-gJOFcACgWGkBOtqACmb6ihvCErL8,4694
|
171
|
-
sunholo-0.
|
172
|
-
sunholo-0.
|
173
|
-
sunholo-0.
|
174
|
-
sunholo-0.
|
175
|
-
sunholo-0.
|
176
|
-
sunholo-0.
|
171
|
+
sunholo-0.134.1.dist-info/licenses/LICENSE.txt,sha256=SdE3QjnD3GEmqqg9EX3TM9f7WmtOzqS1KJve8rhbYmU,11345
|
172
|
+
sunholo-0.134.1.dist-info/METADATA,sha256=HV6eQJwKUSgKpPz2VE9Uv01WNtoy1FU8eAMNpBpDXjA,10067
|
173
|
+
sunholo-0.134.1.dist-info/WHEEL,sha256=pxyMxgL8-pra_rKaQ4drOZAegBVuX-G_4nRHjjgWbmo,91
|
174
|
+
sunholo-0.134.1.dist-info/entry_points.txt,sha256=bZuN5AIHingMPt4Ro1b_T-FnQvZ3teBes-3OyO0asl4,49
|
175
|
+
sunholo-0.134.1.dist-info/top_level.txt,sha256=wt5tadn5--5JrZsjJz2LceoUvcrIvxjHJe-RxuudxAk,8
|
176
|
+
sunholo-0.134.1.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|