sunholo 0.126.4__py3-none-any.whl → 0.127.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sunholo/agents/dispatch_to_qa.py +5 -6
- sunholo/database/alloydb_client.py +399 -1
- sunholo/discovery_engine/discovery_engine_client.py +1 -0
- sunholo/utils/config_class.py +19 -12
- {sunholo-0.126.4.dist-info → sunholo-0.127.1.dist-info}/METADATA +3 -1
- {sunholo-0.126.4.dist-info → sunholo-0.127.1.dist-info}/RECORD +10 -10
- {sunholo-0.126.4.dist-info → sunholo-0.127.1.dist-info}/WHEEL +0 -0
- {sunholo-0.126.4.dist-info → sunholo-0.127.1.dist-info}/entry_points.txt +0 -0
- {sunholo-0.126.4.dist-info → sunholo-0.127.1.dist-info}/licenses/LICENSE.txt +0 -0
- {sunholo-0.126.4.dist-info → sunholo-0.127.1.dist-info}/top_level.txt +0 -0
sunholo/agents/dispatch_to_qa.py
CHANGED
@@ -21,11 +21,6 @@ import traceback
|
|
21
21
|
from .route import route_endpoint
|
22
22
|
import os
|
23
23
|
|
24
|
-
try:
|
25
|
-
from langfuse import Langfuse
|
26
|
-
langfuse = Langfuse()
|
27
|
-
except ImportError:
|
28
|
-
langfuse = None
|
29
24
|
|
30
25
|
def prep_request_payload(user_input, chat_history, vector_name, stream, **kwargs):
|
31
26
|
"""
|
@@ -93,7 +88,11 @@ def prep_request_payload(user_input, chat_history, vector_name, stream, **kwargs
|
|
93
88
|
return qna_endpoint, qna_data
|
94
89
|
|
95
90
|
def add_langfuse_trace(qna_endpoint):
|
96
|
-
|
91
|
+
try:
|
92
|
+
from langfuse import Langfuse
|
93
|
+
langfuse = Langfuse()
|
94
|
+
except Exception as err:
|
95
|
+
log.error(err)
|
97
96
|
return None
|
98
97
|
|
99
98
|
trace = langfuse.trace(name = f'dispatch/{os.path.basename(qna_endpoint)}')
|
@@ -882,4 +882,402 @@ class AlloyDBClient:
|
|
882
882
|
|
883
883
|
log.info(f"Inserted data into table {table_name}")
|
884
884
|
|
885
|
-
return result
|
885
|
+
return result
|
886
|
+
|
887
|
+
async def get_table_columns(self, table_name, schema="public"):
|
888
|
+
"""
|
889
|
+
Fetch column information for an existing table.
|
890
|
+
|
891
|
+
Args:
|
892
|
+
table_name (str): The table name to get columns for
|
893
|
+
schema (str): Database schema, defaults to "public"
|
894
|
+
|
895
|
+
Returns:
|
896
|
+
List[dict]: List of column information dictionaries with keys:
|
897
|
+
- name: column name
|
898
|
+
- type: PostgreSQL data type
|
899
|
+
- is_nullable: whether the column allows NULL values
|
900
|
+
- default: default value if any
|
901
|
+
"""
|
902
|
+
try:
|
903
|
+
query = f"""
|
904
|
+
SELECT
|
905
|
+
column_name,
|
906
|
+
data_type,
|
907
|
+
is_nullable,
|
908
|
+
column_default,
|
909
|
+
character_maximum_length
|
910
|
+
FROM
|
911
|
+
information_schema.columns
|
912
|
+
WHERE
|
913
|
+
table_name = '{table_name}'
|
914
|
+
AND table_schema = '{schema}'
|
915
|
+
ORDER BY
|
916
|
+
ordinal_position;
|
917
|
+
"""
|
918
|
+
|
919
|
+
if self.engine_type == "pg8000":
|
920
|
+
result = self._execute_sql_pg8000(query)
|
921
|
+
rows = result.fetchall() if hasattr(result, 'fetchall') else result
|
922
|
+
else:
|
923
|
+
rows = await self._execute_sql_async_langchain(query)
|
924
|
+
|
925
|
+
columns = []
|
926
|
+
for row in rows:
|
927
|
+
column_info = {
|
928
|
+
"name": row[0],
|
929
|
+
"type": row[1],
|
930
|
+
"is_nullable": row[2] == "YES",
|
931
|
+
"default": row[3],
|
932
|
+
"max_length": row[4]
|
933
|
+
}
|
934
|
+
columns.append(column_info)
|
935
|
+
|
936
|
+
log.info(f"Retrieved {len(columns)} columns for table '{table_name}'")
|
937
|
+
return columns
|
938
|
+
|
939
|
+
except Exception as e:
|
940
|
+
log.error(f"Error getting table columns: {e}")
|
941
|
+
return []
|
942
|
+
|
943
|
+
def map_data_to_columns(self, data, column_info, case_sensitive=False):
|
944
|
+
"""
|
945
|
+
Map data dictionary to available table columns, handling case sensitivity.
|
946
|
+
|
947
|
+
Args:
|
948
|
+
data (dict): Dictionary of data to map
|
949
|
+
column_info (list): List of column information dictionaries from get_table_columns
|
950
|
+
case_sensitive (bool): Whether to match column names case-sensitively
|
951
|
+
|
952
|
+
Returns:
|
953
|
+
dict: Filtered data dictionary with only columns that exist in the table
|
954
|
+
"""
|
955
|
+
if not column_info:
|
956
|
+
return data # No column info, return original data
|
957
|
+
|
958
|
+
# Create lookup dictionaries for columns
|
959
|
+
columns = {}
|
960
|
+
columns_lower = {}
|
961
|
+
|
962
|
+
for col in column_info:
|
963
|
+
col_name = col["name"]
|
964
|
+
columns[col_name] = col
|
965
|
+
columns_lower[col_name.lower()] = col_name
|
966
|
+
|
967
|
+
# Filter and map the data
|
968
|
+
filtered_data = {}
|
969
|
+
for key, value in data.items():
|
970
|
+
if case_sensitive:
|
971
|
+
# Case-sensitive matching
|
972
|
+
if key in columns:
|
973
|
+
filtered_data[key] = value
|
974
|
+
else:
|
975
|
+
# Case-insensitive matching
|
976
|
+
key_lower = key.lower()
|
977
|
+
if key_lower in columns_lower:
|
978
|
+
# Use the original column name from the database
|
979
|
+
original_key = columns_lower[key_lower]
|
980
|
+
filtered_data[original_key] = value
|
981
|
+
|
982
|
+
return filtered_data
|
983
|
+
|
984
|
+
def safe_convert_value(self, value, target_type):
|
985
|
+
"""
|
986
|
+
Safely convert a value to the target PostgreSQL type.
|
987
|
+
Handles various formats and placeholder values.
|
988
|
+
|
989
|
+
Args:
|
990
|
+
value: The value to convert
|
991
|
+
target_type (str): PostgreSQL data type name
|
992
|
+
|
993
|
+
Returns:
|
994
|
+
The converted value appropriate for the target type, or None if conversion fails
|
995
|
+
"""
|
996
|
+
if value is None:
|
997
|
+
return None
|
998
|
+
|
999
|
+
# Handle placeholder values
|
1000
|
+
if isinstance(value, str):
|
1001
|
+
if value.startswith("No ") or value.lower() in ("none", "n/a", "null", ""):
|
1002
|
+
# Special placeholders are converted to None for most types
|
1003
|
+
return None
|
1004
|
+
|
1005
|
+
try:
|
1006
|
+
# Handle different target types
|
1007
|
+
if target_type in ("integer", "bigint", "smallint"):
|
1008
|
+
if isinstance(value, (int, float)):
|
1009
|
+
return int(value)
|
1010
|
+
elif isinstance(value, str) and value.strip():
|
1011
|
+
# Try to extract a number from the string
|
1012
|
+
cleaned = value.replace(',', '')
|
1013
|
+
# Extract the first number if there's text
|
1014
|
+
import re
|
1015
|
+
match = re.search(r'[-+]?\d+', cleaned)
|
1016
|
+
if match:
|
1017
|
+
return int(match.group())
|
1018
|
+
return None
|
1019
|
+
|
1020
|
+
elif target_type in ("numeric", "decimal", "real", "double precision"):
|
1021
|
+
if isinstance(value, (int, float)):
|
1022
|
+
return float(value)
|
1023
|
+
elif isinstance(value, str) and value.strip():
|
1024
|
+
# Remove currency symbols and try to convert
|
1025
|
+
cleaned = value.replace('$', '').replace('€', '').replace('£', '')
|
1026
|
+
cleaned = cleaned.replace(',', '.')
|
1027
|
+
# Extract the first number if there's text
|
1028
|
+
import re
|
1029
|
+
match = re.search(r'[-+]?\d+(\.\d+)?', cleaned)
|
1030
|
+
if match:
|
1031
|
+
return float(match.group())
|
1032
|
+
return None
|
1033
|
+
|
1034
|
+
elif target_type == "boolean":
|
1035
|
+
if isinstance(value, bool):
|
1036
|
+
return value
|
1037
|
+
elif isinstance(value, (int, float)):
|
1038
|
+
return bool(value)
|
1039
|
+
elif isinstance(value, str):
|
1040
|
+
value_lower = value.lower()
|
1041
|
+
if value_lower in ("true", "t", "yes", "y", "1"):
|
1042
|
+
return True
|
1043
|
+
elif value_lower in ("false", "f", "no", "n", "0"):
|
1044
|
+
return False
|
1045
|
+
return None
|
1046
|
+
|
1047
|
+
elif target_type.startswith("timestamp"):
|
1048
|
+
if isinstance(value, str):
|
1049
|
+
# For dates, keep the string format - DB driver will handle conversion
|
1050
|
+
return value
|
1051
|
+
# Other types, just return as is
|
1052
|
+
return value
|
1053
|
+
|
1054
|
+
elif target_type == "jsonb" or target_type == "json":
|
1055
|
+
if isinstance(value, (dict, list)):
|
1056
|
+
return json.dumps(value)
|
1057
|
+
elif isinstance(value, str):
|
1058
|
+
# Validate it's valid JSON
|
1059
|
+
try:
|
1060
|
+
json.loads(value)
|
1061
|
+
return value
|
1062
|
+
except:
|
1063
|
+
return None
|
1064
|
+
return None
|
1065
|
+
|
1066
|
+
else:
|
1067
|
+
# For text and other types, convert to string
|
1068
|
+
if isinstance(value, (dict, list)):
|
1069
|
+
return json.dumps(value)
|
1070
|
+
elif value is not None:
|
1071
|
+
return str(value)
|
1072
|
+
return None
|
1073
|
+
|
1074
|
+
except Exception as e:
|
1075
|
+
log.debug(f"Conversion error for value '{value}' to {target_type}: {e}")
|
1076
|
+
return None
|
1077
|
+
|
1078
|
+
async def insert_rows_safely(self, table_name, rows, metadata=None, continue_on_error=False):
|
1079
|
+
"""
|
1080
|
+
Insert multiple rows into a table with error handling for individual rows.
|
1081
|
+
|
1082
|
+
Args:
|
1083
|
+
table_name (str): The table to insert into
|
1084
|
+
rows (list): List of dictionaries containing row data
|
1085
|
+
metadata (dict, optional): Additional metadata to include in each row
|
1086
|
+
continue_on_error (bool): Whether to continue if some rows fail
|
1087
|
+
|
1088
|
+
Returns:
|
1089
|
+
dict: {
|
1090
|
+
'success': bool,
|
1091
|
+
'total_rows': int,
|
1092
|
+
'inserted_rows': int,
|
1093
|
+
'failed_rows': int,
|
1094
|
+
'errors': list of errors with row data
|
1095
|
+
}
|
1096
|
+
"""
|
1097
|
+
if not rows:
|
1098
|
+
return {'success': True, 'total_rows': 0, 'inserted_rows': 0, 'failed_rows': 0, 'errors': []}
|
1099
|
+
|
1100
|
+
# Get table columns for mapping and type conversion
|
1101
|
+
columns = await self.get_table_columns(table_name)
|
1102
|
+
column_map = {col['name']: col for col in columns}
|
1103
|
+
column_map_lower = {col['name'].lower(): col for col in columns}
|
1104
|
+
|
1105
|
+
results = {
|
1106
|
+
'success': True,
|
1107
|
+
'total_rows': len(rows),
|
1108
|
+
'inserted_rows': 0,
|
1109
|
+
'failed_rows': 0,
|
1110
|
+
'errors': []
|
1111
|
+
}
|
1112
|
+
|
1113
|
+
for i, row in enumerate(rows):
|
1114
|
+
try:
|
1115
|
+
# Map row data to actual table columns
|
1116
|
+
filtered_row = {}
|
1117
|
+
|
1118
|
+
# First, do case-insensitive mapping
|
1119
|
+
for key, value in row.items():
|
1120
|
+
key_lower = key.lower()
|
1121
|
+
if key_lower in column_map_lower:
|
1122
|
+
col_info = column_map_lower[key_lower]
|
1123
|
+
col_name = col_info['name'] # Use the correct case from DB
|
1124
|
+
col_type = col_info['type']
|
1125
|
+
|
1126
|
+
# Try to convert value to the appropriate type
|
1127
|
+
converted_value = self.safe_convert_value(value, col_type)
|
1128
|
+
filtered_row[col_name] = converted_value
|
1129
|
+
|
1130
|
+
# Add metadata if provided
|
1131
|
+
if metadata:
|
1132
|
+
for key, value in metadata.items():
|
1133
|
+
key_lower = key.lower()
|
1134
|
+
if key_lower in column_map_lower:
|
1135
|
+
col_name = column_map_lower[key_lower]['name']
|
1136
|
+
filtered_row[col_name] = value
|
1137
|
+
|
1138
|
+
# Insert the row
|
1139
|
+
result = await self._insert_single_row(table_name, filtered_row)
|
1140
|
+
results['inserted_rows'] += 1
|
1141
|
+
|
1142
|
+
except Exception as e:
|
1143
|
+
error_info = {
|
1144
|
+
'row_index': i,
|
1145
|
+
'error': str(e),
|
1146
|
+
'row_data': row
|
1147
|
+
}
|
1148
|
+
results['errors'].append(error_info)
|
1149
|
+
results['failed_rows'] += 1
|
1150
|
+
|
1151
|
+
log.error(f"Error inserting row {i}: {e}")
|
1152
|
+
|
1153
|
+
if not continue_on_error:
|
1154
|
+
results['success'] = False
|
1155
|
+
return results
|
1156
|
+
|
1157
|
+
# Overall success is true if any rows were inserted successfully
|
1158
|
+
results['success'] = results['inserted_rows'] > 0
|
1159
|
+
return results
|
1160
|
+
|
1161
|
+
async def create_table_with_columns(self, table_name, column_definitions, if_not_exists=True):
|
1162
|
+
"""
|
1163
|
+
Create a table with explicit column definitions.
|
1164
|
+
|
1165
|
+
Args:
|
1166
|
+
table_name (str): The name of the table to create
|
1167
|
+
column_definitions (list): List of column definition dictionaries:
|
1168
|
+
- name: Column name
|
1169
|
+
- type: PostgreSQL data type
|
1170
|
+
- nullable: Whether column allows NULL (default True)
|
1171
|
+
- default: Default value expression (optional)
|
1172
|
+
- primary_key: Whether this is a primary key (default False)
|
1173
|
+
if_not_exists (bool): Whether to use IF NOT EXISTS clause
|
1174
|
+
|
1175
|
+
Returns:
|
1176
|
+
Result of the execution
|
1177
|
+
"""
|
1178
|
+
if not column_definitions:
|
1179
|
+
raise ValueError("No column definitions provided")
|
1180
|
+
|
1181
|
+
# Generate column definition strings
|
1182
|
+
column_strs = []
|
1183
|
+
|
1184
|
+
# Check if we need to add a serial primary key
|
1185
|
+
has_primary_key = any(col.get('primary_key', False) for col in column_definitions)
|
1186
|
+
|
1187
|
+
if not has_primary_key:
|
1188
|
+
# Add an ID column as primary key
|
1189
|
+
column_strs.append("id SERIAL PRIMARY KEY")
|
1190
|
+
|
1191
|
+
for col in column_definitions:
|
1192
|
+
col_name = col.get('name')
|
1193
|
+
col_type = col.get('type', 'TEXT')
|
1194
|
+
nullable = col.get('nullable', True)
|
1195
|
+
default = col.get('default')
|
1196
|
+
primary_key = col.get('primary_key', False)
|
1197
|
+
|
1198
|
+
if not col_name:
|
1199
|
+
continue
|
1200
|
+
|
1201
|
+
# Build the column definition
|
1202
|
+
col_def = f'"{col_name}" {col_type}'
|
1203
|
+
|
1204
|
+
if primary_key:
|
1205
|
+
col_def += " PRIMARY KEY"
|
1206
|
+
|
1207
|
+
if not nullable:
|
1208
|
+
col_def += " NOT NULL"
|
1209
|
+
|
1210
|
+
if default is not None:
|
1211
|
+
col_def += f" DEFAULT {default}"
|
1212
|
+
|
1213
|
+
column_strs.append(col_def)
|
1214
|
+
|
1215
|
+
# Create the SQL statement
|
1216
|
+
exists_clause = "IF NOT EXISTS " if if_not_exists else ""
|
1217
|
+
columns_sql = ",\n ".join(column_strs)
|
1218
|
+
|
1219
|
+
create_table_sql = f"""
|
1220
|
+
CREATE TABLE {exists_clause}"{table_name}" (
|
1221
|
+
{columns_sql}
|
1222
|
+
)
|
1223
|
+
"""
|
1224
|
+
|
1225
|
+
# Execute the SQL based on engine type
|
1226
|
+
log.info(f"Creating table '{table_name}' with explicit column definitions")
|
1227
|
+
try:
|
1228
|
+
if self.engine_type == "pg8000":
|
1229
|
+
result = self._execute_sql_pg8000(create_table_sql)
|
1230
|
+
else:
|
1231
|
+
result = await self._execute_sql_async_langchain(create_table_sql)
|
1232
|
+
|
1233
|
+
log.info(f"Table '{table_name}' created successfully")
|
1234
|
+
return result
|
1235
|
+
except Exception as e:
|
1236
|
+
log.error(f"Error creating table: {e}")
|
1237
|
+
raise
|
1238
|
+
|
1239
|
+
def _get_sql_type_safe(self, value):
|
1240
|
+
"""
|
1241
|
+
Enhanced version of _get_sql_type with better type detection.
|
1242
|
+
Handles placeholder values and common patterns.
|
1243
|
+
|
1244
|
+
Args:
|
1245
|
+
value: The value to determine the column type
|
1246
|
+
|
1247
|
+
Returns:
|
1248
|
+
str: SQL type
|
1249
|
+
"""
|
1250
|
+
if value is None:
|
1251
|
+
return "TEXT"
|
1252
|
+
|
1253
|
+
# Handle placeholder values
|
1254
|
+
if isinstance(value, str) and (value.startswith("No ") or value.lower() in ("none", "n/a", "null", "")):
|
1255
|
+
return "TEXT" # Always use TEXT for placeholder values
|
1256
|
+
|
1257
|
+
if isinstance(value, dict):
|
1258
|
+
return "JSONB"
|
1259
|
+
elif isinstance(value, list):
|
1260
|
+
return "JSONB"
|
1261
|
+
elif isinstance(value, bool):
|
1262
|
+
return "BOOLEAN"
|
1263
|
+
elif isinstance(value, int):
|
1264
|
+
return "INTEGER"
|
1265
|
+
elif isinstance(value, float):
|
1266
|
+
return "NUMERIC"
|
1267
|
+
else:
|
1268
|
+
# Check if it's a date string
|
1269
|
+
if isinstance(value, str):
|
1270
|
+
# Try to detect date formats
|
1271
|
+
value_lower = value.lower()
|
1272
|
+
if len(value) in (8, 10) and ('-' in value or '/' in value):
|
1273
|
+
# Likely a date (YYYY-MM-DD or MM/DD/YYYY)
|
1274
|
+
return "DATE"
|
1275
|
+
elif 'date' in value_lower or 'time' in value_lower:
|
1276
|
+
# Column name hint suggests it's a date
|
1277
|
+
return "TIMESTAMP"
|
1278
|
+
elif any(currency in value for currency in ('$', '€', '£')):
|
1279
|
+
# Likely a monetary value
|
1280
|
+
return "NUMERIC"
|
1281
|
+
|
1282
|
+
# Default to TEXT
|
1283
|
+
return "TEXT"
|
@@ -614,6 +614,7 @@ class DiscoveryEngineClient:
|
|
614
614
|
reconciliation_mode=discoveryengine.ImportDocumentsRequest.ReconciliationMode.INCREMENTAL,
|
615
615
|
)
|
616
616
|
|
617
|
+
log.debug(f"Making import_document_request: {request}")
|
617
618
|
return self._import_document_request(request)
|
618
619
|
|
619
620
|
def _create_unique_gsuri_docid(self, gcs_uri:str):
|
sunholo/utils/config_class.py
CHANGED
@@ -112,18 +112,6 @@ class ConfigManager:
|
|
112
112
|
"""
|
113
113
|
Helper function to load a config file and update the cache.
|
114
114
|
|
115
|
-
Args:
|
116
|
-
config_file (str): The path to the configuration file.
|
117
|
-
filename (str): The name of the configuration file.
|
118
|
-
is_local (bool): Indicates if the config file is from the local folder.
|
119
|
-
|
120
|
-
Returns:
|
121
|
-
dict: The loaded configuration.
|
122
|
-
"""
|
123
|
-
def _reload_config_file(self, config_file, filename, is_local=False):
|
124
|
-
"""
|
125
|
-
Helper function to load a config file and update the cache.
|
126
|
-
|
127
115
|
Args:
|
128
116
|
config_file (str): The path to the configuration file.
|
129
117
|
filename (str): The name of the configuration file.
|
@@ -244,4 +232,23 @@ class ConfigManager:
|
|
244
232
|
return agents[key]
|
245
233
|
else:
|
246
234
|
return agents.get("default")
|
235
|
+
|
236
|
+
def permissionConfig(self, key: str):
|
237
|
+
"""
|
238
|
+
Fetch a key from 'permissionConfig' kind configuration.
|
247
239
|
|
240
|
+
Args:
|
241
|
+
key (str): The key to fetch from the configuration.
|
242
|
+
|
243
|
+
Returns:
|
244
|
+
str: The value associated with the specified key.
|
245
|
+
"""
|
246
|
+
self._check_and_reload_configs()
|
247
|
+
config = self.configs_by_kind.get('permissionConfig')
|
248
|
+
if not config:
|
249
|
+
return None
|
250
|
+
agents = config.get('permissions')
|
251
|
+
if key in agents:
|
252
|
+
return agents[key]
|
253
|
+
else:
|
254
|
+
return None
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: sunholo
|
3
|
-
Version: 0.
|
3
|
+
Version: 0.127.1
|
4
4
|
Summary: Large Language Model DevOps - a package to help deploy LLMs to the Cloud.
|
5
5
|
Author-email: Holosun ApS <multivac@sunholo.com>
|
6
6
|
License: Apache License, Version 2.0
|
@@ -142,6 +142,7 @@ Requires-Dist: google-cloud-discoveryengine>=0.13.4; extra == "gcp"
|
|
142
142
|
Requires-Dist: google-cloud-texttospeech; extra == "gcp"
|
143
143
|
Requires-Dist: google-genai>=0.2.2; extra == "gcp"
|
144
144
|
Requires-Dist: google-generativeai>=0.8.3; extra == "gcp"
|
145
|
+
Requires-Dist: langchain; extra == "gcp"
|
145
146
|
Requires-Dist: langchain-google-genai>=2.0.0; extra == "gcp"
|
146
147
|
Requires-Dist: langchain_google_alloydb_pg>=0.2.2; extra == "gcp"
|
147
148
|
Requires-Dist: langchain-google-vertexai; extra == "gcp"
|
@@ -164,6 +165,7 @@ Requires-Dist: flask; extra == "http"
|
|
164
165
|
Requires-Dist: gunicorn; extra == "http"
|
165
166
|
Requires-Dist: httpcore; extra == "http"
|
166
167
|
Requires-Dist: httpx; extra == "http"
|
168
|
+
Requires-Dist: langchain; extra == "http"
|
167
169
|
Requires-Dist: langfuse; extra == "http"
|
168
170
|
Requires-Dist: python-socketio; extra == "http"
|
169
171
|
Requires-Dist: requests; extra == "http"
|
@@ -3,7 +3,7 @@ sunholo/custom_logging.py,sha256=YfIN1oP3dOEkkYkyRBU8BGS3uJFGwUDsFCl8mIVbwvE,122
|
|
3
3
|
sunholo/langchain_types.py,sha256=uZ4zvgej_f7pLqjtu4YP7qMC_eZD5ym_5x4pyvA1Ih4,1834
|
4
4
|
sunholo/agents/__init__.py,sha256=X2I3pPkGeKWjc3d0QgSpkTyqD8J8JtrEWqwrumf1MMc,391
|
5
5
|
sunholo/agents/chat_history.py,sha256=Gph_CdlP2otYnNdR1q1Umyyyvcad2F6K3LxU5yBQ9l0,5387
|
6
|
-
sunholo/agents/dispatch_to_qa.py,sha256=
|
6
|
+
sunholo/agents/dispatch_to_qa.py,sha256=NHihwAoCJ5_Lk11e_jZnucVUGQyZHCB-YpkfMHBCpQk,8882
|
7
7
|
sunholo/agents/langserve.py,sha256=C46ph2mnygr6bdHijYWYyfQDI9ylAF0_9Kx2PfcCJpU,4414
|
8
8
|
sunholo/agents/pubsub.py,sha256=TscZN_6am6DfaQkC-Yl18ZIBOoLE-0nDSiil6GpQEh4,1344
|
9
9
|
sunholo/agents/route.py,sha256=mV8tGABbSqcg3PQL02MgQOs41gKEHLMyIJJJcTuFdbE,2988
|
@@ -60,7 +60,7 @@ sunholo/components/retriever.py,sha256=Wmchv3huAM4w7DIS-a5Lp9Hi7M8pE6vZdxgseiT9S
|
|
60
60
|
sunholo/components/vectorstore.py,sha256=k7GS1Y5c6ZGXSDAJvyCes6dTjhDAi0fjGbVLqpyfzBc,5918
|
61
61
|
sunholo/database/__init__.py,sha256=bpB5Nk21kwqYj-qdVnvNgXjLsbflnH4g-San7OHMqR4,283
|
62
62
|
sunholo/database/alloydb.py,sha256=x1zUMB-EVWbE2Zvp4nAs2Z-tB_kOZmS45H2lwVHdYnk,11678
|
63
|
-
sunholo/database/alloydb_client.py,sha256=
|
63
|
+
sunholo/database/alloydb_client.py,sha256=B_vCN9d2wQj77TGoyHAMryCNKljKt0ehtXNTdASqTIk,50297
|
64
64
|
sunholo/database/database.py,sha256=VqhZdkXUNdvWn8sUcUV3YNby1JDVf7IykPVXWBtxo9U,7361
|
65
65
|
sunholo/database/lancedb.py,sha256=DyfZntiFKBlVPaFooNN1Z6Pl-LAs4nxWKKuq8GBqN58,715
|
66
66
|
sunholo/database/static_dbs.py,sha256=8cvcMwUK6c32AS2e_WguKXWMkFf5iN3g9WHzsh0C07Q,442
|
@@ -75,7 +75,7 @@ sunholo/discovery_engine/__init__.py,sha256=hLgqRDJ22Aov9o2QjAEfsVgnL3kMdM-g5p8R
|
|
75
75
|
sunholo/discovery_engine/chunker_handler.py,sha256=44qlTpdtz2GKzrhoQrxVMk-RPVFp7vQDPJoe9KmCcsw,7517
|
76
76
|
sunholo/discovery_engine/cli.py,sha256=KGVle5rkLL49oF9TQhrGI--8017IvvLOEoYur545Qb0,12790
|
77
77
|
sunholo/discovery_engine/create_new.py,sha256=WUi4_xh_dFaGX3xA9jkNKZhaR6LCELjMPeRb0hyj4FU,1226
|
78
|
-
sunholo/discovery_engine/discovery_engine_client.py,sha256=
|
78
|
+
sunholo/discovery_engine/discovery_engine_client.py,sha256=0KhKRFKCvqvtkUOIrCXNk5353t9duuEtUQDhQnN2B24,37335
|
79
79
|
sunholo/discovery_engine/get_ai_search_chunks.py,sha256=I6Dt1CznqEvE7XIZ2PkLqopmjpO96iVEWJJqL5cJjOU,5554
|
80
80
|
sunholo/embedder/__init__.py,sha256=sI4N_CqgEVcrMDxXgxKp1FsfsB4FpjoXgPGkl4N_u4I,44
|
81
81
|
sunholo/embedder/embed_chunk.py,sha256=did2pKkWM2o0KkRcb0H9l2x_WjCq6OyuHDxGbITFKPM,6530
|
@@ -151,7 +151,7 @@ sunholo/utils/__init__.py,sha256=Hv02T5L2zYWvCso5hzzwm8FQogwBq0OgtUbN_7Quzqc,89
|
|
151
151
|
sunholo/utils/api_key.py,sha256=Ct4bIAQZxzPEw14hP586LpVxBAVi_W9Serpy0BK-7KI,244
|
152
152
|
sunholo/utils/big_context.py,sha256=HuP9_r_Nx1jvZHxjMEihgoZAXmnCh80zzsj1fq3mIOg,6021
|
153
153
|
sunholo/utils/config.py,sha256=bz0ODJyqnoHQIsk4pmNpVxxq5WvwS0SfOq4cnCjQPJk,9105
|
154
|
-
sunholo/utils/config_class.py,sha256=
|
154
|
+
sunholo/utils/config_class.py,sha256=zhp71gNZb-t1cbkN2N3zwCAEiX0FSvKaKlpBxosgI4U,9750
|
155
155
|
sunholo/utils/config_schema.py,sha256=Wv-ncitzljOhgbDaq9qnFqH5LCuxNv59dTGDWgd1qdk,4189
|
156
156
|
sunholo/utils/gcp.py,sha256=lus1HH8YhFInw6QRKwfvKZq-Lz-2KQg4ips9v1I_3zE,4783
|
157
157
|
sunholo/utils/gcp_project.py,sha256=Fa0IhCX12bZ1ctF_PKN8PNYd7hihEUfb90kilBfUDjg,1411
|
@@ -168,9 +168,9 @@ sunholo/vertex/init.py,sha256=1OQwcPBKZYBTDPdyU7IM4X4OmiXLdsNV30C-fee2scQ,2875
|
|
168
168
|
sunholo/vertex/memory_tools.py,sha256=tBZxqVZ4InTmdBvLlOYwoSEWu4-kGquc-gxDwZCC4FA,7667
|
169
169
|
sunholo/vertex/safety.py,sha256=S9PgQT1O_BQAkcqauWncRJaydiP8Q_Jzmu9gxYfy1VA,2482
|
170
170
|
sunholo/vertex/type_dict_to_json.py,sha256=uTzL4o9tJRao4u-gJOFcACgWGkBOtqACmb6ihvCErL8,4694
|
171
|
-
sunholo-0.
|
172
|
-
sunholo-0.
|
173
|
-
sunholo-0.
|
174
|
-
sunholo-0.
|
175
|
-
sunholo-0.
|
176
|
-
sunholo-0.
|
171
|
+
sunholo-0.127.1.dist-info/licenses/LICENSE.txt,sha256=SdE3QjnD3GEmqqg9EX3TM9f7WmtOzqS1KJve8rhbYmU,11345
|
172
|
+
sunholo-0.127.1.dist-info/METADATA,sha256=kKvoiijhfyGLL7CfJOEYZGYiCuIAZ6m2hGagaNjfCAQ,10084
|
173
|
+
sunholo-0.127.1.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
|
174
|
+
sunholo-0.127.1.dist-info/entry_points.txt,sha256=bZuN5AIHingMPt4Ro1b_T-FnQvZ3teBes-3OyO0asl4,49
|
175
|
+
sunholo-0.127.1.dist-info/top_level.txt,sha256=wt5tadn5--5JrZsjJz2LceoUvcrIvxjHJe-RxuudxAk,8
|
176
|
+
sunholo-0.127.1.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|