sunholo 0.139.1__py3-none-any.whl → 0.140.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sunholo/agents/chat_history.py +63 -0
- sunholo/agents/flask/vac_routes.py +117 -2
- {sunholo-0.139.1.dist-info → sunholo-0.140.2.dist-info}/METADATA +1 -1
- {sunholo-0.139.1.dist-info → sunholo-0.140.2.dist-info}/RECORD +8 -8
- {sunholo-0.139.1.dist-info → sunholo-0.140.2.dist-info}/WHEEL +1 -1
- {sunholo-0.139.1.dist-info → sunholo-0.140.2.dist-info}/entry_points.txt +0 -0
- {sunholo-0.139.1.dist-info → sunholo-0.140.2.dist-info}/licenses/LICENSE.txt +0 -0
- {sunholo-0.139.1.dist-info → sunholo-0.140.2.dist-info}/top_level.txt +0 -0
sunholo/agents/chat_history.py
CHANGED
@@ -1,6 +1,69 @@
|
|
1
1
|
import json
|
2
2
|
from ..custom_logging import log
|
3
3
|
|
4
|
+
|
5
|
+
async def extract_chat_history_async(chat_history=None):
|
6
|
+
"""
|
7
|
+
Extracts paired chat history between human and AI messages.
|
8
|
+
|
9
|
+
For this lightweight processing, we use a simpler approach that minimizes overhead.
|
10
|
+
|
11
|
+
Args:
|
12
|
+
chat_history (list): List of chat messages.
|
13
|
+
|
14
|
+
Returns:
|
15
|
+
list: List of tuples with paired human and AI messages.
|
16
|
+
"""
|
17
|
+
if not chat_history:
|
18
|
+
log.info("No chat history found")
|
19
|
+
return []
|
20
|
+
|
21
|
+
log.info(f"Extracting chat history: {chat_history}")
|
22
|
+
paired_messages = []
|
23
|
+
|
24
|
+
# Handle special case of initial bot message
|
25
|
+
if chat_history and is_bot(chat_history[0]):
|
26
|
+
first_message = chat_history[0]
|
27
|
+
log.info(f"Extracting first_message: {first_message}")
|
28
|
+
blank_human_message = {"name": "Human", "content": "", "embeds": []}
|
29
|
+
|
30
|
+
# Since create_message_element is so lightweight, we don't need async here
|
31
|
+
blank_element = create_message_element(blank_human_message)
|
32
|
+
bot_element = create_message_element(first_message)
|
33
|
+
|
34
|
+
paired_messages.append((blank_element, bot_element))
|
35
|
+
chat_history = chat_history[1:]
|
36
|
+
|
37
|
+
# Pre-process all messages in one batch (more efficient than one-by-one)
|
38
|
+
message_types = []
|
39
|
+
message_contents = []
|
40
|
+
|
41
|
+
for message in chat_history:
|
42
|
+
is_human_msg = is_human(message)
|
43
|
+
is_bot_msg = is_bot(message)
|
44
|
+
|
45
|
+
# Extract content for all messages at once
|
46
|
+
content = create_message_element(message)
|
47
|
+
|
48
|
+
message_types.append((is_human_msg, is_bot_msg))
|
49
|
+
message_contents.append(content)
|
50
|
+
|
51
|
+
# Pair messages efficiently
|
52
|
+
last_human_message = ""
|
53
|
+
for i, ((is_human_msg, is_bot_msg), content) in enumerate(zip(message_types, message_contents)):
|
54
|
+
if is_human_msg:
|
55
|
+
last_human_message = content
|
56
|
+
log.info(f"Extracted human message: {last_human_message}")
|
57
|
+
elif is_bot_msg:
|
58
|
+
ai_message = content
|
59
|
+
log.info(f"Extracted AI message: {ai_message}")
|
60
|
+
paired_messages.append((last_human_message, ai_message))
|
61
|
+
last_human_message = ""
|
62
|
+
|
63
|
+
log.info(f"Paired messages: {paired_messages}")
|
64
|
+
return paired_messages
|
65
|
+
|
66
|
+
|
4
67
|
def extract_chat_history(chat_history=None):
|
5
68
|
"""
|
6
69
|
Extracts paired chat history between human and AI messages.
|
@@ -8,6 +8,7 @@ import inspect
|
|
8
8
|
import asyncio
|
9
9
|
|
10
10
|
from ...agents import extract_chat_history, handle_special_commands
|
11
|
+
from ..chat_history import extract_chat_history_async
|
11
12
|
from ...qna.parsers import parse_output
|
12
13
|
from ...streaming import start_streaming_chat, start_streaming_chat_async
|
13
14
|
from ...archive import archive_qa
|
@@ -57,11 +58,12 @@ if __name__ == "__main__":
|
|
57
58
|
```
|
58
59
|
|
59
60
|
"""
|
60
|
-
def __init__(self, app, stream_interpreter, vac_interpreter=None, additional_routes=None):
|
61
|
+
def __init__(self, app, stream_interpreter: callable, vac_interpreter:callable=None, additional_routes:dict=None, async_stream:bool=False):
|
61
62
|
self.app = app
|
62
63
|
self.stream_interpreter = stream_interpreter
|
63
64
|
self.vac_interpreter = vac_interpreter or partial(self.vac_interpreter_default)
|
64
65
|
self.additional_routes = additional_routes if additional_routes is not None else []
|
66
|
+
self.async_stream = async_stream
|
65
67
|
self.register_routes()
|
66
68
|
|
67
69
|
|
@@ -99,7 +101,15 @@ if __name__ == "__main__":
|
|
99
101
|
self.app.route('/vac/streaming/<vector_name>',
|
100
102
|
methods=['POST'],
|
101
103
|
provide_automatic_options=False)(self.handle_stream_vac)
|
102
|
-
|
104
|
+
|
105
|
+
if self.async_stream: # Use async treatment
|
106
|
+
self.app.route('/vac/streaming/<vector_name>',
|
107
|
+
methods=['POST'],
|
108
|
+
provide_automatic_options=False)(self.handle_stream_vac_async)
|
109
|
+
else:
|
110
|
+
self.app.route('/vac/streaming/<vector_name>',
|
111
|
+
methods=['POST'],
|
112
|
+
provide_automatic_options=False)(self.handle_stream_vac)
|
103
113
|
# Static VAC
|
104
114
|
self.app.route('/vac/<vector_name>',
|
105
115
|
methods=['POST'],
|
@@ -332,6 +342,51 @@ if __name__ == "__main__":
|
|
332
342
|
|
333
343
|
return response
|
334
344
|
|
345
|
+
async def handle_stream_vac_async(self, vector_name):
|
346
|
+
observed_stream_interpreter = self.stream_interpreter
|
347
|
+
is_async = inspect.iscoroutinefunction(self.stream_interpreter)
|
348
|
+
|
349
|
+
if not is_async:
|
350
|
+
raise ValueError(f"Stream interpreter must be async: {observed_stream_interpreter}")
|
351
|
+
|
352
|
+
# Use the async version of prep_vac
|
353
|
+
prep = await self.prep_vac_async(request, vector_name)
|
354
|
+
log.info(f"Processing prep: {prep}")
|
355
|
+
all_input = prep["all_input"]
|
356
|
+
|
357
|
+
log.info(f'Streaming data with: {all_input}')
|
358
|
+
|
359
|
+
async def generate_response_content():
|
360
|
+
try:
|
361
|
+
# Direct async handling without the queue/thread approach
|
362
|
+
async_gen = start_streaming_chat_async(
|
363
|
+
question=all_input["user_input"],
|
364
|
+
vector_name=vector_name,
|
365
|
+
qna_func_async=observed_stream_interpreter,
|
366
|
+
chat_history=all_input["chat_history"],
|
367
|
+
wait_time=all_input["stream_wait_time"],
|
368
|
+
timeout=all_input["stream_timeout"],
|
369
|
+
**all_input["kwargs"]
|
370
|
+
)
|
371
|
+
|
372
|
+
log.info(f"{async_gen=}")
|
373
|
+
async for chunk in async_gen:
|
374
|
+
if isinstance(chunk, dict) and 'answer' in chunk:
|
375
|
+
await archive_qa(chunk, vector_name)
|
376
|
+
yield json.dumps(chunk)
|
377
|
+
else:
|
378
|
+
yield chunk
|
379
|
+
|
380
|
+
except Exception as e:
|
381
|
+
yield f"Streaming Error: {str(e)} {traceback.format_exc()}"
|
382
|
+
|
383
|
+
response = Response(generate_response_content(), content_type='text/plain; charset=utf-8')
|
384
|
+
response.headers['Transfer-Encoding'] = 'chunked'
|
385
|
+
|
386
|
+
log.debug(f"streaming response: {response}")
|
387
|
+
|
388
|
+
return response
|
389
|
+
|
335
390
|
@staticmethod
|
336
391
|
async def _async_generator_to_stream(async_gen_func):
|
337
392
|
"""Helper function to stream the async generator's values to the client."""
|
@@ -699,6 +754,66 @@ if __name__ == "__main__":
|
|
699
754
|
"vac_config": vac_config
|
700
755
|
}
|
701
756
|
|
757
|
+
async def prep_vac_async(self, request, vector_name):
|
758
|
+
"""Async version of prep_vac."""
|
759
|
+
# Parse request data
|
760
|
+
if request.content_type.startswith('application/json'):
|
761
|
+
data = request.get_json()
|
762
|
+
elif request.content_type.startswith('multipart/form-data'):
|
763
|
+
data = request.form.to_dict()
|
764
|
+
if 'file' in request.files:
|
765
|
+
file = request.files['file']
|
766
|
+
if file.filename != '':
|
767
|
+
log.info(f"Found file: {file.filename} to upload to GCS")
|
768
|
+
try:
|
769
|
+
# Make file upload async if possible
|
770
|
+
image_uri, mime_type = await self.handle_file_upload_async(file, vector_name)
|
771
|
+
data["image_uri"] = image_uri
|
772
|
+
data["mime"] = mime_type
|
773
|
+
except Exception as e:
|
774
|
+
log.error(traceback.format_exc())
|
775
|
+
return jsonify({'error': str(e), 'traceback': traceback.format_exc()}), 500
|
776
|
+
else:
|
777
|
+
log.error("No file selected")
|
778
|
+
return jsonify({"error": "No file selected"}), 400
|
779
|
+
else:
|
780
|
+
return jsonify({"error": "Unsupported content type"}), 400
|
781
|
+
|
782
|
+
log.info(f"vac/{vector_name} got data: {data}")
|
783
|
+
|
784
|
+
# Run these operations concurrently
|
785
|
+
tasks = []
|
786
|
+
|
787
|
+
# Extract other data while configs load
|
788
|
+
user_input = data.pop('user_input').strip()
|
789
|
+
stream_wait_time = data.pop('stream_wait_time', 7)
|
790
|
+
stream_timeout = data.pop('stream_timeout', 120)
|
791
|
+
chat_history = data.pop('chat_history', None)
|
792
|
+
vector_name_param = data.pop('vector_name', vector_name)
|
793
|
+
data.pop('trace_id', None) # to ensure not in kwargs
|
794
|
+
|
795
|
+
# Task 3: Process chat history
|
796
|
+
chat_history_task = asyncio.create_task(extract_chat_history_async(chat_history))
|
797
|
+
tasks.append(chat_history_task)
|
798
|
+
|
799
|
+
# Await all tasks concurrently
|
800
|
+
results = await asyncio.gather(*tasks, return_exceptions=True)
|
801
|
+
|
802
|
+
paired_messages = results[0] if not isinstance(results[0], Exception) else []
|
803
|
+
|
804
|
+
# Only create span after we have trace
|
805
|
+
all_input = {
|
806
|
+
'user_input': user_input,
|
807
|
+
'vector_name': vector_name_param,
|
808
|
+
'chat_history': paired_messages,
|
809
|
+
'stream_wait_time': stream_wait_time,
|
810
|
+
'stream_timeout': stream_timeout,
|
811
|
+
'kwargs': data
|
812
|
+
}
|
813
|
+
|
814
|
+
return {
|
815
|
+
"all_input": all_input
|
816
|
+
}
|
702
817
|
|
703
818
|
def handle_file_upload(self, file, vector_name):
|
704
819
|
try:
|
@@ -2,7 +2,7 @@ sunholo/__init__.py,sha256=InRbX4V0-qdNHo9zYH3GEye7ASLR6LX8-SMvPV4Jsaw,1212
|
|
2
2
|
sunholo/custom_logging.py,sha256=JXZTnXp_DixP3jwYfKw4LYRDS9IuTq7ctCgfZbI2rxA,22023
|
3
3
|
sunholo/langchain_types.py,sha256=uZ4zvgej_f7pLqjtu4YP7qMC_eZD5ym_5x4pyvA1Ih4,1834
|
4
4
|
sunholo/agents/__init__.py,sha256=X2I3pPkGeKWjc3d0QgSpkTyqD8J8JtrEWqwrumf1MMc,391
|
5
|
-
sunholo/agents/chat_history.py,sha256=
|
5
|
+
sunholo/agents/chat_history.py,sha256=4jGCHBP8dZfUjSJPxgKyh6nOqhnHRn1x9U3CnGb0I5E,7624
|
6
6
|
sunholo/agents/dispatch_to_qa.py,sha256=NHihwAoCJ5_Lk11e_jZnucVUGQyZHCB-YpkfMHBCpQk,8882
|
7
7
|
sunholo/agents/langserve.py,sha256=C46ph2mnygr6bdHijYWYyfQDI9ylAF0_9Kx2PfcCJpU,4414
|
8
8
|
sunholo/agents/pubsub.py,sha256=TscZN_6am6DfaQkC-Yl18ZIBOoLE-0nDSiil6GpQEh4,1344
|
@@ -15,7 +15,7 @@ sunholo/agents/fastapi/qna_routes.py,sha256=lKHkXPmwltu9EH3RMwmD153-J6pE7kWQ4BhB
|
|
15
15
|
sunholo/agents/flask/__init__.py,sha256=poJDKMr2qj8qMb99JqCvCPSiEt1tj2tLQ3hKW3f2aVw,107
|
16
16
|
sunholo/agents/flask/base.py,sha256=HLz3Z5efWaewTwSFEM6JH48NA9otoJBoVFJlARGk9L8,788
|
17
17
|
sunholo/agents/flask/qna_routes.py,sha256=uwUD1yrzOPH27m2AXpiQrPk_2VfJOQOM6dAynOWQtoQ,22532
|
18
|
-
sunholo/agents/flask/vac_routes.py,sha256=
|
18
|
+
sunholo/agents/flask/vac_routes.py,sha256=RprhFJje5gTNU3ePGbCCPdBAdYs417VyXNlGe5UlR-g,33370
|
19
19
|
sunholo/archive/__init__.py,sha256=qNHWm5rGPVOlxZBZCpA1wTYPbalizRT7f8X4rs2t290,31
|
20
20
|
sunholo/archive/archive.py,sha256=PxVfDtO2_2ZEEbnhXSCbXLdeoHoQVImo4y3Jr2XkCFY,1204
|
21
21
|
sunholo/auth/__init__.py,sha256=TeP-OY0XGxYV_8AQcVGoh35bvyWhNUcMRfhuD5l44Sk,91
|
@@ -169,9 +169,9 @@ sunholo/vertex/init.py,sha256=1OQwcPBKZYBTDPdyU7IM4X4OmiXLdsNV30C-fee2scQ,2875
|
|
169
169
|
sunholo/vertex/memory_tools.py,sha256=tBZxqVZ4InTmdBvLlOYwoSEWu4-kGquc-gxDwZCC4FA,7667
|
170
170
|
sunholo/vertex/safety.py,sha256=S9PgQT1O_BQAkcqauWncRJaydiP8Q_Jzmu9gxYfy1VA,2482
|
171
171
|
sunholo/vertex/type_dict_to_json.py,sha256=uTzL4o9tJRao4u-gJOFcACgWGkBOtqACmb6ihvCErL8,4694
|
172
|
-
sunholo-0.
|
173
|
-
sunholo-0.
|
174
|
-
sunholo-0.
|
175
|
-
sunholo-0.
|
176
|
-
sunholo-0.
|
177
|
-
sunholo-0.
|
172
|
+
sunholo-0.140.2.dist-info/licenses/LICENSE.txt,sha256=SdE3QjnD3GEmqqg9EX3TM9f7WmtOzqS1KJve8rhbYmU,11345
|
173
|
+
sunholo-0.140.2.dist-info/METADATA,sha256=nMq7_Z9VKq4kPXXTvyeirWinNf5tEMmUe0HnIiFhYAc,10067
|
174
|
+
sunholo-0.140.2.dist-info/WHEEL,sha256=Nw36Djuh_5VDukK0H78QzOX-_FQEo6V37m3nkm96gtU,91
|
175
|
+
sunholo-0.140.2.dist-info/entry_points.txt,sha256=bZuN5AIHingMPt4Ro1b_T-FnQvZ3teBes-3OyO0asl4,49
|
176
|
+
sunholo-0.140.2.dist-info/top_level.txt,sha256=wt5tadn5--5JrZsjJz2LceoUvcrIvxjHJe-RxuudxAk,8
|
177
|
+
sunholo-0.140.2.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|